Compare commits
305 Commits
test-varia
...
travis-tes
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
57a1519584 | ||
|
|
aedbfdc487 | ||
|
|
ef9e892889 | ||
|
|
f66314926a | ||
|
|
9c345ac301 | ||
|
|
af21d1d56e | ||
|
|
49912732ac | ||
|
|
8fb9a395ab | ||
|
|
35fb99b86f | ||
|
|
127d2dc307 | ||
|
|
569df2d37a | ||
|
|
960d66ff41 | ||
|
|
ff732bf975 | ||
|
|
77871ba71c | ||
|
|
cd0acf5dcc | ||
|
|
8e4dc0a48c | ||
|
|
316e4640f8 | ||
|
|
537bee0994 | ||
|
|
e9895d2ec6 | ||
|
|
5992d521e2 | ||
|
|
4ca86d9482 | ||
|
|
bc3088121b | ||
|
|
dbda499b08 | ||
|
|
6df90d17ae | ||
|
|
e4a0edc7af | ||
|
|
1285297b23 | ||
|
|
9e3c348dff | ||
|
|
3bfaf41d3d | ||
|
|
06599a1e18 | ||
|
|
30ec4cafe1 | ||
|
|
c6d35549d6 | ||
|
|
9a256ca4fe | ||
|
|
809cb516c9 | ||
|
|
07abe7a8d6 | ||
|
|
2fd85a4f36 | ||
|
|
44b97df4e9 | ||
|
|
78168a5248 | ||
|
|
69aec55ead | ||
|
|
7f63141e41 | ||
|
|
d72a1a71d2 | ||
|
|
68f4ae12be | ||
|
|
144d4f2b44 | ||
|
|
e362948d45 | ||
|
|
6edb4e1a39 | ||
|
|
b1fb3296e9 | ||
|
|
3147026211 | ||
|
|
9f8e4507ad | ||
|
|
50ea608553 | ||
|
|
fa67b7ba0f | ||
|
|
6309ded92f | ||
|
|
5a4f158c55 | ||
|
|
a2be8e1956 | ||
|
|
2f737ee292 | ||
|
|
8c75a9de9f | ||
|
|
24aa1e9127 | ||
|
|
f4c0a9fd63 | ||
|
|
f169c37153 | ||
|
|
a489079208 | ||
|
|
ddf68aea80 | ||
|
|
2ae090529e | ||
|
|
4ea98d830b | ||
|
|
4fd04366aa | ||
|
|
2633c3ffb6 | ||
|
|
5b29e4616c | ||
|
|
32904d8c9e | ||
|
|
d68f37ae88 | ||
|
|
b3071aab29 | ||
|
|
2aac24c982 | ||
|
|
20df5507ae | ||
|
|
36311a276b | ||
|
|
22685ef86f | ||
|
|
c3cfd412c9 | ||
|
|
0b21e716ca | ||
|
|
8b90b55518 | ||
|
|
247d9cd887 | ||
|
|
d6ef34a03e | ||
|
|
9819443440 | ||
|
|
84b57fac93 | ||
|
|
7d79c91e9b | ||
|
|
c883efde0f | ||
|
|
42dda355c5 | ||
|
|
99b1538d0a | ||
|
|
fd64c8c33b | ||
|
|
3f52695ec2 | ||
|
|
fc7e5e8e60 | ||
|
|
bcaee66b0a | ||
|
|
df584a3b90 | ||
|
|
7d540fc33a | ||
|
|
605ef40656 | ||
|
|
b8856ac810 | ||
|
|
02bf7d7dfc | ||
|
|
e6f050dbe9 | ||
|
|
5607025e9b | ||
|
|
7cc6cf2604 | ||
|
|
86a6cc53cf | ||
|
|
1859fb059d | ||
|
|
c5a2ba03da | ||
|
|
995e70542a | ||
|
|
4f80f8b910 | ||
|
|
0e03f82733 | ||
|
|
5035a510a2 | ||
|
|
ef388a309f | ||
|
|
c98183c998 | ||
|
|
2b051dd197 | ||
|
|
7da5196206 | ||
|
|
cc764b65c1 | ||
|
|
7b35abbcb4 | ||
|
|
6601d03ce8 | ||
|
|
d3a4b8fd8c | ||
|
|
f3ed133744 | ||
|
|
1a2189f4df | ||
|
|
6a4b610269 | ||
|
|
97ae63efa6 | ||
|
|
3907b53b4b | ||
|
|
6c5959d892 | ||
|
|
601a114d1b | ||
|
|
86926dff92 | ||
|
|
9b35dbf2be | ||
|
|
05e35ff2e0 | ||
|
|
7d0651c315 | ||
|
|
174fa0e05c | ||
|
|
8d9943cb08 | ||
|
|
715899d5a8 | ||
|
|
882335c7ec | ||
|
|
35fa4c0457 | ||
|
|
11e402893f | ||
|
|
2338ab36fd | ||
|
|
e3c996de10 | ||
|
|
b8a9dd75eb | ||
|
|
2072599bd7 | ||
|
|
b1a8e7175b | ||
|
|
1e2f70b17a | ||
|
|
896c1e0b66 | ||
|
|
2f24726d4c | ||
|
|
5f315b46e9 | ||
|
|
a342eb5546 | ||
|
|
90fd1afc38 | ||
|
|
4473fd25cb | ||
|
|
a6772043d6 | ||
|
|
7234d8922d | ||
|
|
07dc2400eb | ||
|
|
1702cb90fd | ||
|
|
fcdeaf48f2 | ||
|
|
702ad99090 | ||
|
|
5f0703cbf1 | ||
|
|
9a3186a67e | ||
|
|
91ce42ce9c | ||
|
|
097c76f512 | ||
|
|
6e07e8b5c0 | ||
|
|
fd91643a7f | ||
|
|
619b17753e | ||
|
|
60cd920bcb | ||
|
|
f512b5eaa2 | ||
|
|
9800e5d8fc | ||
|
|
e84ed49c56 | ||
|
|
ceea41c1e2 | ||
|
|
456122e342 | ||
|
|
84c1b912d9 | ||
|
|
0e78436b05 | ||
|
|
9b5b27597c | ||
|
|
3b065238b3 | ||
|
|
0f5bda4ff9 | ||
|
|
70be256c66 | ||
|
|
fda655370a | ||
|
|
887d72fd5d | ||
|
|
6d527bcc42 | ||
|
|
6ca80b7ce8 | ||
|
|
4401eacaac | ||
|
|
f520d482fd | ||
|
|
b5a31bec03 | ||
|
|
6ac7aabaf7 | ||
|
|
24fdea5fd8 | ||
|
|
4a906484ee | ||
|
|
9e5bca4bbf | ||
|
|
5c588a6f8d | ||
|
|
e048da1e38 | ||
|
|
34b568f366 | ||
|
|
b99bfe8ab7 | ||
|
|
5da61564d9 | ||
|
|
b45f79d0ab | ||
|
|
3cfa63483d | ||
|
|
27d6f62a96 | ||
|
|
e32033f1ec | ||
|
|
d2bad803f3 | ||
|
|
5debf7af7e | ||
|
|
6102cc440b | ||
|
|
bc80195a58 | ||
|
|
2008e3cc77 | ||
|
|
6148e5c355 | ||
|
|
4c652b9c82 | ||
|
|
06fdbf2a55 | ||
|
|
84b770b56e | ||
|
|
6c1dfe43c7 | ||
|
|
a8e711d281 | ||
|
|
f36b93267c | ||
|
|
d2b65b47f2 | ||
|
|
b624172f68 | ||
|
|
6d1472bf8c | ||
|
|
5c8083851a | ||
|
|
345bdb46e0 | ||
|
|
e023f889ff | ||
|
|
4abd81e218 | ||
|
|
d56cd4ef01 | ||
|
|
8139689d4c | ||
|
|
a27b1137a5 | ||
|
|
5809aa6a2c | ||
|
|
d8ca555eed | ||
|
|
bd35e71b5c | ||
|
|
70e4cb7853 | ||
|
|
ac1a60ff0b | ||
|
|
b70f9c4744 | ||
|
|
4f3010ef3f | ||
|
|
2692b862d2 | ||
|
|
4d4c83d4d8 | ||
|
|
57148b7593 | ||
|
|
4a8ede2562 | ||
|
|
46d5f7a860 | ||
|
|
595b1b212e | ||
|
|
75acdeb645 | ||
|
|
517ff5cb19 | ||
|
|
d14eec9ecf | ||
|
|
bdf24d2bed | ||
|
|
c26d459d0f | ||
|
|
2bc64183a8 | ||
|
|
4792e1ee21 | ||
|
|
08c1de34bd | ||
|
|
641b60b8f0 | ||
|
|
578ca1c6af | ||
|
|
d290fe464e | ||
|
|
e38aa65cae | ||
|
|
8fb9e9adde | ||
|
|
96e02d614b | ||
|
|
0a48d7bf7e | ||
|
|
4b488614cf | ||
|
|
f4f16605ed | ||
|
|
b84edfd39a | ||
|
|
88d9a31cf9 | ||
|
|
1dff022d05 | ||
|
|
2b4c2a7f55 | ||
|
|
19de05c72f | ||
|
|
baf43a2dbc | ||
|
|
ebce0adb5a | ||
|
|
61f77c35c0 | ||
|
|
1b76faada6 | ||
|
|
b79bcd0bf2 | ||
|
|
5f6ab47a7b | ||
|
|
d87c905c06 | ||
|
|
9b848b1d65 | ||
|
|
f555e4bf1f | ||
|
|
0de2645a8f | ||
|
|
fcecdfbcc5 | ||
|
|
73cd5aa81c | ||
|
|
3d9d212040 | ||
|
|
78deca4f60 | ||
|
|
3c24ff88cc | ||
|
|
08d91b456b | ||
|
|
1c05b9bd07 | ||
|
|
fffa74edb2 | ||
|
|
8956de6bee | ||
|
|
9bc4286a27 | ||
|
|
3e848b8fce | ||
|
|
fb1aafb5d2 | ||
|
|
f8ff881d23 | ||
|
|
ef3f8888b5 | ||
|
|
a45efcd40d | ||
|
|
63d673a3e0 | ||
|
|
9796128fee | ||
|
|
de6b56bec0 | ||
|
|
6f711d9ae8 | ||
|
|
d645574839 | ||
|
|
6fcdfb0e50 | ||
|
|
e19b2e04c7 | ||
|
|
2dbe47f3a7 | ||
|
|
9b2322a573 | ||
|
|
79caaa8e6f | ||
|
|
8620dcf06f | ||
|
|
0f31d9b7ac | ||
|
|
60673e8a81 | ||
|
|
3132c32c26 | ||
|
|
db46326e95 | ||
|
|
44cc8d7a3c | ||
|
|
3f36298716 | ||
|
|
f8e097a061 | ||
|
|
37b3c22dee | ||
|
|
032178bea0 | ||
|
|
63c7dd109c | ||
|
|
118cb3c9b1 | ||
|
|
717afebcff | ||
|
|
8f6fc67378 | ||
|
|
ec3ec9068c | ||
|
|
f755cfef48 | ||
|
|
c1f4b86d34 | ||
|
|
a156d37ee1 | ||
|
|
1756ef8620 | ||
|
|
feacbe9671 | ||
|
|
c224340330 | ||
|
|
23fb6d2877 | ||
|
|
9620cc75d4 | ||
|
|
af1c66b28f | ||
|
|
270754deff | ||
|
|
a83f9eb4e4 | ||
|
|
fed2264dac | ||
|
|
31a8d086fc | ||
|
|
b27e5804b9 | ||
|
|
4ca03aec8d |
@@ -69,12 +69,12 @@ Access can be defined for all or only selected repositories, which is nice.
|
|||||||
```
|
```
|
||||||
|
|
||||||
- Redirected to Azure DevOps, select the account created in _Having an Azure DevOps account_ section.
|
- Redirected to Azure DevOps, select the account created in _Having an Azure DevOps account_ section.
|
||||||
- Select the organization, and click "Create a new project" (let's name it the same than the targetted github repo)
|
- Select the organization, and click "Create a new project" (let's name it the same than the targeted github repo)
|
||||||
- The Visibility is public, to profit from 10 parallel jobs
|
- The Visibility is public, to profit from 10 parallel jobs
|
||||||
|
|
||||||
```
|
```
|
||||||
!!! ACCESS !!!
|
!!! ACCESS !!!
|
||||||
Azure Pipelines needs access to the GitHub account (in term of beeing able to check it is valid), and the Resources shared between the GitHub account and Azure Pipelines.
|
Azure Pipelines needs access to the GitHub account (in term of being able to check it is valid), and the Resources shared between the GitHub account and Azure Pipelines.
|
||||||
```
|
```
|
||||||
|
|
||||||
_Done. We can move to pipelines configuration._
|
_Done. We can move to pipelines configuration._
|
||||||
@@ -112,6 +112,8 @@ steps:
|
|||||||
CODECOV_TOKEN: $(codecov_token)
|
CODECOV_TOKEN: $(codecov_token)
|
||||||
```
|
```
|
||||||
|
|
||||||
- On Azure DevOps, go to you organization, project, pipeline tab
|
To set up a variable that is shared between pipelines, follow the instructions
|
||||||
- Select the pipeline, click "Edit" button, then click "Variables" button
|
at
|
||||||
- Set name (eg `codecov_token`), value, tick "Keep this value secret"
|
https://docs.microsoft.com/en-us/azure/devops/pipelines/library/variable-groups.
|
||||||
|
When adding variables to a group, don't forget to tick "Keep this value secret"
|
||||||
|
if it shouldn't be shared publcily.
|
||||||
|
|||||||
13
.azure-pipelines/advanced-test.yml
Normal file
13
.azure-pipelines/advanced-test.yml
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
# Advanced pipeline for running our full test suite on demand.
|
||||||
|
trigger:
|
||||||
|
# When changing these triggers, please ensure the documentation under
|
||||||
|
# "Running tests in CI" is still correct.
|
||||||
|
- azure-test-*
|
||||||
|
- test-*
|
||||||
|
pr: none
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
# Any addition here should be reflected in the advanced and release pipelines.
|
||||||
|
# It is advised to declare all jobs here as templates to improve maintainability.
|
||||||
|
- template: templates/tests-suite.yml
|
||||||
|
- template: templates/installer-tests.yml
|
||||||
@@ -1,18 +1,18 @@
|
|||||||
# Advanced pipeline for isolated checks and release purpose
|
# Advanced pipeline for running our full test suite on protected branches.
|
||||||
trigger:
|
trigger:
|
||||||
- test-*
|
|
||||||
- '*.x'
|
|
||||||
pr:
|
|
||||||
- test-*
|
|
||||||
- '*.x'
|
- '*.x'
|
||||||
|
pr: none
|
||||||
# This pipeline is also nightly run on master
|
# This pipeline is also nightly run on master
|
||||||
schedules:
|
schedules:
|
||||||
- cron: "4 0 * * *"
|
- cron: "0 4 * * *"
|
||||||
displayName: Nightly build
|
displayName: Nightly build
|
||||||
branches:
|
branches:
|
||||||
include:
|
include:
|
||||||
- master
|
- master
|
||||||
|
always: true
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
|
# Any addition here should be reflected in the advanced-test and release pipelines.
|
||||||
|
# It is advised to declare all jobs here as templates to improve maintainability.
|
||||||
- template: templates/tests-suite.yml
|
- template: templates/tests-suite.yml
|
||||||
- template: templates/installer-tests.yml
|
- template: templates/installer-tests.yml
|
||||||
|
|||||||
@@ -6,6 +6,7 @@ trigger:
|
|||||||
pr:
|
pr:
|
||||||
- apache-parser-v2
|
- apache-parser-v2
|
||||||
- master
|
- master
|
||||||
|
- '*.x'
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
- template: templates/tests-suite.yml
|
- template: templates/tests-suite.yml
|
||||||
|
|||||||
13
.azure-pipelines/release.yml
Normal file
13
.azure-pipelines/release.yml
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
# Release pipeline to build and deploy Certbot for Windows for GitHub release tags
|
||||||
|
trigger:
|
||||||
|
tags:
|
||||||
|
include:
|
||||||
|
- v*
|
||||||
|
pr: none
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
# Any addition here should be reflected in the advanced and advanced-test pipelines.
|
||||||
|
# It is advised to declare all jobs here as templates to improve maintainability.
|
||||||
|
- template: templates/tests-suite.yml
|
||||||
|
- template: templates/installer-tests.yml
|
||||||
|
- template: templates/changelog.yml
|
||||||
14
.azure-pipelines/templates/changelog.yml
Normal file
14
.azure-pipelines/templates/changelog.yml
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
jobs:
|
||||||
|
- job: changelog
|
||||||
|
pool:
|
||||||
|
vmImage: vs2017-win2016
|
||||||
|
steps:
|
||||||
|
- bash: |
|
||||||
|
CERTBOT_VERSION="$(cd certbot && python -c "import certbot; print(certbot.__version__)" && cd ~-)"
|
||||||
|
"${BUILD_REPOSITORY_LOCALPATH}\tools\extract_changelog.py" "${CERTBOT_VERSION}" >> "${BUILD_ARTIFACTSTAGINGDIRECTORY}/release_notes.md"
|
||||||
|
displayName: Prepare changelog
|
||||||
|
- task: PublishPipelineArtifact@1
|
||||||
|
inputs:
|
||||||
|
path: $(Build.ArtifactStagingDirectory)
|
||||||
|
artifact: changelog
|
||||||
|
displayName: Publish changelog
|
||||||
@@ -1,5 +1,5 @@
|
|||||||
jobs:
|
jobs:
|
||||||
- job: installer
|
- job: installer_build
|
||||||
pool:
|
pool:
|
||||||
vmImage: vs2017-win2016
|
vmImage: vs2017-win2016
|
||||||
steps:
|
steps:
|
||||||
@@ -18,14 +18,44 @@ jobs:
|
|||||||
- task: PublishPipelineArtifact@1
|
- task: PublishPipelineArtifact@1
|
||||||
inputs:
|
inputs:
|
||||||
path: $(Build.ArtifactStagingDirectory)
|
path: $(Build.ArtifactStagingDirectory)
|
||||||
artifact: WindowsInstaller
|
artifact: windows-installer
|
||||||
- script: $(Build.ArtifactStagingDirectory)\certbot-installer-win32.exe /S
|
displayName: Publish Windows installer
|
||||||
displayName: Install Certbot
|
- job: installer_run
|
||||||
|
dependsOn: installer_build
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
win2019:
|
||||||
|
imageName: windows-2019
|
||||||
|
win2016:
|
||||||
|
imageName: vs2017-win2016
|
||||||
|
pool:
|
||||||
|
vmImage: $(imageName)
|
||||||
|
steps:
|
||||||
|
- powershell: |
|
||||||
|
$currentVersion = $PSVersionTable.PSVersion
|
||||||
|
if ($currentVersion.Major -ne 5) {
|
||||||
|
throw "Powershell version is not 5.x"
|
||||||
|
}
|
||||||
|
condition: eq(variables['imageName'], 'vs2017-win2016')
|
||||||
|
displayName: Check Powershell 5.x is used in vs2017-win2016
|
||||||
|
- task: UsePythonVersion@0
|
||||||
|
inputs:
|
||||||
|
versionSpec: 3.8
|
||||||
|
addToPath: true
|
||||||
|
- task: DownloadPipelineArtifact@2
|
||||||
|
inputs:
|
||||||
|
artifact: windows-installer
|
||||||
|
path: $(Build.SourcesDirectory)/bin
|
||||||
|
displayName: Retrieve Windows installer
|
||||||
- script: |
|
- script: |
|
||||||
python -m venv venv
|
py -3 -m venv venv
|
||||||
venv\Scripts\python tools\pip_install.py -e certbot-ci
|
venv\Scripts\python tools\pip_install.py -e certbot-ci
|
||||||
displayName: Prepare Certbot-CI
|
displayName: Prepare Certbot-CI
|
||||||
|
- script: |
|
||||||
|
set PATH=%ProgramFiles(x86)%\Certbot\bin;%PATH%
|
||||||
|
venv\Scripts\python -m pytest certbot-ci\windows_installer_integration_tests --allow-persistent-changes --installer-path $(Build.SourcesDirectory)\bin\certbot-beta-installer-win32.exe
|
||||||
|
displayName: Run windows installer integration tests
|
||||||
- script: |
|
- script: |
|
||||||
set PATH=%ProgramFiles(x86)%\Certbot\bin;%PATH%
|
set PATH=%ProgramFiles(x86)%\Certbot\bin;%PATH%
|
||||||
venv\Scripts\python -m pytest certbot-ci\certbot_integration_tests\certbot_tests -n 4
|
venv\Scripts\python -m pytest certbot-ci\certbot_integration_tests\certbot_tests -n 4
|
||||||
displayName: Run integration tests
|
displayName: Run certbot integration tests
|
||||||
|
|||||||
@@ -1,20 +1,34 @@
|
|||||||
jobs:
|
jobs:
|
||||||
- job: test
|
- job: test
|
||||||
pool:
|
|
||||||
vmImage: vs2017-win2016
|
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
py35:
|
macos-py27:
|
||||||
|
IMAGE_NAME: macOS-10.14
|
||||||
|
PYTHON_VERSION: 2.7
|
||||||
|
TOXENV: py27
|
||||||
|
macos-py38:
|
||||||
|
IMAGE_NAME: macOS-10.14
|
||||||
|
PYTHON_VERSION: 3.8
|
||||||
|
TOXENV: py38
|
||||||
|
windows-py35:
|
||||||
|
IMAGE_NAME: vs2017-win2016
|
||||||
PYTHON_VERSION: 3.5
|
PYTHON_VERSION: 3.5
|
||||||
TOXENV: py35
|
TOXENV: py35
|
||||||
py37-cover:
|
windows-py37-cover:
|
||||||
|
IMAGE_NAME: vs2017-win2016
|
||||||
PYTHON_VERSION: 3.7
|
PYTHON_VERSION: 3.7
|
||||||
TOXENV: py37-cover
|
TOXENV: py37-cover
|
||||||
integration-certbot:
|
windows-integration-certbot:
|
||||||
|
IMAGE_NAME: vs2017-win2016
|
||||||
PYTHON_VERSION: 3.7
|
PYTHON_VERSION: 3.7
|
||||||
TOXENV: integration-certbot
|
TOXENV: integration-certbot
|
||||||
PYTEST_ADDOPTS: --numprocesses 4
|
PYTEST_ADDOPTS: --numprocesses 4
|
||||||
|
pool:
|
||||||
|
vmImage: $(IMAGE_NAME)
|
||||||
steps:
|
steps:
|
||||||
|
- bash: brew install augeas
|
||||||
|
condition: startswith(variables['IMAGE_NAME'], 'macOS')
|
||||||
|
displayName: Install Augeas
|
||||||
- task: UsePythonVersion@0
|
- task: UsePythonVersion@0
|
||||||
inputs:
|
inputs:
|
||||||
versionSpec: $(PYTHON_VERSION)
|
versionSpec: $(PYTHON_VERSION)
|
||||||
@@ -23,14 +37,3 @@ jobs:
|
|||||||
displayName: Install dependencies
|
displayName: Install dependencies
|
||||||
- script: python -m tox
|
- script: python -m tox
|
||||||
displayName: Run tox
|
displayName: Run tox
|
||||||
# We do not require codecov report upload to succeed. So to avoid to break the pipeline if
|
|
||||||
# something goes wrong, each command is suffixed with a command that hides any non zero exit
|
|
||||||
# codes and echoes an informative message instead.
|
|
||||||
- bash: |
|
|
||||||
curl -s https://codecov.io/bash -o codecov-bash || echo "Failed to download codecov-bash"
|
|
||||||
chmod +x codecov-bash || echo "Failed to apply execute permissions on codecov-bash"
|
|
||||||
./codecov-bash -F windows || echo "Codecov did not collect coverage reports"
|
|
||||||
condition: eq(variables['TOXENV'], 'py37-cover')
|
|
||||||
env:
|
|
||||||
CODECOV_TOKEN: $(codecov_token)
|
|
||||||
displayName: Publish coverage
|
|
||||||
|
|||||||
18
.codecov.yml
18
.codecov.yml
@@ -1,18 +0,0 @@
|
|||||||
coverage:
|
|
||||||
status:
|
|
||||||
project:
|
|
||||||
default: off
|
|
||||||
linux:
|
|
||||||
flags: linux
|
|
||||||
# Fixed target instead of auto set by #7173, can
|
|
||||||
# be removed when flags in Codecov are added back.
|
|
||||||
target: 97.4
|
|
||||||
threshold: 0.1
|
|
||||||
base: auto
|
|
||||||
windows:
|
|
||||||
flags: windows
|
|
||||||
# Fixed target instead of auto set by #7173, can
|
|
||||||
# be removed when flags in Codecov are added back.
|
|
||||||
target: 97.7
|
|
||||||
threshold: 0.1
|
|
||||||
base: auto
|
|
||||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -26,6 +26,7 @@ tags
|
|||||||
\#*#
|
\#*#
|
||||||
.idea
|
.idea
|
||||||
.ropeproject
|
.ropeproject
|
||||||
|
.vscode
|
||||||
|
|
||||||
# auth --cert-path --chain-path
|
# auth --cert-path --chain-path
|
||||||
/*.pem
|
/*.pem
|
||||||
@@ -34,6 +35,7 @@ tags
|
|||||||
tests/letstest/letest-*/
|
tests/letstest/letest-*/
|
||||||
tests/letstest/*.pem
|
tests/letstest/*.pem
|
||||||
tests/letstest/venv/
|
tests/letstest/venv/
|
||||||
|
tests/letstest/venv3/
|
||||||
|
|
||||||
.venv
|
.venv
|
||||||
|
|
||||||
|
|||||||
7
.isort.cfg
Normal file
7
.isort.cfg
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
[settings]
|
||||||
|
skip_glob=venv*
|
||||||
|
skip=letsencrypt-auto-source
|
||||||
|
force_sort_within_sections=True
|
||||||
|
force_single_line=True
|
||||||
|
order_by_type=False
|
||||||
|
line_length=400
|
||||||
51
.pylintrc
51
.pylintrc
@@ -24,6 +24,11 @@ persistent=yes
|
|||||||
# usually to register additional checkers.
|
# usually to register additional checkers.
|
||||||
load-plugins=linter_plugin
|
load-plugins=linter_plugin
|
||||||
|
|
||||||
|
# A comma-separated list of package or module names from where C extensions may
|
||||||
|
# be loaded. Extensions are loading into the active Python interpreter and may
|
||||||
|
# run arbitrary code.
|
||||||
|
extension-pkg-whitelist=pywintypes,win32api,win32file,win32security
|
||||||
|
|
||||||
|
|
||||||
[MESSAGES CONTROL]
|
[MESSAGES CONTROL]
|
||||||
|
|
||||||
@@ -41,10 +46,14 @@ load-plugins=linter_plugin
|
|||||||
# --enable=similarities". If you want to run only the classes checker, but have
|
# --enable=similarities". If you want to run only the classes checker, but have
|
||||||
# no Warning level messages displayed, use"--disable=all --enable=classes
|
# no Warning level messages displayed, use"--disable=all --enable=classes
|
||||||
# --disable=W"
|
# --disable=W"
|
||||||
disable=fixme,locally-disabled,locally-enabled,abstract-class-not-used,abstract-class-little-used,bad-continuation,too-few-public-methods,no-self-use,invalid-name,too-many-instance-attributes,cyclic-import,duplicate-code
|
# CERTBOT COMMENT
|
||||||
# abstract-class-not-used cannot be disabled locally (at least in
|
# 1) Once certbot codebase is claimed to be compatible exclusively with Python 3,
|
||||||
# pylint 1.4.1), same for abstract-class-little-used
|
# the useless-object-inheritance check can be enabled again, and code fixed accordingly.
|
||||||
|
# 2) Check unsubscriptable-object tends to create a lot of false positives. Let's disable it.
|
||||||
|
# See https://github.com/PyCQA/pylint/issues/1498.
|
||||||
|
# 3) Same as point 2 for no-value-for-parameter.
|
||||||
|
# See https://github.com/PyCQA/pylint/issues/2820.
|
||||||
|
disable=fixme,locally-disabled,locally-enabled,bad-continuation,no-self-use,invalid-name,cyclic-import,duplicate-code,design,import-outside-toplevel,useless-object-inheritance,unsubscriptable-object,no-value-for-parameter,no-else-return,no-else-raise,no-else-break,no-else-continue
|
||||||
|
|
||||||
[REPORTS]
|
[REPORTS]
|
||||||
|
|
||||||
@@ -297,40 +306,6 @@ valid-classmethod-first-arg=cls
|
|||||||
valid-metaclass-classmethod-first-arg=mcs
|
valid-metaclass-classmethod-first-arg=mcs
|
||||||
|
|
||||||
|
|
||||||
[DESIGN]
|
|
||||||
|
|
||||||
# Maximum number of arguments for function / method
|
|
||||||
max-args=6
|
|
||||||
|
|
||||||
# Argument names that match this expression will be ignored. Default to name
|
|
||||||
# with leading underscore
|
|
||||||
ignored-argument-names=(unused)?_.*|dummy
|
|
||||||
|
|
||||||
# Maximum number of locals for function / method body
|
|
||||||
max-locals=15
|
|
||||||
|
|
||||||
# Maximum number of return / yield for function / method body
|
|
||||||
max-returns=6
|
|
||||||
|
|
||||||
# Maximum number of branch for function / method body
|
|
||||||
max-branches=12
|
|
||||||
|
|
||||||
# Maximum number of statements in function / method body
|
|
||||||
max-statements=50
|
|
||||||
|
|
||||||
# Maximum number of parents for a class (see R0901).
|
|
||||||
max-parents=12
|
|
||||||
|
|
||||||
# Maximum number of attributes for a class (see R0902).
|
|
||||||
max-attributes=7
|
|
||||||
|
|
||||||
# Minimum number of public methods for a class (see R0903).
|
|
||||||
min-public-methods=2
|
|
||||||
|
|
||||||
# Maximum number of public methods for a class (see R0904).
|
|
||||||
max-public-methods=20
|
|
||||||
|
|
||||||
|
|
||||||
[EXCEPTIONS]
|
[EXCEPTIONS]
|
||||||
|
|
||||||
# Exceptions that will emit a warning when being caught. Defaults to
|
# Exceptions that will emit a warning when being caught. Defaults to
|
||||||
|
|||||||
101
.travis.yml
101
.travis.yml
@@ -6,7 +6,6 @@ cache:
|
|||||||
- $HOME/.cache/pip
|
- $HOME/.cache/pip
|
||||||
|
|
||||||
before_script:
|
before_script:
|
||||||
- 'if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then ulimit -n 1024 ; fi'
|
|
||||||
# On Travis, the fastest parallelization for integration tests has proved to be 4.
|
# On Travis, the fastest parallelization for integration tests has proved to be 4.
|
||||||
- 'if [[ "$TOXENV" == *"integration"* ]]; then export PYTEST_ADDOPTS="--numprocesses 4"; fi'
|
- 'if [[ "$TOXENV" == *"integration"* ]]; then export PYTEST_ADDOPTS="--numprocesses 4"; fi'
|
||||||
# Use Travis retry feature for farm tests since they are flaky
|
# Use Travis retry feature for farm tests since they are flaky
|
||||||
@@ -14,17 +13,19 @@ before_script:
|
|||||||
- export TOX_TESTENV_PASSENV=TRAVIS
|
- export TOX_TESTENV_PASSENV=TRAVIS
|
||||||
|
|
||||||
# Only build pushes to the master branch, PRs, and branches beginning with
|
# Only build pushes to the master branch, PRs, and branches beginning with
|
||||||
# `test-` or of the form `digit(s).digit(s).x`. This reduces the number of
|
# `test-`, `travis-test-`, or of the form `digit(s).digit(s).x`. This reduces
|
||||||
# simultaneous Travis runs, which speeds turnaround time on review since there
|
# the number of simultaneous Travis runs, which speeds turnaround time on
|
||||||
# is a cap of on the number of simultaneous runs.
|
# review since there is a cap of on the number of simultaneous runs.
|
||||||
branches:
|
branches:
|
||||||
|
# When changing these branches, please ensure the documentation under
|
||||||
|
# "Running tests in CI" is still correct.
|
||||||
only:
|
only:
|
||||||
# apache-parser-v2 is a temporary branch for doing work related to
|
# apache-parser-v2 is a temporary branch for doing work related to
|
||||||
# rewriting the parser in the Apache plugin.
|
# rewriting the parser in the Apache plugin.
|
||||||
- apache-parser-v2
|
- apache-parser-v2
|
||||||
- master
|
- master
|
||||||
- /^\d+\.\d+\.x$/
|
- /^\d+\.\d+\.x$/
|
||||||
- /^test-.*$/
|
- /^(travis-)?test-.*$/
|
||||||
|
|
||||||
# Jobs for the main test suite are always executed (including on PRs) except for pushes on master.
|
# Jobs for the main test suite are always executed (including on PRs) except for pushes on master.
|
||||||
not-on-master: ¬-on-master
|
not-on-master: ¬-on-master
|
||||||
@@ -43,15 +44,12 @@ matrix:
|
|||||||
<<: *not-on-master
|
<<: *not-on-master
|
||||||
|
|
||||||
# This job is always executed, including on master
|
# This job is always executed, including on master
|
||||||
- python: "2.7"
|
- python: "3.8"
|
||||||
env: TOXENV=py27-cover FYI="py27 tests + code coverage"
|
env: TOXENV=py38-cover FYI="py38 tests + code coverage"
|
||||||
|
|
||||||
- python: "2.7"
|
- python: "3.7"
|
||||||
env: TOXENV=lint
|
env: TOXENV=lint
|
||||||
<<: *not-on-master
|
<<: *not-on-master
|
||||||
- python: "3.4"
|
|
||||||
env: TOXENV=mypy
|
|
||||||
<<: *not-on-master
|
|
||||||
- python: "3.5"
|
- python: "3.5"
|
||||||
env: TOXENV=mypy
|
env: TOXENV=mypy
|
||||||
<<: *not-on-master
|
<<: *not-on-master
|
||||||
@@ -60,16 +58,13 @@ matrix:
|
|||||||
# cryptography we support cannot be compiled against the version of
|
# cryptography we support cannot be compiled against the version of
|
||||||
# OpenSSL in Xenial or newer.
|
# OpenSSL in Xenial or newer.
|
||||||
dist: trusty
|
dist: trusty
|
||||||
env: TOXENV='py27-{acme,apache,certbot,dns,nginx}-oldest'
|
env: TOXENV='py27-{acme,apache,apache-v2,certbot,dns,nginx}-oldest'
|
||||||
<<: *not-on-master
|
<<: *not-on-master
|
||||||
- python: "3.4"
|
- python: "2.7"
|
||||||
env: TOXENV=py34
|
env: TOXENV=py27
|
||||||
<<: *not-on-master
|
<<: *not-on-master
|
||||||
- python: "3.7"
|
- python: "3.5"
|
||||||
env: TOXENV=py37
|
env: TOXENV=py35
|
||||||
<<: *not-on-master
|
|
||||||
- python: "3.8-dev"
|
|
||||||
env: TOXENV=py38
|
|
||||||
<<: *not-on-master
|
<<: *not-on-master
|
||||||
- sudo: required
|
- sudo: required
|
||||||
env: TOXENV=apache_compat
|
env: TOXENV=apache_compat
|
||||||
@@ -95,24 +90,24 @@ matrix:
|
|||||||
before_install:
|
before_install:
|
||||||
addons:
|
addons:
|
||||||
<<: *extended-test-suite
|
<<: *extended-test-suite
|
||||||
- python: "2.7"
|
- python: "3.7"
|
||||||
env:
|
env:
|
||||||
- TOXENV=travis-test-farm-apache2
|
- TOXENV=travis-test-farm-apache2
|
||||||
- secure: "f+j/Lj9s1lcuKo5sEFrlRd1kIAMnIJI4z0MTI7QF8jl9Fkmbx7KECGzw31TNgzrOSzxSapHbcueFYvNCLKST+kE/8ogMZBbwqXfEDuKpyF6BY3uYoJn+wPVE5pIb8Hhe08xPte8TTDSMIyHI3EyTfcAKrIreauoArePvh/cRvSw="
|
- secure: "f+j/Lj9s1lcuKo5sEFrlRd1kIAMnIJI4z0MTI7QF8jl9Fkmbx7KECGzw31TNgzrOSzxSapHbcueFYvNCLKST+kE/8ogMZBbwqXfEDuKpyF6BY3uYoJn+wPVE5pIb8Hhe08xPte8TTDSMIyHI3EyTfcAKrIreauoArePvh/cRvSw="
|
||||||
<<: *extended-test-suite
|
<<: *extended-test-suite
|
||||||
- python: "2.7"
|
- python: "3.7"
|
||||||
env:
|
env:
|
||||||
- TOXENV=travis-test-farm-leauto-upgrades
|
- TOXENV=travis-test-farm-leauto-upgrades
|
||||||
- secure: "f+j/Lj9s1lcuKo5sEFrlRd1kIAMnIJI4z0MTI7QF8jl9Fkmbx7KECGzw31TNgzrOSzxSapHbcueFYvNCLKST+kE/8ogMZBbwqXfEDuKpyF6BY3uYoJn+wPVE5pIb8Hhe08xPte8TTDSMIyHI3EyTfcAKrIreauoArePvh/cRvSw="
|
- secure: "f+j/Lj9s1lcuKo5sEFrlRd1kIAMnIJI4z0MTI7QF8jl9Fkmbx7KECGzw31TNgzrOSzxSapHbcueFYvNCLKST+kE/8ogMZBbwqXfEDuKpyF6BY3uYoJn+wPVE5pIb8Hhe08xPte8TTDSMIyHI3EyTfcAKrIreauoArePvh/cRvSw="
|
||||||
git:
|
git:
|
||||||
depth: false # This is needed to have the history to checkout old versions of certbot-auto.
|
depth: false # This is needed to have the history to checkout old versions of certbot-auto.
|
||||||
<<: *extended-test-suite
|
<<: *extended-test-suite
|
||||||
- python: "2.7"
|
- python: "3.7"
|
||||||
env:
|
env:
|
||||||
- TOXENV=travis-test-farm-certonly-standalone
|
- TOXENV=travis-test-farm-certonly-standalone
|
||||||
- secure: "f+j/Lj9s1lcuKo5sEFrlRd1kIAMnIJI4z0MTI7QF8jl9Fkmbx7KECGzw31TNgzrOSzxSapHbcueFYvNCLKST+kE/8ogMZBbwqXfEDuKpyF6BY3uYoJn+wPVE5pIb8Hhe08xPte8TTDSMIyHI3EyTfcAKrIreauoArePvh/cRvSw="
|
- secure: "f+j/Lj9s1lcuKo5sEFrlRd1kIAMnIJI4z0MTI7QF8jl9Fkmbx7KECGzw31TNgzrOSzxSapHbcueFYvNCLKST+kE/8ogMZBbwqXfEDuKpyF6BY3uYoJn+wPVE5pIb8Hhe08xPte8TTDSMIyHI3EyTfcAKrIreauoArePvh/cRvSw="
|
||||||
<<: *extended-test-suite
|
<<: *extended-test-suite
|
||||||
- python: "2.7"
|
- python: "3.7"
|
||||||
env:
|
env:
|
||||||
- TOXENV=travis-test-farm-sdists
|
- TOXENV=travis-test-farm-sdists
|
||||||
- secure: "f+j/Lj9s1lcuKo5sEFrlRd1kIAMnIJI4z0MTI7QF8jl9Fkmbx7KECGzw31TNgzrOSzxSapHbcueFYvNCLKST+kE/8ogMZBbwqXfEDuKpyF6BY3uYoJn+wPVE5pIb8Hhe08xPte8TTDSMIyHI3EyTfcAKrIreauoArePvh/cRvSw="
|
- secure: "f+j/Lj9s1lcuKo5sEFrlRd1kIAMnIJI4z0MTI7QF8jl9Fkmbx7KECGzw31TNgzrOSzxSapHbcueFYvNCLKST+kE/8ogMZBbwqXfEDuKpyF6BY3uYoJn+wPVE5pIb8Hhe08xPte8TTDSMIyHI3EyTfcAKrIreauoArePvh/cRvSw="
|
||||||
@@ -166,31 +161,12 @@ matrix:
|
|||||||
sudo: required
|
sudo: required
|
||||||
services: docker
|
services: docker
|
||||||
<<: *extended-test-suite
|
<<: *extended-test-suite
|
||||||
- python: "3.4"
|
|
||||||
env: TOXENV=py34
|
|
||||||
<<: *extended-test-suite
|
|
||||||
- python: "3.5"
|
|
||||||
env: TOXENV=py35
|
|
||||||
<<: *extended-test-suite
|
|
||||||
- python: "3.6"
|
- python: "3.6"
|
||||||
env: TOXENV=py36
|
env: TOXENV=py36
|
||||||
<<: *extended-test-suite
|
<<: *extended-test-suite
|
||||||
- python: "3.7"
|
- python: "3.7"
|
||||||
env: TOXENV=py37
|
env: TOXENV=py37
|
||||||
<<: *extended-test-suite
|
<<: *extended-test-suite
|
||||||
- python: "3.8-dev"
|
|
||||||
env: TOXENV=py38
|
|
||||||
<<: *extended-test-suite
|
|
||||||
- python: "3.4"
|
|
||||||
env: ACME_SERVER=boulder-v1 TOXENV=integration
|
|
||||||
sudo: required
|
|
||||||
services: docker
|
|
||||||
<<: *extended-test-suite
|
|
||||||
- python: "3.4"
|
|
||||||
env: ACME_SERVER=boulder-v2 TOXENV=integration
|
|
||||||
sudo: required
|
|
||||||
services: docker
|
|
||||||
<<: *extended-test-suite
|
|
||||||
- python: "3.5"
|
- python: "3.5"
|
||||||
env: ACME_SERVER=boulder-v1 TOXENV=integration
|
env: ACME_SERVER=boulder-v1 TOXENV=integration
|
||||||
sudo: required
|
sudo: required
|
||||||
@@ -221,10 +197,10 @@ matrix:
|
|||||||
sudo: required
|
sudo: required
|
||||||
services: docker
|
services: docker
|
||||||
<<: *extended-test-suite
|
<<: *extended-test-suite
|
||||||
- python: "3.8-dev"
|
- python: "3.8"
|
||||||
env: ACME_SERVER=boulder-v1 TOXENV=integration
|
env: ACME_SERVER=boulder-v1 TOXENV=integration
|
||||||
<<: *extended-test-suite
|
<<: *extended-test-suite
|
||||||
- python: "3.8-dev"
|
- python: "3.8"
|
||||||
env: ACME_SERVER=boulder-v2 TOXENV=integration
|
env: ACME_SERVER=boulder-v2 TOXENV=integration
|
||||||
<<: *extended-test-suite
|
<<: *extended-test-suite
|
||||||
- sudo: required
|
- sudo: required
|
||||||
@@ -235,6 +211,10 @@ matrix:
|
|||||||
env: TOXENV=le_auto_centos6
|
env: TOXENV=le_auto_centos6
|
||||||
services: docker
|
services: docker
|
||||||
<<: *extended-test-suite
|
<<: *extended-test-suite
|
||||||
|
- sudo: required
|
||||||
|
env: TOXENV=le_auto_oraclelinux6
|
||||||
|
services: docker
|
||||||
|
<<: *extended-test-suite
|
||||||
- sudo: required
|
- sudo: required
|
||||||
env: TOXENV=docker_dev
|
env: TOXENV=docker_dev
|
||||||
services: docker
|
services: docker
|
||||||
@@ -243,30 +223,6 @@ matrix:
|
|||||||
packages: # don't install nginx and apache
|
packages: # don't install nginx and apache
|
||||||
- libaugeas0
|
- libaugeas0
|
||||||
<<: *extended-test-suite
|
<<: *extended-test-suite
|
||||||
- language: generic
|
|
||||||
env: TOXENV=py27
|
|
||||||
os: osx
|
|
||||||
# Using this osx_image is a workaround for
|
|
||||||
# https://travis-ci.community/t/xcode-8-3-homebrew-outdated-error/3798.
|
|
||||||
osx_image: xcode10.2
|
|
||||||
addons:
|
|
||||||
homebrew:
|
|
||||||
packages:
|
|
||||||
- augeas
|
|
||||||
- python2
|
|
||||||
<<: *extended-test-suite
|
|
||||||
- language: generic
|
|
||||||
env: TOXENV=py3
|
|
||||||
os: osx
|
|
||||||
# Using this osx_image is a workaround for
|
|
||||||
# https://travis-ci.community/t/xcode-8-3-homebrew-outdated-error/3798.
|
|
||||||
osx_image: xcode10.2
|
|
||||||
addons:
|
|
||||||
homebrew:
|
|
||||||
packages:
|
|
||||||
- augeas
|
|
||||||
- python3
|
|
||||||
<<: *extended-test-suite
|
|
||||||
|
|
||||||
# container-based infrastructure
|
# container-based infrastructure
|
||||||
sudo: false
|
sudo: false
|
||||||
@@ -288,19 +244,20 @@ addons:
|
|||||||
# except in tests where the environment variable CERTBOT_NO_PIN is set.
|
# except in tests where the environment variable CERTBOT_NO_PIN is set.
|
||||||
# virtualenv is listed here explicitly to make sure it is upgraded when
|
# virtualenv is listed here explicitly to make sure it is upgraded when
|
||||||
# CERTBOT_NO_PIN is set to work around failures we've seen when using an older
|
# CERTBOT_NO_PIN is set to work around failures we've seen when using an older
|
||||||
# version of virtualenv.
|
# version of virtualenv. The option "-I" is set so when CERTBOT_NO_PIN is also
|
||||||
install: 'tools/pip_install.py -U codecov tox virtualenv'
|
# set, pip updates dependencies it thinks are already satisfied to avoid some
|
||||||
|
# problems with its lack of real dependency resolution.
|
||||||
|
install: 'tools/pip_install.py -I tox virtualenv'
|
||||||
# Most of the time TRAVIS_RETRY is an empty string, and has no effect on the
|
# Most of the time TRAVIS_RETRY is an empty string, and has no effect on the
|
||||||
# script command. It is set only to `travis_retry` during farm tests, in
|
# script command. It is set only to `travis_retry` during farm tests, in
|
||||||
# order to trigger the Travis retry feature, and compensate the inherent
|
# order to trigger the Travis retry feature, and compensate the inherent
|
||||||
# flakiness of these specific tests.
|
# flakiness of these specific tests.
|
||||||
script: '$TRAVIS_RETRY tox'
|
script: '$TRAVIS_RETRY tox'
|
||||||
|
|
||||||
after_success: '[ "$TOXENV" == "py27-cover" ] && codecov -F linux'
|
|
||||||
|
|
||||||
notifications:
|
notifications:
|
||||||
email: false
|
email: false
|
||||||
irc:
|
irc:
|
||||||
|
if: NOT branch =~ ^(travis-)?test-.*$
|
||||||
channels:
|
channels:
|
||||||
# This is set to a secure variable to prevent forks from sending
|
# This is set to a secure variable to prevent forks from sending
|
||||||
# notifications. This value was created by installing
|
# notifications. This value was created by installing
|
||||||
|
|||||||
@@ -21,6 +21,7 @@ Authors
|
|||||||
* [Andrzej Górski](https://github.com/andrzej3393)
|
* [Andrzej Górski](https://github.com/andrzej3393)
|
||||||
* [Anselm Levskaya](https://github.com/levskaya)
|
* [Anselm Levskaya](https://github.com/levskaya)
|
||||||
* [Antoine Jacoutot](https://github.com/ajacoutot)
|
* [Antoine Jacoutot](https://github.com/ajacoutot)
|
||||||
|
* [April King](https://github.com/april)
|
||||||
* [asaph](https://github.com/asaph)
|
* [asaph](https://github.com/asaph)
|
||||||
* [Axel Beckert](https://github.com/xtaran)
|
* [Axel Beckert](https://github.com/xtaran)
|
||||||
* [Bas](https://github.com/Mechazawa)
|
* [Bas](https://github.com/Mechazawa)
|
||||||
@@ -36,6 +37,7 @@ Authors
|
|||||||
* [Brad Warren](https://github.com/bmw)
|
* [Brad Warren](https://github.com/bmw)
|
||||||
* [Brandon Kraft](https://github.com/kraftbj)
|
* [Brandon Kraft](https://github.com/kraftbj)
|
||||||
* [Brandon Kreisel](https://github.com/kraftbj)
|
* [Brandon Kreisel](https://github.com/kraftbj)
|
||||||
|
* [Cameron Steel](https://github.com/Tugzrida)
|
||||||
* [Ceesjan Luiten](https://github.com/quinox)
|
* [Ceesjan Luiten](https://github.com/quinox)
|
||||||
* [Chad Whitacre](https://github.com/whit537)
|
* [Chad Whitacre](https://github.com/whit537)
|
||||||
* [Chhatoi Pritam Baral](https://github.com/pritambaral)
|
* [Chhatoi Pritam Baral](https://github.com/pritambaral)
|
||||||
@@ -100,7 +102,9 @@ Authors
|
|||||||
* [Harlan Lieberman-Berg](https://github.com/hlieberman)
|
* [Harlan Lieberman-Berg](https://github.com/hlieberman)
|
||||||
* [Henri Salo](https://github.com/fgeek)
|
* [Henri Salo](https://github.com/fgeek)
|
||||||
* [Henry Chen](https://github.com/henrychen95)
|
* [Henry Chen](https://github.com/henrychen95)
|
||||||
|
* [Hugo van Kemenade](https://github.com/hugovk)
|
||||||
* [Ingolf Becker](https://github.com/watercrossing)
|
* [Ingolf Becker](https://github.com/watercrossing)
|
||||||
|
* [Ivan Nejgebauer](https://github.com/inejge)
|
||||||
* [Jaap Eldering](https://github.com/eldering)
|
* [Jaap Eldering](https://github.com/eldering)
|
||||||
* [Jacob Hoffman-Andrews](https://github.com/jsha)
|
* [Jacob Hoffman-Andrews](https://github.com/jsha)
|
||||||
* [Jacob Sachs](https://github.com/jsachs)
|
* [Jacob Sachs](https://github.com/jsachs)
|
||||||
@@ -124,6 +128,7 @@ Authors
|
|||||||
* [Jonathan Herlin](https://github.com/Jonher937)
|
* [Jonathan Herlin](https://github.com/Jonher937)
|
||||||
* [Jon Walsh](https://github.com/code-tree)
|
* [Jon Walsh](https://github.com/code-tree)
|
||||||
* [Joona Hoikkala](https://github.com/joohoi)
|
* [Joona Hoikkala](https://github.com/joohoi)
|
||||||
|
* [Josh McCullough](https://github.com/JoshMcCullough)
|
||||||
* [Josh Soref](https://github.com/jsoref)
|
* [Josh Soref](https://github.com/jsoref)
|
||||||
* [Joubin Jabbari](https://github.com/joubin)
|
* [Joubin Jabbari](https://github.com/joubin)
|
||||||
* [Juho Juopperi](https://github.com/jkjuopperi)
|
* [Juho Juopperi](https://github.com/jkjuopperi)
|
||||||
@@ -167,6 +172,7 @@ Authors
|
|||||||
* [Michael Watters](https://github.com/blackknight36)
|
* [Michael Watters](https://github.com/blackknight36)
|
||||||
* [Michal Moravec](https://github.com/https://github.com/Majkl578)
|
* [Michal Moravec](https://github.com/https://github.com/Majkl578)
|
||||||
* [Michal Papis](https://github.com/mpapis)
|
* [Michal Papis](https://github.com/mpapis)
|
||||||
|
* [Mickaël Schoentgen](https://github.com/BoboTiG)
|
||||||
* [Minn Soe](https://github.com/MinnSoe)
|
* [Minn Soe](https://github.com/MinnSoe)
|
||||||
* [Min RK](https://github.com/minrk)
|
* [Min RK](https://github.com/minrk)
|
||||||
* [Miquel Ruiz](https://github.com/miquelruiz)
|
* [Miquel Ruiz](https://github.com/miquelruiz)
|
||||||
@@ -230,6 +236,7 @@ Authors
|
|||||||
* [Stavros Korokithakis](https://github.com/skorokithakis)
|
* [Stavros Korokithakis](https://github.com/skorokithakis)
|
||||||
* [Stefan Weil](https://github.com/stweil)
|
* [Stefan Weil](https://github.com/stweil)
|
||||||
* [Steve Desmond](https://github.com/stevedesmond-ca)
|
* [Steve Desmond](https://github.com/stevedesmond-ca)
|
||||||
|
* [sydneyli](https://github.com/sydneyli)
|
||||||
* [Tan Jay Jun](https://github.com/jayjun)
|
* [Tan Jay Jun](https://github.com/jayjun)
|
||||||
* [Tapple Gao](https://github.com/tapple)
|
* [Tapple Gao](https://github.com/tapple)
|
||||||
* [Telepenin Nikolay](https://github.com/telepenin)
|
* [Telepenin Nikolay](https://github.com/telepenin)
|
||||||
@@ -261,5 +268,6 @@ Authors
|
|||||||
* [Yomna](https://github.com/ynasser)
|
* [Yomna](https://github.com/ynasser)
|
||||||
* [Yoni Jah](https://github.com/yonjah)
|
* [Yoni Jah](https://github.com/yonjah)
|
||||||
* [YourDaddyIsHere](https://github.com/YourDaddyIsHere)
|
* [YourDaddyIsHere](https://github.com/YourDaddyIsHere)
|
||||||
|
* [Yuseong Cho](https://github.com/g6123)
|
||||||
* [Zach Shepherd](https://github.com/zjs)
|
* [Zach Shepherd](https://github.com/zjs)
|
||||||
* [陈三](https://github.com/chenxsan)
|
* [陈三](https://github.com/chenxsan)
|
||||||
|
|||||||
1788
CHANGELOG.md
1788
CHANGELOG.md
File diff suppressed because it is too large
Load Diff
1
CHANGELOG.md
Symbolic link
1
CHANGELOG.md
Symbolic link
@@ -0,0 +1 @@
|
|||||||
|
certbot/CHANGELOG.md
|
||||||
@@ -6,16 +6,15 @@ EXPOSE 80 443
|
|||||||
|
|
||||||
WORKDIR /opt/certbot/src
|
WORKDIR /opt/certbot/src
|
||||||
|
|
||||||
# TODO: Install Apache/Nginx for plugin development.
|
|
||||||
COPY . .
|
COPY . .
|
||||||
RUN apt-get update && \
|
RUN apt-get update && \
|
||||||
apt-get install apache2 git nginx-light -y && \
|
apt-get install apache2 git python3-dev python3-venv gcc libaugeas0 \
|
||||||
letsencrypt-auto-source/letsencrypt-auto --os-packages-only && \
|
libssl-dev libffi-dev ca-certificates openssl nginx-light -y && \
|
||||||
apt-get clean && \
|
apt-get clean && \
|
||||||
rm -rf /var/lib/apt/lists/* \
|
rm -rf /var/lib/apt/lists/* \
|
||||||
/tmp/* \
|
/tmp/* \
|
||||||
/var/tmp/*
|
/var/tmp/*
|
||||||
|
|
||||||
RUN VENV_NAME="../venv" python tools/venv.py
|
RUN VENV_NAME="../venv3" python3 tools/venv3.py
|
||||||
|
|
||||||
ENV PATH /opt/certbot/venv/bin:$PATH
|
ENV PATH /opt/certbot/venv3/bin:$PATH
|
||||||
|
|||||||
131
README.rst
131
README.rst
@@ -1,131 +0,0 @@
|
|||||||
.. This file contains a series of comments that are used to include sections of this README in other files. Do not modify these comments unless you know what you are doing. tag:intro-begin
|
|
||||||
|
|
||||||
Certbot is part of EFF’s effort to encrypt the entire Internet. Secure communication over the Web relies on HTTPS, which requires the use of a digital certificate that lets browsers verify the identity of web servers (e.g., is that really google.com?). Web servers obtain their certificates from trusted third parties called certificate authorities (CAs). Certbot is an easy-to-use client that fetches a certificate from Let’s Encrypt—an open certificate authority launched by the EFF, Mozilla, and others—and deploys it to a web server.
|
|
||||||
|
|
||||||
Anyone who has gone through the trouble of setting up a secure website knows what a hassle getting and maintaining a certificate is. Certbot and Let’s Encrypt can automate away the pain and let you turn on and manage HTTPS with simple commands. Using Certbot and Let's Encrypt is free, so there’s no need to arrange payment.
|
|
||||||
|
|
||||||
How you use Certbot depends on the configuration of your web server. The best way to get started is to use our `interactive guide <https://certbot.eff.org>`_. It generates instructions based on your configuration settings. In most cases, you’ll need `root or administrator access <https://certbot.eff.org/faq/#does-certbot-require-root-administrator-privileges>`_ to your web server to run Certbot.
|
|
||||||
|
|
||||||
Certbot is meant to be run directly on your web server, not on your personal computer. If you’re using a hosted service and don’t have direct access to your web server, you might not be able to use Certbot. Check with your hosting provider for documentation about uploading certificates or using certificates issued by Let’s Encrypt.
|
|
||||||
|
|
||||||
Certbot is a fully-featured, extensible client for the Let's
|
|
||||||
Encrypt CA (or any other CA that speaks the `ACME
|
|
||||||
<https://github.com/ietf-wg-acme/acme/blob/master/draft-ietf-acme-acme.md>`_
|
|
||||||
protocol) that can automate the tasks of obtaining certificates and
|
|
||||||
configuring webservers to use them. This client runs on Unix-based operating
|
|
||||||
systems.
|
|
||||||
|
|
||||||
To see the changes made to Certbot between versions please refer to our
|
|
||||||
`changelog <https://github.com/certbot/certbot/blob/master/CHANGELOG.md>`_.
|
|
||||||
|
|
||||||
Until May 2016, Certbot was named simply ``letsencrypt`` or ``letsencrypt-auto``,
|
|
||||||
depending on install method. Instructions on the Internet, and some pieces of the
|
|
||||||
software, may still refer to this older name.
|
|
||||||
|
|
||||||
Contributing
|
|
||||||
------------
|
|
||||||
|
|
||||||
If you'd like to contribute to this project please read `Developer Guide
|
|
||||||
<https://certbot.eff.org/docs/contributing.html>`_.
|
|
||||||
|
|
||||||
This project is governed by `EFF's Public Projects Code of Conduct <https://www.eff.org/pages/eppcode>`_.
|
|
||||||
|
|
||||||
.. _installation:
|
|
||||||
|
|
||||||
How to run the client
|
|
||||||
---------------------
|
|
||||||
|
|
||||||
The easiest way to install and run Certbot is by visiting `certbot.eff.org`_,
|
|
||||||
where you can find the correct instructions for many web server and OS
|
|
||||||
combinations. For more information, see `Get Certbot
|
|
||||||
<https://certbot.eff.org/docs/install.html>`_.
|
|
||||||
|
|
||||||
.. _certbot.eff.org: https://certbot.eff.org/
|
|
||||||
|
|
||||||
Understanding the client in more depth
|
|
||||||
--------------------------------------
|
|
||||||
|
|
||||||
To understand what the client is doing in detail, it's important to
|
|
||||||
understand the way it uses plugins. Please see the `explanation of
|
|
||||||
plugins <https://certbot.eff.org/docs/using.html#plugins>`_ in
|
|
||||||
the User Guide.
|
|
||||||
|
|
||||||
Links
|
|
||||||
=====
|
|
||||||
|
|
||||||
.. Do not modify this comment unless you know what you're doing. tag:links-begin
|
|
||||||
|
|
||||||
Documentation: https://certbot.eff.org/docs
|
|
||||||
|
|
||||||
Software project: https://github.com/certbot/certbot
|
|
||||||
|
|
||||||
Notes for developers: https://certbot.eff.org/docs/contributing.html
|
|
||||||
|
|
||||||
Main Website: https://certbot.eff.org
|
|
||||||
|
|
||||||
Let's Encrypt Website: https://letsencrypt.org
|
|
||||||
|
|
||||||
Community: https://community.letsencrypt.org
|
|
||||||
|
|
||||||
ACME spec: http://ietf-wg-acme.github.io/acme/
|
|
||||||
|
|
||||||
ACME working area in github: https://github.com/ietf-wg-acme/acme
|
|
||||||
|
|
||||||
|build-status| |coverage| |docs| |container|
|
|
||||||
|
|
||||||
.. |build-status| image:: https://travis-ci.com/certbot/certbot.svg?branch=master
|
|
||||||
:target: https://travis-ci.com/certbot/certbot
|
|
||||||
:alt: Travis CI status
|
|
||||||
|
|
||||||
.. |coverage| image:: https://codecov.io/gh/certbot/certbot/branch/master/graph/badge.svg
|
|
||||||
:target: https://codecov.io/gh/certbot/certbot
|
|
||||||
:alt: Coverage status
|
|
||||||
|
|
||||||
.. |docs| image:: https://readthedocs.org/projects/letsencrypt/badge/
|
|
||||||
:target: https://readthedocs.org/projects/letsencrypt/
|
|
||||||
:alt: Documentation status
|
|
||||||
|
|
||||||
.. |container| image:: https://quay.io/repository/letsencrypt/letsencrypt/status
|
|
||||||
:target: https://quay.io/repository/letsencrypt/letsencrypt
|
|
||||||
:alt: Docker Repository on Quay.io
|
|
||||||
|
|
||||||
.. Do not modify this comment unless you know what you're doing. tag:links-end
|
|
||||||
|
|
||||||
System Requirements
|
|
||||||
===================
|
|
||||||
|
|
||||||
See https://certbot.eff.org/docs/install.html#system-requirements.
|
|
||||||
|
|
||||||
.. Do not modify this comment unless you know what you're doing. tag:intro-end
|
|
||||||
|
|
||||||
.. Do not modify this comment unless you know what you're doing. tag:features-begin
|
|
||||||
|
|
||||||
Current Features
|
|
||||||
=====================
|
|
||||||
|
|
||||||
* Supports multiple web servers:
|
|
||||||
|
|
||||||
- apache/2.x
|
|
||||||
- nginx/0.8.48+
|
|
||||||
- webroot (adds files to webroot directories in order to prove control of
|
|
||||||
domains and obtain certs)
|
|
||||||
- standalone (runs its own simple webserver to prove you control a domain)
|
|
||||||
- other server software via `third party plugins <https://certbot.eff.org/docs/using.html#third-party-plugins>`_
|
|
||||||
|
|
||||||
* The private key is generated locally on your system.
|
|
||||||
* Can talk to the Let's Encrypt CA or optionally to other ACME
|
|
||||||
compliant services.
|
|
||||||
* Can get domain-validated (DV) certificates.
|
|
||||||
* Can revoke certificates.
|
|
||||||
* Adjustable RSA key bit-length (2048 (default), 4096, ...).
|
|
||||||
* Can optionally install a http -> https redirect, so your site effectively
|
|
||||||
runs https only (Apache only)
|
|
||||||
* Fully automated.
|
|
||||||
* Configuration changes are logged and can be reverted.
|
|
||||||
* Supports an interactive text UI, or can be driven entirely from the
|
|
||||||
command line.
|
|
||||||
* Free and Open Source Software, made with Python.
|
|
||||||
|
|
||||||
.. Do not modify this comment unless you know what you're doing. tag:features-end
|
|
||||||
|
|
||||||
For extensive documentation on using and contributing to Certbot, go to https://certbot.eff.org/docs. If you would like to contribute to the project or run the latest code from git, you should read our `developer guide <https://certbot.eff.org/docs/contributing.html>`_.
|
|
||||||
1
README.rst
Symbolic link
1
README.rst
Symbolic link
@@ -0,0 +1 @@
|
|||||||
|
certbot/README.rst
|
||||||
@@ -3,4 +3,6 @@ include README.rst
|
|||||||
include pytest.ini
|
include pytest.ini
|
||||||
recursive-include docs *
|
recursive-include docs *
|
||||||
recursive-include examples *
|
recursive-include examples *
|
||||||
recursive-include acme/testdata *
|
recursive-include tests *
|
||||||
|
global-exclude __pycache__
|
||||||
|
global-exclude *.py[cod]
|
||||||
|
|||||||
@@ -13,7 +13,6 @@ import warnings
|
|||||||
#
|
#
|
||||||
# It is based on
|
# It is based on
|
||||||
# https://github.com/requests/requests/blob/1278ecdf71a312dc2268f3bfc0aabfab3c006dcf/requests/packages.py
|
# https://github.com/requests/requests/blob/1278ecdf71a312dc2268f3bfc0aabfab3c006dcf/requests/packages.py
|
||||||
|
|
||||||
import josepy as jose
|
import josepy as jose
|
||||||
|
|
||||||
for mod in list(sys.modules):
|
for mod in list(sys.modules):
|
||||||
@@ -21,30 +20,3 @@ for mod in list(sys.modules):
|
|||||||
# preserved (acme.jose.* is josepy.*)
|
# preserved (acme.jose.* is josepy.*)
|
||||||
if mod == 'josepy' or mod.startswith('josepy.'):
|
if mod == 'josepy' or mod.startswith('josepy.'):
|
||||||
sys.modules['acme.' + mod.replace('josepy', 'jose', 1)] = sys.modules[mod]
|
sys.modules['acme.' + mod.replace('josepy', 'jose', 1)] = sys.modules[mod]
|
||||||
|
|
||||||
|
|
||||||
# This class takes a similar approach to the cryptography project to deprecate attributes
|
|
||||||
# in public modules. See the _ModuleWithDeprecation class here:
|
|
||||||
# https://github.com/pyca/cryptography/blob/91105952739442a74582d3e62b3d2111365b0dc7/src/cryptography/utils.py#L129
|
|
||||||
class _TLSSNI01DeprecationModule(object):
|
|
||||||
"""
|
|
||||||
Internal class delegating to a module, and displaying warnings when
|
|
||||||
attributes related to TLS-SNI-01 are accessed.
|
|
||||||
"""
|
|
||||||
def __init__(self, module):
|
|
||||||
self.__dict__['_module'] = module
|
|
||||||
|
|
||||||
def __getattr__(self, attr):
|
|
||||||
if 'TLSSNI01' in attr:
|
|
||||||
warnings.warn('{0} attribute is deprecated, and will be removed soon.'.format(attr),
|
|
||||||
DeprecationWarning, stacklevel=2)
|
|
||||||
return getattr(self._module, attr)
|
|
||||||
|
|
||||||
def __setattr__(self, attr, value): # pragma: no cover
|
|
||||||
setattr(self._module, attr, value)
|
|
||||||
|
|
||||||
def __delattr__(self, attr): # pragma: no cover
|
|
||||||
delattr(self._module, attr)
|
|
||||||
|
|
||||||
def __dir__(self): # pragma: no cover
|
|
||||||
return ['_module'] + dir(self._module)
|
|
||||||
|
|||||||
@@ -1,21 +1,22 @@
|
|||||||
"""ACME Identifier Validation Challenges."""
|
"""ACME Identifier Validation Challenges."""
|
||||||
import abc
|
import abc
|
||||||
|
import codecs
|
||||||
import functools
|
import functools
|
||||||
import hashlib
|
import hashlib
|
||||||
import logging
|
import logging
|
||||||
import socket
|
import socket
|
||||||
import sys
|
|
||||||
|
|
||||||
from cryptography.hazmat.primitives import hashes # type: ignore
|
from cryptography.hazmat.primitives import hashes # type: ignore
|
||||||
import josepy as jose
|
import josepy as jose
|
||||||
import OpenSSL
|
|
||||||
import requests
|
import requests
|
||||||
import six
|
import six
|
||||||
|
from OpenSSL import SSL # type: ignore # https://github.com/python/typeshed/issues/2052
|
||||||
|
from OpenSSL import crypto
|
||||||
|
|
||||||
from acme import errors
|
|
||||||
from acme import crypto_util
|
from acme import crypto_util
|
||||||
|
from acme import errors
|
||||||
from acme import fields
|
from acme import fields
|
||||||
from acme import _TLSSNI01DeprecationModule
|
from acme.mixins import ResourceMixin, TypeMixin
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -34,7 +35,7 @@ class Challenge(jose.TypedJSONObjectWithFields):
|
|||||||
return UnrecognizedChallenge.from_json(jobj)
|
return UnrecognizedChallenge.from_json(jobj)
|
||||||
|
|
||||||
|
|
||||||
class ChallengeResponse(jose.TypedJSONObjectWithFields):
|
class ChallengeResponse(ResourceMixin, TypeMixin, jose.TypedJSONObjectWithFields):
|
||||||
# _fields_to_partial_json
|
# _fields_to_partial_json
|
||||||
"""ACME challenge response."""
|
"""ACME challenge response."""
|
||||||
TYPES = {} # type: dict
|
TYPES = {} # type: dict
|
||||||
@@ -60,8 +61,7 @@ class UnrecognizedChallenge(Challenge):
|
|||||||
object.__setattr__(self, "jobj", jobj)
|
object.__setattr__(self, "jobj", jobj)
|
||||||
|
|
||||||
def to_partial_json(self):
|
def to_partial_json(self):
|
||||||
# pylint: disable=no-member
|
return self.jobj # pylint: disable=no-member
|
||||||
return self.jobj
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def from_json(cls, jobj):
|
def from_json(cls, jobj):
|
||||||
@@ -119,7 +119,7 @@ class KeyAuthorizationChallengeResponse(ChallengeResponse):
|
|||||||
:rtype: bool
|
:rtype: bool
|
||||||
|
|
||||||
"""
|
"""
|
||||||
parts = self.key_authorization.split('.') # pylint: disable=no-member
|
parts = self.key_authorization.split('.')
|
||||||
if len(parts) != 2:
|
if len(parts) != 2:
|
||||||
logger.debug("Key authorization (%r) is not well formed",
|
logger.debug("Key authorization (%r) is not well formed",
|
||||||
self.key_authorization)
|
self.key_authorization)
|
||||||
@@ -237,7 +237,7 @@ class DNS01Response(KeyAuthorizationChallengeResponse):
|
|||||||
return verified
|
return verified
|
||||||
|
|
||||||
|
|
||||||
@Challenge.register # pylint: disable=too-many-ancestors
|
@Challenge.register
|
||||||
class DNS01(KeyAuthorizationChallenge):
|
class DNS01(KeyAuthorizationChallenge):
|
||||||
"""ACME dns-01 challenge."""
|
"""ACME dns-01 challenge."""
|
||||||
response_cls = DNS01Response
|
response_cls = DNS01Response
|
||||||
@@ -310,7 +310,7 @@ class HTTP01Response(KeyAuthorizationChallengeResponse):
|
|||||||
uri = chall.uri(domain)
|
uri = chall.uri(domain)
|
||||||
logger.debug("Verifying %s at %s...", chall.typ, uri)
|
logger.debug("Verifying %s at %s...", chall.typ, uri)
|
||||||
try:
|
try:
|
||||||
http_response = requests.get(uri)
|
http_response = requests.get(uri, verify=False)
|
||||||
except requests.exceptions.RequestException as error:
|
except requests.exceptions.RequestException as error:
|
||||||
logger.error("Unable to reach %s: %s", uri, error)
|
logger.error("Unable to reach %s: %s", uri, error)
|
||||||
return False
|
return False
|
||||||
@@ -327,7 +327,7 @@ class HTTP01Response(KeyAuthorizationChallengeResponse):
|
|||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
@Challenge.register # pylint: disable=too-many-ancestors
|
@Challenge.register
|
||||||
class HTTP01(KeyAuthorizationChallenge):
|
class HTTP01(KeyAuthorizationChallenge):
|
||||||
"""ACME http-01 challenge."""
|
"""ACME http-01 challenge."""
|
||||||
response_cls = HTTP01Response
|
response_cls = HTTP01Response
|
||||||
@@ -368,12 +368,9 @@ class HTTP01(KeyAuthorizationChallenge):
|
|||||||
|
|
||||||
|
|
||||||
@ChallengeResponse.register
|
@ChallengeResponse.register
|
||||||
class TLSSNI01Response(KeyAuthorizationChallengeResponse):
|
class TLSALPN01Response(KeyAuthorizationChallengeResponse):
|
||||||
"""ACME tls-sni-01 challenge response."""
|
"""ACME tls-alpn-01 challenge response."""
|
||||||
typ = "tls-sni-01"
|
typ = "tls-alpn-01"
|
||||||
|
|
||||||
DOMAIN_SUFFIX = b".acme.invalid"
|
|
||||||
"""Domain name suffix."""
|
|
||||||
|
|
||||||
PORT = 443
|
PORT = 443
|
||||||
"""Verification port as defined by the protocol.
|
"""Verification port as defined by the protocol.
|
||||||
@@ -383,28 +380,18 @@ class TLSSNI01Response(KeyAuthorizationChallengeResponse):
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@property
|
ID_PE_ACME_IDENTIFIER_V1 = b"1.3.6.1.5.5.7.1.30.1"
|
||||||
def z(self): # pylint: disable=invalid-name
|
ACME_TLS_1_PROTOCOL = "acme-tls/1"
|
||||||
"""``z`` value used for verification.
|
|
||||||
|
|
||||||
:rtype bytes:
|
|
||||||
|
|
||||||
"""
|
|
||||||
return hashlib.sha256(
|
|
||||||
self.key_authorization.encode("utf-8")).hexdigest().lower().encode()
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def z_domain(self):
|
def h(self):
|
||||||
"""Domain name used for verification, generated from `z`.
|
"""Hash value stored in challenge certificate"""
|
||||||
|
return hashlib.sha256(self.key_authorization.encode('utf-8')).digest()
|
||||||
|
|
||||||
:rtype bytes:
|
def gen_cert(self, domain, key=None, bits=2048):
|
||||||
|
"""Generate tls-alpn-01 certificate.
|
||||||
"""
|
|
||||||
return self.z[:32] + b'.' + self.z[32:] + self.DOMAIN_SUFFIX
|
|
||||||
|
|
||||||
def gen_cert(self, key=None, bits=2048):
|
|
||||||
"""Generate tls-sni-01 certificate.
|
|
||||||
|
|
||||||
|
:param unicode domain: Domain verified by the challenge.
|
||||||
:param OpenSSL.crypto.PKey key: Optional private key used in
|
:param OpenSSL.crypto.PKey key: Optional private key used in
|
||||||
certificate generation. If not provided (``None``), then
|
certificate generation. If not provided (``None``), then
|
||||||
fresh key will be generated.
|
fresh key will be generated.
|
||||||
@@ -414,32 +401,38 @@ class TLSSNI01Response(KeyAuthorizationChallengeResponse):
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
if key is None:
|
if key is None:
|
||||||
key = OpenSSL.crypto.PKey()
|
key = crypto.PKey()
|
||||||
key.generate_key(OpenSSL.crypto.TYPE_RSA, bits)
|
key.generate_key(crypto.TYPE_RSA, bits)
|
||||||
return crypto_util.gen_ss_cert(key, [
|
|
||||||
# z_domain is too big to fit into CN, hence first dummy domain
|
|
||||||
'dummy', self.z_domain.decode()], force_san=True), key
|
|
||||||
|
|
||||||
def probe_cert(self, domain, **kwargs):
|
|
||||||
"""Probe tls-sni-01 challenge certificate.
|
|
||||||
|
|
||||||
:param unicode domain:
|
der_value = b"DER:" + codecs.encode(self.h, 'hex')
|
||||||
|
acme_extension = crypto.X509Extension(self.ID_PE_ACME_IDENTIFIER_V1,
|
||||||
|
critical=True, value=der_value)
|
||||||
|
|
||||||
|
return crypto_util.gen_ss_cert(key, [domain], force_san=True,
|
||||||
|
extensions=[acme_extension]), key
|
||||||
|
|
||||||
|
def probe_cert(self, domain, host=None, port=None):
|
||||||
|
"""Probe tls-alpn-01 challenge certificate.
|
||||||
|
|
||||||
|
:param unicode domain: domain being validated, required.
|
||||||
|
:param string host: IP address used to probe the certificate.
|
||||||
|
:param int port: Port used to probe the certificate.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
# TODO: domain is not necessary if host is provided
|
if host is None:
|
||||||
if "host" not in kwargs:
|
|
||||||
host = socket.gethostbyname(domain)
|
host = socket.gethostbyname(domain)
|
||||||
logger.debug('%s resolved to %s', domain, host)
|
logger.debug('%s resolved to %s', domain, host)
|
||||||
kwargs["host"] = host
|
if port is None:
|
||||||
|
port = self.PORT
|
||||||
|
|
||||||
kwargs.setdefault("port", self.PORT)
|
return crypto_util.probe_sni(host=host, port=port, name=domain,
|
||||||
kwargs["name"] = self.z_domain
|
alpn_protocols=[self.ACME_TLS_1_PROTOCOL])
|
||||||
# TODO: try different methods?
|
|
||||||
return crypto_util.probe_sni(**kwargs)
|
|
||||||
|
|
||||||
def verify_cert(self, cert):
|
def verify_cert(self, domain, cert):
|
||||||
"""Verify tls-sni-01 challenge certificate.
|
"""Verify tls-alpn-01 challenge certificate.
|
||||||
|
|
||||||
|
:param unicode domain: Domain name being validated.
|
||||||
:param OpensSSL.crypto.X509 cert: Challenge certificate.
|
:param OpensSSL.crypto.X509 cert: Challenge certificate.
|
||||||
|
|
||||||
:returns: Whether the certificate was successfully verified.
|
:returns: Whether the certificate was successfully verified.
|
||||||
@@ -447,28 +440,40 @@ class TLSSNI01Response(KeyAuthorizationChallengeResponse):
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
# pylint: disable=protected-access
|
# pylint: disable=protected-access
|
||||||
sans = crypto_util._pyopenssl_cert_or_req_san(cert)
|
names = crypto_util._pyopenssl_cert_or_req_all_names(cert)
|
||||||
logger.debug('Certificate %s. SANs: %s', cert.digest('sha256'), sans)
|
logger.debug('Certificate %s. SANs: %s', cert.digest('sha256'), names)
|
||||||
return self.z_domain.decode() in sans
|
if len(names) != 1 or names[0].lower() != domain.lower():
|
||||||
|
return False
|
||||||
|
|
||||||
|
for i in range(cert.get_extension_count()):
|
||||||
|
ext = cert.get_extension(i)
|
||||||
|
# FIXME: assume this is the ACME extension. Currently there is no
|
||||||
|
# way to get full OID of an unknown extension from pyopenssl.
|
||||||
|
if ext.get_short_name() == b'UNDEF':
|
||||||
|
data = ext.get_data()
|
||||||
|
return data == self.h
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
# pylint: disable=too-many-arguments
|
||||||
def simple_verify(self, chall, domain, account_public_key,
|
def simple_verify(self, chall, domain, account_public_key,
|
||||||
cert=None, **kwargs):
|
cert=None, host=None, port=None):
|
||||||
"""Simple verify.
|
"""Simple verify.
|
||||||
|
|
||||||
Verify ``validation`` using ``account_public_key``, optionally
|
Verify ``validation`` using ``account_public_key``, optionally
|
||||||
probe tls-sni-01 certificate and check using `verify_cert`.
|
probe tls-alpn-01 certificate and check using `verify_cert`.
|
||||||
|
|
||||||
:param .challenges.TLSSNI01 chall: Corresponding challenge.
|
:param .challenges.TLSALPN01 chall: Corresponding challenge.
|
||||||
:param str domain: Domain name being validated.
|
:param str domain: Domain name being validated.
|
||||||
:param JWK account_public_key:
|
:param JWK account_public_key:
|
||||||
:param OpenSSL.crypto.X509 cert: Optional certificate. If not
|
:param OpenSSL.crypto.X509 cert: Optional certificate. If not
|
||||||
provided (``None``) certificate will be retrieved using
|
provided (``None``) certificate will be retrieved using
|
||||||
`probe_cert`.
|
`probe_cert`.
|
||||||
|
:param string host: IP address used to probe the certificate.
|
||||||
:param int port: Port used to probe the certificate.
|
:param int port: Port used to probe the certificate.
|
||||||
|
|
||||||
|
|
||||||
:returns: ``True`` iff client's control of the domain has been
|
:returns: ``True`` if and only if client's control of the domain has been verified.
|
||||||
verified.
|
|
||||||
:rtype: bool
|
:rtype: bool
|
||||||
|
|
||||||
"""
|
"""
|
||||||
@@ -478,27 +483,25 @@ class TLSSNI01Response(KeyAuthorizationChallengeResponse):
|
|||||||
|
|
||||||
if cert is None:
|
if cert is None:
|
||||||
try:
|
try:
|
||||||
cert = self.probe_cert(domain=domain, **kwargs)
|
cert = self.probe_cert(domain=domain, host=host, port=port)
|
||||||
except errors.Error as error:
|
except errors.Error as error:
|
||||||
logger.debug(str(error), exc_info=True)
|
logger.debug(str(error), exc_info=True)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
return self.verify_cert(cert)
|
return self.verify_cert(domain, cert)
|
||||||
|
|
||||||
|
|
||||||
@Challenge.register # pylint: disable=too-many-ancestors
|
@Challenge.register # pylint: disable=too-many-ancestors
|
||||||
class TLSSNI01(KeyAuthorizationChallenge):
|
class TLSALPN01(KeyAuthorizationChallenge):
|
||||||
"""ACME tls-sni-01 challenge."""
|
"""ACME tls-alpn-01 challenge."""
|
||||||
response_cls = TLSSNI01Response
|
response_cls = TLSALPN01Response
|
||||||
typ = response_cls.typ
|
typ = response_cls.typ
|
||||||
|
|
||||||
# boulder#962, ietf-wg-acme#22
|
|
||||||
#n = jose.Field("n", encoder=int, decoder=int)
|
|
||||||
|
|
||||||
def validation(self, account_key, **kwargs):
|
def validation(self, account_key, **kwargs):
|
||||||
"""Generate validation.
|
"""Generate validation.
|
||||||
|
|
||||||
:param JWK account_key:
|
:param JWK account_key:
|
||||||
|
:param unicode domain: Domain verified by the challenge.
|
||||||
:param OpenSSL.crypto.PKey cert_key: Optional private key used
|
:param OpenSSL.crypto.PKey cert_key: Optional private key used
|
||||||
in certificate generation. If not provided (``None``), then
|
in certificate generation. If not provided (``None``), then
|
||||||
fresh key will be generated.
|
fresh key will be generated.
|
||||||
@@ -506,34 +509,23 @@ class TLSSNI01(KeyAuthorizationChallenge):
|
|||||||
:rtype: `tuple` of `OpenSSL.crypto.X509` and `OpenSSL.crypto.PKey`
|
:rtype: `tuple` of `OpenSSL.crypto.X509` and `OpenSSL.crypto.PKey`
|
||||||
|
|
||||||
"""
|
"""
|
||||||
return self.response(account_key).gen_cert(key=kwargs.get('cert_key'))
|
return self.response(account_key).gen_cert(
|
||||||
|
key=kwargs.get('cert_key'),
|
||||||
|
domain=kwargs.get('domain'))
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def is_supported():
|
||||||
|
"""
|
||||||
|
Check if TLS-ALPN-01 challenge is supported on this machine.
|
||||||
|
This implies that a recent version of OpenSSL is installed (>= 1.0.2),
|
||||||
|
or a recent cryptography version shipped with the OpenSSL library is installed.
|
||||||
|
|
||||||
@ChallengeResponse.register
|
:returns: ``True`` if TLS-ALPN-01 is supported on this machine, ``False`` otherwise.
|
||||||
class TLSALPN01Response(KeyAuthorizationChallengeResponse):
|
:rtype: bool
|
||||||
"""ACME TLS-ALPN-01 challenge response.
|
|
||||||
|
|
||||||
This class only allows initiating a TLS-ALPN-01 challenge returned from the
|
"""
|
||||||
CA. Full support for responding to TLS-ALPN-01 challenges by generating and
|
return (hasattr(SSL.Connection, "set_alpn_protos")
|
||||||
serving the expected response certificate is not currently provided.
|
and hasattr(SSL.Context, "set_alpn_select_callback"))
|
||||||
"""
|
|
||||||
typ = "tls-alpn-01"
|
|
||||||
|
|
||||||
|
|
||||||
@Challenge.register # pylint: disable=too-many-ancestors
|
|
||||||
class TLSALPN01(KeyAuthorizationChallenge):
|
|
||||||
"""ACME tls-alpn-01 challenge.
|
|
||||||
|
|
||||||
This class simply allows parsing the TLS-ALPN-01 challenge returned from
|
|
||||||
the CA. Full TLS-ALPN-01 support is not currently provided.
|
|
||||||
|
|
||||||
"""
|
|
||||||
typ = "tls-alpn-01"
|
|
||||||
response_cls = TLSALPN01Response
|
|
||||||
|
|
||||||
def validation(self, account_key, **kwargs):
|
|
||||||
"""Generate validation for the challenge."""
|
|
||||||
raise NotImplementedError()
|
|
||||||
|
|
||||||
|
|
||||||
@Challenge.register
|
@Challenge.register
|
||||||
@@ -617,7 +609,3 @@ class DNSResponse(ChallengeResponse):
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
return chall.check_validation(self.validation, account_public_key)
|
return chall.check_validation(self.validation, account_public_key)
|
||||||
|
|
||||||
|
|
||||||
# Patching ourselves to warn about TLS-SNI challenge deprecation and removal.
|
|
||||||
sys.modules[__name__] = _TLSSNI01DeprecationModule(sys.modules[__name__])
|
|
||||||
|
|||||||
@@ -5,25 +5,27 @@ import datetime
|
|||||||
from email.utils import parsedate_tz
|
from email.utils import parsedate_tz
|
||||||
import heapq
|
import heapq
|
||||||
import logging
|
import logging
|
||||||
import time
|
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
|
import time
|
||||||
|
|
||||||
import six
|
|
||||||
from six.moves import http_client # pylint: disable=import-error
|
|
||||||
import josepy as jose
|
import josepy as jose
|
||||||
import OpenSSL
|
import OpenSSL
|
||||||
import requests
|
import requests
|
||||||
from requests.adapters import HTTPAdapter
|
from requests.adapters import HTTPAdapter
|
||||||
from requests_toolbelt.adapters.source import SourceAddressAdapter
|
from requests_toolbelt.adapters.source import SourceAddressAdapter
|
||||||
|
import six
|
||||||
|
from six.moves import http_client
|
||||||
|
|
||||||
from acme import crypto_util
|
from acme import crypto_util
|
||||||
from acme import errors
|
from acme import errors
|
||||||
from acme import jws
|
from acme import jws
|
||||||
from acme import messages
|
from acme import messages
|
||||||
# pylint: disable=unused-import, no-name-in-module
|
from acme.magic_typing import Dict
|
||||||
from acme.magic_typing import Dict, List, Set, Text
|
from acme.magic_typing import List
|
||||||
|
from acme.magic_typing import Set
|
||||||
|
from acme.magic_typing import Text
|
||||||
|
from acme.mixins import VersionedLEACMEMixin
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -33,10 +35,9 @@ logger = logging.getLogger(__name__)
|
|||||||
# https://urllib3.readthedocs.org/en/latest/security.html#insecureplatformwarning
|
# https://urllib3.readthedocs.org/en/latest/security.html#insecureplatformwarning
|
||||||
if sys.version_info < (2, 7, 9): # pragma: no cover
|
if sys.version_info < (2, 7, 9): # pragma: no cover
|
||||||
try:
|
try:
|
||||||
# pylint: disable=no-member
|
|
||||||
requests.packages.urllib3.contrib.pyopenssl.inject_into_urllib3() # type: ignore
|
requests.packages.urllib3.contrib.pyopenssl.inject_into_urllib3() # type: ignore
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
import urllib3.contrib.pyopenssl # pylint: disable=import-error
|
import urllib3.contrib.pyopenssl
|
||||||
urllib3.contrib.pyopenssl.inject_into_urllib3()
|
urllib3.contrib.pyopenssl.inject_into_urllib3()
|
||||||
|
|
||||||
DEFAULT_NETWORK_TIMEOUT = 45
|
DEFAULT_NETWORK_TIMEOUT = 45
|
||||||
@@ -44,7 +45,7 @@ DEFAULT_NETWORK_TIMEOUT = 45
|
|||||||
DER_CONTENT_TYPE = 'application/pkix-cert'
|
DER_CONTENT_TYPE = 'application/pkix-cert'
|
||||||
|
|
||||||
|
|
||||||
class ClientBase(object): # pylint: disable=too-many-instance-attributes
|
class ClientBase(object):
|
||||||
"""ACME client base object.
|
"""ACME client base object.
|
||||||
|
|
||||||
:ivar messages.Directory directory:
|
:ivar messages.Directory directory:
|
||||||
@@ -136,7 +137,8 @@ class ClientBase(object): # pylint: disable=too-many-instance-attributes
|
|||||||
"""
|
"""
|
||||||
body = messages.UpdateAuthorization(status='deactivated')
|
body = messages.UpdateAuthorization(status='deactivated')
|
||||||
response = self._post(authzr.uri, body)
|
response = self._post(authzr.uri, body)
|
||||||
return self._authzr_from_response(response)
|
return self._authzr_from_response(response,
|
||||||
|
authzr.body.identifier, authzr.uri)
|
||||||
|
|
||||||
def _authzr_from_response(self, response, identifier=None, uri=None):
|
def _authzr_from_response(self, response, identifier=None, uri=None):
|
||||||
authzr = messages.AuthorizationResource(
|
authzr = messages.AuthorizationResource(
|
||||||
@@ -253,7 +255,6 @@ class Client(ClientBase):
|
|||||||
URI from which the resource will be downloaded.
|
URI from which the resource will be downloaded.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
# pylint: disable=too-many-arguments
|
|
||||||
self.key = key
|
self.key = key
|
||||||
if net is None:
|
if net is None:
|
||||||
net = ClientNetwork(key, alg=alg, verify_ssl=verify_ssl)
|
net = ClientNetwork(key, alg=alg, verify_ssl=verify_ssl)
|
||||||
@@ -279,7 +280,6 @@ class Client(ClientBase):
|
|||||||
assert response.status_code == http_client.CREATED
|
assert response.status_code == http_client.CREATED
|
||||||
|
|
||||||
# "Instance of 'Field' has no key/contact member" bug:
|
# "Instance of 'Field' has no key/contact member" bug:
|
||||||
# pylint: disable=no-member
|
|
||||||
return self._regr_from_response(response)
|
return self._regr_from_response(response)
|
||||||
|
|
||||||
def query_registration(self, regr):
|
def query_registration(self, regr):
|
||||||
@@ -434,7 +434,6 @@ class Client(ClientBase):
|
|||||||
was marked by the CA as invalid
|
was marked by the CA as invalid
|
||||||
|
|
||||||
"""
|
"""
|
||||||
# pylint: disable=too-many-locals
|
|
||||||
assert max_attempts > 0
|
assert max_attempts > 0
|
||||||
attempts = collections.defaultdict(int) # type: Dict[messages.AuthorizationResource, int]
|
attempts = collections.defaultdict(int) # type: Dict[messages.AuthorizationResource, int]
|
||||||
exhausted = set()
|
exhausted = set()
|
||||||
@@ -465,7 +464,6 @@ class Client(ClientBase):
|
|||||||
updated[authzr] = updated_authzr
|
updated[authzr] = updated_authzr
|
||||||
|
|
||||||
attempts[authzr] += 1
|
attempts[authzr] += 1
|
||||||
# pylint: disable=no-member
|
|
||||||
if updated_authzr.body.status not in (
|
if updated_authzr.body.status not in (
|
||||||
messages.STATUS_VALID, messages.STATUS_INVALID):
|
messages.STATUS_VALID, messages.STATUS_INVALID):
|
||||||
if attempts[authzr] < max_attempts:
|
if attempts[authzr] < max_attempts:
|
||||||
@@ -606,7 +604,6 @@ class ClientV2(ClientBase):
|
|||||||
if response.status_code == 200 and 'Location' in response.headers:
|
if response.status_code == 200 and 'Location' in response.headers:
|
||||||
raise errors.ConflictError(response.headers.get('Location'))
|
raise errors.ConflictError(response.headers.get('Location'))
|
||||||
# "Instance of 'Field' has no key/contact member" bug:
|
# "Instance of 'Field' has no key/contact member" bug:
|
||||||
# pylint: disable=no-member
|
|
||||||
regr = self._regr_from_response(response)
|
regr = self._regr_from_response(response)
|
||||||
self.net.account = regr
|
self.net.account = regr
|
||||||
return regr
|
return regr
|
||||||
@@ -670,7 +667,7 @@ class ClientV2(ClientBase):
|
|||||||
response = self._post(self.directory['newOrder'], order)
|
response = self._post(self.directory['newOrder'], order)
|
||||||
body = messages.Order.from_json(response.json())
|
body = messages.Order.from_json(response.json())
|
||||||
authorizations = []
|
authorizations = []
|
||||||
for url in body.authorizations: # pylint: disable=not-an-iterable
|
for url in body.authorizations:
|
||||||
authorizations.append(self._authzr_from_response(self._post_as_get(url), uri=url))
|
authorizations.append(self._authzr_from_response(self._post_as_get(url), uri=url))
|
||||||
return messages.OrderResource(
|
return messages.OrderResource(
|
||||||
body=body,
|
body=body,
|
||||||
@@ -730,7 +727,7 @@ class ClientV2(ClientBase):
|
|||||||
for authzr in responses:
|
for authzr in responses:
|
||||||
if authzr.body.status != messages.STATUS_VALID:
|
if authzr.body.status != messages.STATUS_VALID:
|
||||||
for chall in authzr.body.challenges:
|
for chall in authzr.body.challenges:
|
||||||
if chall.error != None:
|
if chall.error is not None:
|
||||||
failed.append(authzr)
|
failed.append(authzr)
|
||||||
if failed:
|
if failed:
|
||||||
raise errors.ValidationError(failed)
|
raise errors.ValidationError(failed)
|
||||||
@@ -780,29 +777,13 @@ class ClientV2(ClientBase):
|
|||||||
|
|
||||||
def _post_as_get(self, *args, **kwargs):
|
def _post_as_get(self, *args, **kwargs):
|
||||||
"""
|
"""
|
||||||
Send GET request using the POST-as-GET protocol if needed.
|
Send GET request using the POST-as-GET protocol.
|
||||||
The request will be first issued using POST-as-GET for ACME v2. If the ACME CA servers do
|
|
||||||
not support this yet and return an error, request will be retried using GET.
|
|
||||||
For ACME v1, only GET request will be tried, as POST-as-GET is not supported.
|
|
||||||
:param args:
|
:param args:
|
||||||
:param kwargs:
|
:param kwargs:
|
||||||
:return:
|
:return:
|
||||||
"""
|
"""
|
||||||
if self.acme_version >= 2:
|
new_args = args[:1] + (None,) + args[1:]
|
||||||
# We add an empty payload for POST-as-GET requests
|
return self._post(*new_args, **kwargs)
|
||||||
new_args = args[:1] + (None,) + args[1:]
|
|
||||||
try:
|
|
||||||
return self._post(*new_args, **kwargs)
|
|
||||||
except messages.Error as error:
|
|
||||||
if error.code == 'malformed':
|
|
||||||
logger.debug('Error during a POST-as-GET request, '
|
|
||||||
'your ACME CA server may not support it:\n%s', error)
|
|
||||||
logger.debug('Retrying request with GET.')
|
|
||||||
else: # pragma: no cover
|
|
||||||
raise
|
|
||||||
|
|
||||||
# If POST-as-GET is not supported yet, we use a GET instead.
|
|
||||||
return self.net.get(*args, **kwargs)
|
|
||||||
|
|
||||||
|
|
||||||
class BackwardsCompatibleClientV2(object):
|
class BackwardsCompatibleClientV2(object):
|
||||||
@@ -946,7 +927,7 @@ class BackwardsCompatibleClientV2(object):
|
|||||||
return self.client.external_account_required()
|
return self.client.external_account_required()
|
||||||
|
|
||||||
|
|
||||||
class ClientNetwork(object): # pylint: disable=too-many-instance-attributes
|
class ClientNetwork(object):
|
||||||
"""Wrapper around requests that signs POSTs for authentication.
|
"""Wrapper around requests that signs POSTs for authentication.
|
||||||
|
|
||||||
Also adds user agent, and handles Content-Type.
|
Also adds user agent, and handles Content-Type.
|
||||||
@@ -962,7 +943,7 @@ class ClientNetwork(object): # pylint: disable=too-many-instance-attributes
|
|||||||
:param messages.RegistrationResource account: Account object. Required if you are
|
:param messages.RegistrationResource account: Account object. Required if you are
|
||||||
planning to use .post() with acme_version=2 for anything other than
|
planning to use .post() with acme_version=2 for anything other than
|
||||||
creating a new account; may be set later after registering.
|
creating a new account; may be set later after registering.
|
||||||
:param josepy.JWASignature alg: Algoritm to use in signing JWS.
|
:param josepy.JWASignature alg: Algorithm to use in signing JWS.
|
||||||
:param bool verify_ssl: Whether to verify certificates on SSL connections.
|
:param bool verify_ssl: Whether to verify certificates on SSL connections.
|
||||||
:param str user_agent: String to send as User-Agent header.
|
:param str user_agent: String to send as User-Agent header.
|
||||||
:param float timeout: Timeout for requests.
|
:param float timeout: Timeout for requests.
|
||||||
@@ -972,7 +953,6 @@ class ClientNetwork(object): # pylint: disable=too-many-instance-attributes
|
|||||||
def __init__(self, key, account=None, alg=jose.RS256, verify_ssl=True,
|
def __init__(self, key, account=None, alg=jose.RS256, verify_ssl=True,
|
||||||
user_agent='acme-python', timeout=DEFAULT_NETWORK_TIMEOUT,
|
user_agent='acme-python', timeout=DEFAULT_NETWORK_TIMEOUT,
|
||||||
source_address=None):
|
source_address=None):
|
||||||
# pylint: disable=too-many-arguments
|
|
||||||
self.key = key
|
self.key = key
|
||||||
self.account = account
|
self.account = account
|
||||||
self.alg = alg
|
self.alg = alg
|
||||||
@@ -1008,6 +988,8 @@ class ClientNetwork(object): # pylint: disable=too-many-instance-attributes
|
|||||||
:rtype: `josepy.JWS`
|
:rtype: `josepy.JWS`
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
if isinstance(obj, VersionedLEACMEMixin):
|
||||||
|
obj.le_acme_version = acme_version
|
||||||
jobj = obj.json_dumps(indent=2).encode() if obj else b''
|
jobj = obj.json_dumps(indent=2).encode() if obj else b''
|
||||||
logger.debug('JWS payload:\n%s', jobj)
|
logger.debug('JWS payload:\n%s', jobj)
|
||||||
kwargs = {
|
kwargs = {
|
||||||
@@ -1043,6 +1025,9 @@ class ClientNetwork(object): # pylint: disable=too-many-instance-attributes
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
response_ct = response.headers.get('Content-Type')
|
response_ct = response.headers.get('Content-Type')
|
||||||
|
# Strip parameters from the media-type (rfc2616#section-3.7)
|
||||||
|
if response_ct:
|
||||||
|
response_ct = response_ct.split(';')[0].strip()
|
||||||
try:
|
try:
|
||||||
# TODO: response.json() is called twice, once here, and
|
# TODO: response.json() is called twice, once here, and
|
||||||
# once in _get and _post clients
|
# once in _get and _post clients
|
||||||
@@ -1080,7 +1065,6 @@ class ClientNetwork(object): # pylint: disable=too-many-instance-attributes
|
|||||||
return response
|
return response
|
||||||
|
|
||||||
def _send_request(self, method, url, *args, **kwargs):
|
def _send_request(self, method, url, *args, **kwargs):
|
||||||
# pylint: disable=too-many-locals
|
|
||||||
"""Send HTTP request.
|
"""Send HTTP request.
|
||||||
|
|
||||||
Makes sure that `verify_ssl` is respected. Logs request and
|
Makes sure that `verify_ssl` is respected. Logs request and
|
||||||
@@ -1127,10 +1111,9 @@ class ClientNetwork(object): # pylint: disable=too-many-instance-attributes
|
|||||||
err_regex = r".*host='(\S*)'.*Max retries exceeded with url\: (\/\w*).*(\[Errno \d+\])([A-Za-z ]*)"
|
err_regex = r".*host='(\S*)'.*Max retries exceeded with url\: (\/\w*).*(\[Errno \d+\])([A-Za-z ]*)"
|
||||||
m = re.match(err_regex, str(e))
|
m = re.match(err_regex, str(e))
|
||||||
if m is None:
|
if m is None:
|
||||||
raise # pragma: no cover
|
raise # pragma: no cover
|
||||||
else:
|
host, path, _err_no, err_msg = m.groups()
|
||||||
host, path, _err_no, err_msg = m.groups()
|
raise ValueError("Requesting {0}{1}:{2}".format(host, path, err_msg))
|
||||||
raise ValueError("Requesting {0}{1}:{2}".format(host, path, err_msg))
|
|
||||||
|
|
||||||
# If content is DER, log the base64 of it instead of raw bytes, to keep
|
# If content is DER, log the base64 of it instead of raw bytes, to keep
|
||||||
# binary data out of the logs.
|
# binary data out of the logs.
|
||||||
@@ -1140,8 +1123,8 @@ class ClientNetwork(object): # pylint: disable=too-many-instance-attributes
|
|||||||
debug_content = response.content.decode("utf-8")
|
debug_content = response.content.decode("utf-8")
|
||||||
logger.debug('Received response:\nHTTP %d\n%s\n\n%s',
|
logger.debug('Received response:\nHTTP %d\n%s\n\n%s',
|
||||||
response.status_code,
|
response.status_code,
|
||||||
"\n".join(["{0}: {1}".format(k, v)
|
"\n".join("{0}: {1}".format(k, v)
|
||||||
for k, v in response.headers.items()]),
|
for k, v in response.headers.items()),
|
||||||
debug_content)
|
debug_content)
|
||||||
return response
|
return response
|
||||||
|
|
||||||
@@ -1196,8 +1179,7 @@ class ClientNetwork(object): # pylint: disable=too-many-instance-attributes
|
|||||||
if error.code == 'badNonce':
|
if error.code == 'badNonce':
|
||||||
logger.debug('Retrying request after error:\n%s', error)
|
logger.debug('Retrying request after error:\n%s', error)
|
||||||
return self._post_once(*args, **kwargs)
|
return self._post_once(*args, **kwargs)
|
||||||
else:
|
raise
|
||||||
raise
|
|
||||||
|
|
||||||
def _post_once(self, url, obj, content_type=JOSE_CONTENT_TYPE,
|
def _post_once(self, url, obj, content_type=JOSE_CONTENT_TYPE,
|
||||||
acme_version=1, **kwargs):
|
acme_version=1, **kwargs):
|
||||||
|
|||||||
@@ -6,15 +6,14 @@ import os
|
|||||||
import re
|
import re
|
||||||
import socket
|
import socket
|
||||||
|
|
||||||
from OpenSSL import crypto
|
|
||||||
from OpenSSL import SSL # type: ignore # https://github.com/python/typeshed/issues/2052
|
|
||||||
import josepy as jose
|
import josepy as jose
|
||||||
|
from OpenSSL import crypto
|
||||||
|
from OpenSSL import SSL # type: ignore # https://github.com/python/typeshed/issues/2052
|
||||||
|
|
||||||
from acme import errors
|
from acme import errors
|
||||||
# pylint: disable=unused-import, no-name-in-module
|
from acme.magic_typing import Callable
|
||||||
from acme.magic_typing import Callable, Union, Tuple, Optional
|
from acme.magic_typing import Tuple
|
||||||
# pylint: enable=unused-import, no-name-in-module
|
from acme.magic_typing import Union
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -28,6 +27,15 @@ logger = logging.getLogger(__name__)
|
|||||||
_DEFAULT_SSL_METHOD = SSL.SSLv23_METHOD # type: ignore
|
_DEFAULT_SSL_METHOD = SSL.SSLv23_METHOD # type: ignore
|
||||||
|
|
||||||
|
|
||||||
|
class _DefaultCertSelection(object):
|
||||||
|
def __init__(self, certs):
|
||||||
|
self.certs = certs
|
||||||
|
|
||||||
|
def __call__(self, connection):
|
||||||
|
server_name = connection.get_servername()
|
||||||
|
return self.certs.get(server_name, None)
|
||||||
|
|
||||||
|
|
||||||
class SSLSocket(object): # pylint: disable=too-few-public-methods
|
class SSLSocket(object): # pylint: disable=too-few-public-methods
|
||||||
"""SSL wrapper for sockets.
|
"""SSL wrapper for sockets.
|
||||||
|
|
||||||
@@ -35,12 +43,25 @@ class SSLSocket(object): # pylint: disable=too-few-public-methods
|
|||||||
:ivar dict certs: Mapping from domain names (`bytes`) to
|
:ivar dict certs: Mapping from domain names (`bytes`) to
|
||||||
`OpenSSL.crypto.X509`.
|
`OpenSSL.crypto.X509`.
|
||||||
:ivar method: See `OpenSSL.SSL.Context` for allowed values.
|
:ivar method: See `OpenSSL.SSL.Context` for allowed values.
|
||||||
|
:ivar alpn_selection: Hook to select negotiated ALPN protocol for
|
||||||
|
connection.
|
||||||
|
:ivar cert_selection: Hook to select certificate for connection. If given,
|
||||||
|
`certs` parameter would be ignored, and therefore must be empty.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
def __init__(self, sock, certs, method=_DEFAULT_SSL_METHOD):
|
def __init__(self, sock, certs=None,
|
||||||
|
method=_DEFAULT_SSL_METHOD, alpn_selection=None,
|
||||||
|
cert_selection=None):
|
||||||
self.sock = sock
|
self.sock = sock
|
||||||
self.certs = certs
|
self.alpn_selection = alpn_selection
|
||||||
self.method = method
|
self.method = method
|
||||||
|
if not cert_selection and not certs:
|
||||||
|
raise ValueError("Neither cert_selection or certs specified.")
|
||||||
|
if cert_selection and certs:
|
||||||
|
raise ValueError("Both cert_selection and certs specified.")
|
||||||
|
if cert_selection is None:
|
||||||
|
cert_selection = _DefaultCertSelection(certs)
|
||||||
|
self.cert_selection = cert_selection
|
||||||
|
|
||||||
def __getattr__(self, name):
|
def __getattr__(self, name):
|
||||||
return getattr(self.sock, name)
|
return getattr(self.sock, name)
|
||||||
@@ -57,24 +78,25 @@ class SSLSocket(object): # pylint: disable=too-few-public-methods
|
|||||||
:type connection: :class:`OpenSSL.Connection`
|
:type connection: :class:`OpenSSL.Connection`
|
||||||
|
|
||||||
"""
|
"""
|
||||||
server_name = connection.get_servername()
|
pair = self.cert_selection(connection)
|
||||||
try:
|
if pair is None:
|
||||||
key, cert = self.certs[server_name]
|
logger.debug("Certificate selection for server name %s failed, dropping SSL",
|
||||||
except KeyError:
|
connection.get_servername())
|
||||||
logger.debug("Server name (%s) not recognized, dropping SSL",
|
|
||||||
server_name)
|
|
||||||
return
|
return
|
||||||
|
key, cert = pair
|
||||||
new_context = SSL.Context(self.method)
|
new_context = SSL.Context(self.method)
|
||||||
new_context.set_options(SSL.OP_NO_SSLv2)
|
new_context.set_options(SSL.OP_NO_SSLv2)
|
||||||
new_context.set_options(SSL.OP_NO_SSLv3)
|
new_context.set_options(SSL.OP_NO_SSLv3)
|
||||||
new_context.use_privatekey(key)
|
new_context.use_privatekey(key)
|
||||||
new_context.use_certificate(cert)
|
new_context.use_certificate(cert)
|
||||||
|
if self.alpn_selection is not None:
|
||||||
|
new_context.set_alpn_select_callback(self.alpn_selection)
|
||||||
connection.set_context(new_context)
|
connection.set_context(new_context)
|
||||||
|
|
||||||
class FakeConnection(object):
|
class FakeConnection(object):
|
||||||
"""Fake OpenSSL.SSL.Connection."""
|
"""Fake OpenSSL.SSL.Connection."""
|
||||||
|
|
||||||
# pylint: disable=too-few-public-methods,missing-docstring
|
# pylint: disable=missing-function-docstring
|
||||||
|
|
||||||
def __init__(self, connection):
|
def __init__(self, connection):
|
||||||
self._wrapped = connection
|
self._wrapped = connection
|
||||||
@@ -86,13 +108,15 @@ class SSLSocket(object): # pylint: disable=too-few-public-methods
|
|||||||
# OpenSSL.SSL.Connection.shutdown doesn't accept any args
|
# OpenSSL.SSL.Connection.shutdown doesn't accept any args
|
||||||
return self._wrapped.shutdown()
|
return self._wrapped.shutdown()
|
||||||
|
|
||||||
def accept(self): # pylint: disable=missing-docstring
|
def accept(self): # pylint: disable=missing-function-docstring
|
||||||
sock, addr = self.sock.accept()
|
sock, addr = self.sock.accept()
|
||||||
|
|
||||||
context = SSL.Context(self.method)
|
context = SSL.Context(self.method)
|
||||||
context.set_options(SSL.OP_NO_SSLv2)
|
context.set_options(SSL.OP_NO_SSLv2)
|
||||||
context.set_options(SSL.OP_NO_SSLv3)
|
context.set_options(SSL.OP_NO_SSLv3)
|
||||||
context.set_tlsext_servername_callback(self._pick_certificate_cb)
|
context.set_tlsext_servername_callback(self._pick_certificate_cb)
|
||||||
|
if self.alpn_selection is not None:
|
||||||
|
context.set_alpn_select_callback(self.alpn_selection)
|
||||||
|
|
||||||
ssl_sock = self.FakeConnection(SSL.Connection(context, sock))
|
ssl_sock = self.FakeConnection(SSL.Connection(context, sock))
|
||||||
ssl_sock.set_accept_state()
|
ssl_sock.set_accept_state()
|
||||||
@@ -108,8 +132,9 @@ class SSLSocket(object): # pylint: disable=too-few-public-methods
|
|||||||
return ssl_sock, addr
|
return ssl_sock, addr
|
||||||
|
|
||||||
|
|
||||||
def probe_sni(name, host, port=443, timeout=300,
|
def probe_sni(name, host, port=443, timeout=300, # pylint: disable=too-many-arguments
|
||||||
method=_DEFAULT_SSL_METHOD, source_address=('', 0)):
|
method=_DEFAULT_SSL_METHOD, source_address=('', 0),
|
||||||
|
alpn_protocols=None):
|
||||||
"""Probe SNI server for SSL certificate.
|
"""Probe SNI server for SSL certificate.
|
||||||
|
|
||||||
:param bytes name: Byte string to send as the server name in the
|
:param bytes name: Byte string to send as the server name in the
|
||||||
@@ -121,6 +146,8 @@ def probe_sni(name, host, port=443, timeout=300,
|
|||||||
:param tuple source_address: Enables multi-path probing (selection
|
:param tuple source_address: Enables multi-path probing (selection
|
||||||
of source interface). See `socket.creation_connection` for more
|
of source interface). See `socket.creation_connection` for more
|
||||||
info. Available only in Python 2.7+.
|
info. Available only in Python 2.7+.
|
||||||
|
:param alpn_protocols: Protocols to request using ALPN.
|
||||||
|
:type alpn_protocols: `list` of `bytes`
|
||||||
|
|
||||||
:raises acme.errors.Error: In case of any problems.
|
:raises acme.errors.Error: In case of any problems.
|
||||||
|
|
||||||
@@ -150,6 +177,8 @@ def probe_sni(name, host, port=443, timeout=300,
|
|||||||
client_ssl = SSL.Connection(context, client)
|
client_ssl = SSL.Connection(context, client)
|
||||||
client_ssl.set_connect_state()
|
client_ssl.set_connect_state()
|
||||||
client_ssl.set_tlsext_host_name(name) # pyOpenSSL>=0.13
|
client_ssl.set_tlsext_host_name(name) # pyOpenSSL>=0.13
|
||||||
|
if alpn_protocols is not None:
|
||||||
|
client_ssl.set_alpn_protos(alpn_protocols)
|
||||||
try:
|
try:
|
||||||
client_ssl.do_handshake()
|
client_ssl.do_handshake()
|
||||||
client_ssl.shutdown()
|
client_ssl.shutdown()
|
||||||
@@ -240,12 +269,14 @@ def _pyopenssl_cert_or_req_san(cert_or_req):
|
|||||||
|
|
||||||
|
|
||||||
def gen_ss_cert(key, domains, not_before=None,
|
def gen_ss_cert(key, domains, not_before=None,
|
||||||
validity=(7 * 24 * 60 * 60), force_san=True):
|
validity=(7 * 24 * 60 * 60), force_san=True, extensions=None):
|
||||||
"""Generate new self-signed certificate.
|
"""Generate new self-signed certificate.
|
||||||
|
|
||||||
:type domains: `list` of `unicode`
|
:type domains: `list` of `unicode`
|
||||||
:param OpenSSL.crypto.PKey key:
|
:param OpenSSL.crypto.PKey key:
|
||||||
:param bool force_san:
|
:param bool force_san:
|
||||||
|
:param extensions: List of additional extensions to include in the cert.
|
||||||
|
:type extensions: `list` of `OpenSSL.crypto.X509Extension`
|
||||||
|
|
||||||
If more than one domain is provided, all of the domains are put into
|
If more than one domain is provided, all of the domains are put into
|
||||||
``subjectAltName`` X.509 extension and first domain is set as the
|
``subjectAltName`` X.509 extension and first domain is set as the
|
||||||
@@ -258,10 +289,13 @@ def gen_ss_cert(key, domains, not_before=None,
|
|||||||
cert.set_serial_number(int(binascii.hexlify(os.urandom(16)), 16))
|
cert.set_serial_number(int(binascii.hexlify(os.urandom(16)), 16))
|
||||||
cert.set_version(2)
|
cert.set_version(2)
|
||||||
|
|
||||||
extensions = [
|
if extensions is None:
|
||||||
|
extensions = []
|
||||||
|
|
||||||
|
extensions.append(
|
||||||
crypto.X509Extension(
|
crypto.X509Extension(
|
||||||
b"basicConstraints", True, b"CA:TRUE, pathlen:0"),
|
b"basicConstraints", True, b"CA:TRUE, pathlen:0"),
|
||||||
]
|
)
|
||||||
|
|
||||||
cert.get_subject().CN = domains[0]
|
cert.get_subject().CN = domains[0]
|
||||||
# TODO: what to put into cert.get_subject()?
|
# TODO: what to put into cert.get_subject()?
|
||||||
@@ -298,7 +332,6 @@ def dump_pyopenssl_chain(chain, filetype=crypto.FILETYPE_PEM):
|
|||||||
|
|
||||||
def _dump_cert(cert):
|
def _dump_cert(cert):
|
||||||
if isinstance(cert, jose.ComparableX509):
|
if isinstance(cert, jose.ComparableX509):
|
||||||
# pylint: disable=protected-access
|
|
||||||
cert = cert.wrapped
|
cert = cert.wrapped
|
||||||
return crypto.dump_certificate(filetype, cert)
|
return crypto.dump_certificate(filetype, cert)
|
||||||
|
|
||||||
|
|||||||
@@ -29,7 +29,12 @@ class NonceError(ClientError):
|
|||||||
class BadNonce(NonceError):
|
class BadNonce(NonceError):
|
||||||
"""Bad nonce error."""
|
"""Bad nonce error."""
|
||||||
def __init__(self, nonce, error, *args, **kwargs):
|
def __init__(self, nonce, error, *args, **kwargs):
|
||||||
super(BadNonce, self).__init__(*args, **kwargs)
|
# MyPy complains here that there is too many arguments for BaseException constructor.
|
||||||
|
# This is an error fixed in typeshed, see https://github.com/python/mypy/issues/4183
|
||||||
|
# The fix is included in MyPy>=0.740, but upgrading it would bring dozen of errors due to
|
||||||
|
# new types definitions. So we ignore the error until the code base is fixed to match
|
||||||
|
# with MyPy>=0.740 referential.
|
||||||
|
super(BadNonce, self).__init__(*args, **kwargs) # type: ignore
|
||||||
self.nonce = nonce
|
self.nonce = nonce
|
||||||
self.error = error
|
self.error = error
|
||||||
|
|
||||||
@@ -48,7 +53,8 @@ class MissingNonce(NonceError):
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
def __init__(self, response, *args, **kwargs):
|
def __init__(self, response, *args, **kwargs):
|
||||||
super(MissingNonce, self).__init__(*args, **kwargs)
|
# See comment in BadNonce constructor above for an explanation of type: ignore here.
|
||||||
|
super(MissingNonce, self).__init__(*args, **kwargs) # type: ignore
|
||||||
self.response = response
|
self.response = response
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
@@ -83,6 +89,7 @@ class PollError(ClientError):
|
|||||||
return '{0}(exhausted={1!r}, updated={2!r})'.format(
|
return '{0}(exhausted={1!r}, updated={2!r})'.format(
|
||||||
self.__class__.__name__, self.exhausted, self.updated)
|
self.__class__.__name__, self.exhausted, self.updated)
|
||||||
|
|
||||||
|
|
||||||
class ValidationError(Error):
|
class ValidationError(Error):
|
||||||
"""Error for authorization failures. Contains a list of authorization
|
"""Error for authorization failures. Contains a list of authorization
|
||||||
resources, each of which is invalid and should have an error field.
|
resources, each of which is invalid and should have an error field.
|
||||||
@@ -91,9 +98,11 @@ class ValidationError(Error):
|
|||||||
self.failed_authzrs = failed_authzrs
|
self.failed_authzrs = failed_authzrs
|
||||||
super(ValidationError, self).__init__()
|
super(ValidationError, self).__init__()
|
||||||
|
|
||||||
class TimeoutError(Error):
|
|
||||||
|
class TimeoutError(Error): # pylint: disable=redefined-builtin
|
||||||
"""Error for when polling an authorization or an order times out."""
|
"""Error for when polling an authorization or an order times out."""
|
||||||
|
|
||||||
|
|
||||||
class IssuanceError(Error):
|
class IssuanceError(Error):
|
||||||
"""Error sent by the server after requesting issuance of a certificate."""
|
"""Error sent by the server after requesting issuance of a certificate."""
|
||||||
|
|
||||||
@@ -105,6 +114,7 @@ class IssuanceError(Error):
|
|||||||
self.error = error
|
self.error = error
|
||||||
super(IssuanceError, self).__init__()
|
super(IssuanceError, self).__init__()
|
||||||
|
|
||||||
|
|
||||||
class ConflictError(ClientError):
|
class ConflictError(ClientError):
|
||||||
"""Error for when the server returns a 409 (Conflict) HTTP status.
|
"""Error for when the server returns a 409 (Conflict) HTTP status.
|
||||||
|
|
||||||
|
|||||||
@@ -4,7 +4,6 @@ import logging
|
|||||||
import josepy as jose
|
import josepy as jose
|
||||||
import pyrfc3339
|
import pyrfc3339
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -15,7 +15,7 @@ class Header(jose.Header):
|
|||||||
url = jose.Field('url', omitempty=True)
|
url = jose.Field('url', omitempty=True)
|
||||||
|
|
||||||
@nonce.decoder
|
@nonce.decoder
|
||||||
def nonce(value): # pylint: disable=missing-docstring,no-self-argument
|
def nonce(value): # pylint: disable=no-self-argument,missing-function-docstring
|
||||||
try:
|
try:
|
||||||
return jose.decode_b64jose(value)
|
return jose.decode_b64jose(value)
|
||||||
except jose.DeserializationError as error:
|
except jose.DeserializationError as error:
|
||||||
@@ -40,10 +40,10 @@ class Signature(jose.Signature):
|
|||||||
class JWS(jose.JWS):
|
class JWS(jose.JWS):
|
||||||
"""ACME-specific JWS. Includes none, url, and kid in protected header."""
|
"""ACME-specific JWS. Includes none, url, and kid in protected header."""
|
||||||
signature_cls = Signature
|
signature_cls = Signature
|
||||||
__slots__ = jose.JWS._orig_slots # pylint: disable=no-member
|
__slots__ = jose.JWS._orig_slots
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
# pylint: disable=arguments-differ,too-many-arguments
|
# pylint: disable=arguments-differ
|
||||||
def sign(cls, payload, key, alg, nonce, url=None, kid=None):
|
def sign(cls, payload, key, alg, nonce, url=None, kid=None):
|
||||||
# Per ACME spec, jwk and kid are mutually exclusive, so only include a
|
# Per ACME spec, jwk and kid are mutually exclusive, so only include a
|
||||||
# jwk field if kid is not provided.
|
# jwk field if kid is not provided.
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
"""Shim class to not have to depend on typing module in prod."""
|
"""Shim class to not have to depend on typing module in prod."""
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
|
|
||||||
class TypingClass(object):
|
class TypingClass(object):
|
||||||
"""Ignore import errors by getting anything"""
|
"""Ignore import errors by getting anything"""
|
||||||
def __getattr__(self, name):
|
def __getattr__(self, name):
|
||||||
@@ -9,8 +10,6 @@ class TypingClass(object):
|
|||||||
try:
|
try:
|
||||||
# mypy doesn't respect modifying sys.modules
|
# mypy doesn't respect modifying sys.modules
|
||||||
from typing import * # pylint: disable=wildcard-import, unused-wildcard-import
|
from typing import * # pylint: disable=wildcard-import, unused-wildcard-import
|
||||||
# pylint: disable=unused-import
|
|
||||||
from typing import Collection, IO # type: ignore
|
from typing import Collection, IO # type: ignore
|
||||||
# pylint: enable=unused-import
|
|
||||||
except ImportError:
|
except ImportError:
|
||||||
sys.modules[__name__] = TypingClass()
|
sys.modules[__name__] = TypingClass()
|
||||||
|
|||||||
@@ -1,18 +1,22 @@
|
|||||||
"""ACME protocol messages."""
|
"""ACME protocol messages."""
|
||||||
import json
|
import json
|
||||||
import six
|
|
||||||
try:
|
|
||||||
from collections.abc import Hashable # pylint: disable=no-name-in-module
|
|
||||||
except ImportError: # pragma: no cover
|
|
||||||
from collections import Hashable
|
|
||||||
|
|
||||||
import josepy as jose
|
import josepy as jose
|
||||||
|
import six
|
||||||
|
|
||||||
from acme import challenges
|
from acme import challenges
|
||||||
from acme import errors
|
from acme import errors
|
||||||
from acme import fields
|
from acme import fields
|
||||||
from acme import util
|
|
||||||
from acme import jws
|
from acme import jws
|
||||||
|
from acme import util
|
||||||
|
from acme.mixins import ResourceMixin
|
||||||
|
|
||||||
|
try:
|
||||||
|
from collections.abc import Hashable
|
||||||
|
except ImportError: # pragma: no cover
|
||||||
|
from collections import Hashable
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
OLD_ERROR_PREFIX = "urn:acme:error:"
|
OLD_ERROR_PREFIX = "urn:acme:error:"
|
||||||
ERROR_PREFIX = "urn:ietf:params:acme:error:"
|
ERROR_PREFIX = "urn:ietf:params:acme:error:"
|
||||||
@@ -33,7 +37,7 @@ ERROR_CODES = {
|
|||||||
' domain'),
|
' domain'),
|
||||||
'dns': 'There was a problem with a DNS query during identifier validation',
|
'dns': 'There was a problem with a DNS query during identifier validation',
|
||||||
'dnssec': 'The server could not validate a DNSSEC signed domain',
|
'dnssec': 'The server could not validate a DNSSEC signed domain',
|
||||||
'incorrectResponse': 'Response recieved didn\'t match the challenge\'s requirements',
|
'incorrectResponse': 'Response received didn\'t match the challenge\'s requirements',
|
||||||
# deprecate invalidEmail
|
# deprecate invalidEmail
|
||||||
'invalidEmail': 'The provided email for a registration was invalid',
|
'invalidEmail': 'The provided email for a registration was invalid',
|
||||||
'invalidContact': 'The provided contact URI was invalid',
|
'invalidContact': 'The provided contact URI was invalid',
|
||||||
@@ -143,7 +147,7 @@ class _Constant(jose.JSONDeSerializable, Hashable): # type: ignore
|
|||||||
if jobj not in cls.POSSIBLE_NAMES: # pylint: disable=unsupported-membership-test
|
if jobj not in cls.POSSIBLE_NAMES: # pylint: disable=unsupported-membership-test
|
||||||
raise jose.DeserializationError(
|
raise jose.DeserializationError(
|
||||||
'{0} not recognized'.format(cls.__name__))
|
'{0} not recognized'.format(cls.__name__))
|
||||||
return cls.POSSIBLE_NAMES[jobj] # pylint: disable=unsubscriptable-object
|
return cls.POSSIBLE_NAMES[jobj]
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return '{0}({1})'.format(self.__class__.__name__, self.name)
|
return '{0}({1})'.format(self.__class__.__name__, self.name)
|
||||||
@@ -242,13 +246,13 @@ class Directory(jose.JSONDeSerializable):
|
|||||||
try:
|
try:
|
||||||
return self[name.replace('_', '-')]
|
return self[name.replace('_', '-')]
|
||||||
except KeyError as error:
|
except KeyError as error:
|
||||||
raise AttributeError(str(error) + ': ' + name)
|
raise AttributeError(str(error))
|
||||||
|
|
||||||
def __getitem__(self, name):
|
def __getitem__(self, name):
|
||||||
try:
|
try:
|
||||||
return self._jobj[self._canon_key(name)]
|
return self._jobj[self._canon_key(name)]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
raise KeyError('Directory field not found')
|
raise KeyError('Directory field "' + self._canon_key(name) + '" not found')
|
||||||
|
|
||||||
def to_partial_json(self):
|
def to_partial_json(self):
|
||||||
return self._jobj
|
return self._jobj
|
||||||
@@ -353,13 +357,13 @@ class Registration(ResourceBody):
|
|||||||
|
|
||||||
|
|
||||||
@Directory.register
|
@Directory.register
|
||||||
class NewRegistration(Registration):
|
class NewRegistration(ResourceMixin, Registration):
|
||||||
"""New registration."""
|
"""New registration."""
|
||||||
resource_type = 'new-reg'
|
resource_type = 'new-reg'
|
||||||
resource = fields.Resource(resource_type)
|
resource = fields.Resource(resource_type)
|
||||||
|
|
||||||
|
|
||||||
class UpdateRegistration(Registration):
|
class UpdateRegistration(ResourceMixin, Registration):
|
||||||
"""Update registration."""
|
"""Update registration."""
|
||||||
resource_type = 'reg'
|
resource_type = 'reg'
|
||||||
resource = fields.Resource(resource_type)
|
resource = fields.Resource(resource_type)
|
||||||
@@ -457,7 +461,6 @@ class ChallengeResource(Resource):
|
|||||||
@property
|
@property
|
||||||
def uri(self):
|
def uri(self):
|
||||||
"""The URL of the challenge body."""
|
"""The URL of the challenge body."""
|
||||||
# pylint: disable=function-redefined,no-member
|
|
||||||
return self.body.uri
|
return self.body.uri
|
||||||
|
|
||||||
|
|
||||||
@@ -485,7 +488,7 @@ class Authorization(ResourceBody):
|
|||||||
wildcard = jose.Field('wildcard', omitempty=True)
|
wildcard = jose.Field('wildcard', omitempty=True)
|
||||||
|
|
||||||
@challenges.decoder
|
@challenges.decoder
|
||||||
def challenges(value): # pylint: disable=missing-docstring,no-self-argument
|
def challenges(value): # pylint: disable=no-self-argument,missing-function-docstring
|
||||||
return tuple(ChallengeBody.from_json(chall) for chall in value)
|
return tuple(ChallengeBody.from_json(chall) for chall in value)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@@ -496,13 +499,13 @@ class Authorization(ResourceBody):
|
|||||||
|
|
||||||
|
|
||||||
@Directory.register
|
@Directory.register
|
||||||
class NewAuthorization(Authorization):
|
class NewAuthorization(ResourceMixin, Authorization):
|
||||||
"""New authorization."""
|
"""New authorization."""
|
||||||
resource_type = 'new-authz'
|
resource_type = 'new-authz'
|
||||||
resource = fields.Resource(resource_type)
|
resource = fields.Resource(resource_type)
|
||||||
|
|
||||||
|
|
||||||
class UpdateAuthorization(Authorization):
|
class UpdateAuthorization(ResourceMixin, Authorization):
|
||||||
"""Update authorization."""
|
"""Update authorization."""
|
||||||
resource_type = 'authz'
|
resource_type = 'authz'
|
||||||
resource = fields.Resource(resource_type)
|
resource = fields.Resource(resource_type)
|
||||||
@@ -520,7 +523,7 @@ class AuthorizationResource(ResourceWithURI):
|
|||||||
|
|
||||||
|
|
||||||
@Directory.register
|
@Directory.register
|
||||||
class CertificateRequest(jose.JSONObjectWithFields):
|
class CertificateRequest(ResourceMixin, jose.JSONObjectWithFields):
|
||||||
"""ACME new-cert request.
|
"""ACME new-cert request.
|
||||||
|
|
||||||
:ivar josepy.util.ComparableX509 csr:
|
:ivar josepy.util.ComparableX509 csr:
|
||||||
@@ -546,7 +549,7 @@ class CertificateResource(ResourceWithURI):
|
|||||||
|
|
||||||
|
|
||||||
@Directory.register
|
@Directory.register
|
||||||
class Revocation(jose.JSONObjectWithFields):
|
class Revocation(ResourceMixin, jose.JSONObjectWithFields):
|
||||||
"""Revocation message.
|
"""Revocation message.
|
||||||
|
|
||||||
:ivar .ComparableX509 certificate: `OpenSSL.crypto.X509` wrapped in
|
:ivar .ComparableX509 certificate: `OpenSSL.crypto.X509` wrapped in
|
||||||
@@ -582,7 +585,7 @@ class Order(ResourceBody):
|
|||||||
error = jose.Field('error', omitempty=True, decoder=Error.from_json)
|
error = jose.Field('error', omitempty=True, decoder=Error.from_json)
|
||||||
|
|
||||||
@identifiers.decoder
|
@identifiers.decoder
|
||||||
def identifiers(value): # pylint: disable=missing-docstring,no-self-argument
|
def identifiers(value): # pylint: disable=no-self-argument,missing-function-docstring
|
||||||
return tuple(Identifier.from_json(identifier) for identifier in value)
|
return tuple(Identifier.from_json(identifier) for identifier in value)
|
||||||
|
|
||||||
class OrderResource(ResourceWithURI):
|
class OrderResource(ResourceWithURI):
|
||||||
|
|||||||
65
acme/acme/mixins.py
Normal file
65
acme/acme/mixins.py
Normal file
@@ -0,0 +1,65 @@
|
|||||||
|
"""Useful mixins for Challenge and Resource objects"""
|
||||||
|
|
||||||
|
|
||||||
|
class VersionedLEACMEMixin(object):
|
||||||
|
"""This mixin stores the version of Let's Encrypt's endpoint being used."""
|
||||||
|
@property
|
||||||
|
def le_acme_version(self):
|
||||||
|
"""Define the version of ACME protocol to use"""
|
||||||
|
return getattr(self, '_le_acme_version', 1)
|
||||||
|
|
||||||
|
@le_acme_version.setter
|
||||||
|
def le_acme_version(self, version):
|
||||||
|
# We need to use object.__setattr__ to not depend on the specific implementation of
|
||||||
|
# __setattr__ in current class (eg. jose.TypedJSONObjectWithFields raises AttributeError
|
||||||
|
# for any attempt to set an attribute to make objects immutable).
|
||||||
|
object.__setattr__(self, '_le_acme_version', version)
|
||||||
|
|
||||||
|
def __setattr__(self, key, value):
|
||||||
|
if key == 'le_acme_version':
|
||||||
|
# Required for @property to operate properly. See comment above.
|
||||||
|
object.__setattr__(self, key, value)
|
||||||
|
else:
|
||||||
|
super(VersionedLEACMEMixin, self).__setattr__(key, value) # pragma: no cover
|
||||||
|
|
||||||
|
|
||||||
|
class ResourceMixin(VersionedLEACMEMixin):
|
||||||
|
"""
|
||||||
|
This mixin generates a RFC8555 compliant JWS payload
|
||||||
|
by removing the `resource` field if needed (eg. ACME v2 protocol).
|
||||||
|
"""
|
||||||
|
def to_partial_json(self):
|
||||||
|
"""See josepy.JSONDeserializable.to_partial_json()"""
|
||||||
|
return _safe_jobj_compliance(super(ResourceMixin, self),
|
||||||
|
'to_partial_json', 'resource')
|
||||||
|
|
||||||
|
def fields_to_partial_json(self):
|
||||||
|
"""See josepy.JSONObjectWithFields.fields_to_partial_json()"""
|
||||||
|
return _safe_jobj_compliance(super(ResourceMixin, self),
|
||||||
|
'fields_to_partial_json', 'resource')
|
||||||
|
|
||||||
|
|
||||||
|
class TypeMixin(VersionedLEACMEMixin):
|
||||||
|
"""
|
||||||
|
This mixin allows generation of a RFC8555 compliant JWS payload
|
||||||
|
by removing the `type` field if needed (eg. ACME v2 protocol).
|
||||||
|
"""
|
||||||
|
def to_partial_json(self):
|
||||||
|
"""See josepy.JSONDeserializable.to_partial_json()"""
|
||||||
|
return _safe_jobj_compliance(super(TypeMixin, self),
|
||||||
|
'to_partial_json', 'type')
|
||||||
|
|
||||||
|
def fields_to_partial_json(self):
|
||||||
|
"""See josepy.JSONObjectWithFields.fields_to_partial_json()"""
|
||||||
|
return _safe_jobj_compliance(super(TypeMixin, self),
|
||||||
|
'fields_to_partial_json', 'type')
|
||||||
|
|
||||||
|
|
||||||
|
def _safe_jobj_compliance(instance, jobj_method, uncompliant_field):
|
||||||
|
if hasattr(instance, jobj_method):
|
||||||
|
jobj = getattr(instance, jobj_method)()
|
||||||
|
if instance.le_acme_version == 2:
|
||||||
|
jobj.pop(uncompliant_field, None)
|
||||||
|
return jobj
|
||||||
|
|
||||||
|
raise AttributeError('Method {0}() is not implemented.'.format(jobj_method)) # pragma: no cover
|
||||||
@@ -1,30 +1,20 @@
|
|||||||
"""Support for standalone client challenge solvers. """
|
"""Support for standalone client challenge solvers. """
|
||||||
import argparse
|
|
||||||
import collections
|
import collections
|
||||||
import functools
|
import functools
|
||||||
import logging
|
import logging
|
||||||
import os
|
|
||||||
import socket
|
import socket
|
||||||
import sys
|
|
||||||
import threading
|
import threading
|
||||||
|
|
||||||
from six.moves import BaseHTTPServer # type: ignore # pylint: disable=import-error
|
from six.moves import BaseHTTPServer # type: ignore
|
||||||
from six.moves import http_client # pylint: disable=import-error
|
from six.moves import http_client
|
||||||
from six.moves import socketserver # type: ignore # pylint: disable=import-error
|
from six.moves import socketserver # type: ignore
|
||||||
|
|
||||||
import OpenSSL
|
|
||||||
|
|
||||||
from acme import challenges
|
from acme import challenges
|
||||||
from acme import crypto_util
|
from acme import crypto_util
|
||||||
from acme.magic_typing import List # pylint: disable=unused-import, no-name-in-module
|
from acme.magic_typing import List
|
||||||
from acme import _TLSSNI01DeprecationModule
|
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
# six.moves.* | pylint: disable=no-member,attribute-defined-outside-init
|
|
||||||
# pylint: disable=too-few-public-methods,no-init
|
|
||||||
|
|
||||||
|
|
||||||
class TLSServer(socketserver.TCPServer):
|
class TLSServer(socketserver.TCPServer):
|
||||||
"""Generic TLS Server."""
|
"""Generic TLS Server."""
|
||||||
@@ -37,21 +27,27 @@ class TLSServer(socketserver.TCPServer):
|
|||||||
self.address_family = socket.AF_INET
|
self.address_family = socket.AF_INET
|
||||||
self.certs = kwargs.pop("certs", {})
|
self.certs = kwargs.pop("certs", {})
|
||||||
self.method = kwargs.pop(
|
self.method = kwargs.pop(
|
||||||
# pylint: disable=protected-access
|
|
||||||
"method", crypto_util._DEFAULT_SSL_METHOD)
|
"method", crypto_util._DEFAULT_SSL_METHOD)
|
||||||
self.allow_reuse_address = kwargs.pop("allow_reuse_address", True)
|
self.allow_reuse_address = kwargs.pop("allow_reuse_address", True)
|
||||||
socketserver.TCPServer.__init__(self, *args, **kwargs)
|
socketserver.TCPServer.__init__(self, *args, **kwargs)
|
||||||
|
|
||||||
def _wrap_sock(self):
|
def _wrap_sock(self):
|
||||||
self.socket = crypto_util.SSLSocket(
|
self.socket = crypto_util.SSLSocket(
|
||||||
self.socket, certs=self.certs, method=self.method)
|
self.socket, cert_selection=self._cert_selection,
|
||||||
|
alpn_selection=getattr(self, '_alpn_selection', None),
|
||||||
|
method=self.method)
|
||||||
|
|
||||||
def server_bind(self): # pylint: disable=missing-docstring
|
def _cert_selection(self, connection): # pragma: no cover
|
||||||
|
"""Callback selecting certificate for connection."""
|
||||||
|
server_name = connection.get_servername()
|
||||||
|
return self.certs.get(server_name, None)
|
||||||
|
|
||||||
|
def server_bind(self):
|
||||||
self._wrap_sock()
|
self._wrap_sock()
|
||||||
return socketserver.TCPServer.server_bind(self)
|
return socketserver.TCPServer.server_bind(self)
|
||||||
|
|
||||||
|
|
||||||
class ACMEServerMixin: # pylint: disable=old-style-class
|
class ACMEServerMixin:
|
||||||
"""ACME server common settings mixin."""
|
"""ACME server common settings mixin."""
|
||||||
# TODO: c.f. #858
|
# TODO: c.f. #858
|
||||||
server_version = "ACME client standalone challenge solver"
|
server_version = "ACME client standalone challenge solver"
|
||||||
@@ -112,7 +108,6 @@ class BaseDualNetworkedServers(object):
|
|||||||
"""Wraps socketserver.TCPServer.serve_forever"""
|
"""Wraps socketserver.TCPServer.serve_forever"""
|
||||||
for server in self.servers:
|
for server in self.servers:
|
||||||
thread = threading.Thread(
|
thread = threading.Thread(
|
||||||
# pylint: disable=no-member
|
|
||||||
target=server.serve_forever)
|
target=server.serve_forever)
|
||||||
thread.start()
|
thread.start()
|
||||||
self.threads.append(thread)
|
self.threads.append(thread)
|
||||||
@@ -132,33 +127,38 @@ class BaseDualNetworkedServers(object):
|
|||||||
self.threads = []
|
self.threads = []
|
||||||
|
|
||||||
|
|
||||||
class TLSSNI01Server(TLSServer, ACMEServerMixin):
|
class TLSALPN01Server(TLSServer, ACMEServerMixin):
|
||||||
"""TLSSNI01 Server."""
|
"""TLSALPN01 Server."""
|
||||||
|
|
||||||
def __init__(self, server_address, certs, ipv6=False):
|
ACME_TLS_1_PROTOCOL = b"acme-tls/1"
|
||||||
|
|
||||||
|
def __init__(self, server_address, certs, challenge_certs, ipv6=False):
|
||||||
TLSServer.__init__(
|
TLSServer.__init__(
|
||||||
self, server_address, BaseRequestHandlerWithLogging, certs=certs, ipv6=ipv6)
|
self, server_address, _BaseRequestHandlerWithLogging, certs=certs,
|
||||||
|
ipv6=ipv6)
|
||||||
|
self.challenge_certs = challenge_certs
|
||||||
|
|
||||||
|
def _cert_selection(self, connection):
|
||||||
|
# TODO: We would like to serve challenge cert only if asked for it via
|
||||||
|
# ALPN. To do this, we need to retrieve the list of protos from client
|
||||||
|
# hello, but this is currently impossible with openssl [0], and ALPN
|
||||||
|
# negotiation is done after cert selection.
|
||||||
|
# Therefore, currently we always return challenge cert, and terminate
|
||||||
|
# handshake in alpn_selection() if ALPN protos are not what we expect.
|
||||||
|
# [0] https://github.com/openssl/openssl/issues/4952
|
||||||
|
server_name = connection.get_servername()
|
||||||
|
logger.debug("Serving challenge cert for server name %s", server_name)
|
||||||
|
return self.challenge_certs.get(server_name, None)
|
||||||
|
|
||||||
class TLSSNI01DualNetworkedServers(BaseDualNetworkedServers):
|
def _alpn_selection(self, _connection, alpn_protos):
|
||||||
"""TLSSNI01Server Wrapper. Tries everything for both. Failures for one don't
|
"""Callback to select alpn protocol."""
|
||||||
affect the other."""
|
if len(alpn_protos) == 1 and alpn_protos[0] == self.ACME_TLS_1_PROTOCOL:
|
||||||
|
logger.debug("Agreed on %s ALPN", self.ACME_TLS_1_PROTOCOL)
|
||||||
def __init__(self, *args, **kwargs):
|
return self.ACME_TLS_1_PROTOCOL
|
||||||
BaseDualNetworkedServers.__init__(self, TLSSNI01Server, *args, **kwargs)
|
logger.debug("Cannot agree on ALPN proto. Got: %s", str(alpn_protos))
|
||||||
|
# Explicitly close the connection now, by returning an empty string.
|
||||||
|
# See https://www.pyopenssl.org/en/stable/api/ssl.html#OpenSSL.SSL.Context.set_alpn_select_callback # pylint: disable=line-too-long
|
||||||
class BaseRequestHandlerWithLogging(socketserver.BaseRequestHandler):
|
return b""
|
||||||
"""BaseRequestHandler with logging."""
|
|
||||||
|
|
||||||
def log_message(self, format, *args): # pylint: disable=redefined-builtin
|
|
||||||
"""Log arbitrary message."""
|
|
||||||
logger.debug("%s - - %s", self.client_address[0], format % args)
|
|
||||||
|
|
||||||
def handle(self):
|
|
||||||
"""Handle request."""
|
|
||||||
self.log_message("Incoming request")
|
|
||||||
socketserver.BaseRequestHandler.handle(self)
|
|
||||||
|
|
||||||
|
|
||||||
class HTTPServer(BaseHTTPServer.HTTPServer):
|
class HTTPServer(BaseHTTPServer.HTTPServer):
|
||||||
@@ -176,10 +176,10 @@ class HTTPServer(BaseHTTPServer.HTTPServer):
|
|||||||
class HTTP01Server(HTTPServer, ACMEServerMixin):
|
class HTTP01Server(HTTPServer, ACMEServerMixin):
|
||||||
"""HTTP01 Server."""
|
"""HTTP01 Server."""
|
||||||
|
|
||||||
def __init__(self, server_address, resources, ipv6=False):
|
def __init__(self, server_address, resources, ipv6=False, timeout=30):
|
||||||
HTTPServer.__init__(
|
HTTPServer.__init__(
|
||||||
self, server_address, HTTP01RequestHandler.partial_init(
|
self, server_address, HTTP01RequestHandler.partial_init(
|
||||||
simple_http_resources=resources), ipv6=ipv6)
|
simple_http_resources=resources, timeout=timeout), ipv6=ipv6)
|
||||||
|
|
||||||
|
|
||||||
class HTTP01DualNetworkedServers(BaseDualNetworkedServers):
|
class HTTP01DualNetworkedServers(BaseDualNetworkedServers):
|
||||||
@@ -204,6 +204,7 @@ class HTTP01RequestHandler(BaseHTTPServer.BaseHTTPRequestHandler):
|
|||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
self.simple_http_resources = kwargs.pop("simple_http_resources", set())
|
self.simple_http_resources = kwargs.pop("simple_http_resources", set())
|
||||||
|
self.timeout = kwargs.pop('timeout', 30)
|
||||||
BaseHTTPServer.BaseHTTPRequestHandler.__init__(self, *args, **kwargs)
|
BaseHTTPServer.BaseHTTPRequestHandler.__init__(self, *args, **kwargs)
|
||||||
|
|
||||||
def log_message(self, format, *args): # pylint: disable=redefined-builtin
|
def log_message(self, format, *args): # pylint: disable=redefined-builtin
|
||||||
@@ -215,7 +216,7 @@ class HTTP01RequestHandler(BaseHTTPServer.BaseHTTPRequestHandler):
|
|||||||
self.log_message("Incoming request")
|
self.log_message("Incoming request")
|
||||||
BaseHTTPServer.BaseHTTPRequestHandler.handle(self)
|
BaseHTTPServer.BaseHTTPRequestHandler.handle(self)
|
||||||
|
|
||||||
def do_GET(self): # pylint: disable=invalid-name,missing-docstring
|
def do_GET(self): # pylint: disable=invalid-name,missing-function-docstring
|
||||||
if self.path == "/":
|
if self.path == "/":
|
||||||
self.handle_index()
|
self.handle_index()
|
||||||
elif self.path.startswith("/" + challenges.HTTP01.URI_ROOT_PATH):
|
elif self.path.startswith("/" + challenges.HTTP01.URI_ROOT_PATH):
|
||||||
@@ -253,7 +254,7 @@ class HTTP01RequestHandler(BaseHTTPServer.BaseHTTPRequestHandler):
|
|||||||
self.path)
|
self.path)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def partial_init(cls, simple_http_resources):
|
def partial_init(cls, simple_http_resources, timeout):
|
||||||
"""Partially initialize this handler.
|
"""Partially initialize this handler.
|
||||||
|
|
||||||
This is useful because `socketserver.BaseServer` takes
|
This is useful because `socketserver.BaseServer` takes
|
||||||
@@ -262,44 +263,18 @@ class HTTP01RequestHandler(BaseHTTPServer.BaseHTTPRequestHandler):
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
return functools.partial(
|
return functools.partial(
|
||||||
cls, simple_http_resources=simple_http_resources)
|
cls, simple_http_resources=simple_http_resources,
|
||||||
|
timeout=timeout)
|
||||||
|
|
||||||
|
|
||||||
def simple_tls_sni_01_server(cli_args, forever=True):
|
class _BaseRequestHandlerWithLogging(socketserver.BaseRequestHandler):
|
||||||
"""Run simple standalone TLSSNI01 server."""
|
"""BaseRequestHandler with logging."""
|
||||||
logging.basicConfig(level=logging.DEBUG)
|
|
||||||
|
|
||||||
parser = argparse.ArgumentParser()
|
def log_message(self, format, *args): # pylint: disable=redefined-builtin
|
||||||
parser.add_argument(
|
"""Log arbitrary message."""
|
||||||
"-p", "--port", default=0, help="Port to serve at. By default "
|
logger.debug("%s - - %s", self.client_address[0], format % args)
|
||||||
"picks random free port.")
|
|
||||||
args = parser.parse_args(cli_args[1:])
|
|
||||||
|
|
||||||
certs = {}
|
def handle(self):
|
||||||
|
"""Handle request."""
|
||||||
_, hosts, _ = next(os.walk('.')) # type: ignore # https://github.com/python/mypy/issues/465
|
self.log_message("Incoming request")
|
||||||
for host in hosts:
|
socketserver.BaseRequestHandler.handle(self)
|
||||||
with open(os.path.join(host, "cert.pem")) as cert_file:
|
|
||||||
cert_contents = cert_file.read()
|
|
||||||
with open(os.path.join(host, "key.pem")) as key_file:
|
|
||||||
key_contents = key_file.read()
|
|
||||||
certs[host.encode()] = (
|
|
||||||
OpenSSL.crypto.load_privatekey(
|
|
||||||
OpenSSL.crypto.FILETYPE_PEM, key_contents),
|
|
||||||
OpenSSL.crypto.load_certificate(
|
|
||||||
OpenSSL.crypto.FILETYPE_PEM, cert_contents))
|
|
||||||
|
|
||||||
server = TLSSNI01Server(('', int(args.port)), certs=certs)
|
|
||||||
logger.info("Serving at https://%s:%s...", *server.socket.getsockname()[:2])
|
|
||||||
if forever: # pragma: no cover
|
|
||||||
server.serve_forever()
|
|
||||||
else:
|
|
||||||
server.handle_request()
|
|
||||||
|
|
||||||
|
|
||||||
# Patching ourselves to warn about TLS-SNI challenge deprecation and removal.
|
|
||||||
sys.modules[__name__] = _TLSSNI01DeprecationModule(sys.modules[__name__])
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
sys.exit(simple_tls_sni_01_server(sys.argv)) # pragma: no cover
|
|
||||||
|
|||||||
@@ -12,10 +12,8 @@
|
|||||||
# All configuration values have a default; values that are commented out
|
# All configuration values have a default; values that are commented out
|
||||||
# serve to show the default.
|
# serve to show the default.
|
||||||
|
|
||||||
import sys
|
|
||||||
import os
|
import os
|
||||||
import shlex
|
import sys
|
||||||
|
|
||||||
|
|
||||||
here = os.path.abspath(os.path.dirname(__file__))
|
here = os.path.abspath(os.path.dirname(__file__))
|
||||||
|
|
||||||
@@ -42,7 +40,7 @@ extensions = [
|
|||||||
]
|
]
|
||||||
|
|
||||||
autodoc_member_order = 'bysource'
|
autodoc_member_order = 'bysource'
|
||||||
autodoc_default_flags = ['show-inheritance', 'private-members']
|
autodoc_default_flags = ['show-inheritance']
|
||||||
|
|
||||||
# Add any paths that contain templates here, relative to this directory.
|
# Add any paths that contain templates here, relative to this directory.
|
||||||
templates_path = ['_templates']
|
templates_path = ['_templates']
|
||||||
@@ -114,7 +112,7 @@ pygments_style = 'sphinx'
|
|||||||
#keep_warnings = False
|
#keep_warnings = False
|
||||||
|
|
||||||
# If true, `todo` and `todoList` produce output, else they produce nothing.
|
# If true, `todo` and `todoList` produce output, else they produce nothing.
|
||||||
todo_include_todos = True
|
todo_include_todos = False
|
||||||
|
|
||||||
|
|
||||||
# -- Options for HTML output ----------------------------------------------
|
# -- Options for HTML output ----------------------------------------------
|
||||||
|
|||||||
@@ -26,8 +26,10 @@ Workflow:
|
|||||||
- Deactivate Account
|
- Deactivate Account
|
||||||
"""
|
"""
|
||||||
from contextlib import contextmanager
|
from contextlib import contextmanager
|
||||||
|
|
||||||
from cryptography.hazmat.backends import default_backend
|
from cryptography.hazmat.backends import default_backend
|
||||||
from cryptography.hazmat.primitives.asymmetric import rsa
|
from cryptography.hazmat.primitives.asymmetric import rsa
|
||||||
|
import josepy as jose
|
||||||
import OpenSSL
|
import OpenSSL
|
||||||
|
|
||||||
from acme import challenges
|
from acme import challenges
|
||||||
@@ -36,7 +38,6 @@ from acme import crypto_util
|
|||||||
from acme import errors
|
from acme import errors
|
||||||
from acme import messages
|
from acme import messages
|
||||||
from acme import standalone
|
from acme import standalone
|
||||||
import josepy as jose
|
|
||||||
|
|
||||||
# Constants:
|
# Constants:
|
||||||
|
|
||||||
|
|||||||
@@ -1,10 +1,10 @@
|
|||||||
# readthedocs.org gives no way to change the install command to "pip
|
# readthedocs.org gives no way to change the install command to "pip
|
||||||
# install -e .[docs]" (that would in turn install documentation
|
# install -e acme[docs]" (that would in turn install documentation
|
||||||
# dependencies), but it allows to specify a requirements.txt file at
|
# dependencies), but it allows to specify a requirements.txt file at
|
||||||
# https://readthedocs.org/dashboard/letsencrypt/advanced/ (c.f. #259)
|
# https://readthedocs.org/dashboard/letsencrypt/advanced/ (c.f. #259)
|
||||||
|
|
||||||
# Although ReadTheDocs certainly doesn't need to install the project
|
# Although ReadTheDocs certainly doesn't need to install the project
|
||||||
# in --editable mode (-e), just "pip install .[docs]" does not work as
|
# in --editable mode (-e), just "pip install acme[docs]" does not work as
|
||||||
# expected and "pip install -e .[docs]" must be used instead
|
# expected and "pip install -e acme[docs]" must be used instead
|
||||||
|
|
||||||
-e acme[docs]
|
-e acme[docs]
|
||||||
|
|||||||
@@ -1,9 +1,12 @@
|
|||||||
from setuptools import setup
|
from distutils.version import StrictVersion
|
||||||
from setuptools import find_packages
|
|
||||||
from setuptools.command.test import test as TestCommand
|
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
version = '0.40.0.dev0'
|
from setuptools import __version__ as setuptools_version
|
||||||
|
from setuptools import find_packages
|
||||||
|
from setuptools import setup
|
||||||
|
from setuptools.command.test import test as TestCommand
|
||||||
|
|
||||||
|
version = '1.4.0.dev0'
|
||||||
|
|
||||||
# Please update tox.ini when modifying dependency version requirements
|
# Please update tox.ini when modifying dependency version requirements
|
||||||
install_requires = [
|
install_requires = [
|
||||||
@@ -15,7 +18,6 @@ install_requires = [
|
|||||||
# https://github.com/certbot/josepy/issues/13.
|
# https://github.com/certbot/josepy/issues/13.
|
||||||
'josepy>=1.1.0',
|
'josepy>=1.1.0',
|
||||||
# Connection.set_tlsext_host_name (>=0.13)
|
# Connection.set_tlsext_host_name (>=0.13)
|
||||||
'mock',
|
|
||||||
'PyOpenSSL>=0.13.1',
|
'PyOpenSSL>=0.13.1',
|
||||||
'pyrfc3339',
|
'pyrfc3339',
|
||||||
'pytz',
|
'pytz',
|
||||||
@@ -25,6 +27,15 @@ install_requires = [
|
|||||||
'six>=1.9.0', # needed for python_2_unicode_compatible
|
'six>=1.9.0', # needed for python_2_unicode_compatible
|
||||||
]
|
]
|
||||||
|
|
||||||
|
setuptools_known_environment_markers = (StrictVersion(setuptools_version) >= StrictVersion('36.2'))
|
||||||
|
if setuptools_known_environment_markers:
|
||||||
|
install_requires.append('mock ; python_version < "3.3"')
|
||||||
|
elif 'bdist_wheel' in sys.argv[1:]:
|
||||||
|
raise RuntimeError('Error, you are trying to build certbot wheels using an old version '
|
||||||
|
'of setuptools. Version 36.2+ of setuptools is required.')
|
||||||
|
elif sys.version_info < (3,3):
|
||||||
|
install_requires.append('mock')
|
||||||
|
|
||||||
dev_extras = [
|
dev_extras = [
|
||||||
'pytest',
|
'pytest',
|
||||||
'pytest-xdist',
|
'pytest-xdist',
|
||||||
@@ -60,7 +71,7 @@ setup(
|
|||||||
author="Certbot Project",
|
author="Certbot Project",
|
||||||
author_email='client-dev@letsencrypt.org',
|
author_email='client-dev@letsencrypt.org',
|
||||||
license='Apache License 2.0',
|
license='Apache License 2.0',
|
||||||
python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*',
|
python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*',
|
||||||
classifiers=[
|
classifiers=[
|
||||||
'Development Status :: 5 - Production/Stable',
|
'Development Status :: 5 - Production/Stable',
|
||||||
'Intended Audience :: Developers',
|
'Intended Audience :: Developers',
|
||||||
@@ -69,7 +80,6 @@ setup(
|
|||||||
'Programming Language :: Python :: 2',
|
'Programming Language :: Python :: 2',
|
||||||
'Programming Language :: Python :: 2.7',
|
'Programming Language :: Python :: 2.7',
|
||||||
'Programming Language :: Python :: 3',
|
'Programming Language :: Python :: 3',
|
||||||
'Programming Language :: Python :: 3.4',
|
|
||||||
'Programming Language :: Python :: 3.5',
|
'Programming Language :: Python :: 3.5',
|
||||||
'Programming Language :: Python :: 3.6',
|
'Programming Language :: Python :: 3.6',
|
||||||
'Programming Language :: Python :: 3.7',
|
'Programming Language :: Python :: 3.7',
|
||||||
|
|||||||
@@ -2,14 +2,17 @@
|
|||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
import josepy as jose
|
import josepy as jose
|
||||||
import mock
|
|
||||||
import OpenSSL
|
import OpenSSL
|
||||||
|
try:
|
||||||
|
import mock
|
||||||
|
except ImportError: # pragma: no cover
|
||||||
|
from unittest import mock # type: ignore
|
||||||
import requests
|
import requests
|
||||||
|
from six.moves.urllib import parse as urllib_parse
|
||||||
from six.moves.urllib import parse as urllib_parse # pylint: disable=relative-import
|
|
||||||
|
|
||||||
from acme import errors
|
from acme import errors
|
||||||
from acme import test_util
|
|
||||||
|
import test_util
|
||||||
|
|
||||||
CERT = test_util.load_comparable_cert('cert.pem')
|
CERT = test_util.load_comparable_cert('cert.pem')
|
||||||
KEY = jose.JWKRSA(key=test_util.load_rsa_private_key('rsa512_key.pem'))
|
KEY = jose.JWKRSA(key=test_util.load_rsa_private_key('rsa512_key.pem'))
|
||||||
@@ -21,7 +24,6 @@ class ChallengeTest(unittest.TestCase):
|
|||||||
from acme.challenges import Challenge
|
from acme.challenges import Challenge
|
||||||
from acme.challenges import UnrecognizedChallenge
|
from acme.challenges import UnrecognizedChallenge
|
||||||
chall = UnrecognizedChallenge({"type": "foo"})
|
chall = UnrecognizedChallenge({"type": "foo"})
|
||||||
# pylint: disable=no-member
|
|
||||||
self.assertEqual(chall, Challenge.from_json(chall.jobj))
|
self.assertEqual(chall, Challenge.from_json(chall.jobj))
|
||||||
|
|
||||||
|
|
||||||
@@ -77,7 +79,6 @@ class KeyAuthorizationChallengeResponseTest(unittest.TestCase):
|
|||||||
|
|
||||||
|
|
||||||
class DNS01ResponseTest(unittest.TestCase):
|
class DNS01ResponseTest(unittest.TestCase):
|
||||||
# pylint: disable=too-many-instance-attributes
|
|
||||||
|
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
from acme.challenges import DNS01Response
|
from acme.challenges import DNS01Response
|
||||||
@@ -149,7 +150,6 @@ class DNS01Test(unittest.TestCase):
|
|||||||
|
|
||||||
|
|
||||||
class HTTP01ResponseTest(unittest.TestCase):
|
class HTTP01ResponseTest(unittest.TestCase):
|
||||||
# pylint: disable=too-many-instance-attributes
|
|
||||||
|
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
from acme.challenges import HTTP01Response
|
from acme.challenges import HTTP01Response
|
||||||
@@ -187,7 +187,7 @@ class HTTP01ResponseTest(unittest.TestCase):
|
|||||||
mock_get.return_value = mock.MagicMock(text=validation)
|
mock_get.return_value = mock.MagicMock(text=validation)
|
||||||
self.assertTrue(self.response.simple_verify(
|
self.assertTrue(self.response.simple_verify(
|
||||||
self.chall, "local", KEY.public_key()))
|
self.chall, "local", KEY.public_key()))
|
||||||
mock_get.assert_called_once_with(self.chall.uri("local"))
|
mock_get.assert_called_once_with(self.chall.uri("local"), verify=False)
|
||||||
|
|
||||||
@mock.patch("acme.challenges.requests.get")
|
@mock.patch("acme.challenges.requests.get")
|
||||||
def test_simple_verify_bad_validation(self, mock_get):
|
def test_simple_verify_bad_validation(self, mock_get):
|
||||||
@@ -203,7 +203,7 @@ class HTTP01ResponseTest(unittest.TestCase):
|
|||||||
HTTP01Response.WHITESPACE_CUTSET))
|
HTTP01Response.WHITESPACE_CUTSET))
|
||||||
self.assertTrue(self.response.simple_verify(
|
self.assertTrue(self.response.simple_verify(
|
||||||
self.chall, "local", KEY.public_key()))
|
self.chall, "local", KEY.public_key()))
|
||||||
mock_get.assert_called_once_with(self.chall.uri("local"))
|
mock_get.assert_called_once_with(self.chall.uri("local"), verify=False)
|
||||||
|
|
||||||
@mock.patch("acme.challenges.requests.get")
|
@mock.patch("acme.challenges.requests.get")
|
||||||
def test_simple_verify_connection_error(self, mock_get):
|
def test_simple_verify_connection_error(self, mock_get):
|
||||||
@@ -259,43 +259,68 @@ class HTTP01Test(unittest.TestCase):
|
|||||||
self.msg.update(token=b'..').good_token)
|
self.msg.update(token=b'..').good_token)
|
||||||
|
|
||||||
|
|
||||||
class TLSSNI01ResponseTest(unittest.TestCase):
|
class TLSALPN01ResponseTest(unittest.TestCase):
|
||||||
# pylint: disable=too-many-instance-attributes
|
|
||||||
|
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
from acme.challenges import TLSSNI01
|
from acme.challenges import TLSALPN01
|
||||||
self.chall = TLSSNI01(
|
self.chall = TLSALPN01(
|
||||||
token=jose.b64decode(b'a82d5ff8ef740d12881f6d3c2277ab2e'))
|
token=jose.b64decode(b'a82d5ff8ef740d12881f6d3c2277ab2e'))
|
||||||
|
self.domain = u'example.com'
|
||||||
|
self.domain2 = u'example2.com'
|
||||||
|
|
||||||
self.response = self.chall.response(KEY)
|
self.response = self.chall.response(KEY)
|
||||||
self.jmsg = {
|
self.jmsg = {
|
||||||
'resource': 'challenge',
|
'resource': 'challenge',
|
||||||
'type': 'tls-sni-01',
|
'type': 'tls-alpn-01',
|
||||||
'keyAuthorization': self.response.key_authorization,
|
'keyAuthorization': self.response.key_authorization,
|
||||||
}
|
}
|
||||||
|
|
||||||
# pylint: disable=invalid-name
|
|
||||||
label1 = b'dc38d9c3fa1a4fdcc3a5501f2d38583f'
|
|
||||||
label2 = b'b7793728f084394f2a1afd459556bb5c'
|
|
||||||
self.z = label1 + label2
|
|
||||||
self.z_domain = label1 + b'.' + label2 + b'.acme.invalid'
|
|
||||||
self.domain = 'foo.com'
|
|
||||||
|
|
||||||
def test_z_and_domain(self):
|
|
||||||
self.assertEqual(self.z, self.response.z)
|
|
||||||
self.assertEqual(self.z_domain, self.response.z_domain)
|
|
||||||
|
|
||||||
def test_to_partial_json(self):
|
def test_to_partial_json(self):
|
||||||
self.assertEqual({k: v for k, v in self.jmsg.items() if k != 'keyAuthorization'},
|
self.assertEqual({k: v for k, v in self.jmsg.items() if k != 'keyAuthorization'},
|
||||||
self.response.to_partial_json())
|
self.response.to_partial_json())
|
||||||
|
|
||||||
def test_from_json(self):
|
def test_from_json(self):
|
||||||
from acme.challenges import TLSSNI01Response
|
from acme.challenges import TLSALPN01Response
|
||||||
self.assertEqual(self.response, TLSSNI01Response.from_json(self.jmsg))
|
self.assertEqual(self.response, TLSALPN01Response.from_json(self.jmsg))
|
||||||
|
|
||||||
def test_from_json_hashable(self):
|
def test_from_json_hashable(self):
|
||||||
from acme.challenges import TLSSNI01Response
|
from acme.challenges import TLSALPN01Response
|
||||||
hash(TLSSNI01Response.from_json(self.jmsg))
|
hash(TLSALPN01Response.from_json(self.jmsg))
|
||||||
|
|
||||||
|
def test_gen_verify_cert(self):
|
||||||
|
key1 = test_util.load_pyopenssl_private_key('rsa512_key.pem')
|
||||||
|
cert, key2 = self.response.gen_cert(self.domain, key1)
|
||||||
|
self.assertEqual(key1, key2)
|
||||||
|
self.assertTrue(self.response.verify_cert(self.domain, cert))
|
||||||
|
|
||||||
|
def test_gen_verify_cert_gen_key(self):
|
||||||
|
cert, key = self.response.gen_cert(self.domain)
|
||||||
|
self.assertTrue(isinstance(key, OpenSSL.crypto.PKey))
|
||||||
|
self.assertTrue(self.response.verify_cert(self.domain, cert))
|
||||||
|
|
||||||
|
def test_verify_bad_cert(self):
|
||||||
|
self.assertFalse(self.response.verify_cert(self.domain,
|
||||||
|
test_util.load_cert('cert.pem')))
|
||||||
|
|
||||||
|
def test_verify_bad_domain(self):
|
||||||
|
key1 = test_util.load_pyopenssl_private_key('rsa512_key.pem')
|
||||||
|
cert, key2 = self.response.gen_cert(self.domain, key1)
|
||||||
|
self.assertEqual(key1, key2)
|
||||||
|
self.assertFalse(self.response.verify_cert(self.domain2, cert))
|
||||||
|
|
||||||
|
def test_simple_verify_bad_key_authorization(self):
|
||||||
|
key2 = jose.JWKRSA.load(test_util.load_vector('rsa256_key.pem'))
|
||||||
|
self.response.simple_verify(self.chall, "local", key2.public_key())
|
||||||
|
|
||||||
|
@mock.patch('acme.challenges.TLSALPN01Response.verify_cert', autospec=True)
|
||||||
|
def test_simple_verify(self, mock_verify_cert):
|
||||||
|
mock_verify_cert.return_value = mock.sentinel.verification
|
||||||
|
self.assertEqual(
|
||||||
|
mock.sentinel.verification, self.response.simple_verify(
|
||||||
|
self.chall, self.domain, KEY.public_key(),
|
||||||
|
cert=mock.sentinel.cert))
|
||||||
|
mock_verify_cert.assert_called_once_with(
|
||||||
|
self.response, self.domain, mock.sentinel.cert)
|
||||||
|
|
||||||
@mock.patch('acme.challenges.socket.gethostbyname')
|
@mock.patch('acme.challenges.socket.gethostbyname')
|
||||||
@mock.patch('acme.challenges.crypto_util.probe_sni')
|
@mock.patch('acme.challenges.crypto_util.probe_sni')
|
||||||
@@ -304,134 +329,21 @@ class TLSSNI01ResponseTest(unittest.TestCase):
|
|||||||
self.response.probe_cert('foo.com')
|
self.response.probe_cert('foo.com')
|
||||||
mock_gethostbyname.assert_called_once_with('foo.com')
|
mock_gethostbyname.assert_called_once_with('foo.com')
|
||||||
mock_probe_sni.assert_called_once_with(
|
mock_probe_sni.assert_called_once_with(
|
||||||
host='127.0.0.1', port=self.response.PORT,
|
host='127.0.0.1', port=self.response.PORT, name='foo.com',
|
||||||
name=self.z_domain)
|
alpn_protocols=['acme-tls/1'])
|
||||||
|
|
||||||
self.response.probe_cert('foo.com', host='8.8.8.8')
|
self.response.probe_cert('foo.com', host='8.8.8.8')
|
||||||
mock_probe_sni.assert_called_with(
|
mock_probe_sni.assert_called_with(
|
||||||
host='8.8.8.8', port=mock.ANY, name=mock.ANY)
|
host='8.8.8.8', port=mock.ANY, name='foo.com',
|
||||||
|
alpn_protocols=['acme-tls/1'])
|
||||||
|
|
||||||
self.response.probe_cert('foo.com', port=1234)
|
@mock.patch('acme.challenges.TLSALPN01Response.probe_cert')
|
||||||
mock_probe_sni.assert_called_with(
|
|
||||||
host=mock.ANY, port=1234, name=mock.ANY)
|
|
||||||
|
|
||||||
self.response.probe_cert('foo.com', bar='baz')
|
|
||||||
mock_probe_sni.assert_called_with(
|
|
||||||
host=mock.ANY, port=mock.ANY, name=mock.ANY, bar='baz')
|
|
||||||
|
|
||||||
self.response.probe_cert('foo.com', name=b'xxx')
|
|
||||||
mock_probe_sni.assert_called_with(
|
|
||||||
host=mock.ANY, port=mock.ANY,
|
|
||||||
name=self.z_domain)
|
|
||||||
|
|
||||||
def test_gen_verify_cert(self):
|
|
||||||
key1 = test_util.load_pyopenssl_private_key('rsa512_key.pem')
|
|
||||||
cert, key2 = self.response.gen_cert(key1)
|
|
||||||
self.assertEqual(key1, key2)
|
|
||||||
self.assertTrue(self.response.verify_cert(cert))
|
|
||||||
|
|
||||||
def test_gen_verify_cert_gen_key(self):
|
|
||||||
cert, key = self.response.gen_cert()
|
|
||||||
self.assertTrue(isinstance(key, OpenSSL.crypto.PKey))
|
|
||||||
self.assertTrue(self.response.verify_cert(cert))
|
|
||||||
|
|
||||||
def test_verify_bad_cert(self):
|
|
||||||
self.assertFalse(self.response.verify_cert(
|
|
||||||
test_util.load_cert('cert.pem')))
|
|
||||||
|
|
||||||
def test_simple_verify_bad_key_authorization(self):
|
|
||||||
key2 = jose.JWKRSA.load(test_util.load_vector('rsa256_key.pem'))
|
|
||||||
self.response.simple_verify(self.chall, "local", key2.public_key())
|
|
||||||
|
|
||||||
@mock.patch('acme.challenges.TLSSNI01Response.verify_cert', autospec=True)
|
|
||||||
def test_simple_verify(self, mock_verify_cert):
|
|
||||||
mock_verify_cert.return_value = mock.sentinel.verification
|
|
||||||
self.assertEqual(
|
|
||||||
mock.sentinel.verification, self.response.simple_verify(
|
|
||||||
self.chall, self.domain, KEY.public_key(),
|
|
||||||
cert=mock.sentinel.cert))
|
|
||||||
mock_verify_cert.assert_called_once_with(
|
|
||||||
self.response, mock.sentinel.cert)
|
|
||||||
|
|
||||||
@mock.patch('acme.challenges.TLSSNI01Response.probe_cert')
|
|
||||||
def test_simple_verify_false_on_probe_error(self, mock_probe_cert):
|
def test_simple_verify_false_on_probe_error(self, mock_probe_cert):
|
||||||
mock_probe_cert.side_effect = errors.Error
|
mock_probe_cert.side_effect = errors.Error
|
||||||
self.assertFalse(self.response.simple_verify(
|
self.assertFalse(self.response.simple_verify(
|
||||||
self.chall, self.domain, KEY.public_key()))
|
self.chall, self.domain, KEY.public_key()))
|
||||||
|
|
||||||
|
|
||||||
class TLSSNI01Test(unittest.TestCase):
|
|
||||||
|
|
||||||
def setUp(self):
|
|
||||||
self.jmsg = {
|
|
||||||
'type': 'tls-sni-01',
|
|
||||||
'token': 'a82d5ff8ef740d12881f6d3c2277ab2e',
|
|
||||||
}
|
|
||||||
from acme.challenges import TLSSNI01
|
|
||||||
self.msg = TLSSNI01(
|
|
||||||
token=jose.b64decode('a82d5ff8ef740d12881f6d3c2277ab2e'))
|
|
||||||
|
|
||||||
def test_to_partial_json(self):
|
|
||||||
self.assertEqual(self.jmsg, self.msg.to_partial_json())
|
|
||||||
|
|
||||||
def test_from_json(self):
|
|
||||||
from acme.challenges import TLSSNI01
|
|
||||||
self.assertEqual(self.msg, TLSSNI01.from_json(self.jmsg))
|
|
||||||
|
|
||||||
def test_from_json_hashable(self):
|
|
||||||
from acme.challenges import TLSSNI01
|
|
||||||
hash(TLSSNI01.from_json(self.jmsg))
|
|
||||||
|
|
||||||
def test_from_json_invalid_token_length(self):
|
|
||||||
from acme.challenges import TLSSNI01
|
|
||||||
self.jmsg['token'] = jose.encode_b64jose(b'abcd')
|
|
||||||
self.assertRaises(
|
|
||||||
jose.DeserializationError, TLSSNI01.from_json, self.jmsg)
|
|
||||||
|
|
||||||
@mock.patch('acme.challenges.TLSSNI01Response.gen_cert')
|
|
||||||
def test_validation(self, mock_gen_cert):
|
|
||||||
mock_gen_cert.return_value = ('cert', 'key')
|
|
||||||
self.assertEqual(('cert', 'key'), self.msg.validation(
|
|
||||||
KEY, cert_key=mock.sentinel.cert_key))
|
|
||||||
mock_gen_cert.assert_called_once_with(key=mock.sentinel.cert_key)
|
|
||||||
|
|
||||||
def test_deprecation_message(self):
|
|
||||||
with mock.patch('acme.warnings.warn') as mock_warn:
|
|
||||||
from acme.challenges import TLSSNI01
|
|
||||||
assert TLSSNI01
|
|
||||||
self.assertEqual(mock_warn.call_count, 1)
|
|
||||||
self.assertTrue('deprecated' in mock_warn.call_args[0][0])
|
|
||||||
|
|
||||||
|
|
||||||
class TLSALPN01ResponseTest(unittest.TestCase):
|
|
||||||
# pylint: disable=too-many-instance-attributes
|
|
||||||
|
|
||||||
def setUp(self):
|
|
||||||
from acme.challenges import TLSALPN01Response
|
|
||||||
self.msg = TLSALPN01Response(key_authorization=u'foo')
|
|
||||||
self.jmsg = {
|
|
||||||
'resource': 'challenge',
|
|
||||||
'type': 'tls-alpn-01',
|
|
||||||
'keyAuthorization': u'foo',
|
|
||||||
}
|
|
||||||
|
|
||||||
from acme.challenges import TLSALPN01
|
|
||||||
self.chall = TLSALPN01(token=(b'x' * 16))
|
|
||||||
self.response = self.chall.response(KEY)
|
|
||||||
|
|
||||||
def test_to_partial_json(self):
|
|
||||||
self.assertEqual({k: v for k, v in self.jmsg.items() if k != 'keyAuthorization'},
|
|
||||||
self.msg.to_partial_json())
|
|
||||||
|
|
||||||
def test_from_json(self):
|
|
||||||
from acme.challenges import TLSALPN01Response
|
|
||||||
self.assertEqual(self.msg, TLSALPN01Response.from_json(self.jmsg))
|
|
||||||
|
|
||||||
def test_from_json_hashable(self):
|
|
||||||
from acme.challenges import TLSALPN01Response
|
|
||||||
hash(TLSALPN01Response.from_json(self.jmsg))
|
|
||||||
|
|
||||||
|
|
||||||
class TLSALPN01Test(unittest.TestCase):
|
class TLSALPN01Test(unittest.TestCase):
|
||||||
|
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
@@ -460,8 +372,13 @@ class TLSALPN01Test(unittest.TestCase):
|
|||||||
self.assertRaises(
|
self.assertRaises(
|
||||||
jose.DeserializationError, TLSALPN01.from_json, self.jmsg)
|
jose.DeserializationError, TLSALPN01.from_json, self.jmsg)
|
||||||
|
|
||||||
def test_validation(self):
|
@mock.patch('acme.challenges.TLSALPN01Response.gen_cert')
|
||||||
self.assertRaises(NotImplementedError, self.msg.validation, KEY)
|
def test_validation(self, mock_gen_cert):
|
||||||
|
mock_gen_cert.return_value = ('cert', 'key')
|
||||||
|
self.assertEqual(('cert', 'key'), self.msg.validation(
|
||||||
|
KEY, cert_key=mock.sentinel.cert_key, domain=mock.sentinel.domain))
|
||||||
|
mock_gen_cert.assert_called_once_with(key=mock.sentinel.cert_key,
|
||||||
|
domain=mock.sentinel.domain)
|
||||||
|
|
||||||
|
|
||||||
class DNSTest(unittest.TestCase):
|
class DNSTest(unittest.TestCase):
|
||||||
@@ -564,5 +481,18 @@ class DNSResponseTest(unittest.TestCase):
|
|||||||
self.msg.check_validation(self.chall, KEY.public_key()))
|
self.msg.check_validation(self.chall, KEY.public_key()))
|
||||||
|
|
||||||
|
|
||||||
|
class JWSPayloadRFC8555Compliant(unittest.TestCase):
|
||||||
|
"""Test for RFC8555 compliance of JWS generated from resources/challenges"""
|
||||||
|
def test_challenge_payload(self):
|
||||||
|
from acme.challenges import HTTP01Response
|
||||||
|
|
||||||
|
challenge_body = HTTP01Response()
|
||||||
|
challenge_body.le_acme_version = 2
|
||||||
|
|
||||||
|
jobj = challenge_body.json_dumps(indent=2).encode()
|
||||||
|
# RFC8555 states that challenge responses must have an empty payload.
|
||||||
|
self.assertEqual(jobj, b'{}')
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
unittest.main() # pragma: no cover
|
unittest.main() # pragma: no cover
|
||||||
@@ -5,21 +5,22 @@ import datetime
|
|||||||
import json
|
import json
|
||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
from six.moves import http_client # pylint: disable=import-error
|
|
||||||
|
|
||||||
import josepy as jose
|
import josepy as jose
|
||||||
import mock
|
try:
|
||||||
|
import mock
|
||||||
|
except ImportError: # pragma: no cover
|
||||||
|
from unittest import mock # type: ignore
|
||||||
import OpenSSL
|
import OpenSSL
|
||||||
import requests
|
import requests
|
||||||
|
from six.moves import http_client # pylint: disable=import-error
|
||||||
|
|
||||||
from acme import challenges
|
from acme import challenges
|
||||||
from acme import errors
|
from acme import errors
|
||||||
from acme import jws as acme_jws
|
from acme import jws as acme_jws
|
||||||
from acme import messages
|
from acme import messages
|
||||||
from acme import messages_test
|
from acme.mixins import VersionedLEACMEMixin
|
||||||
from acme import test_util
|
import messages_test
|
||||||
from acme.magic_typing import Dict # pylint: disable=unused-import, no-name-in-module
|
import test_util
|
||||||
|
|
||||||
|
|
||||||
CERT_DER = test_util.load_vector('cert.der')
|
CERT_DER = test_util.load_vector('cert.der')
|
||||||
CERT_SAN_PEM = test_util.load_vector('cert-san.pem')
|
CERT_SAN_PEM = test_util.load_vector('cert-san.pem')
|
||||||
@@ -63,7 +64,7 @@ class ClientTestBase(unittest.TestCase):
|
|||||||
self.contact = ('mailto:cert-admin@example.com', 'tel:+12025551212')
|
self.contact = ('mailto:cert-admin@example.com', 'tel:+12025551212')
|
||||||
reg = messages.Registration(
|
reg = messages.Registration(
|
||||||
contact=self.contact, key=KEY.public_key())
|
contact=self.contact, key=KEY.public_key())
|
||||||
the_arg = dict(reg) # type: Dict
|
the_arg = dict(reg) # type: Dict
|
||||||
self.new_reg = messages.NewRegistration(**the_arg)
|
self.new_reg = messages.NewRegistration(**the_arg)
|
||||||
self.regr = messages.RegistrationResource(
|
self.regr = messages.RegistrationResource(
|
||||||
body=reg, uri='https://www.letsencrypt-demo.org/acme/reg/1')
|
body=reg, uri='https://www.letsencrypt-demo.org/acme/reg/1')
|
||||||
@@ -318,7 +319,6 @@ class BackwardsCompatibleClientV2Test(ClientTestBase):
|
|||||||
|
|
||||||
class ClientTest(ClientTestBase):
|
class ClientTest(ClientTestBase):
|
||||||
"""Tests for acme.client.Client."""
|
"""Tests for acme.client.Client."""
|
||||||
# pylint: disable=too-many-instance-attributes,too-many-public-methods
|
|
||||||
|
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
super(ClientTest, self).setUp()
|
super(ClientTest, self).setUp()
|
||||||
@@ -888,21 +888,8 @@ class ClientV2Test(ClientTestBase):
|
|||||||
new_nonce_url='https://www.letsencrypt-demo.org/acme/new-nonce')
|
new_nonce_url='https://www.letsencrypt-demo.org/acme/new-nonce')
|
||||||
self.client.net.get.assert_not_called()
|
self.client.net.get.assert_not_called()
|
||||||
|
|
||||||
class FakeError(messages.Error): # pylint: disable=too-many-ancestors
|
|
||||||
"""Fake error to reproduce a malformed request ACME error"""
|
|
||||||
def __init__(self): # pylint: disable=super-init-not-called
|
|
||||||
pass
|
|
||||||
@property
|
|
||||||
def code(self):
|
|
||||||
return 'malformed'
|
|
||||||
self.client.net.post.side_effect = FakeError()
|
|
||||||
|
|
||||||
self.client.poll(self.authzr2) # pylint: disable=protected-access
|
class MockJSONDeSerializable(VersionedLEACMEMixin, jose.JSONDeSerializable):
|
||||||
|
|
||||||
self.client.net.get.assert_called_once_with(self.authzr2.uri)
|
|
||||||
|
|
||||||
|
|
||||||
class MockJSONDeSerializable(jose.JSONDeSerializable):
|
|
||||||
# pylint: disable=missing-docstring
|
# pylint: disable=missing-docstring
|
||||||
def __init__(self, value):
|
def __init__(self, value):
|
||||||
self.value = value
|
self.value = value
|
||||||
@@ -917,7 +904,6 @@ class MockJSONDeSerializable(jose.JSONDeSerializable):
|
|||||||
|
|
||||||
class ClientNetworkTest(unittest.TestCase):
|
class ClientNetworkTest(unittest.TestCase):
|
||||||
"""Tests for acme.client.ClientNetwork."""
|
"""Tests for acme.client.ClientNetwork."""
|
||||||
# pylint: disable=too-many-public-methods
|
|
||||||
|
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
self.verify_ssl = mock.MagicMock()
|
self.verify_ssl = mock.MagicMock()
|
||||||
@@ -967,8 +953,8 @@ class ClientNetworkTest(unittest.TestCase):
|
|||||||
|
|
||||||
def test_check_response_not_ok_jobj_error(self):
|
def test_check_response_not_ok_jobj_error(self):
|
||||||
self.response.ok = False
|
self.response.ok = False
|
||||||
self.response.json.return_value = messages.Error(
|
self.response.json.return_value = messages.Error.with_code(
|
||||||
detail='foo', typ='serverInternal', title='some title').to_json()
|
'serverInternal', detail='foo', title='some title').to_json()
|
||||||
# pylint: disable=protected-access
|
# pylint: disable=protected-access
|
||||||
self.assertRaises(
|
self.assertRaises(
|
||||||
messages.Error, self.net._check_response, self.response)
|
messages.Error, self.net._check_response, self.response)
|
||||||
@@ -993,10 +979,39 @@ class ClientNetworkTest(unittest.TestCase):
|
|||||||
self.response.json.side_effect = ValueError
|
self.response.json.side_effect = ValueError
|
||||||
for response_ct in [self.net.JSON_CONTENT_TYPE, 'foo']:
|
for response_ct in [self.net.JSON_CONTENT_TYPE, 'foo']:
|
||||||
self.response.headers['Content-Type'] = response_ct
|
self.response.headers['Content-Type'] = response_ct
|
||||||
# pylint: disable=protected-access,no-value-for-parameter
|
# pylint: disable=protected-access
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
self.response, self.net._check_response(self.response))
|
self.response, self.net._check_response(self.response))
|
||||||
|
|
||||||
|
@mock.patch('acme.client.logger')
|
||||||
|
def test_check_response_ok_ct_with_charset(self, mock_logger):
|
||||||
|
self.response.json.return_value = {}
|
||||||
|
self.response.headers['Content-Type'] = 'application/json; charset=utf-8'
|
||||||
|
# pylint: disable=protected-access
|
||||||
|
self.assertEqual(self.response, self.net._check_response(
|
||||||
|
self.response, content_type='application/json'))
|
||||||
|
try:
|
||||||
|
mock_logger.debug.assert_called_with(
|
||||||
|
'Ignoring wrong Content-Type (%r) for JSON decodable response',
|
||||||
|
'application/json; charset=utf-8'
|
||||||
|
)
|
||||||
|
except AssertionError:
|
||||||
|
return
|
||||||
|
raise AssertionError('Expected Content-Type warning ' #pragma: no cover
|
||||||
|
'to not have been logged')
|
||||||
|
|
||||||
|
@mock.patch('acme.client.logger')
|
||||||
|
def test_check_response_ok_bad_ct(self, mock_logger):
|
||||||
|
self.response.json.return_value = {}
|
||||||
|
self.response.headers['Content-Type'] = 'text/plain'
|
||||||
|
# pylint: disable=protected-access
|
||||||
|
self.assertEqual(self.response, self.net._check_response(
|
||||||
|
self.response, content_type='application/json'))
|
||||||
|
mock_logger.debug.assert_called_with(
|
||||||
|
'Ignoring wrong Content-Type (%r) for JSON decodable response',
|
||||||
|
'text/plain'
|
||||||
|
)
|
||||||
|
|
||||||
def test_check_response_conflict(self):
|
def test_check_response_conflict(self):
|
||||||
self.response.ok = False
|
self.response.ok = False
|
||||||
self.response.status_code = 409
|
self.response.status_code = 409
|
||||||
@@ -1007,7 +1022,7 @@ class ClientNetworkTest(unittest.TestCase):
|
|||||||
self.response.json.return_value = {}
|
self.response.json.return_value = {}
|
||||||
for response_ct in [self.net.JSON_CONTENT_TYPE, 'foo']:
|
for response_ct in [self.net.JSON_CONTENT_TYPE, 'foo']:
|
||||||
self.response.headers['Content-Type'] = response_ct
|
self.response.headers['Content-Type'] = response_ct
|
||||||
# pylint: disable=protected-access,no-value-for-parameter
|
# pylint: disable=protected-access
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
self.response, self.net._check_response(self.response))
|
self.response, self.net._check_response(self.response))
|
||||||
|
|
||||||
@@ -1123,7 +1138,6 @@ class ClientNetworkTest(unittest.TestCase):
|
|||||||
|
|
||||||
class ClientNetworkWithMockedResponseTest(unittest.TestCase):
|
class ClientNetworkWithMockedResponseTest(unittest.TestCase):
|
||||||
"""Tests for acme.client.ClientNetwork which mock out response."""
|
"""Tests for acme.client.ClientNetwork which mock out response."""
|
||||||
# pylint: disable=too-many-instance-attributes
|
|
||||||
|
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
from acme.client import ClientNetwork
|
from acme.client import ClientNetwork
|
||||||
@@ -1133,8 +1147,8 @@ class ClientNetworkWithMockedResponseTest(unittest.TestCase):
|
|||||||
self.response.headers = {}
|
self.response.headers = {}
|
||||||
self.response.links = {}
|
self.response.links = {}
|
||||||
self.response.checked = False
|
self.response.checked = False
|
||||||
self.acmev1_nonce_response = mock.MagicMock(ok=False,
|
self.acmev1_nonce_response = mock.MagicMock(
|
||||||
status_code=http_client.METHOD_NOT_ALLOWED)
|
ok=False, status_code=http_client.METHOD_NOT_ALLOWED)
|
||||||
self.acmev1_nonce_response.headers = {}
|
self.acmev1_nonce_response.headers = {}
|
||||||
self.obj = mock.MagicMock()
|
self.obj = mock.MagicMock()
|
||||||
self.wrapped_obj = mock.MagicMock()
|
self.wrapped_obj = mock.MagicMock()
|
||||||
@@ -5,21 +5,18 @@ import threading
|
|||||||
import time
|
import time
|
||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
import six
|
|
||||||
from six.moves import socketserver #type: ignore # pylint: disable=import-error
|
|
||||||
|
|
||||||
import josepy as jose
|
import josepy as jose
|
||||||
import OpenSSL
|
import OpenSSL
|
||||||
|
import six
|
||||||
|
from six.moves import socketserver # type: ignore # pylint: disable=import-error
|
||||||
|
|
||||||
from acme import errors
|
from acme import errors
|
||||||
from acme import test_util
|
import test_util
|
||||||
from acme.magic_typing import List # pylint: disable=unused-import, no-name-in-module
|
|
||||||
|
|
||||||
|
|
||||||
class SSLSocketAndProbeSNITest(unittest.TestCase):
|
class SSLSocketAndProbeSNITest(unittest.TestCase):
|
||||||
"""Tests for acme.crypto_util.SSLSocket/probe_sni."""
|
"""Tests for acme.crypto_util.SSLSocket/probe_sni."""
|
||||||
|
|
||||||
|
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
self.cert = test_util.load_comparable_cert('rsa2048_cert.pem')
|
self.cert = test_util.load_comparable_cert('rsa2048_cert.pem')
|
||||||
key = test_util.load_pyopenssl_private_key('rsa2048_key.pem')
|
key = test_util.load_pyopenssl_private_key('rsa2048_key.pem')
|
||||||
@@ -30,17 +27,16 @@ class SSLSocketAndProbeSNITest(unittest.TestCase):
|
|||||||
|
|
||||||
class _TestServer(socketserver.TCPServer):
|
class _TestServer(socketserver.TCPServer):
|
||||||
|
|
||||||
# pylint: disable=too-few-public-methods
|
|
||||||
# six.moves.* | pylint: disable=attribute-defined-outside-init,no-init
|
# six.moves.* | pylint: disable=attribute-defined-outside-init,no-init
|
||||||
|
|
||||||
def server_bind(self): # pylint: disable=missing-docstring
|
def server_bind(self): # pylint: disable=missing-docstring
|
||||||
self.socket = SSLSocket(socket.socket(), certs=certs)
|
self.socket = SSLSocket(socket.socket(),
|
||||||
|
certs)
|
||||||
socketserver.TCPServer.server_bind(self)
|
socketserver.TCPServer.server_bind(self)
|
||||||
|
|
||||||
self.server = _TestServer(('', 0), socketserver.BaseRequestHandler)
|
self.server = _TestServer(('', 0), socketserver.BaseRequestHandler)
|
||||||
self.port = self.server.socket.getsockname()[1]
|
self.port = self.server.socket.getsockname()[1]
|
||||||
self.server_thread = threading.Thread(
|
self.server_thread = threading.Thread(
|
||||||
# pylint: disable=no-member
|
|
||||||
target=self.server.handle_request)
|
target=self.server.handle_request)
|
||||||
|
|
||||||
def tearDown(self):
|
def tearDown(self):
|
||||||
@@ -67,7 +63,7 @@ class SSLSocketAndProbeSNITest(unittest.TestCase):
|
|||||||
|
|
||||||
def test_probe_connection_error(self):
|
def test_probe_connection_error(self):
|
||||||
# pylint has a hard time with six
|
# pylint has a hard time with six
|
||||||
self.server.server_close() # pylint: disable=no-member
|
self.server.server_close()
|
||||||
original_timeout = socket.getdefaulttimeout()
|
original_timeout = socket.getdefaulttimeout()
|
||||||
try:
|
try:
|
||||||
socket.setdefaulttimeout(1)
|
socket.setdefaulttimeout(1)
|
||||||
@@ -76,6 +72,18 @@ class SSLSocketAndProbeSNITest(unittest.TestCase):
|
|||||||
socket.setdefaulttimeout(original_timeout)
|
socket.setdefaulttimeout(original_timeout)
|
||||||
|
|
||||||
|
|
||||||
|
class SSLSocketTest(unittest.TestCase):
|
||||||
|
"""Tests for acme.crypto_util.SSLSocket."""
|
||||||
|
|
||||||
|
def test_ssl_socket_invalid_arguments(self):
|
||||||
|
from acme.crypto_util import SSLSocket
|
||||||
|
with self.assertRaises(ValueError):
|
||||||
|
_ = SSLSocket(None, {'sni': ('key', 'cert')},
|
||||||
|
cert_selection=lambda _: None)
|
||||||
|
with self.assertRaises(ValueError):
|
||||||
|
_ = SSLSocket(None)
|
||||||
|
|
||||||
|
|
||||||
class PyOpenSSLCertOrReqAllNamesTest(unittest.TestCase):
|
class PyOpenSSLCertOrReqAllNamesTest(unittest.TestCase):
|
||||||
"""Test for acme.crypto_util._pyopenssl_cert_or_req_all_names."""
|
"""Test for acme.crypto_util._pyopenssl_cert_or_req_all_names."""
|
||||||
|
|
||||||
@@ -1,7 +1,10 @@
|
|||||||
"""Tests for acme.errors."""
|
"""Tests for acme.errors."""
|
||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
import mock
|
try:
|
||||||
|
import mock
|
||||||
|
except ImportError: # pragma: no cover
|
||||||
|
from unittest import mock # type: ignore
|
||||||
|
|
||||||
|
|
||||||
class BadNonceTest(unittest.TestCase):
|
class BadNonceTest(unittest.TestCase):
|
||||||
@@ -35,7 +38,7 @@ class PollErrorTest(unittest.TestCase):
|
|||||||
def setUp(self):
|
def setUp(self):
|
||||||
from acme.errors import PollError
|
from acme.errors import PollError
|
||||||
self.timeout = PollError(
|
self.timeout = PollError(
|
||||||
exhausted=set([mock.sentinel.AR]),
|
exhausted={mock.sentinel.AR},
|
||||||
updated={})
|
updated={})
|
||||||
self.invalid = PollError(exhausted=set(), updated={
|
self.invalid = PollError(exhausted=set(), updated={
|
||||||
mock.sentinel.AR: mock.sentinel.AR2})
|
mock.sentinel.AR: mock.sentinel.AR2})
|
||||||
@@ -2,6 +2,7 @@
|
|||||||
import importlib
|
import importlib
|
||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
|
|
||||||
class JoseTest(unittest.TestCase):
|
class JoseTest(unittest.TestCase):
|
||||||
"""Tests for acme.jose shim."""
|
"""Tests for acme.jose shim."""
|
||||||
|
|
||||||
@@ -20,11 +21,10 @@ class JoseTest(unittest.TestCase):
|
|||||||
|
|
||||||
# We use the imports below with eval, but pylint doesn't
|
# We use the imports below with eval, but pylint doesn't
|
||||||
# understand that.
|
# understand that.
|
||||||
# pylint: disable=eval-used,unused-variable
|
import acme # pylint: disable=unused-import
|
||||||
import acme
|
import josepy # pylint: disable=unused-import
|
||||||
import josepy
|
acme_jose_mod = eval(acme_jose_path) # pylint: disable=eval-used
|
||||||
acme_jose_mod = eval(acme_jose_path)
|
josepy_mod = eval(josepy_path) # pylint: disable=eval-used
|
||||||
josepy_mod = eval(josepy_path)
|
|
||||||
self.assertIs(acme_jose_mod, josepy_mod)
|
self.assertIs(acme_jose_mod, josepy_mod)
|
||||||
self.assertIs(getattr(acme_jose_mod, attribute), getattr(josepy_mod, attribute))
|
self.assertIs(getattr(acme_jose_mod, attribute), getattr(josepy_mod, attribute))
|
||||||
|
|
||||||
@@ -3,8 +3,7 @@ import unittest
|
|||||||
|
|
||||||
import josepy as jose
|
import josepy as jose
|
||||||
|
|
||||||
from acme import test_util
|
import test_util
|
||||||
|
|
||||||
|
|
||||||
KEY = jose.JWKRSA.load(test_util.load_vector('rsa512_key.pem'))
|
KEY = jose.JWKRSA.load(test_util.load_vector('rsa512_key.pem'))
|
||||||
|
|
||||||
@@ -2,7 +2,10 @@
|
|||||||
import sys
|
import sys
|
||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
import mock
|
try:
|
||||||
|
import mock
|
||||||
|
except ImportError: # pragma: no cover
|
||||||
|
from unittest import mock # type: ignore
|
||||||
|
|
||||||
|
|
||||||
class MagicTypingTest(unittest.TestCase):
|
class MagicTypingTest(unittest.TestCase):
|
||||||
@@ -18,7 +21,7 @@ class MagicTypingTest(unittest.TestCase):
|
|||||||
sys.modules['typing'] = typing_class_mock
|
sys.modules['typing'] = typing_class_mock
|
||||||
if 'acme.magic_typing' in sys.modules:
|
if 'acme.magic_typing' in sys.modules:
|
||||||
del sys.modules['acme.magic_typing'] # pragma: no cover
|
del sys.modules['acme.magic_typing'] # pragma: no cover
|
||||||
from acme.magic_typing import Text # pylint: disable=no-name-in-module
|
from acme.magic_typing import Text
|
||||||
self.assertEqual(Text, text_mock)
|
self.assertEqual(Text, text_mock)
|
||||||
del sys.modules['acme.magic_typing']
|
del sys.modules['acme.magic_typing']
|
||||||
sys.modules['typing'] = temp_typing
|
sys.modules['typing'] = temp_typing
|
||||||
@@ -31,7 +34,7 @@ class MagicTypingTest(unittest.TestCase):
|
|||||||
sys.modules['typing'] = None
|
sys.modules['typing'] = None
|
||||||
if 'acme.magic_typing' in sys.modules:
|
if 'acme.magic_typing' in sys.modules:
|
||||||
del sys.modules['acme.magic_typing'] # pragma: no cover
|
del sys.modules['acme.magic_typing'] # pragma: no cover
|
||||||
from acme.magic_typing import Text # pylint: disable=no-name-in-module
|
from acme.magic_typing import Text
|
||||||
self.assertTrue(Text is None)
|
self.assertTrue(Text is None)
|
||||||
del sys.modules['acme.magic_typing']
|
del sys.modules['acme.magic_typing']
|
||||||
sys.modules['typing'] = temp_typing
|
sys.modules['typing'] = temp_typing
|
||||||
@@ -2,12 +2,13 @@
|
|||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
import josepy as jose
|
import josepy as jose
|
||||||
import mock
|
try:
|
||||||
|
import mock
|
||||||
|
except ImportError: # pragma: no cover
|
||||||
|
from unittest import mock # type: ignore
|
||||||
|
|
||||||
from acme import challenges
|
from acme import challenges
|
||||||
from acme import test_util
|
import test_util
|
||||||
from acme.magic_typing import Dict # pylint: disable=unused-import, no-name-in-module
|
|
||||||
|
|
||||||
|
|
||||||
CERT = test_util.load_comparable_cert('cert.der')
|
CERT = test_util.load_comparable_cert('cert.der')
|
||||||
CSR = test_util.load_comparable_csr('csr.der')
|
CSR = test_util.load_comparable_csr('csr.der')
|
||||||
@@ -19,8 +20,7 @@ class ErrorTest(unittest.TestCase):
|
|||||||
|
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
from acme.messages import Error, ERROR_PREFIX
|
from acme.messages import Error, ERROR_PREFIX
|
||||||
self.error = Error(
|
self.error = Error.with_code('malformed', detail='foo', title='title')
|
||||||
detail='foo', typ=ERROR_PREFIX + 'malformed', title='title')
|
|
||||||
self.jobj = {
|
self.jobj = {
|
||||||
'detail': 'foo',
|
'detail': 'foo',
|
||||||
'title': 'some title',
|
'title': 'some title',
|
||||||
@@ -28,7 +28,6 @@ class ErrorTest(unittest.TestCase):
|
|||||||
}
|
}
|
||||||
self.error_custom = Error(typ='custom', detail='bar')
|
self.error_custom = Error(typ='custom', detail='bar')
|
||||||
self.empty_error = Error()
|
self.empty_error = Error()
|
||||||
self.jobj_custom = {'type': 'custom', 'detail': 'bar'}
|
|
||||||
|
|
||||||
def test_default_typ(self):
|
def test_default_typ(self):
|
||||||
from acme.messages import Error
|
from acme.messages import Error
|
||||||
@@ -43,8 +42,7 @@ class ErrorTest(unittest.TestCase):
|
|||||||
hash(Error.from_json(self.error.to_json()))
|
hash(Error.from_json(self.error.to_json()))
|
||||||
|
|
||||||
def test_description(self):
|
def test_description(self):
|
||||||
self.assertEqual(
|
self.assertEqual('The request message was malformed', self.error.description)
|
||||||
'The request message was malformed', self.error.description)
|
|
||||||
self.assertTrue(self.error_custom.description is None)
|
self.assertTrue(self.error_custom.description is None)
|
||||||
|
|
||||||
def test_code(self):
|
def test_code(self):
|
||||||
@@ -54,17 +52,17 @@ class ErrorTest(unittest.TestCase):
|
|||||||
self.assertEqual(None, Error().code)
|
self.assertEqual(None, Error().code)
|
||||||
|
|
||||||
def test_is_acme_error(self):
|
def test_is_acme_error(self):
|
||||||
from acme.messages import is_acme_error
|
from acme.messages import is_acme_error, Error
|
||||||
self.assertTrue(is_acme_error(self.error))
|
self.assertTrue(is_acme_error(self.error))
|
||||||
self.assertFalse(is_acme_error(self.error_custom))
|
self.assertFalse(is_acme_error(self.error_custom))
|
||||||
|
self.assertFalse(is_acme_error(Error()))
|
||||||
self.assertFalse(is_acme_error(self.empty_error))
|
self.assertFalse(is_acme_error(self.empty_error))
|
||||||
self.assertFalse(is_acme_error("must pet all the {dogs|rabbits}"))
|
self.assertFalse(is_acme_error("must pet all the {dogs|rabbits}"))
|
||||||
|
|
||||||
def test_unicode_error(self):
|
def test_unicode_error(self):
|
||||||
from acme.messages import Error, ERROR_PREFIX, is_acme_error
|
from acme.messages import Error, is_acme_error
|
||||||
arabic_error = Error(
|
arabic_error = Error.with_code(
|
||||||
detail=u'\u0639\u062f\u0627\u0644\u0629', typ=ERROR_PREFIX + 'malformed',
|
'malformed', detail=u'\u0639\u062f\u0627\u0644\u0629', title='title')
|
||||||
title='title')
|
|
||||||
self.assertTrue(is_acme_error(arabic_error))
|
self.assertTrue(is_acme_error(arabic_error))
|
||||||
|
|
||||||
def test_with_code(self):
|
def test_with_code(self):
|
||||||
@@ -305,8 +303,7 @@ class ChallengeBodyTest(unittest.TestCase):
|
|||||||
from acme.messages import Error
|
from acme.messages import Error
|
||||||
from acme.messages import STATUS_INVALID
|
from acme.messages import STATUS_INVALID
|
||||||
self.status = STATUS_INVALID
|
self.status = STATUS_INVALID
|
||||||
error = Error(typ='urn:ietf:params:acme:error:serverInternal',
|
error = Error.with_code('serverInternal', detail='Unable to communicate with DNS server')
|
||||||
detail='Unable to communicate with DNS server')
|
|
||||||
self.challb = ChallengeBody(
|
self.challb = ChallengeBody(
|
||||||
uri='http://challb', chall=self.chall, status=self.status,
|
uri='http://challb', chall=self.chall, status=self.status,
|
||||||
error=error)
|
error=error)
|
||||||
@@ -458,6 +455,7 @@ class OrderResourceTest(unittest.TestCase):
|
|||||||
'authorizations': None,
|
'authorizations': None,
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|
||||||
class NewOrderTest(unittest.TestCase):
|
class NewOrderTest(unittest.TestCase):
|
||||||
"""Tests for acme.messages.NewOrder."""
|
"""Tests for acme.messages.NewOrder."""
|
||||||
|
|
||||||
@@ -472,5 +470,18 @@ class NewOrderTest(unittest.TestCase):
|
|||||||
})
|
})
|
||||||
|
|
||||||
|
|
||||||
|
class JWSPayloadRFC8555Compliant(unittest.TestCase):
|
||||||
|
"""Test for RFC8555 compliance of JWS generated from resources/challenges"""
|
||||||
|
def test_message_payload(self):
|
||||||
|
from acme.messages import NewAuthorization
|
||||||
|
|
||||||
|
new_order = NewAuthorization()
|
||||||
|
new_order.le_acme_version = 2
|
||||||
|
|
||||||
|
jobj = new_order.json_dumps(indent=2).encode()
|
||||||
|
# RFC8555 states that JWS bodies must not have a resource field.
|
||||||
|
self.assertEqual(jobj, b'{}')
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
unittest.main() # pragma: no cover
|
unittest.main() # pragma: no cover
|
||||||
@@ -1,26 +1,22 @@
|
|||||||
"""Tests for acme.standalone."""
|
"""Tests for acme.standalone."""
|
||||||
import multiprocessing
|
|
||||||
import os
|
|
||||||
import shutil
|
|
||||||
import socket
|
import socket
|
||||||
import threading
|
import threading
|
||||||
import tempfile
|
|
||||||
import unittest
|
import unittest
|
||||||
import time
|
|
||||||
from contextlib import closing
|
|
||||||
|
|
||||||
from six.moves import http_client # pylint: disable=import-error
|
|
||||||
from six.moves import socketserver # type: ignore # pylint: disable=import-error
|
|
||||||
|
|
||||||
import josepy as jose
|
import josepy as jose
|
||||||
import mock
|
try:
|
||||||
|
import mock
|
||||||
|
except ImportError: # pragma: no cover
|
||||||
|
from unittest import mock # type: ignore
|
||||||
import requests
|
import requests
|
||||||
|
from six.moves import http_client # pylint: disable=import-error
|
||||||
|
from six.moves import socketserver # type: ignore # pylint: disable=import-error
|
||||||
|
|
||||||
from acme import challenges
|
from acme import challenges
|
||||||
from acme import crypto_util
|
from acme import crypto_util
|
||||||
from acme import errors
|
from acme import errors
|
||||||
from acme import test_util
|
|
||||||
from acme.magic_typing import Set # pylint: disable=unused-import, no-name-in-module
|
import test_util
|
||||||
|
|
||||||
|
|
||||||
class TLSServerTest(unittest.TestCase):
|
class TLSServerTest(unittest.TestCase):
|
||||||
@@ -41,32 +37,6 @@ class TLSServerTest(unittest.TestCase):
|
|||||||
server.server_close()
|
server.server_close()
|
||||||
|
|
||||||
|
|
||||||
class TLSSNI01ServerTest(unittest.TestCase):
|
|
||||||
"""Test for acme.standalone.TLSSNI01Server."""
|
|
||||||
|
|
||||||
|
|
||||||
def setUp(self):
|
|
||||||
self.certs = {b'localhost': (
|
|
||||||
test_util.load_pyopenssl_private_key('rsa2048_key.pem'),
|
|
||||||
test_util.load_cert('rsa2048_cert.pem'),
|
|
||||||
)}
|
|
||||||
from acme.standalone import TLSSNI01Server
|
|
||||||
self.server = TLSSNI01Server(('localhost', 0), certs=self.certs)
|
|
||||||
self.thread = threading.Thread(target=self.server.serve_forever)
|
|
||||||
self.thread.start()
|
|
||||||
|
|
||||||
def tearDown(self):
|
|
||||||
self.server.shutdown()
|
|
||||||
self.thread.join()
|
|
||||||
|
|
||||||
def test_it(self):
|
|
||||||
host, port = self.server.socket.getsockname()[:2]
|
|
||||||
cert = crypto_util.probe_sni(
|
|
||||||
b'localhost', host=host, port=port, timeout=1)
|
|
||||||
self.assertEqual(jose.ComparableX509(cert),
|
|
||||||
jose.ComparableX509(self.certs[b'localhost'][1]))
|
|
||||||
|
|
||||||
|
|
||||||
class HTTP01ServerTest(unittest.TestCase):
|
class HTTP01ServerTest(unittest.TestCase):
|
||||||
"""Tests for acme.standalone.HTTP01Server."""
|
"""Tests for acme.standalone.HTTP01Server."""
|
||||||
|
|
||||||
@@ -118,6 +88,81 @@ class HTTP01ServerTest(unittest.TestCase):
|
|||||||
def test_http01_not_found(self):
|
def test_http01_not_found(self):
|
||||||
self.assertFalse(self._test_http01(add=False))
|
self.assertFalse(self._test_http01(add=False))
|
||||||
|
|
||||||
|
def test_timely_shutdown(self):
|
||||||
|
from acme.standalone import HTTP01Server
|
||||||
|
server = HTTP01Server(('', 0), resources=set(), timeout=0.05)
|
||||||
|
server_thread = threading.Thread(target=server.serve_forever)
|
||||||
|
server_thread.start()
|
||||||
|
|
||||||
|
client = socket.socket()
|
||||||
|
client.connect(('localhost', server.socket.getsockname()[1]))
|
||||||
|
|
||||||
|
stop_thread = threading.Thread(target=server.shutdown)
|
||||||
|
stop_thread.start()
|
||||||
|
server_thread.join(5.)
|
||||||
|
|
||||||
|
is_hung = server_thread.is_alive()
|
||||||
|
try:
|
||||||
|
client.shutdown(socket.SHUT_RDWR)
|
||||||
|
except: # pragma: no cover, pylint: disable=bare-except
|
||||||
|
# may raise error because socket could already be closed
|
||||||
|
pass
|
||||||
|
|
||||||
|
self.assertFalse(is_hung, msg='Server shutdown should not be hung')
|
||||||
|
|
||||||
|
|
||||||
|
@unittest.skipIf(not challenges.TLSALPN01.is_supported(), "pyOpenSSL too old")
|
||||||
|
class TLSALPN01ServerTest(unittest.TestCase):
|
||||||
|
"""Test for acme.standalone.TLSALPN01Server."""
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
self.certs = {b'localhost': (
|
||||||
|
test_util.load_pyopenssl_private_key('rsa2048_key.pem'),
|
||||||
|
test_util.load_cert('rsa2048_cert.pem'),
|
||||||
|
)}
|
||||||
|
# Use different certificate for challenge.
|
||||||
|
self.challenge_certs = {b'localhost': (
|
||||||
|
test_util.load_pyopenssl_private_key('rsa1024_key.pem'),
|
||||||
|
test_util.load_cert('rsa1024_cert.pem'),
|
||||||
|
)}
|
||||||
|
from acme.standalone import TLSALPN01Server
|
||||||
|
self.server = TLSALPN01Server(("localhost", 0), certs=self.certs,
|
||||||
|
challenge_certs=self.challenge_certs)
|
||||||
|
# pylint: disable=no-member
|
||||||
|
self.thread = threading.Thread(target=self.server.serve_forever)
|
||||||
|
self.thread.start()
|
||||||
|
|
||||||
|
def tearDown(self):
|
||||||
|
self.server.shutdown() # pylint: disable=no-member
|
||||||
|
self.thread.join()
|
||||||
|
|
||||||
|
# TODO: This is not implemented yet, see comments in standalone.py
|
||||||
|
# def test_certs(self):
|
||||||
|
# host, port = self.server.socket.getsockname()[:2]
|
||||||
|
# cert = crypto_util.probe_sni(
|
||||||
|
# b'localhost', host=host, port=port, timeout=1)
|
||||||
|
# # Expect normal cert when connecting without ALPN.
|
||||||
|
# self.assertEqual(jose.ComparableX509(cert),
|
||||||
|
# jose.ComparableX509(self.certs[b'localhost'][1]))
|
||||||
|
|
||||||
|
def test_challenge_certs(self):
|
||||||
|
host, port = self.server.socket.getsockname()[:2]
|
||||||
|
cert = crypto_util.probe_sni(
|
||||||
|
b'localhost', host=host, port=port, timeout=1,
|
||||||
|
alpn_protocols=[b"acme-tls/1"])
|
||||||
|
# Expect challenge cert when connecting with ALPN.
|
||||||
|
self.assertEqual(
|
||||||
|
jose.ComparableX509(cert),
|
||||||
|
jose.ComparableX509(self.challenge_certs[b'localhost'][1])
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_bad_alpn(self):
|
||||||
|
host, port = self.server.socket.getsockname()[:2]
|
||||||
|
with self.assertRaises(errors.Error):
|
||||||
|
crypto_util.probe_sni(
|
||||||
|
b'localhost', host=host, port=port, timeout=1,
|
||||||
|
alpn_protocols=[b"bad-alpn"])
|
||||||
|
|
||||||
|
|
||||||
class BaseDualNetworkedServersTest(unittest.TestCase):
|
class BaseDualNetworkedServersTest(unittest.TestCase):
|
||||||
"""Test for acme.standalone.BaseDualNetworkedServers."""
|
"""Test for acme.standalone.BaseDualNetworkedServers."""
|
||||||
@@ -170,37 +215,9 @@ class BaseDualNetworkedServersTest(unittest.TestCase):
|
|||||||
prev_port = port
|
prev_port = port
|
||||||
|
|
||||||
|
|
||||||
class TLSSNI01DualNetworkedServersTest(unittest.TestCase):
|
|
||||||
"""Test for acme.standalone.TLSSNI01DualNetworkedServers."""
|
|
||||||
|
|
||||||
|
|
||||||
def setUp(self):
|
|
||||||
self.certs = {b'localhost': (
|
|
||||||
test_util.load_pyopenssl_private_key('rsa2048_key.pem'),
|
|
||||||
test_util.load_cert('rsa2048_cert.pem'),
|
|
||||||
)}
|
|
||||||
from acme.standalone import TLSSNI01DualNetworkedServers
|
|
||||||
self.servers = TLSSNI01DualNetworkedServers(('localhost', 0), certs=self.certs)
|
|
||||||
self.servers.serve_forever()
|
|
||||||
|
|
||||||
def tearDown(self):
|
|
||||||
self.servers.shutdown_and_server_close()
|
|
||||||
|
|
||||||
def test_connect(self):
|
|
||||||
socknames = self.servers.getsocknames()
|
|
||||||
# connect to all addresses
|
|
||||||
for sockname in socknames:
|
|
||||||
host, port = sockname[:2]
|
|
||||||
cert = crypto_util.probe_sni(
|
|
||||||
b'localhost', host=host, port=port, timeout=1)
|
|
||||||
self.assertEqual(jose.ComparableX509(cert),
|
|
||||||
jose.ComparableX509(self.certs[b'localhost'][1]))
|
|
||||||
|
|
||||||
|
|
||||||
class HTTP01DualNetworkedServersTest(unittest.TestCase):
|
class HTTP01DualNetworkedServersTest(unittest.TestCase):
|
||||||
"""Tests for acme.standalone.HTTP01DualNetworkedServers."""
|
"""Tests for acme.standalone.HTTP01DualNetworkedServers."""
|
||||||
|
|
||||||
|
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
self.account_key = jose.JWK.load(
|
self.account_key = jose.JWK.load(
|
||||||
test_util.load_vector('rsa1024_key.pem'))
|
test_util.load_vector('rsa1024_key.pem'))
|
||||||
@@ -247,60 +264,5 @@ class HTTP01DualNetworkedServersTest(unittest.TestCase):
|
|||||||
self.assertFalse(self._test_http01(add=False))
|
self.assertFalse(self._test_http01(add=False))
|
||||||
|
|
||||||
|
|
||||||
class TestSimpleTLSSNI01Server(unittest.TestCase):
|
|
||||||
"""Tests for acme.standalone.simple_tls_sni_01_server."""
|
|
||||||
|
|
||||||
|
|
||||||
def setUp(self):
|
|
||||||
# mirror ../examples/standalone
|
|
||||||
self.test_cwd = tempfile.mkdtemp()
|
|
||||||
localhost_dir = os.path.join(self.test_cwd, 'localhost')
|
|
||||||
os.makedirs(localhost_dir)
|
|
||||||
shutil.copy(test_util.vector_path('rsa2048_cert.pem'),
|
|
||||||
os.path.join(localhost_dir, 'cert.pem'))
|
|
||||||
shutil.copy(test_util.vector_path('rsa2048_key.pem'),
|
|
||||||
os.path.join(localhost_dir, 'key.pem'))
|
|
||||||
|
|
||||||
with closing(socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as sock:
|
|
||||||
sock.bind(('', 0))
|
|
||||||
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
|
||||||
self.port = sock.getsockname()[1]
|
|
||||||
|
|
||||||
from acme.standalone import simple_tls_sni_01_server
|
|
||||||
self.process = multiprocessing.Process(target=simple_tls_sni_01_server,
|
|
||||||
args=(['path', '-p', str(self.port)],))
|
|
||||||
self.old_cwd = os.getcwd()
|
|
||||||
os.chdir(self.test_cwd)
|
|
||||||
|
|
||||||
def tearDown(self):
|
|
||||||
os.chdir(self.old_cwd)
|
|
||||||
if self.process.is_alive():
|
|
||||||
self.process.terminate()
|
|
||||||
self.process.join(timeout=5)
|
|
||||||
# Check that we didn't timeout waiting for the process to
|
|
||||||
# terminate.
|
|
||||||
self.assertNotEqual(self.process.exitcode, None)
|
|
||||||
shutil.rmtree(self.test_cwd)
|
|
||||||
|
|
||||||
@mock.patch('acme.standalone.TLSSNI01Server.handle_request')
|
|
||||||
def test_mock(self, handle):
|
|
||||||
from acme.standalone import simple_tls_sni_01_server
|
|
||||||
simple_tls_sni_01_server(cli_args=['path', '-p', str(self.port)], forever=False)
|
|
||||||
self.assertEqual(handle.call_count, 1)
|
|
||||||
|
|
||||||
def test_live(self):
|
|
||||||
self.process.start()
|
|
||||||
cert = None
|
|
||||||
for _ in range(50):
|
|
||||||
time.sleep(0.1)
|
|
||||||
try:
|
|
||||||
cert = crypto_util.probe_sni(b'localhost', b'127.0.0.1', self.port)
|
|
||||||
break
|
|
||||||
except errors.Error: # pragma: no cover
|
|
||||||
pass
|
|
||||||
self.assertEqual(jose.ComparableX509(cert),
|
|
||||||
test_util.load_comparable_cert('rsa2048_cert.pem'))
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
unittest.main() # pragma: no cover
|
unittest.main() # pragma: no cover
|
||||||
@@ -4,19 +4,12 @@
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
import os
|
import os
|
||||||
import unittest
|
|
||||||
import pkg_resources
|
|
||||||
|
|
||||||
from cryptography.hazmat.backends import default_backend
|
from cryptography.hazmat.backends import default_backend
|
||||||
from cryptography.hazmat.primitives import serialization
|
from cryptography.hazmat.primitives import serialization
|
||||||
import josepy as jose
|
import josepy as jose
|
||||||
from OpenSSL import crypto
|
from OpenSSL import crypto
|
||||||
|
import pkg_resources
|
||||||
|
|
||||||
def vector_path(*names):
|
|
||||||
"""Path to a test vector."""
|
|
||||||
return pkg_resources.resource_filename(
|
|
||||||
__name__, os.path.join('testdata', *names))
|
|
||||||
|
|
||||||
|
|
||||||
def load_vector(*names):
|
def load_vector(*names):
|
||||||
@@ -32,8 +25,7 @@ def _guess_loader(filename, loader_pem, loader_der):
|
|||||||
return loader_pem
|
return loader_pem
|
||||||
elif ext.lower() == '.der':
|
elif ext.lower() == '.der':
|
||||||
return loader_der
|
return loader_der
|
||||||
else: # pragma: no cover
|
raise ValueError("Loader could not be recognized based on extension") # pragma: no cover
|
||||||
raise ValueError("Loader could not be recognized based on extension")
|
|
||||||
|
|
||||||
|
|
||||||
def load_cert(*names):
|
def load_cert(*names):
|
||||||
@@ -73,23 +65,3 @@ def load_pyopenssl_private_key(*names):
|
|||||||
loader = _guess_loader(
|
loader = _guess_loader(
|
||||||
names[-1], crypto.FILETYPE_PEM, crypto.FILETYPE_ASN1)
|
names[-1], crypto.FILETYPE_PEM, crypto.FILETYPE_ASN1)
|
||||||
return crypto.load_privatekey(loader, load_vector(*names))
|
return crypto.load_privatekey(loader, load_vector(*names))
|
||||||
|
|
||||||
|
|
||||||
def skip_unless(condition, reason): # pragma: no cover
|
|
||||||
"""Skip tests unless a condition holds.
|
|
||||||
|
|
||||||
This implements the basic functionality of unittest.skipUnless
|
|
||||||
which is only available on Python 2.7+.
|
|
||||||
|
|
||||||
:param bool condition: If ``False``, the test will be skipped
|
|
||||||
:param str reason: the reason for skipping the test
|
|
||||||
|
|
||||||
:rtype: callable
|
|
||||||
:returns: decorator that hides tests unless condition is ``True``
|
|
||||||
|
|
||||||
"""
|
|
||||||
if hasattr(unittest, "skipUnless"):
|
|
||||||
return unittest.skipUnless(condition, reason)
|
|
||||||
elif condition:
|
|
||||||
return lambda cls: cls
|
|
||||||
return lambda cls: None
|
|
||||||
@@ -10,6 +10,8 @@ and for the CSR:
|
|||||||
|
|
||||||
openssl req -key rsa2048_key.pem -new -subj '/CN=example.com' -outform DER > csr.der
|
openssl req -key rsa2048_key.pem -new -subj '/CN=example.com' -outform DER > csr.der
|
||||||
|
|
||||||
and for the certificate:
|
and for the certificates:
|
||||||
|
|
||||||
openssl req -key rsa2047_key.pem -new -subj '/CN=example.com' -x509 -outform DER > cert.der
|
openssl req -key rsa2048_key.pem -new -subj '/CN=example.com' -x509 -outform DER > cert.der
|
||||||
|
openssl req -key rsa2048_key.pem -new -subj '/CN=example.com' -x509 > rsa2048_cert.pem
|
||||||
|
openssl req -key rsa1024_key.pem -new -subj '/CN=example.com' -x509 > rsa1024_cert.pem
|
||||||
13
acme/tests/testdata/rsa1024_cert.pem
vendored
Normal file
13
acme/tests/testdata/rsa1024_cert.pem
vendored
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
-----BEGIN CERTIFICATE-----
|
||||||
|
MIIB/TCCAWagAwIBAgIJAOyRIBs3QT8QMA0GCSqGSIb3DQEBCwUAMBYxFDASBgNV
|
||||||
|
BAMMC2V4YW1wbGUuY29tMB4XDTE4MDQyMzEwMzE0NFoXDTE4MDUyMzEwMzE0NFow
|
||||||
|
FjEUMBIGA1UEAwwLZXhhbXBsZS5jb20wgZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJ
|
||||||
|
AoGBAJqJ87R8aVwByONxgQA9hwgvQd/QqI1r1UInXhEF2VnEtZGtUWLi100IpIqr
|
||||||
|
Mq4qusDwNZ3g8cUPtSkvJGs89djoajMDIJP7lQUEKUYnYrI0q755Tr/DgLWSk7iW
|
||||||
|
l5ezym0VzWUD0/xXUz8yRbNMTjTac80rS5SZk2ja2wWkYlRJAgMBAAGjUzBRMB0G
|
||||||
|
A1UdDgQWBBSsaX0IVZ4XXwdeffVAbG7gnxSYjTAfBgNVHSMEGDAWgBSsaX0IVZ4X
|
||||||
|
XwdeffVAbG7gnxSYjTAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3DQEBCwUAA4GB
|
||||||
|
ADe7SVmvGH2nkwVfONk8TauRUDkePN1CJZKFb2zW1uO9ANJ2v5Arm/OQp0BG/xnI
|
||||||
|
Djw/aLTNVESF89oe15dkrUErtcaF413MC1Ld5lTCaJLHLGqDKY69e02YwRuxW7jY
|
||||||
|
qarpt7k7aR5FbcfO5r4V/FK/Gvp4Dmoky8uap7SJIW6x
|
||||||
|
-----END CERTIFICATE-----
|
||||||
47
appveyor.yml
47
appveyor.yml
@@ -1,47 +0,0 @@
|
|||||||
image: Visual Studio 2015
|
|
||||||
|
|
||||||
environment:
|
|
||||||
matrix:
|
|
||||||
- TOXENV: py35
|
|
||||||
- TOXENV: py37-cover
|
|
||||||
- TOXENV: integration-certbot
|
|
||||||
|
|
||||||
branches:
|
|
||||||
only:
|
|
||||||
# apache-parser-v2 is a temporary branch for doing work related to
|
|
||||||
# rewriting the parser in the Apache plugin.
|
|
||||||
- apache-parser-v2
|
|
||||||
- master
|
|
||||||
- /^\d+\.\d+\.x$/ # Version branches like X.X.X
|
|
||||||
- /^test-.*$/
|
|
||||||
|
|
||||||
init:
|
|
||||||
# Since master can receive only commits from PR that have already been tested, following
|
|
||||||
# condition avoid to launch all jobs except the coverage one for commits pushed to master.
|
|
||||||
- ps: |
|
|
||||||
if (-Not $Env:APPVEYOR_PULL_REQUEST_NUMBER -And $Env:APPVEYOR_REPO_BRANCH -Eq 'master' `
|
|
||||||
-And -Not ($Env:TOXENV -Like '*-cover'))
|
|
||||||
{ $Env:APPVEYOR_SKIP_FINALIZE_ON_EXIT = 'true'; Exit-AppVeyorBuild }
|
|
||||||
|
|
||||||
install:
|
|
||||||
# Use Python 3.7 by default
|
|
||||||
- SET PATH=C:\\Python37;C:\\Python37\\Scripts;%PATH%
|
|
||||||
# Using 4 processes is proven to be the most efficient integration tests config for AppVeyor
|
|
||||||
- IF %TOXENV%==integration-certbot SET PYTEST_ADDOPTS=--numprocesses=4
|
|
||||||
# Check env
|
|
||||||
- python --version
|
|
||||||
# Upgrade pip to avoid warnings
|
|
||||||
- python -m pip install --upgrade pip
|
|
||||||
# Ready to install tox and coverage
|
|
||||||
# tools/pip_install.py is used to pin packages to a known working version.
|
|
||||||
- python tools\\pip_install.py tox codecov
|
|
||||||
|
|
||||||
build: off
|
|
||||||
|
|
||||||
test_script:
|
|
||||||
- set TOX_TESTENV_PASSENV=APPVEYOR
|
|
||||||
# Test env is set by TOXENV env variable
|
|
||||||
- tox
|
|
||||||
|
|
||||||
on_success:
|
|
||||||
- if exist .coverage codecov -F windows
|
|
||||||
@@ -1,7 +1,7 @@
|
|||||||
include LICENSE.txt
|
include LICENSE.txt
|
||||||
include README.rst
|
include README.rst
|
||||||
recursive-include docs *
|
recursive-include tests *
|
||||||
recursive-include certbot_apache/tests/testdata *
|
recursive-include certbot_apache/_internal/augeas_lens *.aug
|
||||||
include certbot_apache/centos-options-ssl-apache.conf
|
recursive-include certbot_apache/_internal/tls_configs *.conf
|
||||||
include certbot_apache/options-ssl-apache.conf
|
global-exclude __pycache__
|
||||||
recursive-include certbot_apache/augeas_lens *.aug
|
global-exclude *.py[cod]
|
||||||
|
|||||||
1
certbot-apache/certbot_apache/_internal/__init__.py
Normal file
1
certbot-apache/certbot_apache/_internal/__init__.py
Normal file
@@ -0,0 +1 @@
|
|||||||
|
"""Certbot Apache plugin."""
|
||||||
256
certbot-apache/certbot_apache/_internal/apache_util.py
Normal file
256
certbot-apache/certbot_apache/_internal/apache_util.py
Normal file
@@ -0,0 +1,256 @@
|
|||||||
|
""" Utility functions for certbot-apache plugin """
|
||||||
|
import binascii
|
||||||
|
import fnmatch
|
||||||
|
import logging
|
||||||
|
import re
|
||||||
|
import subprocess
|
||||||
|
|
||||||
|
import pkg_resources
|
||||||
|
|
||||||
|
from certbot import errors
|
||||||
|
from certbot import util
|
||||||
|
|
||||||
|
from certbot.compat import os
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def get_mod_deps(mod_name):
|
||||||
|
"""Get known module dependencies.
|
||||||
|
|
||||||
|
.. note:: This does not need to be accurate in order for the client to
|
||||||
|
run. This simply keeps things clean if the user decides to revert
|
||||||
|
changes.
|
||||||
|
.. warning:: If all deps are not included, it may cause incorrect parsing
|
||||||
|
behavior, due to enable_mod's shortcut for updating the parser's
|
||||||
|
currently defined modules (`.ApacheParser.add_mod`)
|
||||||
|
This would only present a major problem in extremely atypical
|
||||||
|
configs that use ifmod for the missing deps.
|
||||||
|
|
||||||
|
"""
|
||||||
|
deps = {
|
||||||
|
"ssl": ["setenvif", "mime"]
|
||||||
|
}
|
||||||
|
return deps.get(mod_name, [])
|
||||||
|
|
||||||
|
|
||||||
|
def get_file_path(vhost_path):
|
||||||
|
"""Get file path from augeas_vhost_path.
|
||||||
|
|
||||||
|
Takes in Augeas path and returns the file name
|
||||||
|
|
||||||
|
:param str vhost_path: Augeas virtual host path
|
||||||
|
|
||||||
|
:returns: filename of vhost
|
||||||
|
:rtype: str
|
||||||
|
|
||||||
|
"""
|
||||||
|
if not vhost_path or not vhost_path.startswith("/files/"):
|
||||||
|
return None
|
||||||
|
|
||||||
|
return _split_aug_path(vhost_path)[0]
|
||||||
|
|
||||||
|
|
||||||
|
def get_internal_aug_path(vhost_path):
|
||||||
|
"""Get the Augeas path for a vhost with the file path removed.
|
||||||
|
|
||||||
|
:param str vhost_path: Augeas virtual host path
|
||||||
|
|
||||||
|
:returns: Augeas path to vhost relative to the containing file
|
||||||
|
:rtype: str
|
||||||
|
|
||||||
|
"""
|
||||||
|
return _split_aug_path(vhost_path)[1]
|
||||||
|
|
||||||
|
|
||||||
|
def _split_aug_path(vhost_path):
|
||||||
|
"""Splits an Augeas path into a file path and an internal path.
|
||||||
|
|
||||||
|
After removing "/files", this function splits vhost_path into the
|
||||||
|
file path and the remaining Augeas path.
|
||||||
|
|
||||||
|
:param str vhost_path: Augeas virtual host path
|
||||||
|
|
||||||
|
:returns: file path and internal Augeas path
|
||||||
|
:rtype: `tuple` of `str`
|
||||||
|
|
||||||
|
"""
|
||||||
|
# Strip off /files
|
||||||
|
file_path = vhost_path[6:]
|
||||||
|
internal_path = []
|
||||||
|
|
||||||
|
# Remove components from the end of file_path until it becomes valid
|
||||||
|
while not os.path.exists(file_path):
|
||||||
|
file_path, _, internal_path_part = file_path.rpartition("/")
|
||||||
|
internal_path.append(internal_path_part)
|
||||||
|
|
||||||
|
return file_path, "/".join(reversed(internal_path))
|
||||||
|
|
||||||
|
|
||||||
|
def parse_define_file(filepath, varname):
|
||||||
|
""" Parses Defines from a variable in configuration file
|
||||||
|
|
||||||
|
:param str filepath: Path of file to parse
|
||||||
|
:param str varname: Name of the variable
|
||||||
|
|
||||||
|
:returns: Dict of Define:Value pairs
|
||||||
|
:rtype: `dict`
|
||||||
|
|
||||||
|
"""
|
||||||
|
return_vars = {}
|
||||||
|
# Get list of words in the variable
|
||||||
|
a_opts = util.get_var_from_file(varname, filepath).split()
|
||||||
|
for i, v in enumerate(a_opts):
|
||||||
|
# Handle Define statements and make sure it has an argument
|
||||||
|
if v == "-D" and len(a_opts) >= i+2:
|
||||||
|
var_parts = a_opts[i+1].partition("=")
|
||||||
|
return_vars[var_parts[0]] = var_parts[2]
|
||||||
|
elif len(v) > 2 and v.startswith("-D"):
|
||||||
|
# Found var with no whitespace separator
|
||||||
|
var_parts = v[2:].partition("=")
|
||||||
|
return_vars[var_parts[0]] = var_parts[2]
|
||||||
|
return return_vars
|
||||||
|
|
||||||
|
|
||||||
|
def unique_id():
|
||||||
|
""" Returns an unique id to be used as a VirtualHost identifier"""
|
||||||
|
return binascii.hexlify(os.urandom(16)).decode("utf-8")
|
||||||
|
|
||||||
|
|
||||||
|
def included_in_paths(filepath, paths):
|
||||||
|
"""
|
||||||
|
Returns true if the filepath is included in the list of paths
|
||||||
|
that may contain full paths or wildcard paths that need to be
|
||||||
|
expanded.
|
||||||
|
|
||||||
|
:param str filepath: Filepath to check
|
||||||
|
:params list paths: List of paths to check against
|
||||||
|
|
||||||
|
:returns: True if included
|
||||||
|
:rtype: bool
|
||||||
|
"""
|
||||||
|
|
||||||
|
return any(fnmatch.fnmatch(filepath, path) for path in paths)
|
||||||
|
|
||||||
|
|
||||||
|
def parse_defines(apachectl):
|
||||||
|
"""
|
||||||
|
Gets Defines from httpd process and returns a dictionary of
|
||||||
|
the defined variables.
|
||||||
|
|
||||||
|
:param str apachectl: Path to apachectl executable
|
||||||
|
|
||||||
|
:returns: dictionary of defined variables
|
||||||
|
:rtype: dict
|
||||||
|
"""
|
||||||
|
|
||||||
|
variables = {}
|
||||||
|
define_cmd = [apachectl, "-t", "-D",
|
||||||
|
"DUMP_RUN_CFG"]
|
||||||
|
matches = parse_from_subprocess(define_cmd, r"Define: ([^ \n]*)")
|
||||||
|
try:
|
||||||
|
matches.remove("DUMP_RUN_CFG")
|
||||||
|
except ValueError:
|
||||||
|
return {}
|
||||||
|
|
||||||
|
for match in matches:
|
||||||
|
if match.count("=") > 1:
|
||||||
|
logger.error("Unexpected number of equal signs in "
|
||||||
|
"runtime config dump.")
|
||||||
|
raise errors.PluginError(
|
||||||
|
"Error parsing Apache runtime variables")
|
||||||
|
parts = match.partition("=")
|
||||||
|
variables[parts[0]] = parts[2]
|
||||||
|
|
||||||
|
return variables
|
||||||
|
|
||||||
|
|
||||||
|
def parse_includes(apachectl):
|
||||||
|
"""
|
||||||
|
Gets Include directives from httpd process and returns a list of
|
||||||
|
their values.
|
||||||
|
|
||||||
|
:param str apachectl: Path to apachectl executable
|
||||||
|
|
||||||
|
:returns: list of found Include directive values
|
||||||
|
:rtype: list of str
|
||||||
|
"""
|
||||||
|
|
||||||
|
inc_cmd = [apachectl, "-t", "-D",
|
||||||
|
"DUMP_INCLUDES"]
|
||||||
|
return parse_from_subprocess(inc_cmd, r"\(.*\) (.*)")
|
||||||
|
|
||||||
|
|
||||||
|
def parse_modules(apachectl):
|
||||||
|
"""
|
||||||
|
Get loaded modules from httpd process, and return the list
|
||||||
|
of loaded module names.
|
||||||
|
|
||||||
|
:param str apachectl: Path to apachectl executable
|
||||||
|
|
||||||
|
:returns: list of found LoadModule module names
|
||||||
|
:rtype: list of str
|
||||||
|
"""
|
||||||
|
|
||||||
|
mod_cmd = [apachectl, "-t", "-D",
|
||||||
|
"DUMP_MODULES"]
|
||||||
|
return parse_from_subprocess(mod_cmd, r"(.*)_module")
|
||||||
|
|
||||||
|
|
||||||
|
def parse_from_subprocess(command, regexp):
|
||||||
|
"""Get values from stdout of subprocess command
|
||||||
|
|
||||||
|
:param list command: Command to run
|
||||||
|
:param str regexp: Regexp for parsing
|
||||||
|
|
||||||
|
:returns: list parsed from command output
|
||||||
|
:rtype: list
|
||||||
|
|
||||||
|
"""
|
||||||
|
stdout = _get_runtime_cfg(command)
|
||||||
|
return re.compile(regexp).findall(stdout)
|
||||||
|
|
||||||
|
|
||||||
|
def _get_runtime_cfg(command):
|
||||||
|
"""
|
||||||
|
Get runtime configuration info.
|
||||||
|
|
||||||
|
:param command: Command to run
|
||||||
|
|
||||||
|
:returns: stdout from command
|
||||||
|
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
proc = subprocess.Popen(
|
||||||
|
command,
|
||||||
|
stdout=subprocess.PIPE,
|
||||||
|
stderr=subprocess.PIPE,
|
||||||
|
universal_newlines=True)
|
||||||
|
stdout, stderr = proc.communicate()
|
||||||
|
|
||||||
|
except (OSError, ValueError):
|
||||||
|
logger.error(
|
||||||
|
"Error running command %s for runtime parameters!%s",
|
||||||
|
command, os.linesep)
|
||||||
|
raise errors.MisconfigurationError(
|
||||||
|
"Error accessing loaded Apache parameters: {0}".format(
|
||||||
|
command))
|
||||||
|
# Small errors that do not impede
|
||||||
|
if proc.returncode != 0:
|
||||||
|
logger.warning("Error in checking parameter list: %s", stderr)
|
||||||
|
raise errors.MisconfigurationError(
|
||||||
|
"Apache is unable to check whether or not the module is "
|
||||||
|
"loaded because Apache is misconfigured.")
|
||||||
|
|
||||||
|
return stdout
|
||||||
|
|
||||||
|
def find_ssl_apache_conf(prefix):
|
||||||
|
"""
|
||||||
|
Find a TLS Apache config file in the dedicated storage.
|
||||||
|
:param str prefix: prefix of the TLS Apache config file to find
|
||||||
|
:return: the path the TLS Apache config file
|
||||||
|
:rtype: str
|
||||||
|
"""
|
||||||
|
return pkg_resources.resource_filename(
|
||||||
|
"certbot_apache",
|
||||||
|
os.path.join("_internal", "tls_configs", "{0}-options-ssl-apache.conf".format(prefix)))
|
||||||
172
certbot-apache/certbot_apache/_internal/apacheparser.py
Normal file
172
certbot-apache/certbot_apache/_internal/apacheparser.py
Normal file
@@ -0,0 +1,172 @@
|
|||||||
|
""" apacheconfig implementation of the ParserNode interfaces """
|
||||||
|
|
||||||
|
from certbot_apache._internal import assertions
|
||||||
|
from certbot_apache._internal import interfaces
|
||||||
|
from certbot_apache._internal import parsernode_util as util
|
||||||
|
|
||||||
|
|
||||||
|
class ApacheParserNode(interfaces.ParserNode):
|
||||||
|
""" apacheconfig implementation of ParserNode interface.
|
||||||
|
|
||||||
|
Expects metadata `ac_ast` to be passed in, where `ac_ast` is the AST provided
|
||||||
|
by parsing the equivalent configuration text using the apacheconfig library.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, **kwargs):
|
||||||
|
ancestor, dirty, filepath, metadata = util.parsernode_kwargs(kwargs) # pylint: disable=unused-variable
|
||||||
|
super(ApacheParserNode, self).__init__(**kwargs)
|
||||||
|
self.ancestor = ancestor
|
||||||
|
self.filepath = filepath
|
||||||
|
self.dirty = dirty
|
||||||
|
self.metadata = metadata
|
||||||
|
self._raw = self.metadata["ac_ast"]
|
||||||
|
|
||||||
|
def save(self, msg): # pragma: no cover
|
||||||
|
pass
|
||||||
|
|
||||||
|
def find_ancestors(self, name): # pylint: disable=unused-variable
|
||||||
|
"""Find ancestor BlockNodes with a given name"""
|
||||||
|
return [ApacheBlockNode(name=assertions.PASS,
|
||||||
|
parameters=assertions.PASS,
|
||||||
|
ancestor=self,
|
||||||
|
filepath=assertions.PASS,
|
||||||
|
metadata=self.metadata)]
|
||||||
|
|
||||||
|
|
||||||
|
class ApacheCommentNode(ApacheParserNode):
|
||||||
|
""" apacheconfig implementation of CommentNode interface """
|
||||||
|
|
||||||
|
def __init__(self, **kwargs):
|
||||||
|
comment, kwargs = util.commentnode_kwargs(kwargs) # pylint: disable=unused-variable
|
||||||
|
super(ApacheCommentNode, self).__init__(**kwargs)
|
||||||
|
self.comment = comment
|
||||||
|
|
||||||
|
def __eq__(self, other): # pragma: no cover
|
||||||
|
if isinstance(other, self.__class__):
|
||||||
|
return (self.comment == other.comment and
|
||||||
|
self.dirty == other.dirty and
|
||||||
|
self.ancestor == other.ancestor and
|
||||||
|
self.metadata == other.metadata and
|
||||||
|
self.filepath == other.filepath)
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
class ApacheDirectiveNode(ApacheParserNode):
|
||||||
|
""" apacheconfig implementation of DirectiveNode interface """
|
||||||
|
|
||||||
|
def __init__(self, **kwargs):
|
||||||
|
name, parameters, enabled, kwargs = util.directivenode_kwargs(kwargs)
|
||||||
|
super(ApacheDirectiveNode, self).__init__(**kwargs)
|
||||||
|
self.name = name
|
||||||
|
self.parameters = parameters
|
||||||
|
self.enabled = enabled
|
||||||
|
self.include = None
|
||||||
|
|
||||||
|
def __eq__(self, other): # pragma: no cover
|
||||||
|
if isinstance(other, self.__class__):
|
||||||
|
return (self.name == other.name and
|
||||||
|
self.filepath == other.filepath and
|
||||||
|
self.parameters == other.parameters and
|
||||||
|
self.enabled == other.enabled and
|
||||||
|
self.dirty == other.dirty and
|
||||||
|
self.ancestor == other.ancestor and
|
||||||
|
self.metadata == other.metadata)
|
||||||
|
return False
|
||||||
|
|
||||||
|
def set_parameters(self, _parameters): # pragma: no cover
|
||||||
|
"""Sets the parameters for DirectiveNode"""
|
||||||
|
return
|
||||||
|
|
||||||
|
|
||||||
|
class ApacheBlockNode(ApacheDirectiveNode):
|
||||||
|
""" apacheconfig implementation of BlockNode interface """
|
||||||
|
|
||||||
|
def __init__(self, **kwargs):
|
||||||
|
super(ApacheBlockNode, self).__init__(**kwargs)
|
||||||
|
self.children = ()
|
||||||
|
|
||||||
|
def __eq__(self, other): # pragma: no cover
|
||||||
|
if isinstance(other, self.__class__):
|
||||||
|
return (self.name == other.name and
|
||||||
|
self.filepath == other.filepath and
|
||||||
|
self.parameters == other.parameters and
|
||||||
|
self.children == other.children and
|
||||||
|
self.enabled == other.enabled and
|
||||||
|
self.dirty == other.dirty and
|
||||||
|
self.ancestor == other.ancestor and
|
||||||
|
self.metadata == other.metadata)
|
||||||
|
return False
|
||||||
|
|
||||||
|
# pylint: disable=unused-argument
|
||||||
|
def add_child_block(self, name, parameters=None, position=None): # pragma: no cover
|
||||||
|
"""Adds a new BlockNode to the sequence of children"""
|
||||||
|
new_block = ApacheBlockNode(name=assertions.PASS,
|
||||||
|
parameters=assertions.PASS,
|
||||||
|
ancestor=self,
|
||||||
|
filepath=assertions.PASS,
|
||||||
|
metadata=self.metadata)
|
||||||
|
self.children += (new_block,)
|
||||||
|
return new_block
|
||||||
|
|
||||||
|
# pylint: disable=unused-argument
|
||||||
|
def add_child_directive(self, name, parameters=None, position=None): # pragma: no cover
|
||||||
|
"""Adds a new DirectiveNode to the sequence of children"""
|
||||||
|
new_dir = ApacheDirectiveNode(name=assertions.PASS,
|
||||||
|
parameters=assertions.PASS,
|
||||||
|
ancestor=self,
|
||||||
|
filepath=assertions.PASS,
|
||||||
|
metadata=self.metadata)
|
||||||
|
self.children += (new_dir,)
|
||||||
|
return new_dir
|
||||||
|
|
||||||
|
# pylint: disable=unused-argument
|
||||||
|
def add_child_comment(self, comment="", position=None): # pragma: no cover
|
||||||
|
|
||||||
|
"""Adds a new CommentNode to the sequence of children"""
|
||||||
|
new_comment = ApacheCommentNode(comment=assertions.PASS,
|
||||||
|
ancestor=self,
|
||||||
|
filepath=assertions.PASS,
|
||||||
|
metadata=self.metadata)
|
||||||
|
self.children += (new_comment,)
|
||||||
|
return new_comment
|
||||||
|
|
||||||
|
def find_blocks(self, name, exclude=True): # pylint: disable=unused-argument
|
||||||
|
"""Recursive search of BlockNodes from the sequence of children"""
|
||||||
|
return [ApacheBlockNode(name=assertions.PASS,
|
||||||
|
parameters=assertions.PASS,
|
||||||
|
ancestor=self,
|
||||||
|
filepath=assertions.PASS,
|
||||||
|
metadata=self.metadata)]
|
||||||
|
|
||||||
|
def find_directives(self, name, exclude=True): # pylint: disable=unused-argument
|
||||||
|
"""Recursive search of DirectiveNodes from the sequence of children"""
|
||||||
|
return [ApacheDirectiveNode(name=assertions.PASS,
|
||||||
|
parameters=assertions.PASS,
|
||||||
|
ancestor=self,
|
||||||
|
filepath=assertions.PASS,
|
||||||
|
metadata=self.metadata)]
|
||||||
|
|
||||||
|
# pylint: disable=unused-argument
|
||||||
|
def find_comments(self, comment, exact=False): # pragma: no cover
|
||||||
|
"""Recursive search of DirectiveNodes from the sequence of children"""
|
||||||
|
return [ApacheCommentNode(comment=assertions.PASS,
|
||||||
|
ancestor=self,
|
||||||
|
filepath=assertions.PASS,
|
||||||
|
metadata=self.metadata)]
|
||||||
|
|
||||||
|
def delete_child(self, child): # pragma: no cover
|
||||||
|
"""Deletes a ParserNode from the sequence of children"""
|
||||||
|
return
|
||||||
|
|
||||||
|
def unsaved_files(self): # pragma: no cover
|
||||||
|
"""Returns a list of unsaved filepaths"""
|
||||||
|
return [assertions.PASS]
|
||||||
|
|
||||||
|
def parsed_paths(self): # pragma: no cover
|
||||||
|
"""Returns a list of parsed configuration file paths"""
|
||||||
|
return [assertions.PASS]
|
||||||
|
|
||||||
|
|
||||||
|
interfaces.CommentNode.register(ApacheCommentNode)
|
||||||
|
interfaces.DirectiveNode.register(ApacheDirectiveNode)
|
||||||
|
interfaces.BlockNode.register(ApacheBlockNode)
|
||||||
142
certbot-apache/certbot_apache/_internal/assertions.py
Normal file
142
certbot-apache/certbot_apache/_internal/assertions.py
Normal file
@@ -0,0 +1,142 @@
|
|||||||
|
"""Dual parser node assertions"""
|
||||||
|
import fnmatch
|
||||||
|
|
||||||
|
from certbot_apache._internal import interfaces
|
||||||
|
|
||||||
|
|
||||||
|
PASS = "CERTBOT_PASS_ASSERT"
|
||||||
|
|
||||||
|
|
||||||
|
def assertEqual(first, second):
|
||||||
|
""" Equality assertion """
|
||||||
|
|
||||||
|
if isinstance(first, interfaces.CommentNode):
|
||||||
|
assertEqualComment(first, second)
|
||||||
|
elif isinstance(first, interfaces.DirectiveNode):
|
||||||
|
assertEqualDirective(first, second)
|
||||||
|
|
||||||
|
# Do an extra interface implementation assertion, as the contents were
|
||||||
|
# already checked for BlockNode in the assertEqualDirective
|
||||||
|
if isinstance(first, interfaces.BlockNode):
|
||||||
|
assert isinstance(second, interfaces.BlockNode)
|
||||||
|
|
||||||
|
# Skip tests if filepath includes the pass value. This is done
|
||||||
|
# because filepath is variable of the base ParserNode interface, and
|
||||||
|
# unless the implementation is actually done, we cannot assume getting
|
||||||
|
# correct results from boolean assertion for dirty
|
||||||
|
if not isPass(first.filepath) and not isPass(second.filepath):
|
||||||
|
assert first.dirty == second.dirty
|
||||||
|
# We might want to disable this later if testing with two separate
|
||||||
|
# (but identical) directory structures.
|
||||||
|
assert first.filepath == second.filepath
|
||||||
|
|
||||||
|
def assertEqualComment(first, second): # pragma: no cover
|
||||||
|
""" Equality assertion for CommentNode """
|
||||||
|
|
||||||
|
assert isinstance(first, interfaces.CommentNode)
|
||||||
|
assert isinstance(second, interfaces.CommentNode)
|
||||||
|
|
||||||
|
if not isPass(first.comment) and not isPass(second.comment): # type: ignore
|
||||||
|
assert first.comment == second.comment # type: ignore
|
||||||
|
|
||||||
|
def _assertEqualDirectiveComponents(first, second): # pragma: no cover
|
||||||
|
""" Handles assertion for instance variables for DirectiveNode and BlockNode"""
|
||||||
|
|
||||||
|
# Enabled value cannot be asserted, because Augeas implementation
|
||||||
|
# is unable to figure that out.
|
||||||
|
# assert first.enabled == second.enabled
|
||||||
|
if not isPass(first.name) and not isPass(second.name):
|
||||||
|
assert first.name == second.name
|
||||||
|
|
||||||
|
if not isPass(first.parameters) and not isPass(second.parameters):
|
||||||
|
assert first.parameters == second.parameters
|
||||||
|
|
||||||
|
def assertEqualDirective(first, second):
|
||||||
|
""" Equality assertion for DirectiveNode """
|
||||||
|
|
||||||
|
assert isinstance(first, interfaces.DirectiveNode)
|
||||||
|
assert isinstance(second, interfaces.DirectiveNode)
|
||||||
|
_assertEqualDirectiveComponents(first, second)
|
||||||
|
|
||||||
|
def isPass(value): # pragma: no cover
|
||||||
|
"""Checks if the value is set to PASS"""
|
||||||
|
if isinstance(value, bool):
|
||||||
|
return True
|
||||||
|
return PASS in value
|
||||||
|
|
||||||
|
def isPassDirective(block):
|
||||||
|
""" Checks if BlockNode or DirectiveNode should pass the assertion """
|
||||||
|
|
||||||
|
if isPass(block.name):
|
||||||
|
return True
|
||||||
|
if isPass(block.parameters): # pragma: no cover
|
||||||
|
return True
|
||||||
|
if isPass(block.filepath): # pragma: no cover
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def isPassComment(comment):
|
||||||
|
""" Checks if CommentNode should pass the assertion """
|
||||||
|
|
||||||
|
if isPass(comment.comment):
|
||||||
|
return True
|
||||||
|
if isPass(comment.filepath): # pragma: no cover
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def isPassNodeList(nodelist): # pragma: no cover
|
||||||
|
""" Checks if a ParserNode in the nodelist should pass the assertion,
|
||||||
|
this function is used for results of find_* methods. Unimplemented find_*
|
||||||
|
methods should return a sequence containing a single ParserNode instance
|
||||||
|
with assertion pass string."""
|
||||||
|
|
||||||
|
try:
|
||||||
|
node = nodelist[0]
|
||||||
|
except IndexError:
|
||||||
|
node = None
|
||||||
|
|
||||||
|
if not node: # pragma: no cover
|
||||||
|
return False
|
||||||
|
|
||||||
|
if isinstance(node, interfaces.DirectiveNode):
|
||||||
|
return isPassDirective(node)
|
||||||
|
return isPassComment(node)
|
||||||
|
|
||||||
|
def assertEqualSimple(first, second):
|
||||||
|
""" Simple assertion """
|
||||||
|
if not isPass(first) and not isPass(second):
|
||||||
|
assert first == second
|
||||||
|
|
||||||
|
def isEqualVirtualHost(first, second):
|
||||||
|
"""
|
||||||
|
Checks that two VirtualHost objects are similar. There are some built
|
||||||
|
in differences with the implementations: VirtualHost created by ParserNode
|
||||||
|
implementation doesn't have "path" defined, as it was used for Augeas path
|
||||||
|
and that cannot obviously be used in the future. Similarly the legacy
|
||||||
|
version lacks "node" variable, that has a reference to the BlockNode for the
|
||||||
|
VirtualHost.
|
||||||
|
"""
|
||||||
|
return (
|
||||||
|
first.name == second.name and
|
||||||
|
first.aliases == second.aliases and
|
||||||
|
first.filep == second.filep and
|
||||||
|
first.addrs == second.addrs and
|
||||||
|
first.ssl == second.ssl and
|
||||||
|
first.enabled == second.enabled and
|
||||||
|
first.modmacro == second.modmacro and
|
||||||
|
first.ancestor == second.ancestor
|
||||||
|
)
|
||||||
|
|
||||||
|
def assertEqualPathsList(first, second): # pragma: no cover
|
||||||
|
"""
|
||||||
|
Checks that the two lists of file paths match. This assertion allows for wildcard
|
||||||
|
paths.
|
||||||
|
"""
|
||||||
|
if any(isPass(path) for path in first):
|
||||||
|
return
|
||||||
|
if any(isPass(path) for path in second):
|
||||||
|
return
|
||||||
|
for fpath in first:
|
||||||
|
assert any([fnmatch.fnmatch(fpath, spath) for spath in second])
|
||||||
|
for spath in second:
|
||||||
|
assert any([fnmatch.fnmatch(fpath, spath) for fpath in first])
|
||||||
538
certbot-apache/certbot_apache/_internal/augeasparser.py
Normal file
538
certbot-apache/certbot_apache/_internal/augeasparser.py
Normal file
@@ -0,0 +1,538 @@
|
|||||||
|
"""
|
||||||
|
Augeas implementation of the ParserNode interfaces.
|
||||||
|
|
||||||
|
Augeas works internally by using XPATH notation. The following is a short example
|
||||||
|
of how this all works internally, to better understand what's going on under the
|
||||||
|
hood.
|
||||||
|
|
||||||
|
A configuration file /etc/apache2/apache2.conf with the following content:
|
||||||
|
|
||||||
|
# First comment line
|
||||||
|
# Second comment line
|
||||||
|
WhateverDirective whatevervalue
|
||||||
|
<ABlock>
|
||||||
|
DirectiveInABlock dirvalue
|
||||||
|
</ABlock>
|
||||||
|
SomeDirective somedirectivevalue
|
||||||
|
<ABlock>
|
||||||
|
AnotherDirectiveInABlock dirvalue
|
||||||
|
</ABlock>
|
||||||
|
# Yet another comment
|
||||||
|
|
||||||
|
|
||||||
|
Translates over to Augeas path notation (of immediate children), when calling
|
||||||
|
for example: aug.match("/files/etc/apache2/apache2.conf/*")
|
||||||
|
|
||||||
|
[
|
||||||
|
"/files/etc/apache2/apache2.conf/#comment[1]",
|
||||||
|
"/files/etc/apache2/apache2.conf/#comment[2]",
|
||||||
|
"/files/etc/apache2/apache2.conf/directive[1]",
|
||||||
|
"/files/etc/apache2/apache2.conf/ABlock[1]",
|
||||||
|
"/files/etc/apache2/apache2.conf/directive[2]",
|
||||||
|
"/files/etc/apache2/apache2.conf/ABlock[2]",
|
||||||
|
"/files/etc/apache2/apache2.conf/#comment[3]"
|
||||||
|
]
|
||||||
|
|
||||||
|
Regardless of directives name, its key in the Augeas tree is always "directive",
|
||||||
|
with index where needed of course. Comments work similarly, while blocks
|
||||||
|
have their own key in the Augeas XPATH notation.
|
||||||
|
|
||||||
|
It's important to note that all of the unique keys have their own indices.
|
||||||
|
|
||||||
|
Augeas paths are case sensitive, while Apache configuration is case insensitive.
|
||||||
|
It looks like this:
|
||||||
|
|
||||||
|
<block>
|
||||||
|
directive value
|
||||||
|
</block>
|
||||||
|
<Block>
|
||||||
|
Directive Value
|
||||||
|
</Block>
|
||||||
|
<block>
|
||||||
|
directive value
|
||||||
|
</block>
|
||||||
|
<bLoCk>
|
||||||
|
DiReCtiVe VaLuE
|
||||||
|
</bLoCk>
|
||||||
|
|
||||||
|
Translates over to:
|
||||||
|
|
||||||
|
[
|
||||||
|
"/files/etc/apache2/apache2.conf/block[1]",
|
||||||
|
"/files/etc/apache2/apache2.conf/Block[1]",
|
||||||
|
"/files/etc/apache2/apache2.conf/block[2]",
|
||||||
|
"/files/etc/apache2/apache2.conf/bLoCk[1]",
|
||||||
|
]
|
||||||
|
"""
|
||||||
|
from acme.magic_typing import Set
|
||||||
|
from certbot import errors
|
||||||
|
from certbot.compat import os
|
||||||
|
|
||||||
|
from certbot_apache._internal import apache_util
|
||||||
|
from certbot_apache._internal import assertions
|
||||||
|
from certbot_apache._internal import interfaces
|
||||||
|
from certbot_apache._internal import parser
|
||||||
|
from certbot_apache._internal import parsernode_util as util
|
||||||
|
|
||||||
|
|
||||||
|
class AugeasParserNode(interfaces.ParserNode):
|
||||||
|
""" Augeas implementation of ParserNode interface """
|
||||||
|
|
||||||
|
def __init__(self, **kwargs):
|
||||||
|
ancestor, dirty, filepath, metadata = util.parsernode_kwargs(kwargs) # pylint: disable=unused-variable
|
||||||
|
super(AugeasParserNode, self).__init__(**kwargs)
|
||||||
|
self.ancestor = ancestor
|
||||||
|
self.filepath = filepath
|
||||||
|
self.dirty = dirty
|
||||||
|
self.metadata = metadata
|
||||||
|
self.parser = self.metadata.get("augeasparser")
|
||||||
|
try:
|
||||||
|
if self.metadata["augeaspath"].endswith("/"):
|
||||||
|
raise errors.PluginError(
|
||||||
|
"Augeas path: {} has a trailing slash".format(
|
||||||
|
self.metadata["augeaspath"]
|
||||||
|
)
|
||||||
|
)
|
||||||
|
except KeyError:
|
||||||
|
raise errors.PluginError("Augeas path is required")
|
||||||
|
|
||||||
|
def save(self, msg):
|
||||||
|
self.parser.save(msg)
|
||||||
|
|
||||||
|
def find_ancestors(self, name):
|
||||||
|
"""
|
||||||
|
Searches for ancestor BlockNodes with a given name.
|
||||||
|
|
||||||
|
:param str name: Name of the BlockNode parent to search for
|
||||||
|
|
||||||
|
:returns: List of matching ancestor nodes.
|
||||||
|
:rtype: list of AugeasBlockNode
|
||||||
|
"""
|
||||||
|
|
||||||
|
ancestors = []
|
||||||
|
|
||||||
|
parent = self.metadata["augeaspath"]
|
||||||
|
while True:
|
||||||
|
# Get the path of ancestor node
|
||||||
|
parent = parent.rpartition("/")[0]
|
||||||
|
# Root of the tree
|
||||||
|
if not parent or parent == "/files":
|
||||||
|
break
|
||||||
|
anc = self._create_blocknode(parent)
|
||||||
|
if anc.name.lower() == name.lower():
|
||||||
|
ancestors.append(anc)
|
||||||
|
|
||||||
|
return ancestors
|
||||||
|
|
||||||
|
def _create_blocknode(self, path):
|
||||||
|
"""
|
||||||
|
Helper function to create a BlockNode from Augeas path. This is used by
|
||||||
|
AugeasParserNode.find_ancestors and AugeasBlockNode.
|
||||||
|
and AugeasBlockNode.find_blocks
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
name = self._aug_get_name(path)
|
||||||
|
metadata = {"augeasparser": self.parser, "augeaspath": path}
|
||||||
|
|
||||||
|
# Check if the file was included from the root config or initial state
|
||||||
|
enabled = self.parser.parsed_in_original(
|
||||||
|
apache_util.get_file_path(path)
|
||||||
|
)
|
||||||
|
|
||||||
|
return AugeasBlockNode(name=name,
|
||||||
|
enabled=enabled,
|
||||||
|
ancestor=assertions.PASS,
|
||||||
|
filepath=apache_util.get_file_path(path),
|
||||||
|
metadata=metadata)
|
||||||
|
|
||||||
|
def _aug_get_name(self, path):
|
||||||
|
"""
|
||||||
|
Helper function to get name of a configuration block or variable from path.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Remove the ending slash if any
|
||||||
|
if path[-1] == "/": # pragma: no cover
|
||||||
|
path = path[:-1]
|
||||||
|
|
||||||
|
# Get the block name
|
||||||
|
name = path.split("/")[-1]
|
||||||
|
|
||||||
|
# remove [...], it's not allowed in Apache configuration and is used
|
||||||
|
# for indexing within Augeas
|
||||||
|
name = name.split("[")[0]
|
||||||
|
return name
|
||||||
|
|
||||||
|
|
||||||
|
class AugeasCommentNode(AugeasParserNode):
|
||||||
|
""" Augeas implementation of CommentNode interface """
|
||||||
|
|
||||||
|
def __init__(self, **kwargs):
|
||||||
|
comment, kwargs = util.commentnode_kwargs(kwargs) # pylint: disable=unused-variable
|
||||||
|
super(AugeasCommentNode, self).__init__(**kwargs)
|
||||||
|
# self.comment = comment
|
||||||
|
self.comment = comment
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
if isinstance(other, self.__class__):
|
||||||
|
return (self.comment == other.comment and
|
||||||
|
self.filepath == other.filepath and
|
||||||
|
self.dirty == other.dirty and
|
||||||
|
self.ancestor == other.ancestor and
|
||||||
|
self.metadata == other.metadata)
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
class AugeasDirectiveNode(AugeasParserNode):
|
||||||
|
""" Augeas implementation of DirectiveNode interface """
|
||||||
|
|
||||||
|
def __init__(self, **kwargs):
|
||||||
|
name, parameters, enabled, kwargs = util.directivenode_kwargs(kwargs)
|
||||||
|
super(AugeasDirectiveNode, self).__init__(**kwargs)
|
||||||
|
self.name = name
|
||||||
|
self.enabled = enabled
|
||||||
|
if parameters:
|
||||||
|
self.set_parameters(parameters)
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
if isinstance(other, self.__class__):
|
||||||
|
return (self.name == other.name and
|
||||||
|
self.filepath == other.filepath and
|
||||||
|
self.parameters == other.parameters and
|
||||||
|
self.enabled == other.enabled and
|
||||||
|
self.dirty == other.dirty and
|
||||||
|
self.ancestor == other.ancestor and
|
||||||
|
self.metadata == other.metadata)
|
||||||
|
return False
|
||||||
|
|
||||||
|
def set_parameters(self, parameters):
|
||||||
|
"""
|
||||||
|
Sets parameters of a DirectiveNode or BlockNode object.
|
||||||
|
|
||||||
|
:param list parameters: List of all parameters for the node to set.
|
||||||
|
"""
|
||||||
|
orig_params = self._aug_get_params(self.metadata["augeaspath"])
|
||||||
|
|
||||||
|
# Clear out old parameters
|
||||||
|
for _ in orig_params:
|
||||||
|
# When the first parameter is removed, the indices get updated
|
||||||
|
param_path = "{}/arg[1]".format(self.metadata["augeaspath"])
|
||||||
|
self.parser.aug.remove(param_path)
|
||||||
|
# Insert new ones
|
||||||
|
for pi, param in enumerate(parameters):
|
||||||
|
param_path = "{}/arg[{}]".format(self.metadata["augeaspath"], pi+1)
|
||||||
|
self.parser.aug.set(param_path, param)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def parameters(self):
|
||||||
|
"""
|
||||||
|
Fetches the parameters from Augeas tree, ensuring that the sequence always
|
||||||
|
represents the current state
|
||||||
|
|
||||||
|
:returns: Tuple of parameters for this DirectiveNode
|
||||||
|
:rtype: tuple:
|
||||||
|
"""
|
||||||
|
return tuple(self._aug_get_params(self.metadata["augeaspath"]))
|
||||||
|
|
||||||
|
def _aug_get_params(self, path):
|
||||||
|
"""Helper function to get parameters for DirectiveNodes and BlockNodes"""
|
||||||
|
|
||||||
|
arg_paths = self.parser.aug.match(path + "/arg")
|
||||||
|
return [self.parser.get_arg(apath) for apath in arg_paths]
|
||||||
|
|
||||||
|
|
||||||
|
class AugeasBlockNode(AugeasDirectiveNode):
|
||||||
|
""" Augeas implementation of BlockNode interface """
|
||||||
|
|
||||||
|
def __init__(self, **kwargs):
|
||||||
|
super(AugeasBlockNode, self).__init__(**kwargs)
|
||||||
|
self.children = ()
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
if isinstance(other, self.__class__):
|
||||||
|
return (self.name == other.name and
|
||||||
|
self.filepath == other.filepath and
|
||||||
|
self.parameters == other.parameters and
|
||||||
|
self.children == other.children and
|
||||||
|
self.enabled == other.enabled and
|
||||||
|
self.dirty == other.dirty and
|
||||||
|
self.ancestor == other.ancestor and
|
||||||
|
self.metadata == other.metadata)
|
||||||
|
return False
|
||||||
|
|
||||||
|
# pylint: disable=unused-argument
|
||||||
|
def add_child_block(self, name, parameters=None, position=None): # pragma: no cover
|
||||||
|
"""Adds a new BlockNode to the sequence of children"""
|
||||||
|
|
||||||
|
insertpath, realpath, before = self._aug_resolve_child_position(
|
||||||
|
name,
|
||||||
|
position
|
||||||
|
)
|
||||||
|
new_metadata = {"augeasparser": self.parser, "augeaspath": realpath}
|
||||||
|
|
||||||
|
# Create the new block
|
||||||
|
self.parser.aug.insert(insertpath, name, before)
|
||||||
|
# Check if the file was included from the root config or initial state
|
||||||
|
enabled = self.parser.parsed_in_original(
|
||||||
|
apache_util.get_file_path(realpath)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Parameters will be set at the initialization of the new object
|
||||||
|
new_block = AugeasBlockNode(name=name,
|
||||||
|
parameters=parameters,
|
||||||
|
enabled=enabled,
|
||||||
|
ancestor=assertions.PASS,
|
||||||
|
filepath=apache_util.get_file_path(realpath),
|
||||||
|
metadata=new_metadata)
|
||||||
|
return new_block
|
||||||
|
|
||||||
|
# pylint: disable=unused-argument
|
||||||
|
def add_child_directive(self, name, parameters=None, position=None): # pragma: no cover
|
||||||
|
"""Adds a new DirectiveNode to the sequence of children"""
|
||||||
|
|
||||||
|
if not parameters:
|
||||||
|
raise errors.PluginError("Directive requires parameters and none were set.")
|
||||||
|
|
||||||
|
insertpath, realpath, before = self._aug_resolve_child_position(
|
||||||
|
"directive",
|
||||||
|
position
|
||||||
|
)
|
||||||
|
new_metadata = {"augeasparser": self.parser, "augeaspath": realpath}
|
||||||
|
|
||||||
|
# Create the new directive
|
||||||
|
self.parser.aug.insert(insertpath, "directive", before)
|
||||||
|
# Set the directive key
|
||||||
|
self.parser.aug.set(realpath, name)
|
||||||
|
# Check if the file was included from the root config or initial state
|
||||||
|
enabled = self.parser.parsed_in_original(
|
||||||
|
apache_util.get_file_path(realpath)
|
||||||
|
)
|
||||||
|
|
||||||
|
new_dir = AugeasDirectiveNode(name=name,
|
||||||
|
parameters=parameters,
|
||||||
|
enabled=enabled,
|
||||||
|
ancestor=assertions.PASS,
|
||||||
|
filepath=apache_util.get_file_path(realpath),
|
||||||
|
metadata=new_metadata)
|
||||||
|
return new_dir
|
||||||
|
|
||||||
|
def add_child_comment(self, comment="", position=None):
|
||||||
|
"""Adds a new CommentNode to the sequence of children"""
|
||||||
|
|
||||||
|
insertpath, realpath, before = self._aug_resolve_child_position(
|
||||||
|
"#comment",
|
||||||
|
position
|
||||||
|
)
|
||||||
|
new_metadata = {"augeasparser": self.parser, "augeaspath": realpath}
|
||||||
|
|
||||||
|
# Create the new comment
|
||||||
|
self.parser.aug.insert(insertpath, "#comment", before)
|
||||||
|
# Set the comment content
|
||||||
|
self.parser.aug.set(realpath, comment)
|
||||||
|
|
||||||
|
new_comment = AugeasCommentNode(comment=comment,
|
||||||
|
ancestor=assertions.PASS,
|
||||||
|
filepath=apache_util.get_file_path(realpath),
|
||||||
|
metadata=new_metadata)
|
||||||
|
return new_comment
|
||||||
|
|
||||||
|
def find_blocks(self, name, exclude=True):
|
||||||
|
"""Recursive search of BlockNodes from the sequence of children"""
|
||||||
|
|
||||||
|
nodes = []
|
||||||
|
paths = self._aug_find_blocks(name)
|
||||||
|
if exclude:
|
||||||
|
paths = self.parser.exclude_dirs(paths)
|
||||||
|
for path in paths:
|
||||||
|
nodes.append(self._create_blocknode(path))
|
||||||
|
|
||||||
|
return nodes
|
||||||
|
|
||||||
|
def find_directives(self, name, exclude=True):
|
||||||
|
"""Recursive search of DirectiveNodes from the sequence of children"""
|
||||||
|
|
||||||
|
nodes = []
|
||||||
|
ownpath = self.metadata.get("augeaspath")
|
||||||
|
|
||||||
|
directives = self.parser.find_dir(name, start=ownpath, exclude=exclude)
|
||||||
|
already_parsed = set() # type: Set[str]
|
||||||
|
for directive in directives:
|
||||||
|
# Remove the /arg part from the Augeas path
|
||||||
|
directive = directive.partition("/arg")[0]
|
||||||
|
# find_dir returns an object for each _parameter_ of a directive
|
||||||
|
# so we need to filter out duplicates.
|
||||||
|
if directive not in already_parsed:
|
||||||
|
nodes.append(self._create_directivenode(directive))
|
||||||
|
already_parsed.add(directive)
|
||||||
|
|
||||||
|
return nodes
|
||||||
|
|
||||||
|
def find_comments(self, comment):
|
||||||
|
"""
|
||||||
|
Recursive search of DirectiveNodes from the sequence of children.
|
||||||
|
|
||||||
|
:param str comment: Comment content to search for.
|
||||||
|
"""
|
||||||
|
|
||||||
|
nodes = []
|
||||||
|
ownpath = self.metadata.get("augeaspath")
|
||||||
|
|
||||||
|
comments = self.parser.find_comments(comment, start=ownpath)
|
||||||
|
for com in comments:
|
||||||
|
nodes.append(self._create_commentnode(com))
|
||||||
|
|
||||||
|
return nodes
|
||||||
|
|
||||||
|
def delete_child(self, child):
|
||||||
|
"""
|
||||||
|
Deletes a ParserNode from the sequence of children, and raises an
|
||||||
|
exception if it's unable to do so.
|
||||||
|
:param AugeasParserNode: child: A node to delete.
|
||||||
|
"""
|
||||||
|
if not self.parser.aug.remove(child.metadata["augeaspath"]):
|
||||||
|
|
||||||
|
raise errors.PluginError(
|
||||||
|
("Could not delete child node, the Augeas path: {} doesn't " +
|
||||||
|
"seem to exist.").format(child.metadata["augeaspath"])
|
||||||
|
)
|
||||||
|
|
||||||
|
def unsaved_files(self):
|
||||||
|
"""Returns a list of unsaved filepaths"""
|
||||||
|
return self.parser.unsaved_files()
|
||||||
|
|
||||||
|
def parsed_paths(self):
|
||||||
|
"""
|
||||||
|
Returns a list of file paths that have currently been parsed into the parser
|
||||||
|
tree. The returned list may include paths with wildcard characters, for
|
||||||
|
example: ['/etc/apache2/conf.d/*.load']
|
||||||
|
|
||||||
|
This is typically called on the root node of the ParserNode tree.
|
||||||
|
|
||||||
|
:returns: list of file paths of files that have been parsed
|
||||||
|
"""
|
||||||
|
|
||||||
|
res_paths = []
|
||||||
|
|
||||||
|
paths = self.parser.existing_paths
|
||||||
|
for directory in paths:
|
||||||
|
for filename in paths[directory]:
|
||||||
|
res_paths.append(os.path.join(directory, filename))
|
||||||
|
|
||||||
|
return res_paths
|
||||||
|
|
||||||
|
def _create_commentnode(self, path):
|
||||||
|
"""Helper function to create a CommentNode from Augeas path"""
|
||||||
|
|
||||||
|
comment = self.parser.aug.get(path)
|
||||||
|
metadata = {"augeasparser": self.parser, "augeaspath": path}
|
||||||
|
|
||||||
|
# Because of the dynamic nature of AugeasParser and the fact that we're
|
||||||
|
# not populating the complete node tree, the ancestor has a dummy value
|
||||||
|
return AugeasCommentNode(comment=comment,
|
||||||
|
ancestor=assertions.PASS,
|
||||||
|
filepath=apache_util.get_file_path(path),
|
||||||
|
metadata=metadata)
|
||||||
|
|
||||||
|
def _create_directivenode(self, path):
|
||||||
|
"""Helper function to create a DirectiveNode from Augeas path"""
|
||||||
|
|
||||||
|
name = self.parser.get_arg(path)
|
||||||
|
metadata = {"augeasparser": self.parser, "augeaspath": path}
|
||||||
|
|
||||||
|
# Check if the file was included from the root config or initial state
|
||||||
|
enabled = self.parser.parsed_in_original(
|
||||||
|
apache_util.get_file_path(path)
|
||||||
|
)
|
||||||
|
return AugeasDirectiveNode(name=name,
|
||||||
|
ancestor=assertions.PASS,
|
||||||
|
enabled=enabled,
|
||||||
|
filepath=apache_util.get_file_path(path),
|
||||||
|
metadata=metadata)
|
||||||
|
|
||||||
|
def _aug_find_blocks(self, name):
|
||||||
|
"""Helper function to perform a search to Augeas DOM tree to search
|
||||||
|
configuration blocks with a given name"""
|
||||||
|
|
||||||
|
# The code here is modified from configurator.get_virtual_hosts()
|
||||||
|
blk_paths = set()
|
||||||
|
for vhost_path in list(self.parser.parser_paths):
|
||||||
|
paths = self.parser.aug.match(
|
||||||
|
("/files%s//*[label()=~regexp('%s')]" %
|
||||||
|
(vhost_path, parser.case_i(name))))
|
||||||
|
blk_paths.update([path for path in paths if
|
||||||
|
name.lower() in os.path.basename(path).lower()])
|
||||||
|
return blk_paths
|
||||||
|
|
||||||
|
def _aug_resolve_child_position(self, name, position):
|
||||||
|
"""
|
||||||
|
Helper function that iterates through the immediate children and figures
|
||||||
|
out the insertion path for a new AugeasParserNode.
|
||||||
|
|
||||||
|
Augeas also generalizes indices for directives and comments, simply by
|
||||||
|
using "directive" or "comment" respectively as their names.
|
||||||
|
|
||||||
|
This function iterates over the existing children of the AugeasBlockNode,
|
||||||
|
returning their insertion path, resulting Augeas path and if the new node
|
||||||
|
should be inserted before or after the returned insertion path.
|
||||||
|
|
||||||
|
Note: while Apache is case insensitive, Augeas is not, and blocks like
|
||||||
|
Nameofablock and NameOfABlock have different indices.
|
||||||
|
|
||||||
|
:param str name: Name of the AugeasBlockNode to insert, "directive" for
|
||||||
|
AugeasDirectiveNode or "comment" for AugeasCommentNode
|
||||||
|
:param int position: The position to insert the child AugeasParserNode to
|
||||||
|
|
||||||
|
:returns: Tuple of insert path, resulting path and a boolean if the new
|
||||||
|
node should be inserted before it.
|
||||||
|
:rtype: tuple of str, str, bool
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Default to appending
|
||||||
|
before = False
|
||||||
|
|
||||||
|
all_children = self.parser.aug.match("{}/*".format(
|
||||||
|
self.metadata["augeaspath"])
|
||||||
|
)
|
||||||
|
|
||||||
|
# Calculate resulting_path
|
||||||
|
# Augeas indices start at 1. We use counter to calculate the index to
|
||||||
|
# be used in resulting_path.
|
||||||
|
counter = 1
|
||||||
|
for i, child in enumerate(all_children):
|
||||||
|
if position is not None and i >= position:
|
||||||
|
# We're not going to insert the new node to an index after this
|
||||||
|
break
|
||||||
|
childname = self._aug_get_name(child)
|
||||||
|
if name == childname:
|
||||||
|
counter += 1
|
||||||
|
|
||||||
|
resulting_path = "{}/{}[{}]".format(
|
||||||
|
self.metadata["augeaspath"],
|
||||||
|
name,
|
||||||
|
counter
|
||||||
|
)
|
||||||
|
|
||||||
|
# Form the correct insert_path
|
||||||
|
# Inserting the only child and appending as the last child work
|
||||||
|
# similarly in Augeas.
|
||||||
|
append = not all_children or position is None or position >= len(all_children)
|
||||||
|
if append:
|
||||||
|
insert_path = "{}/*[last()]".format(
|
||||||
|
self.metadata["augeaspath"]
|
||||||
|
)
|
||||||
|
elif position == 0:
|
||||||
|
# Insert as the first child, before the current first one.
|
||||||
|
insert_path = all_children[0]
|
||||||
|
before = True
|
||||||
|
else:
|
||||||
|
insert_path = "{}/*[{}]".format(
|
||||||
|
self.metadata["augeaspath"],
|
||||||
|
position
|
||||||
|
)
|
||||||
|
|
||||||
|
return (insert_path, resulting_path, before)
|
||||||
|
|
||||||
|
|
||||||
|
interfaces.CommentNode.register(AugeasCommentNode)
|
||||||
|
interfaces.DirectiveNode.register(AugeasDirectiveNode)
|
||||||
|
interfaces.BlockNode.register(AugeasBlockNode)
|
||||||
@@ -1,5 +1,7 @@
|
|||||||
"""Apache Configurator."""
|
"""Apache Configurator."""
|
||||||
# pylint: disable=too-many-lines
|
# pylint: disable=too-many-lines
|
||||||
|
from collections import defaultdict
|
||||||
|
from distutils.version import LooseVersion
|
||||||
import copy
|
import copy
|
||||||
import fnmatch
|
import fnmatch
|
||||||
import logging
|
import logging
|
||||||
@@ -7,34 +9,38 @@ import re
|
|||||||
import socket
|
import socket
|
||||||
import time
|
import time
|
||||||
|
|
||||||
from collections import defaultdict
|
|
||||||
|
|
||||||
import pkg_resources
|
|
||||||
import six
|
import six
|
||||||
|
|
||||||
import zope.component
|
import zope.component
|
||||||
import zope.interface
|
import zope.interface
|
||||||
|
try:
|
||||||
|
import apacheconfig
|
||||||
|
HAS_APACHECONFIG = True
|
||||||
|
except ImportError: # pragma: no cover
|
||||||
|
HAS_APACHECONFIG = False
|
||||||
|
|
||||||
from acme import challenges
|
from acme import challenges
|
||||||
from acme.magic_typing import DefaultDict, Dict, List, Set, Union # pylint: disable=unused-import, no-name-in-module
|
from acme.magic_typing import DefaultDict
|
||||||
|
from acme.magic_typing import Dict
|
||||||
|
from acme.magic_typing import List
|
||||||
|
from acme.magic_typing import Set
|
||||||
|
from acme.magic_typing import Union
|
||||||
from certbot import errors
|
from certbot import errors
|
||||||
from certbot import interfaces
|
from certbot import interfaces
|
||||||
from certbot import util
|
from certbot import util
|
||||||
|
|
||||||
from certbot.achallenges import KeyAuthorizationAnnotatedChallenge # pylint: disable=unused-import
|
from certbot.achallenges import KeyAuthorizationAnnotatedChallenge # pylint: disable=unused-import
|
||||||
from certbot.compat import filesystem
|
from certbot.compat import filesystem
|
||||||
from certbot.compat import os
|
from certbot.compat import os
|
||||||
from certbot.plugins import common
|
from certbot.plugins import common
|
||||||
from certbot.plugins.util import path_surgery
|
|
||||||
from certbot.plugins.enhancements import AutoHSTSEnhancement
|
from certbot.plugins.enhancements import AutoHSTSEnhancement
|
||||||
|
from certbot.plugins.util import path_surgery
|
||||||
from certbot_apache import apache_util
|
from certbot_apache._internal import apache_util
|
||||||
from certbot_apache import constants
|
from certbot_apache._internal import assertions
|
||||||
from certbot_apache import display_ops
|
from certbot_apache._internal import constants
|
||||||
from certbot_apache import http_01
|
from certbot_apache._internal import display_ops
|
||||||
from certbot_apache import obj
|
from certbot_apache._internal import dualparser
|
||||||
from certbot_apache import parser
|
from certbot_apache._internal import http_01
|
||||||
|
from certbot_apache._internal import obj
|
||||||
|
from certbot_apache._internal import parser
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -71,18 +77,17 @@ logger = logging.getLogger(__name__)
|
|||||||
@zope.interface.implementer(interfaces.IAuthenticator, interfaces.IInstaller)
|
@zope.interface.implementer(interfaces.IAuthenticator, interfaces.IInstaller)
|
||||||
@zope.interface.provider(interfaces.IPluginFactory)
|
@zope.interface.provider(interfaces.IPluginFactory)
|
||||||
class ApacheConfigurator(common.Installer):
|
class ApacheConfigurator(common.Installer):
|
||||||
# pylint: disable=too-many-instance-attributes,too-many-public-methods
|
|
||||||
"""Apache configurator.
|
"""Apache configurator.
|
||||||
|
|
||||||
:ivar config: Configuration.
|
:ivar config: Configuration.
|
||||||
:type config: :class:`~certbot.interfaces.IConfig`
|
:type config: :class:`~certbot.interfaces.IConfig`
|
||||||
|
|
||||||
:ivar parser: Handles low level parsing
|
:ivar parser: Handles low level parsing
|
||||||
:type parser: :class:`~certbot_apache.parser`
|
:type parser: :class:`~certbot_apache._internal.parser`
|
||||||
|
|
||||||
:ivar tup version: version of Apache
|
:ivar tup version: version of Apache
|
||||||
:ivar list vhosts: All vhosts found in the configuration
|
:ivar list vhosts: All vhosts found in the configuration
|
||||||
(:class:`list` of :class:`~certbot_apache.obj.VirtualHost`)
|
(:class:`list` of :class:`~certbot_apache._internal.obj.VirtualHost`)
|
||||||
|
|
||||||
:ivar dict assoc: Mapping between domains and vhosts
|
:ivar dict assoc: Mapping between domains and vhosts
|
||||||
|
|
||||||
@@ -110,14 +115,29 @@ class ApacheConfigurator(common.Installer):
|
|||||||
handle_modules=False,
|
handle_modules=False,
|
||||||
handle_sites=False,
|
handle_sites=False,
|
||||||
challenge_location="/etc/apache2",
|
challenge_location="/etc/apache2",
|
||||||
MOD_SSL_CONF_SRC=pkg_resources.resource_filename(
|
|
||||||
"certbot_apache", "options-ssl-apache.conf")
|
|
||||||
)
|
)
|
||||||
|
|
||||||
def option(self, key):
|
def option(self, key):
|
||||||
"""Get a value from options"""
|
"""Get a value from options"""
|
||||||
return self.options.get(key)
|
return self.options.get(key)
|
||||||
|
|
||||||
|
def pick_apache_config(self, warn_on_no_mod_ssl=True):
|
||||||
|
"""
|
||||||
|
Pick the appropriate TLS Apache configuration file for current version of Apache and OS.
|
||||||
|
|
||||||
|
:param bool warn_on_no_mod_ssl: True if we should warn if mod_ssl is not found.
|
||||||
|
|
||||||
|
:return: the path to the TLS Apache configuration file to use
|
||||||
|
:rtype: str
|
||||||
|
"""
|
||||||
|
# Disabling TLS session tickets is supported by Apache 2.4.11+ and OpenSSL 1.0.2l+.
|
||||||
|
# So for old versions of Apache we pick a configuration without this option.
|
||||||
|
openssl_version = self.openssl_version(warn_on_no_mod_ssl)
|
||||||
|
if self.version < (2, 4, 11) or not openssl_version or\
|
||||||
|
LooseVersion(openssl_version) < LooseVersion('1.0.2l'):
|
||||||
|
return apache_util.find_ssl_apache_conf("old")
|
||||||
|
return apache_util.find_ssl_apache_conf("current")
|
||||||
|
|
||||||
def _prepare_options(self):
|
def _prepare_options(self):
|
||||||
"""
|
"""
|
||||||
Set the values possibly changed by command line parameters to
|
Set the values possibly changed by command line parameters to
|
||||||
@@ -174,8 +194,6 @@ class ApacheConfigurator(common.Installer):
|
|||||||
"(Only Ubuntu/Debian currently)")
|
"(Only Ubuntu/Debian currently)")
|
||||||
add("ctl", default=DEFAULTS["ctl"],
|
add("ctl", default=DEFAULTS["ctl"],
|
||||||
help="Full path to Apache control script")
|
help="Full path to Apache control script")
|
||||||
util.add_deprecated_argument(
|
|
||||||
add, argument_name="init-script", nargs=1)
|
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
"""Initialize an Apache Configurator.
|
"""Initialize an Apache Configurator.
|
||||||
@@ -185,26 +203,34 @@ class ApacheConfigurator(common.Installer):
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
version = kwargs.pop("version", None)
|
version = kwargs.pop("version", None)
|
||||||
|
use_parsernode = kwargs.pop("use_parsernode", False)
|
||||||
|
openssl_version = kwargs.pop("openssl_version", None)
|
||||||
super(ApacheConfigurator, self).__init__(*args, **kwargs)
|
super(ApacheConfigurator, self).__init__(*args, **kwargs)
|
||||||
|
|
||||||
# Add name_server association dict
|
# Add name_server association dict
|
||||||
self.assoc = dict() # type: Dict[str, obj.VirtualHost]
|
self.assoc = {} # type: Dict[str, obj.VirtualHost]
|
||||||
# Outstanding challenges
|
# Outstanding challenges
|
||||||
self._chall_out = set() # type: Set[KeyAuthorizationAnnotatedChallenge]
|
self._chall_out = set() # type: Set[KeyAuthorizationAnnotatedChallenge]
|
||||||
# List of vhosts configured per wildcard domain on this run.
|
# List of vhosts configured per wildcard domain on this run.
|
||||||
# used by deploy_cert() and enhance()
|
# used by deploy_cert() and enhance()
|
||||||
self._wildcard_vhosts = dict() # type: Dict[str, List[obj.VirtualHost]]
|
self._wildcard_vhosts = {} # type: Dict[str, List[obj.VirtualHost]]
|
||||||
# Maps enhancements to vhosts we've enabled the enhancement for
|
# Maps enhancements to vhosts we've enabled the enhancement for
|
||||||
self._enhanced_vhosts = defaultdict(set) # type: DefaultDict[str, Set[obj.VirtualHost]]
|
self._enhanced_vhosts = defaultdict(set) # type: DefaultDict[str, Set[obj.VirtualHost]]
|
||||||
# Temporary state for AutoHSTS enhancement
|
# Temporary state for AutoHSTS enhancement
|
||||||
self._autohsts = {} # type: Dict[str, Dict[str, Union[int, float]]]
|
self._autohsts = {} # type: Dict[str, Dict[str, Union[int, float]]]
|
||||||
# Reverter save notes
|
# Reverter save notes
|
||||||
self.save_notes = ""
|
self.save_notes = ""
|
||||||
|
# Should we use ParserNode implementation instead of the old behavior
|
||||||
|
self.USE_PARSERNODE = use_parsernode
|
||||||
|
# Saves the list of file paths that were parsed initially, and
|
||||||
|
# not added to parser tree by self.conf("vhost-root") for example.
|
||||||
|
self.parsed_paths = [] # type: List[str]
|
||||||
# These will be set in the prepare function
|
# These will be set in the prepare function
|
||||||
self._prepared = False
|
self._prepared = False
|
||||||
self.parser = None
|
self.parser = None
|
||||||
|
self.parser_root = None
|
||||||
self.version = version
|
self.version = version
|
||||||
|
self._openssl_version = openssl_version
|
||||||
self.vhosts = None
|
self.vhosts = None
|
||||||
self.options = copy.deepcopy(self.OS_DEFAULTS)
|
self.options = copy.deepcopy(self.OS_DEFAULTS)
|
||||||
self._enhance_func = {"redirect": self._enable_redirect,
|
self._enhance_func = {"redirect": self._enable_redirect,
|
||||||
@@ -221,6 +247,52 @@ class ApacheConfigurator(common.Installer):
|
|||||||
"""Full absolute path to digest of updated SSL configuration file."""
|
"""Full absolute path to digest of updated SSL configuration file."""
|
||||||
return os.path.join(self.config.config_dir, constants.UPDATED_MOD_SSL_CONF_DIGEST)
|
return os.path.join(self.config.config_dir, constants.UPDATED_MOD_SSL_CONF_DIGEST)
|
||||||
|
|
||||||
|
def _open_module_file(self, ssl_module_location):
|
||||||
|
"""Extract the open lines of openssl_version for testing purposes"""
|
||||||
|
try:
|
||||||
|
with open(ssl_module_location, mode="rb") as f:
|
||||||
|
contents = f.read()
|
||||||
|
except IOError as error:
|
||||||
|
logger.debug(str(error), exc_info=True)
|
||||||
|
return None
|
||||||
|
return contents
|
||||||
|
|
||||||
|
def openssl_version(self, warn_on_no_mod_ssl=True):
|
||||||
|
"""Lazily retrieve openssl version
|
||||||
|
|
||||||
|
:param bool warn_on_no_mod_ssl: `True` if we should warn if mod_ssl is not found. Set to
|
||||||
|
`False` when we know we'll try to enable mod_ssl later. This is currently debian/ubuntu,
|
||||||
|
when called from `prepare`.
|
||||||
|
|
||||||
|
:return: the OpenSSL version as a string, or None.
|
||||||
|
:rtype: str or None
|
||||||
|
"""
|
||||||
|
if self._openssl_version:
|
||||||
|
return self._openssl_version
|
||||||
|
# Step 1. Check for LoadModule directive
|
||||||
|
try:
|
||||||
|
ssl_module_location = self.parser.modules['ssl_module']
|
||||||
|
except KeyError:
|
||||||
|
if warn_on_no_mod_ssl:
|
||||||
|
logger.warning("Could not find ssl_module; not disabling session tickets.")
|
||||||
|
return None
|
||||||
|
if not ssl_module_location:
|
||||||
|
logger.warning("Could not find ssl_module; not disabling session tickets.")
|
||||||
|
return None
|
||||||
|
ssl_module_location = self.parser.standard_path_from_server_root(ssl_module_location)
|
||||||
|
# Step 2. Grep in the .so for openssl version
|
||||||
|
contents = self._open_module_file(ssl_module_location)
|
||||||
|
if not contents:
|
||||||
|
logger.warning("Unable to read ssl_module file; not disabling session tickets.")
|
||||||
|
return None
|
||||||
|
# looks like: OpenSSL 1.0.2s 28 May 2019
|
||||||
|
matches = re.findall(br"OpenSSL ([0-9]\.[^ ]+) ", contents)
|
||||||
|
if not matches:
|
||||||
|
logger.warning("Could not find OpenSSL version; not disabling session tickets.")
|
||||||
|
return None
|
||||||
|
self._openssl_version = matches[0].decode('UTF-8')
|
||||||
|
return self._openssl_version
|
||||||
|
|
||||||
def prepare(self):
|
def prepare(self):
|
||||||
"""Prepare the authenticator/installer.
|
"""Prepare the authenticator/installer.
|
||||||
|
|
||||||
@@ -253,14 +325,26 @@ class ApacheConfigurator(common.Installer):
|
|||||||
# Perform the actual Augeas initialization to be able to react
|
# Perform the actual Augeas initialization to be able to react
|
||||||
self.parser = self.get_parser()
|
self.parser = self.get_parser()
|
||||||
|
|
||||||
|
# Set up ParserNode root
|
||||||
|
pn_meta = {"augeasparser": self.parser,
|
||||||
|
"augeaspath": self.parser.get_root_augpath(),
|
||||||
|
"ac_ast": None}
|
||||||
|
if self.USE_PARSERNODE:
|
||||||
|
self.parser_root = self.get_parsernode_root(pn_meta)
|
||||||
|
self.parsed_paths = self.parser_root.parsed_paths()
|
||||||
|
|
||||||
# Check for errors in parsing files with Augeas
|
# Check for errors in parsing files with Augeas
|
||||||
self.parser.check_parsing_errors("httpd.aug")
|
self.parser.check_parsing_errors("httpd.aug")
|
||||||
|
|
||||||
# Get all of the available vhosts
|
# Get all of the available vhosts
|
||||||
self.vhosts = self.get_virtual_hosts()
|
self.vhosts = self.get_virtual_hosts()
|
||||||
|
|
||||||
|
# We may try to enable mod_ssl later. If so, we shouldn't warn if we can't find it now.
|
||||||
|
# This is currently only true for debian/ubuntu.
|
||||||
|
warn_on_no_mod_ssl = not self.option("handle_modules")
|
||||||
self.install_ssl_options_conf(self.mod_ssl_conf,
|
self.install_ssl_options_conf(self.mod_ssl_conf,
|
||||||
self.updated_mod_ssl_conf_digest)
|
self.updated_mod_ssl_conf_digest,
|
||||||
|
warn_on_no_mod_ssl)
|
||||||
|
|
||||||
# Prevent two Apache plugins from modifying a config at once
|
# Prevent two Apache plugins from modifying a config at once
|
||||||
try:
|
try:
|
||||||
@@ -348,6 +432,28 @@ class ApacheConfigurator(common.Installer):
|
|||||||
self.option("server_root"), self.conf("vhost-root"),
|
self.option("server_root"), self.conf("vhost-root"),
|
||||||
self.version, configurator=self)
|
self.version, configurator=self)
|
||||||
|
|
||||||
|
def get_parsernode_root(self, metadata):
|
||||||
|
"""Initializes the ParserNode parser root instance."""
|
||||||
|
|
||||||
|
if HAS_APACHECONFIG:
|
||||||
|
apache_vars = {}
|
||||||
|
apache_vars["defines"] = apache_util.parse_defines(self.option("ctl"))
|
||||||
|
apache_vars["includes"] = apache_util.parse_includes(self.option("ctl"))
|
||||||
|
apache_vars["modules"] = apache_util.parse_modules(self.option("ctl"))
|
||||||
|
metadata["apache_vars"] = apache_vars
|
||||||
|
|
||||||
|
with open(self.parser.loc["root"]) as f:
|
||||||
|
with apacheconfig.make_loader(writable=True,
|
||||||
|
**apacheconfig.flavors.NATIVE_APACHE) as loader:
|
||||||
|
metadata["ac_ast"] = loader.loads(f.read())
|
||||||
|
|
||||||
|
return dualparser.DualBlockNode(
|
||||||
|
name=assertions.PASS,
|
||||||
|
ancestor=None,
|
||||||
|
filepath=self.parser.loc["root"],
|
||||||
|
metadata=metadata
|
||||||
|
)
|
||||||
|
|
||||||
def _wildcard_domain(self, domain):
|
def _wildcard_domain(self, domain):
|
||||||
"""
|
"""
|
||||||
Checks if domain is a wildcard domain
|
Checks if domain is a wildcard domain
|
||||||
@@ -394,7 +500,7 @@ class ApacheConfigurator(common.Installer):
|
|||||||
counterpart, should one get created
|
counterpart, should one get created
|
||||||
|
|
||||||
:returns: List of VirtualHosts or None
|
:returns: List of VirtualHosts or None
|
||||||
:rtype: `list` of :class:`~certbot_apache.obj.VirtualHost`
|
:rtype: `list` of :class:`~certbot_apache._internal.obj.VirtualHost`
|
||||||
"""
|
"""
|
||||||
|
|
||||||
if self._wildcard_domain(domain):
|
if self._wildcard_domain(domain):
|
||||||
@@ -442,7 +548,7 @@ class ApacheConfigurator(common.Installer):
|
|||||||
|
|
||||||
# Go through the vhosts, making sure that we cover all the names
|
# Go through the vhosts, making sure that we cover all the names
|
||||||
# present, but preferring the SSL vhosts
|
# present, but preferring the SSL vhosts
|
||||||
filtered_vhosts = dict()
|
filtered_vhosts = {}
|
||||||
for vhost in vhosts:
|
for vhost in vhosts:
|
||||||
for name in vhost.get_names():
|
for name in vhost.get_names():
|
||||||
if vhost.ssl:
|
if vhost.ssl:
|
||||||
@@ -453,7 +559,7 @@ class ApacheConfigurator(common.Installer):
|
|||||||
filtered_vhosts[name] = vhost
|
filtered_vhosts[name] = vhost
|
||||||
|
|
||||||
# Only unique VHost objects
|
# Only unique VHost objects
|
||||||
dialog_input = set([vhost for vhost in filtered_vhosts.values()])
|
dialog_input = set(filtered_vhosts.values())
|
||||||
|
|
||||||
# Ask the user which of names to enable, expect list of names back
|
# Ask the user which of names to enable, expect list of names back
|
||||||
dialog_output = display_ops.select_vhost_multiple(list(dialog_input))
|
dialog_output = display_ops.select_vhost_multiple(list(dialog_input))
|
||||||
@@ -468,7 +574,7 @@ class ApacheConfigurator(common.Installer):
|
|||||||
|
|
||||||
# Make sure we create SSL vhosts for the ones that are HTTP only
|
# Make sure we create SSL vhosts for the ones that are HTTP only
|
||||||
# if requested.
|
# if requested.
|
||||||
return_vhosts = list()
|
return_vhosts = []
|
||||||
for vhost in dialog_output:
|
for vhost in dialog_output:
|
||||||
if not vhost.ssl:
|
if not vhost.ssl:
|
||||||
return_vhosts.append(self.make_vhost_ssl(vhost))
|
return_vhosts.append(self.make_vhost_ssl(vhost))
|
||||||
@@ -569,7 +675,7 @@ class ApacheConfigurator(common.Installer):
|
|||||||
counterpart, should one get created
|
counterpart, should one get created
|
||||||
|
|
||||||
:returns: vhost associated with name
|
:returns: vhost associated with name
|
||||||
:rtype: :class:`~certbot_apache.obj.VirtualHost`
|
:rtype: :class:`~certbot_apache._internal.obj.VirtualHost`
|
||||||
|
|
||||||
:raises .errors.PluginError: If no vhost is available or chosen
|
:raises .errors.PluginError: If no vhost is available or chosen
|
||||||
|
|
||||||
@@ -604,9 +710,9 @@ class ApacheConfigurator(common.Installer):
|
|||||||
"in the Apache config.",
|
"in the Apache config.",
|
||||||
target_name)
|
target_name)
|
||||||
raise errors.PluginError("No vhost selected")
|
raise errors.PluginError("No vhost selected")
|
||||||
elif temp:
|
if temp:
|
||||||
return vhost
|
return vhost
|
||||||
elif not vhost.ssl:
|
if not vhost.ssl:
|
||||||
addrs = self._get_proposed_addrs(vhost, "443")
|
addrs = self._get_proposed_addrs(vhost, "443")
|
||||||
# TODO: Conflicts is too conservative
|
# TODO: Conflicts is too conservative
|
||||||
if not any(vhost.enabled and vhost.conflicts(addrs) for
|
if not any(vhost.enabled and vhost.conflicts(addrs) for
|
||||||
@@ -672,7 +778,7 @@ class ApacheConfigurator(common.Installer):
|
|||||||
|
|
||||||
:param str target_name: domain handled by the desired vhost
|
:param str target_name: domain handled by the desired vhost
|
||||||
:param vhosts: vhosts to consider
|
:param vhosts: vhosts to consider
|
||||||
:type vhosts: `collections.Iterable` of :class:`~certbot_apache.obj.VirtualHost`
|
:type vhosts: `collections.Iterable` of :class:`~certbot_apache._internal.obj.VirtualHost`
|
||||||
:param bool filter_defaults: whether a vhost with a _default_
|
:param bool filter_defaults: whether a vhost with a _default_
|
||||||
addr is acceptable
|
addr is acceptable
|
||||||
|
|
||||||
@@ -764,7 +870,7 @@ class ApacheConfigurator(common.Installer):
|
|||||||
|
|
||||||
return util.get_filtered_names(all_names)
|
return util.get_filtered_names(all_names)
|
||||||
|
|
||||||
def get_name_from_ip(self, addr): # pylint: disable=no-self-use
|
def get_name_from_ip(self, addr):
|
||||||
"""Returns a reverse dns name if available.
|
"""Returns a reverse dns name if available.
|
||||||
|
|
||||||
:param addr: IP Address
|
:param addr: IP Address
|
||||||
@@ -814,7 +920,7 @@ class ApacheConfigurator(common.Installer):
|
|||||||
"""Helper function for get_virtual_hosts().
|
"""Helper function for get_virtual_hosts().
|
||||||
|
|
||||||
:param host: In progress vhost whose names will be added
|
:param host: In progress vhost whose names will be added
|
||||||
:type host: :class:`~certbot_apache.obj.VirtualHost`
|
:type host: :class:`~certbot_apache._internal.obj.VirtualHost`
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@@ -833,7 +939,7 @@ class ApacheConfigurator(common.Installer):
|
|||||||
:param str path: Augeas path to virtual host
|
:param str path: Augeas path to virtual host
|
||||||
|
|
||||||
:returns: newly created vhost
|
:returns: newly created vhost
|
||||||
:rtype: :class:`~certbot_apache.obj.VirtualHost`
|
:rtype: :class:`~certbot_apache._internal.obj.VirtualHost`
|
||||||
|
|
||||||
"""
|
"""
|
||||||
addrs = set()
|
addrs = set()
|
||||||
@@ -872,9 +978,32 @@ class ApacheConfigurator(common.Installer):
|
|||||||
return vhost
|
return vhost
|
||||||
|
|
||||||
def get_virtual_hosts(self):
|
def get_virtual_hosts(self):
|
||||||
|
"""
|
||||||
|
Temporary wrapper for legacy and ParserNode version for
|
||||||
|
get_virtual_hosts. This should be replaced with the ParserNode
|
||||||
|
implementation when ready.
|
||||||
|
"""
|
||||||
|
|
||||||
|
v1_vhosts = self.get_virtual_hosts_v1()
|
||||||
|
if self.USE_PARSERNODE and HAS_APACHECONFIG:
|
||||||
|
v2_vhosts = self.get_virtual_hosts_v2()
|
||||||
|
|
||||||
|
for v1_vh in v1_vhosts:
|
||||||
|
found = False
|
||||||
|
for v2_vh in v2_vhosts:
|
||||||
|
if assertions.isEqualVirtualHost(v1_vh, v2_vh):
|
||||||
|
found = True
|
||||||
|
break
|
||||||
|
if not found:
|
||||||
|
raise AssertionError("Equivalent for {} was not found".format(v1_vh.path))
|
||||||
|
|
||||||
|
return v2_vhosts
|
||||||
|
return v1_vhosts
|
||||||
|
|
||||||
|
def get_virtual_hosts_v1(self):
|
||||||
"""Returns list of virtual hosts found in the Apache configuration.
|
"""Returns list of virtual hosts found in the Apache configuration.
|
||||||
|
|
||||||
:returns: List of :class:`~certbot_apache.obj.VirtualHost`
|
:returns: List of :class:`~certbot_apache._internal.obj.VirtualHost`
|
||||||
objects found in configuration
|
objects found in configuration
|
||||||
:rtype: list
|
:rtype: list
|
||||||
|
|
||||||
@@ -924,6 +1053,80 @@ class ApacheConfigurator(common.Installer):
|
|||||||
vhs.append(new_vhost)
|
vhs.append(new_vhost)
|
||||||
return vhs
|
return vhs
|
||||||
|
|
||||||
|
def get_virtual_hosts_v2(self):
|
||||||
|
"""Returns list of virtual hosts found in the Apache configuration using
|
||||||
|
ParserNode interface.
|
||||||
|
:returns: List of :class:`~certbot_apache.obj.VirtualHost`
|
||||||
|
objects found in configuration
|
||||||
|
:rtype: list
|
||||||
|
"""
|
||||||
|
|
||||||
|
vhs = []
|
||||||
|
vhosts = self.parser_root.find_blocks("VirtualHost", exclude=False)
|
||||||
|
for vhblock in vhosts:
|
||||||
|
vhs.append(self._create_vhost_v2(vhblock))
|
||||||
|
return vhs
|
||||||
|
|
||||||
|
def _create_vhost_v2(self, node):
|
||||||
|
"""Used by get_virtual_hosts_v2 to create vhost objects using ParserNode
|
||||||
|
interfaces.
|
||||||
|
:param interfaces.BlockNode node: The BlockNode object of VirtualHost block
|
||||||
|
:returns: newly created vhost
|
||||||
|
:rtype: :class:`~certbot_apache.obj.VirtualHost`
|
||||||
|
"""
|
||||||
|
addrs = set()
|
||||||
|
for param in node.parameters:
|
||||||
|
addrs.add(obj.Addr.fromstring(param))
|
||||||
|
|
||||||
|
is_ssl = False
|
||||||
|
# Exclusion to match the behavior in get_virtual_hosts_v2
|
||||||
|
sslengine = node.find_directives("SSLEngine", exclude=False)
|
||||||
|
if sslengine:
|
||||||
|
for directive in sslengine:
|
||||||
|
if directive.parameters[0].lower() == "on":
|
||||||
|
is_ssl = True
|
||||||
|
break
|
||||||
|
|
||||||
|
# "SSLEngine on" might be set outside of <VirtualHost>
|
||||||
|
# Treat vhosts with port 443 as ssl vhosts
|
||||||
|
for addr in addrs:
|
||||||
|
if addr.get_port() == "443":
|
||||||
|
is_ssl = True
|
||||||
|
|
||||||
|
enabled = apache_util.included_in_paths(node.filepath, self.parsed_paths)
|
||||||
|
|
||||||
|
macro = False
|
||||||
|
# Check if the VirtualHost is contained in a mod_macro block
|
||||||
|
if node.find_ancestors("Macro"):
|
||||||
|
macro = True
|
||||||
|
vhost = obj.VirtualHost(
|
||||||
|
node.filepath, None, addrs, is_ssl, enabled, modmacro=macro, node=node
|
||||||
|
)
|
||||||
|
self._populate_vhost_names_v2(vhost)
|
||||||
|
return vhost
|
||||||
|
|
||||||
|
def _populate_vhost_names_v2(self, vhost):
|
||||||
|
"""Helper function that populates the VirtualHost names.
|
||||||
|
:param host: In progress vhost whose names will be added
|
||||||
|
:type host: :class:`~certbot_apache.obj.VirtualHost`
|
||||||
|
"""
|
||||||
|
|
||||||
|
servername_match = vhost.node.find_directives("ServerName",
|
||||||
|
exclude=False)
|
||||||
|
serveralias_match = vhost.node.find_directives("ServerAlias",
|
||||||
|
exclude=False)
|
||||||
|
|
||||||
|
servername = None
|
||||||
|
if servername_match:
|
||||||
|
servername = servername_match[-1].parameters[-1]
|
||||||
|
|
||||||
|
if not vhost.modmacro:
|
||||||
|
for alias in serveralias_match:
|
||||||
|
for serveralias in alias.parameters:
|
||||||
|
vhost.aliases.add(serveralias)
|
||||||
|
vhost.name = servername
|
||||||
|
|
||||||
|
|
||||||
def is_name_vhost(self, target_addr):
|
def is_name_vhost(self, target_addr):
|
||||||
"""Returns if vhost is a name based vhost
|
"""Returns if vhost is a name based vhost
|
||||||
|
|
||||||
@@ -931,7 +1134,7 @@ class ApacheConfigurator(common.Installer):
|
|||||||
now NameVirtualHosts. If version is earlier than 2.4, check if addr
|
now NameVirtualHosts. If version is earlier than 2.4, check if addr
|
||||||
has a NameVirtualHost directive in the Apache config
|
has a NameVirtualHost directive in the Apache config
|
||||||
|
|
||||||
:param certbot_apache.obj.Addr target_addr: vhost address
|
:param certbot_apache._internal.obj.Addr target_addr: vhost address
|
||||||
|
|
||||||
:returns: Success
|
:returns: Success
|
||||||
:rtype: bool
|
:rtype: bool
|
||||||
@@ -949,19 +1152,18 @@ class ApacheConfigurator(common.Installer):
|
|||||||
"""Adds NameVirtualHost directive for given address.
|
"""Adds NameVirtualHost directive for given address.
|
||||||
|
|
||||||
:param addr: Address that will be added as NameVirtualHost directive
|
:param addr: Address that will be added as NameVirtualHost directive
|
||||||
:type addr: :class:`~certbot_apache.obj.Addr`
|
:type addr: :class:`~certbot_apache._internal.obj.Addr`
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
loc = parser.get_aug_path(self.parser.loc["name"])
|
loc = parser.get_aug_path(self.parser.loc["name"])
|
||||||
if addr.get_port() == "443":
|
if addr.get_port() == "443":
|
||||||
path = self.parser.add_dir_to_ifmodssl(
|
self.parser.add_dir_to_ifmodssl(
|
||||||
loc, "NameVirtualHost", [str(addr)])
|
loc, "NameVirtualHost", [str(addr)])
|
||||||
else:
|
else:
|
||||||
path = self.parser.add_dir(loc, "NameVirtualHost", [str(addr)])
|
self.parser.add_dir(loc, "NameVirtualHost", [str(addr)])
|
||||||
|
|
||||||
msg = ("Setting %s to be NameBasedVirtualHost\n"
|
msg = "Setting {0} to be NameBasedVirtualHost\n".format(addr)
|
||||||
"\tDirective added to %s\n" % (addr, path))
|
|
||||||
logger.debug(msg)
|
logger.debug(msg)
|
||||||
self.save_notes += msg
|
self.save_notes += msg
|
||||||
|
|
||||||
@@ -1117,8 +1319,16 @@ class ApacheConfigurator(common.Installer):
|
|||||||
self.enable_mod("socache_shmcb", temp=temp)
|
self.enable_mod("socache_shmcb", temp=temp)
|
||||||
if "ssl_module" not in self.parser.modules:
|
if "ssl_module" not in self.parser.modules:
|
||||||
self.enable_mod("ssl", temp=temp)
|
self.enable_mod("ssl", temp=temp)
|
||||||
|
# Make sure we're not throwing away any unwritten changes to the config
|
||||||
|
self.parser.ensure_augeas_state()
|
||||||
|
self.parser.aug.load()
|
||||||
|
self.parser.reset_modules() # Reset to load the new ssl_module path
|
||||||
|
# Call again because now we can gate on openssl version
|
||||||
|
self.install_ssl_options_conf(self.mod_ssl_conf,
|
||||||
|
self.updated_mod_ssl_conf_digest,
|
||||||
|
warn_on_no_mod_ssl=True)
|
||||||
|
|
||||||
def make_vhost_ssl(self, nonssl_vhost): # pylint: disable=too-many-locals
|
def make_vhost_ssl(self, nonssl_vhost):
|
||||||
"""Makes an ssl_vhost version of a nonssl_vhost.
|
"""Makes an ssl_vhost version of a nonssl_vhost.
|
||||||
|
|
||||||
Duplicates vhost and adds default ssl options
|
Duplicates vhost and adds default ssl options
|
||||||
@@ -1128,10 +1338,10 @@ class ApacheConfigurator(common.Installer):
|
|||||||
.. note:: This function saves the configuration
|
.. note:: This function saves the configuration
|
||||||
|
|
||||||
:param nonssl_vhost: Valid VH that doesn't have SSLEngine on
|
:param nonssl_vhost: Valid VH that doesn't have SSLEngine on
|
||||||
:type nonssl_vhost: :class:`~certbot_apache.obj.VirtualHost`
|
:type nonssl_vhost: :class:`~certbot_apache._internal.obj.VirtualHost`
|
||||||
|
|
||||||
:returns: SSL vhost
|
:returns: SSL vhost
|
||||||
:rtype: :class:`~certbot_apache.obj.VirtualHost`
|
:rtype: :class:`~certbot_apache._internal.obj.VirtualHost`
|
||||||
|
|
||||||
:raises .errors.PluginError: If more than one virtual host is in
|
:raises .errors.PluginError: If more than one virtual host is in
|
||||||
the file or if plugin is unable to write/read vhost files.
|
the file or if plugin is unable to write/read vhost files.
|
||||||
@@ -1369,12 +1579,9 @@ class ApacheConfigurator(common.Installer):
|
|||||||
result.append(comment)
|
result.append(comment)
|
||||||
sift = True
|
sift = True
|
||||||
|
|
||||||
result.append('\n'.join(
|
result.append('\n'.join('# ' + l for l in chunk))
|
||||||
['# ' + l for l in chunk]))
|
|
||||||
continue
|
|
||||||
else:
|
else:
|
||||||
result.append('\n'.join(chunk))
|
result.append('\n'.join(chunk))
|
||||||
continue
|
|
||||||
return result, sift
|
return result, sift
|
||||||
|
|
||||||
def _get_vhost_block(self, vhost):
|
def _get_vhost_block(self, vhost):
|
||||||
@@ -1502,7 +1709,7 @@ class ApacheConfigurator(common.Installer):
|
|||||||
https://httpd.apache.org/docs/2.2/mod/core.html#namevirtualhost
|
https://httpd.apache.org/docs/2.2/mod/core.html#namevirtualhost
|
||||||
|
|
||||||
:param vhost: New virtual host that was recently created.
|
:param vhost: New virtual host that was recently created.
|
||||||
:type vhost: :class:`~certbot_apache.obj.VirtualHost`
|
:type vhost: :class:`~certbot_apache._internal.obj.VirtualHost`
|
||||||
|
|
||||||
"""
|
"""
|
||||||
need_to_save = False
|
need_to_save = False
|
||||||
@@ -1512,7 +1719,7 @@ class ApacheConfigurator(common.Installer):
|
|||||||
for addr in vhost.addrs:
|
for addr in vhost.addrs:
|
||||||
# In Apache 2.2, when a NameVirtualHost directive is not
|
# In Apache 2.2, when a NameVirtualHost directive is not
|
||||||
# set, "*" and "_default_" will conflict when sharing a port
|
# set, "*" and "_default_" will conflict when sharing a port
|
||||||
addrs = set((addr,))
|
addrs = {addr,}
|
||||||
if addr.get_addr() in ("*", "_default_"):
|
if addr.get_addr() in ("*", "_default_"):
|
||||||
addrs.update(obj.Addr((a, addr.get_port(),))
|
addrs.update(obj.Addr((a, addr.get_port(),))
|
||||||
for a in ("*", "_default_"))
|
for a in ("*", "_default_"))
|
||||||
@@ -1537,7 +1744,7 @@ class ApacheConfigurator(common.Installer):
|
|||||||
:param str id_str: Id string for matching
|
:param str id_str: Id string for matching
|
||||||
|
|
||||||
:returns: The matched VirtualHost or None
|
:returns: The matched VirtualHost or None
|
||||||
:rtype: :class:`~certbot_apache.obj.VirtualHost` or None
|
:rtype: :class:`~certbot_apache._internal.obj.VirtualHost` or None
|
||||||
|
|
||||||
:raises .errors.PluginError: If no VirtualHost is found
|
:raises .errors.PluginError: If no VirtualHost is found
|
||||||
"""
|
"""
|
||||||
@@ -1554,7 +1761,7 @@ class ApacheConfigurator(common.Installer):
|
|||||||
used for keeping track of VirtualHost directive over time.
|
used for keeping track of VirtualHost directive over time.
|
||||||
|
|
||||||
:param vhost: Virtual host to add the id
|
:param vhost: Virtual host to add the id
|
||||||
:type vhost: :class:`~certbot_apache.obj.VirtualHost`
|
:type vhost: :class:`~certbot_apache._internal.obj.VirtualHost`
|
||||||
|
|
||||||
:returns: The unique ID or None
|
:returns: The unique ID or None
|
||||||
:rtype: str or None
|
:rtype: str or None
|
||||||
@@ -1576,7 +1783,7 @@ class ApacheConfigurator(common.Installer):
|
|||||||
If ID already exists, returns that instead.
|
If ID already exists, returns that instead.
|
||||||
|
|
||||||
:param vhost: Virtual host to add or find the id
|
:param vhost: Virtual host to add or find the id
|
||||||
:type vhost: :class:`~certbot_apache.obj.VirtualHost`
|
:type vhost: :class:`~certbot_apache._internal.obj.VirtualHost`
|
||||||
|
|
||||||
:returns: The unique ID for vhost
|
:returns: The unique ID for vhost
|
||||||
:rtype: str or None
|
:rtype: str or None
|
||||||
@@ -1605,7 +1812,7 @@ class ApacheConfigurator(common.Installer):
|
|||||||
######################################################################
|
######################################################################
|
||||||
# Enhancements
|
# Enhancements
|
||||||
######################################################################
|
######################################################################
|
||||||
def supported_enhancements(self): # pylint: disable=no-self-use
|
def supported_enhancements(self):
|
||||||
"""Returns currently supported enhancements."""
|
"""Returns currently supported enhancements."""
|
||||||
return ["redirect", "ensure-http-header", "staple-ocsp"]
|
return ["redirect", "ensure-http-header", "staple-ocsp"]
|
||||||
|
|
||||||
@@ -1614,9 +1821,9 @@ class ApacheConfigurator(common.Installer):
|
|||||||
|
|
||||||
:param str domain: domain to enhance
|
:param str domain: domain to enhance
|
||||||
:param str enhancement: enhancement type defined in
|
:param str enhancement: enhancement type defined in
|
||||||
:const:`~certbot.constants.ENHANCEMENTS`
|
:const:`~certbot.plugins.enhancements.ENHANCEMENTS`
|
||||||
:param options: options for the enhancement
|
:param options: options for the enhancement
|
||||||
See :const:`~certbot.constants.ENHANCEMENTS`
|
See :const:`~certbot.plugins.enhancements.ENHANCEMENTS`
|
||||||
documentation for appropriate parameter.
|
documentation for appropriate parameter.
|
||||||
|
|
||||||
:raises .errors.PluginError: If Enhancement is not supported, or if
|
:raises .errors.PluginError: If Enhancement is not supported, or if
|
||||||
@@ -1654,7 +1861,7 @@ class ApacheConfigurator(common.Installer):
|
|||||||
"""Increase the AutoHSTS max-age value
|
"""Increase the AutoHSTS max-age value
|
||||||
|
|
||||||
:param vhost: Virtual host object to modify
|
:param vhost: Virtual host object to modify
|
||||||
:type vhost: :class:`~certbot_apache.obj.VirtualHost`
|
:type vhost: :class:`~certbot_apache._internal.obj.VirtualHost`
|
||||||
|
|
||||||
:param str id_str: The unique ID string of VirtualHost
|
:param str id_str: The unique ID string of VirtualHost
|
||||||
|
|
||||||
@@ -1703,7 +1910,7 @@ class ApacheConfigurator(common.Installer):
|
|||||||
try:
|
try:
|
||||||
self._autohsts = self.storage.fetch("autohsts")
|
self._autohsts = self.storage.fetch("autohsts")
|
||||||
except KeyError:
|
except KeyError:
|
||||||
self._autohsts = dict()
|
self._autohsts = {}
|
||||||
|
|
||||||
def _autohsts_save_state(self):
|
def _autohsts_save_state(self):
|
||||||
"""
|
"""
|
||||||
@@ -1738,13 +1945,13 @@ class ApacheConfigurator(common.Installer):
|
|||||||
.. note:: This function saves the configuration
|
.. note:: This function saves the configuration
|
||||||
|
|
||||||
:param ssl_vhost: Destination of traffic, an ssl enabled vhost
|
:param ssl_vhost: Destination of traffic, an ssl enabled vhost
|
||||||
:type ssl_vhost: :class:`~certbot_apache.obj.VirtualHost`
|
:type ssl_vhost: :class:`~certbot_apache._internal.obj.VirtualHost`
|
||||||
|
|
||||||
:param unused_options: Not currently used
|
:param unused_options: Not currently used
|
||||||
:type unused_options: Not Available
|
:type unused_options: Not Available
|
||||||
|
|
||||||
:returns: Success, general_vhost (HTTP vhost)
|
:returns: Success, general_vhost (HTTP vhost)
|
||||||
:rtype: (bool, :class:`~certbot_apache.obj.VirtualHost`)
|
:rtype: (bool, :class:`~certbot_apache._internal.obj.VirtualHost`)
|
||||||
|
|
||||||
"""
|
"""
|
||||||
min_apache_ver = (2, 3, 3)
|
min_apache_ver = (2, 3, 3)
|
||||||
@@ -1794,14 +2001,14 @@ class ApacheConfigurator(common.Installer):
|
|||||||
.. note:: This function saves the configuration
|
.. note:: This function saves the configuration
|
||||||
|
|
||||||
:param ssl_vhost: Destination of traffic, an ssl enabled vhost
|
:param ssl_vhost: Destination of traffic, an ssl enabled vhost
|
||||||
:type ssl_vhost: :class:`~certbot_apache.obj.VirtualHost`
|
:type ssl_vhost: :class:`~certbot_apache._internal.obj.VirtualHost`
|
||||||
|
|
||||||
:param header_substring: string that uniquely identifies a header.
|
:param header_substring: string that uniquely identifies a header.
|
||||||
e.g: Strict-Transport-Security, Upgrade-Insecure-Requests.
|
e.g: Strict-Transport-Security, Upgrade-Insecure-Requests.
|
||||||
:type str
|
:type str
|
||||||
|
|
||||||
:returns: Success, general_vhost (HTTP vhost)
|
:returns: Success, general_vhost (HTTP vhost)
|
||||||
:rtype: (bool, :class:`~certbot_apache.obj.VirtualHost`)
|
:rtype: (bool, :class:`~certbot_apache._internal.obj.VirtualHost`)
|
||||||
|
|
||||||
:raises .errors.PluginError: If no viable HTTP host can be created or
|
:raises .errors.PluginError: If no viable HTTP host can be created or
|
||||||
set with header header_substring.
|
set with header header_substring.
|
||||||
@@ -1825,11 +2032,11 @@ class ApacheConfigurator(common.Installer):
|
|||||||
ssl_vhost.filep)
|
ssl_vhost.filep)
|
||||||
|
|
||||||
def _verify_no_matching_http_header(self, ssl_vhost, header_substring):
|
def _verify_no_matching_http_header(self, ssl_vhost, header_substring):
|
||||||
"""Checks to see if an there is an existing Header directive that
|
"""Checks to see if there is an existing Header directive that
|
||||||
contains the string header_substring.
|
contains the string header_substring.
|
||||||
|
|
||||||
:param ssl_vhost: vhost to check
|
:param ssl_vhost: vhost to check
|
||||||
:type vhost: :class:`~certbot_apache.obj.VirtualHost`
|
:type vhost: :class:`~certbot_apache._internal.obj.VirtualHost`
|
||||||
|
|
||||||
:param header_substring: string that uniquely identifies a header.
|
:param header_substring: string that uniquely identifies a header.
|
||||||
e.g: Strict-Transport-Security, Upgrade-Insecure-Requests.
|
e.g: Strict-Transport-Security, Upgrade-Insecure-Requests.
|
||||||
@@ -1866,7 +2073,7 @@ class ApacheConfigurator(common.Installer):
|
|||||||
.. note:: This function saves the configuration
|
.. note:: This function saves the configuration
|
||||||
|
|
||||||
:param ssl_vhost: Destination of traffic, an ssl enabled vhost
|
:param ssl_vhost: Destination of traffic, an ssl enabled vhost
|
||||||
:type ssl_vhost: :class:`~certbot_apache.obj.VirtualHost`
|
:type ssl_vhost: :class:`~certbot_apache._internal.obj.VirtualHost`
|
||||||
|
|
||||||
:param unused_options: Not currently used
|
:param unused_options: Not currently used
|
||||||
:type unused_options: Not Available
|
:type unused_options: Not Available
|
||||||
@@ -1951,7 +2158,7 @@ class ApacheConfigurator(common.Installer):
|
|||||||
delete certbot's old rewrite rules and set the new one instead.
|
delete certbot's old rewrite rules and set the new one instead.
|
||||||
|
|
||||||
:param vhost: vhost to check
|
:param vhost: vhost to check
|
||||||
:type vhost: :class:`~certbot_apache.obj.VirtualHost`
|
:type vhost: :class:`~certbot_apache._internal.obj.VirtualHost`
|
||||||
|
|
||||||
:raises errors.PluginEnhancementAlreadyPresent: When the exact
|
:raises errors.PluginEnhancementAlreadyPresent: When the exact
|
||||||
certbot redirection WriteRule exists in virtual host.
|
certbot redirection WriteRule exists in virtual host.
|
||||||
@@ -1993,7 +2200,7 @@ class ApacheConfigurator(common.Installer):
|
|||||||
"""Checks if there exists a RewriteRule directive in vhost
|
"""Checks if there exists a RewriteRule directive in vhost
|
||||||
|
|
||||||
:param vhost: vhost to check
|
:param vhost: vhost to check
|
||||||
:type vhost: :class:`~certbot_apache.obj.VirtualHost`
|
:type vhost: :class:`~certbot_apache._internal.obj.VirtualHost`
|
||||||
|
|
||||||
:returns: True if a RewriteRule directive exists.
|
:returns: True if a RewriteRule directive exists.
|
||||||
:rtype: bool
|
:rtype: bool
|
||||||
@@ -2007,7 +2214,7 @@ class ApacheConfigurator(common.Installer):
|
|||||||
"""Checks if a RewriteEngine directive is on
|
"""Checks if a RewriteEngine directive is on
|
||||||
|
|
||||||
:param vhost: vhost to check
|
:param vhost: vhost to check
|
||||||
:type vhost: :class:`~certbot_apache.obj.VirtualHost`
|
:type vhost: :class:`~certbot_apache._internal.obj.VirtualHost`
|
||||||
|
|
||||||
"""
|
"""
|
||||||
rewrite_engine_path_list = self.parser.find_dir("RewriteEngine", "on",
|
rewrite_engine_path_list = self.parser.find_dir("RewriteEngine", "on",
|
||||||
@@ -2024,10 +2231,10 @@ class ApacheConfigurator(common.Installer):
|
|||||||
"""Creates an http_vhost specifically to redirect for the ssl_vhost.
|
"""Creates an http_vhost specifically to redirect for the ssl_vhost.
|
||||||
|
|
||||||
:param ssl_vhost: ssl vhost
|
:param ssl_vhost: ssl vhost
|
||||||
:type ssl_vhost: :class:`~certbot_apache.obj.VirtualHost`
|
:type ssl_vhost: :class:`~certbot_apache._internal.obj.VirtualHost`
|
||||||
|
|
||||||
:returns: tuple of the form
|
:returns: tuple of the form
|
||||||
(`success`, :class:`~certbot_apache.obj.VirtualHost`)
|
(`success`, :class:`~certbot_apache._internal.obj.VirtualHost`)
|
||||||
:rtype: tuple
|
:rtype: tuple
|
||||||
|
|
||||||
"""
|
"""
|
||||||
@@ -2153,7 +2360,7 @@ class ApacheConfigurator(common.Installer):
|
|||||||
of this method where available.
|
of this method where available.
|
||||||
|
|
||||||
:param vhost: vhost to enable
|
:param vhost: vhost to enable
|
||||||
:type vhost: :class:`~certbot_apache.obj.VirtualHost`
|
:type vhost: :class:`~certbot_apache._internal.obj.VirtualHost`
|
||||||
|
|
||||||
:raises .errors.NotSupportedError: If filesystem layout is not
|
:raises .errors.NotSupportedError: If filesystem layout is not
|
||||||
supported.
|
supported.
|
||||||
@@ -2171,7 +2378,7 @@ class ApacheConfigurator(common.Installer):
|
|||||||
vhost.enabled = True
|
vhost.enabled = True
|
||||||
return
|
return
|
||||||
|
|
||||||
def enable_mod(self, mod_name, temp=False): # pylint: disable=unused-argument
|
def enable_mod(self, mod_name, temp=False):
|
||||||
"""Enables module in Apache.
|
"""Enables module in Apache.
|
||||||
|
|
||||||
Both enables and reloads Apache so module is active.
|
Both enables and reloads Apache so module is active.
|
||||||
@@ -2229,7 +2436,7 @@ class ApacheConfigurator(common.Installer):
|
|||||||
error = str(err)
|
error = str(err)
|
||||||
raise errors.MisconfigurationError(error)
|
raise errors.MisconfigurationError(error)
|
||||||
|
|
||||||
def config_test(self): # pylint: disable=no-self-use
|
def config_test(self):
|
||||||
"""Check the configuration of Apache for errors.
|
"""Check the configuration of Apache for errors.
|
||||||
|
|
||||||
:raises .errors.MisconfigurationError: If config_test fails
|
:raises .errors.MisconfigurationError: If config_test fails
|
||||||
@@ -2264,7 +2471,7 @@ class ApacheConfigurator(common.Installer):
|
|||||||
if len(matches) != 1:
|
if len(matches) != 1:
|
||||||
raise errors.PluginError("Unable to find Apache version")
|
raise errors.PluginError("Unable to find Apache version")
|
||||||
|
|
||||||
return tuple([int(i) for i in matches[0].split(".")])
|
return tuple(int(i) for i in matches[0].split("."))
|
||||||
|
|
||||||
def more_info(self):
|
def more_info(self):
|
||||||
"""Human-readable string to help understand the module"""
|
"""Human-readable string to help understand the module"""
|
||||||
@@ -2279,7 +2486,7 @@ class ApacheConfigurator(common.Installer):
|
|||||||
###########################################################################
|
###########################################################################
|
||||||
# Challenges Section
|
# Challenges Section
|
||||||
###########################################################################
|
###########################################################################
|
||||||
def get_chall_pref(self, unused_domain): # pylint: disable=no-self-use
|
def get_chall_pref(self, unused_domain):
|
||||||
"""Return list of challenge preferences."""
|
"""Return list of challenge preferences."""
|
||||||
return [challenges.HTTP01]
|
return [challenges.HTTP01]
|
||||||
|
|
||||||
@@ -2333,24 +2540,29 @@ class ApacheConfigurator(common.Installer):
|
|||||||
self.restart()
|
self.restart()
|
||||||
self.parser.reset_modules()
|
self.parser.reset_modules()
|
||||||
|
|
||||||
def install_ssl_options_conf(self, options_ssl, options_ssl_digest):
|
def install_ssl_options_conf(self, options_ssl, options_ssl_digest, warn_on_no_mod_ssl=True):
|
||||||
"""Copy Certbot's SSL options file into the system's config dir if required."""
|
"""Copy Certbot's SSL options file into the system's config dir if required.
|
||||||
|
|
||||||
|
:param bool warn_on_no_mod_ssl: True if we should warn if mod_ssl is not found.
|
||||||
|
"""
|
||||||
|
|
||||||
# XXX if we ever try to enforce a local privilege boundary (eg, running
|
# XXX if we ever try to enforce a local privilege boundary (eg, running
|
||||||
# certbot for unprivileged users via setuid), this function will need
|
# certbot for unprivileged users via setuid), this function will need
|
||||||
# to be modified.
|
# to be modified.
|
||||||
return common.install_version_controlled_file(options_ssl, options_ssl_digest,
|
apache_config_path = self.pick_apache_config(warn_on_no_mod_ssl)
|
||||||
self.option("MOD_SSL_CONF_SRC"), constants.ALL_SSL_OPTIONS_HASHES)
|
|
||||||
|
return common.install_version_controlled_file(
|
||||||
|
options_ssl, options_ssl_digest, apache_config_path, constants.ALL_SSL_OPTIONS_HASHES)
|
||||||
|
|
||||||
def enable_autohsts(self, _unused_lineage, domains):
|
def enable_autohsts(self, _unused_lineage, domains):
|
||||||
"""
|
"""
|
||||||
Enable the AutoHSTS enhancement for defined domains
|
Enable the AutoHSTS enhancement for defined domains
|
||||||
|
|
||||||
:param _unused_lineage: Certificate lineage object, unused
|
:param _unused_lineage: Certificate lineage object, unused
|
||||||
:type _unused_lineage: certbot.storage.RenewableCert
|
:type _unused_lineage: certbot._internal.storage.RenewableCert
|
||||||
|
|
||||||
:param domains: List of domains in certificate to enhance
|
:param domains: List of domains in certificate to enhance
|
||||||
:type domains: str
|
:type domains: `list` of `str`
|
||||||
"""
|
"""
|
||||||
|
|
||||||
self._autohsts_fetch_state()
|
self._autohsts_fetch_state()
|
||||||
@@ -2390,7 +2602,7 @@ class ApacheConfigurator(common.Installer):
|
|||||||
"""Do the initial AutoHSTS deployment to a vhost
|
"""Do the initial AutoHSTS deployment to a vhost
|
||||||
|
|
||||||
:param ssl_vhost: The VirtualHost object to deploy the AutoHSTS
|
:param ssl_vhost: The VirtualHost object to deploy the AutoHSTS
|
||||||
:type ssl_vhost: :class:`~certbot_apache.obj.VirtualHost` or None
|
:type ssl_vhost: :class:`~certbot_apache._internal.obj.VirtualHost` or None
|
||||||
|
|
||||||
:raises errors.PluginEnhancementAlreadyPresent: When already enhanced
|
:raises errors.PluginEnhancementAlreadyPresent: When already enhanced
|
||||||
|
|
||||||
@@ -2472,7 +2684,7 @@ class ApacheConfigurator(common.Installer):
|
|||||||
and changes the HSTS max-age to a high value.
|
and changes the HSTS max-age to a high value.
|
||||||
|
|
||||||
:param lineage: Certificate lineage object
|
:param lineage: Certificate lineage object
|
||||||
:type lineage: certbot.storage.RenewableCert
|
:type lineage: certbot._internal.storage.RenewableCert
|
||||||
"""
|
"""
|
||||||
self._autohsts_fetch_state()
|
self._autohsts_fetch_state()
|
||||||
if not self._autohsts:
|
if not self._autohsts:
|
||||||
@@ -2517,4 +2729,4 @@ class ApacheConfigurator(common.Installer):
|
|||||||
self._autohsts_save_state()
|
self._autohsts_save_state()
|
||||||
|
|
||||||
|
|
||||||
AutoHSTSEnhancement.register(ApacheConfigurator) # pylint: disable=no-member
|
AutoHSTSEnhancement.register(ApacheConfigurator)
|
||||||
@@ -1,6 +1,7 @@
|
|||||||
"""Apache plugin constants."""
|
"""Apache plugin constants."""
|
||||||
import pkg_resources
|
import pkg_resources
|
||||||
|
|
||||||
|
from certbot.compat import os
|
||||||
|
|
||||||
MOD_SSL_CONF_DEST = "options-ssl-apache.conf"
|
MOD_SSL_CONF_DEST = "options-ssl-apache.conf"
|
||||||
"""Name of the mod_ssl config file as saved in `IConfig.config_dir`."""
|
"""Name of the mod_ssl config file as saved in `IConfig.config_dir`."""
|
||||||
@@ -23,11 +24,14 @@ ALL_SSL_OPTIONS_HASHES = [
|
|||||||
'0fcdc81280cd179a07ec4d29d3595068b9326b455c488de4b09f585d5dafc137',
|
'0fcdc81280cd179a07ec4d29d3595068b9326b455c488de4b09f585d5dafc137',
|
||||||
'86cc09ad5415cd6d5f09a947fe2501a9344328b1e8a8b458107ea903e80baa6c',
|
'86cc09ad5415cd6d5f09a947fe2501a9344328b1e8a8b458107ea903e80baa6c',
|
||||||
'06675349e457eae856120cdebb564efe546f0b87399f2264baeb41e442c724c7',
|
'06675349e457eae856120cdebb564efe546f0b87399f2264baeb41e442c724c7',
|
||||||
|
'5cc003edd93fb9cd03d40c7686495f8f058f485f75b5e764b789245a386e6daf',
|
||||||
|
'007cd497a56a3bb8b6a2c1aeb4997789e7e38992f74e44cc5d13a625a738ac73',
|
||||||
|
'34783b9e2210f5c4a23bced2dfd7ec289834716673354ed7c7abf69fe30192a3',
|
||||||
]
|
]
|
||||||
"""SHA256 hashes of the contents of previous versions of all versions of MOD_SSL_CONF_SRC"""
|
"""SHA256 hashes of the contents of previous versions of all versions of MOD_SSL_CONF_SRC"""
|
||||||
|
|
||||||
AUGEAS_LENS_DIR = pkg_resources.resource_filename(
|
AUGEAS_LENS_DIR = pkg_resources.resource_filename(
|
||||||
"certbot_apache", "augeas_lens")
|
"certbot_apache", os.path.join("_internal", "augeas_lens"))
|
||||||
"""Path to the Augeas lens directory"""
|
"""Path to the Augeas lens directory"""
|
||||||
|
|
||||||
REWRITE_HTTPS_ARGS = [
|
REWRITE_HTTPS_ARGS = [
|
||||||
@@ -3,10 +3,10 @@ import logging
|
|||||||
|
|
||||||
import zope.component
|
import zope.component
|
||||||
|
|
||||||
import certbot.display.util as display_util
|
|
||||||
from certbot import errors
|
from certbot import errors
|
||||||
from certbot import interfaces
|
from certbot import interfaces
|
||||||
from certbot.compat import os
|
from certbot.compat import os
|
||||||
|
import certbot.display.util as display_util
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -21,7 +21,7 @@ def select_vhost_multiple(vhosts):
|
|||||||
:rtype: :class:`list`of type `~obj.Vhost`
|
:rtype: :class:`list`of type `~obj.Vhost`
|
||||||
"""
|
"""
|
||||||
if not vhosts:
|
if not vhosts:
|
||||||
return list()
|
return []
|
||||||
tags_list = [vhost.display_repr()+"\n" for vhost in vhosts]
|
tags_list = [vhost.display_repr()+"\n" for vhost in vhosts]
|
||||||
# Remove the extra newline from the last entry
|
# Remove the extra newline from the last entry
|
||||||
if tags_list:
|
if tags_list:
|
||||||
@@ -37,7 +37,7 @@ def select_vhost_multiple(vhosts):
|
|||||||
def _reversemap_vhosts(names, vhosts):
|
def _reversemap_vhosts(names, vhosts):
|
||||||
"""Helper function for select_vhost_multiple for mapping string
|
"""Helper function for select_vhost_multiple for mapping string
|
||||||
representations back to actual vhost objects"""
|
representations back to actual vhost objects"""
|
||||||
return_vhosts = list()
|
return_vhosts = []
|
||||||
|
|
||||||
for selection in names:
|
for selection in names:
|
||||||
for vhost in vhosts:
|
for vhost in vhosts:
|
||||||
@@ -77,7 +77,7 @@ def _vhost_menu(domain, vhosts):
|
|||||||
|
|
||||||
if free_chars < 2:
|
if free_chars < 2:
|
||||||
logger.debug("Display size is too small for "
|
logger.debug("Display size is too small for "
|
||||||
"certbot_apache.display_ops._vhost_menu()")
|
"certbot_apache._internal.display_ops._vhost_menu()")
|
||||||
# This runs the edge off the screen, but it doesn't cause an "error"
|
# This runs the edge off the screen, but it doesn't cause an "error"
|
||||||
filename_size = 1
|
filename_size = 1
|
||||||
disp_name_size = 1
|
disp_name_size = 1
|
||||||
306
certbot-apache/certbot_apache/_internal/dualparser.py
Normal file
306
certbot-apache/certbot_apache/_internal/dualparser.py
Normal file
@@ -0,0 +1,306 @@
|
|||||||
|
""" Dual ParserNode implementation """
|
||||||
|
from certbot_apache._internal import assertions
|
||||||
|
from certbot_apache._internal import augeasparser
|
||||||
|
from certbot_apache._internal import apacheparser
|
||||||
|
|
||||||
|
|
||||||
|
class DualNodeBase(object):
|
||||||
|
""" Dual parser interface for in development testing. This is used as the
|
||||||
|
base class for dual parser interface classes. This class handles runtime
|
||||||
|
attribute value assertions."""
|
||||||
|
|
||||||
|
def save(self, msg): # pragma: no cover
|
||||||
|
""" Call save for both parsers """
|
||||||
|
self.primary.save(msg)
|
||||||
|
self.secondary.save(msg)
|
||||||
|
|
||||||
|
def __getattr__(self, aname):
|
||||||
|
""" Attribute value assertion """
|
||||||
|
firstval = getattr(self.primary, aname)
|
||||||
|
secondval = getattr(self.secondary, aname)
|
||||||
|
exclusions = [
|
||||||
|
# Metadata will inherently be different, as ApacheParserNode does
|
||||||
|
# not have Augeas paths and so on.
|
||||||
|
aname == "metadata",
|
||||||
|
callable(firstval)
|
||||||
|
]
|
||||||
|
if not any(exclusions):
|
||||||
|
assertions.assertEqualSimple(firstval, secondval)
|
||||||
|
return firstval
|
||||||
|
|
||||||
|
def find_ancestors(self, name):
|
||||||
|
""" Traverses the ancestor tree and returns ancestors matching name """
|
||||||
|
return self._find_helper(DualBlockNode, "find_ancestors", name)
|
||||||
|
|
||||||
|
def _find_helper(self, nodeclass, findfunc, search, **kwargs):
|
||||||
|
"""A helper for find_* functions. The function specific attributes should
|
||||||
|
be passed as keyword arguments.
|
||||||
|
|
||||||
|
:param interfaces.ParserNode nodeclass: The node class for results.
|
||||||
|
:param str findfunc: Name of the find function to call
|
||||||
|
:param str search: The search term
|
||||||
|
"""
|
||||||
|
|
||||||
|
primary_res = getattr(self.primary, findfunc)(search, **kwargs)
|
||||||
|
secondary_res = getattr(self.secondary, findfunc)(search, **kwargs)
|
||||||
|
|
||||||
|
# The order of search results for Augeas implementation cannot be
|
||||||
|
# assured.
|
||||||
|
|
||||||
|
pass_primary = assertions.isPassNodeList(primary_res)
|
||||||
|
pass_secondary = assertions.isPassNodeList(secondary_res)
|
||||||
|
new_nodes = []
|
||||||
|
|
||||||
|
if pass_primary and pass_secondary:
|
||||||
|
# Both unimplemented
|
||||||
|
new_nodes.append(nodeclass(primary=primary_res[0],
|
||||||
|
secondary=secondary_res[0])) # pragma: no cover
|
||||||
|
elif pass_primary:
|
||||||
|
for c in secondary_res:
|
||||||
|
new_nodes.append(nodeclass(primary=primary_res[0],
|
||||||
|
secondary=c))
|
||||||
|
elif pass_secondary:
|
||||||
|
for c in primary_res:
|
||||||
|
new_nodes.append(nodeclass(primary=c,
|
||||||
|
secondary=secondary_res[0]))
|
||||||
|
else:
|
||||||
|
assert len(primary_res) == len(secondary_res)
|
||||||
|
matches = self._create_matching_list(primary_res, secondary_res)
|
||||||
|
for p, s in matches:
|
||||||
|
new_nodes.append(nodeclass(primary=p, secondary=s))
|
||||||
|
|
||||||
|
return new_nodes
|
||||||
|
|
||||||
|
|
||||||
|
class DualCommentNode(DualNodeBase):
|
||||||
|
""" Dual parser implementation of CommentNode interface """
|
||||||
|
|
||||||
|
def __init__(self, **kwargs):
|
||||||
|
""" This initialization implementation allows ordinary initialization
|
||||||
|
of CommentNode objects as well as creating a DualCommentNode object
|
||||||
|
using precreated or fetched CommentNode objects if provided as optional
|
||||||
|
arguments primary and secondary.
|
||||||
|
|
||||||
|
Parameters other than the following are from interfaces.CommentNode:
|
||||||
|
|
||||||
|
:param CommentNode primary: Primary pre-created CommentNode, mainly
|
||||||
|
used when creating new DualParser nodes using add_* methods.
|
||||||
|
:param CommentNode secondary: Secondary pre-created CommentNode
|
||||||
|
"""
|
||||||
|
|
||||||
|
kwargs.setdefault("primary", None)
|
||||||
|
kwargs.setdefault("secondary", None)
|
||||||
|
primary = kwargs.pop("primary")
|
||||||
|
secondary = kwargs.pop("secondary")
|
||||||
|
|
||||||
|
if primary or secondary:
|
||||||
|
assert primary and secondary
|
||||||
|
self.primary = primary
|
||||||
|
self.secondary = secondary
|
||||||
|
else:
|
||||||
|
self.primary = augeasparser.AugeasCommentNode(**kwargs)
|
||||||
|
self.secondary = apacheparser.ApacheCommentNode(**kwargs)
|
||||||
|
|
||||||
|
assertions.assertEqual(self.primary, self.secondary)
|
||||||
|
|
||||||
|
|
||||||
|
class DualDirectiveNode(DualNodeBase):
|
||||||
|
""" Dual parser implementation of DirectiveNode interface """
|
||||||
|
|
||||||
|
def __init__(self, **kwargs):
|
||||||
|
""" This initialization implementation allows ordinary initialization
|
||||||
|
of DirectiveNode objects as well as creating a DualDirectiveNode object
|
||||||
|
using precreated or fetched DirectiveNode objects if provided as optional
|
||||||
|
arguments primary and secondary.
|
||||||
|
|
||||||
|
Parameters other than the following are from interfaces.DirectiveNode:
|
||||||
|
|
||||||
|
:param DirectiveNode primary: Primary pre-created DirectiveNode, mainly
|
||||||
|
used when creating new DualParser nodes using add_* methods.
|
||||||
|
:param DirectiveNode secondary: Secondary pre-created DirectiveNode
|
||||||
|
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
kwargs.setdefault("primary", None)
|
||||||
|
kwargs.setdefault("secondary", None)
|
||||||
|
primary = kwargs.pop("primary")
|
||||||
|
secondary = kwargs.pop("secondary")
|
||||||
|
|
||||||
|
if primary or secondary:
|
||||||
|
assert primary and secondary
|
||||||
|
self.primary = primary
|
||||||
|
self.secondary = secondary
|
||||||
|
else:
|
||||||
|
self.primary = augeasparser.AugeasDirectiveNode(**kwargs)
|
||||||
|
self.secondary = apacheparser.ApacheDirectiveNode(**kwargs)
|
||||||
|
|
||||||
|
assertions.assertEqual(self.primary, self.secondary)
|
||||||
|
|
||||||
|
def set_parameters(self, parameters):
|
||||||
|
""" Sets parameters and asserts that both implementation successfully
|
||||||
|
set the parameter sequence """
|
||||||
|
|
||||||
|
self.primary.set_parameters(parameters)
|
||||||
|
self.secondary.set_parameters(parameters)
|
||||||
|
assertions.assertEqual(self.primary, self.secondary)
|
||||||
|
|
||||||
|
|
||||||
|
class DualBlockNode(DualNodeBase):
|
||||||
|
""" Dual parser implementation of BlockNode interface """
|
||||||
|
|
||||||
|
def __init__(self, **kwargs):
|
||||||
|
""" This initialization implementation allows ordinary initialization
|
||||||
|
of BlockNode objects as well as creating a DualBlockNode object
|
||||||
|
using precreated or fetched BlockNode objects if provided as optional
|
||||||
|
arguments primary and secondary.
|
||||||
|
|
||||||
|
Parameters other than the following are from interfaces.BlockNode:
|
||||||
|
|
||||||
|
:param BlockNode primary: Primary pre-created BlockNode, mainly
|
||||||
|
used when creating new DualParser nodes using add_* methods.
|
||||||
|
:param BlockNode secondary: Secondary pre-created BlockNode
|
||||||
|
"""
|
||||||
|
|
||||||
|
kwargs.setdefault("primary", None)
|
||||||
|
kwargs.setdefault("secondary", None)
|
||||||
|
primary = kwargs.pop("primary")
|
||||||
|
secondary = kwargs.pop("secondary")
|
||||||
|
|
||||||
|
if primary or secondary:
|
||||||
|
assert primary and secondary
|
||||||
|
self.primary = primary
|
||||||
|
self.secondary = secondary
|
||||||
|
else:
|
||||||
|
self.primary = augeasparser.AugeasBlockNode(**kwargs)
|
||||||
|
self.secondary = apacheparser.ApacheBlockNode(**kwargs)
|
||||||
|
|
||||||
|
assertions.assertEqual(self.primary, self.secondary)
|
||||||
|
|
||||||
|
def add_child_block(self, name, parameters=None, position=None):
|
||||||
|
""" Creates a new child BlockNode, asserts that both implementations
|
||||||
|
did it in a similar way, and returns a newly created DualBlockNode object
|
||||||
|
encapsulating both of the newly created objects """
|
||||||
|
|
||||||
|
primary_new = self.primary.add_child_block(name, parameters, position)
|
||||||
|
secondary_new = self.secondary.add_child_block(name, parameters, position)
|
||||||
|
assertions.assertEqual(primary_new, secondary_new)
|
||||||
|
new_block = DualBlockNode(primary=primary_new, secondary=secondary_new)
|
||||||
|
return new_block
|
||||||
|
|
||||||
|
def add_child_directive(self, name, parameters=None, position=None):
|
||||||
|
""" Creates a new child DirectiveNode, asserts that both implementations
|
||||||
|
did it in a similar way, and returns a newly created DualDirectiveNode
|
||||||
|
object encapsulating both of the newly created objects """
|
||||||
|
|
||||||
|
primary_new = self.primary.add_child_directive(name, parameters, position)
|
||||||
|
secondary_new = self.secondary.add_child_directive(name, parameters, position)
|
||||||
|
assertions.assertEqual(primary_new, secondary_new)
|
||||||
|
new_dir = DualDirectiveNode(primary=primary_new, secondary=secondary_new)
|
||||||
|
return new_dir
|
||||||
|
|
||||||
|
def add_child_comment(self, comment="", position=None):
|
||||||
|
""" Creates a new child CommentNode, asserts that both implementations
|
||||||
|
did it in a similar way, and returns a newly created DualCommentNode
|
||||||
|
object encapsulating both of the newly created objects """
|
||||||
|
|
||||||
|
primary_new = self.primary.add_child_comment(comment, position)
|
||||||
|
secondary_new = self.secondary.add_child_comment(comment, position)
|
||||||
|
assertions.assertEqual(primary_new, secondary_new)
|
||||||
|
new_comment = DualCommentNode(primary=primary_new, secondary=secondary_new)
|
||||||
|
return new_comment
|
||||||
|
|
||||||
|
def _create_matching_list(self, primary_list, secondary_list):
|
||||||
|
""" Matches the list of primary_list to a list of secondary_list and
|
||||||
|
returns a list of tuples. This is used to create results for find_
|
||||||
|
methods.
|
||||||
|
|
||||||
|
This helper function exists, because we cannot ensure that the list of
|
||||||
|
search results returned by primary.find_* and secondary.find_* are ordered
|
||||||
|
in a same way. The function pairs the same search results from both
|
||||||
|
implementations to a list of tuples.
|
||||||
|
"""
|
||||||
|
|
||||||
|
matched = []
|
||||||
|
for p in primary_list:
|
||||||
|
match = None
|
||||||
|
for s in secondary_list:
|
||||||
|
try:
|
||||||
|
assertions.assertEqual(p, s)
|
||||||
|
match = s
|
||||||
|
break
|
||||||
|
except AssertionError:
|
||||||
|
continue
|
||||||
|
if match:
|
||||||
|
matched.append((p, match))
|
||||||
|
else:
|
||||||
|
raise AssertionError("Could not find a matching node.")
|
||||||
|
return matched
|
||||||
|
|
||||||
|
def find_blocks(self, name, exclude=True):
|
||||||
|
"""
|
||||||
|
Performs a search for BlockNodes using both implementations and does simple
|
||||||
|
checks for results. This is built upon the assumption that unimplemented
|
||||||
|
find_* methods return a list with a single assertion passing object.
|
||||||
|
After the assertion, it creates a list of newly created DualBlockNode
|
||||||
|
instances that encapsulate the pairs of returned BlockNode objects.
|
||||||
|
"""
|
||||||
|
|
||||||
|
return self._find_helper(DualBlockNode, "find_blocks", name,
|
||||||
|
exclude=exclude)
|
||||||
|
|
||||||
|
def find_directives(self, name, exclude=True):
|
||||||
|
"""
|
||||||
|
Performs a search for DirectiveNodes using both implementations and
|
||||||
|
checks the results. This is built upon the assumption that unimplemented
|
||||||
|
find_* methods return a list with a single assertion passing object.
|
||||||
|
After the assertion, it creates a list of newly created DualDirectiveNode
|
||||||
|
instances that encapsulate the pairs of returned DirectiveNode objects.
|
||||||
|
"""
|
||||||
|
|
||||||
|
return self._find_helper(DualDirectiveNode, "find_directives", name,
|
||||||
|
exclude=exclude)
|
||||||
|
|
||||||
|
def find_comments(self, comment):
|
||||||
|
"""
|
||||||
|
Performs a search for CommentNodes using both implementations and
|
||||||
|
checks the results. This is built upon the assumption that unimplemented
|
||||||
|
find_* methods return a list with a single assertion passing object.
|
||||||
|
After the assertion, it creates a list of newly created DualCommentNode
|
||||||
|
instances that encapsulate the pairs of returned CommentNode objects.
|
||||||
|
"""
|
||||||
|
|
||||||
|
return self._find_helper(DualCommentNode, "find_comments", comment)
|
||||||
|
|
||||||
|
def delete_child(self, child):
|
||||||
|
"""Deletes a child from the ParserNode implementations. The actual
|
||||||
|
ParserNode implementations are used here directly in order to be able
|
||||||
|
to match a child to the list of children."""
|
||||||
|
|
||||||
|
self.primary.delete_child(child.primary)
|
||||||
|
self.secondary.delete_child(child.secondary)
|
||||||
|
|
||||||
|
def unsaved_files(self):
|
||||||
|
""" Fetches the list of unsaved file paths and asserts that the lists
|
||||||
|
match """
|
||||||
|
primary_files = self.primary.unsaved_files()
|
||||||
|
secondary_files = self.secondary.unsaved_files()
|
||||||
|
assertions.assertEqualSimple(primary_files, secondary_files)
|
||||||
|
|
||||||
|
return primary_files
|
||||||
|
|
||||||
|
def parsed_paths(self):
|
||||||
|
"""
|
||||||
|
Returns a list of file paths that have currently been parsed into the parser
|
||||||
|
tree. The returned list may include paths with wildcard characters, for
|
||||||
|
example: ['/etc/apache2/conf.d/*.load']
|
||||||
|
|
||||||
|
This is typically called on the root node of the ParserNode tree.
|
||||||
|
|
||||||
|
:returns: list of file paths of files that have been parsed
|
||||||
|
"""
|
||||||
|
|
||||||
|
primary_paths = self.primary.parsed_paths()
|
||||||
|
secondary_paths = self.secondary.parsed_paths()
|
||||||
|
assertions.assertEqualPathsList(primary_paths, secondary_paths)
|
||||||
|
return primary_paths
|
||||||
@@ -1,21 +1,19 @@
|
|||||||
""" Entry point for Apache Plugin """
|
""" Entry point for Apache Plugin """
|
||||||
# Pylint does not like disutils.version when running inside a venv.
|
from distutils.version import LooseVersion
|
||||||
# See: https://github.com/PyCQA/pylint/issues/73
|
|
||||||
from distutils.version import LooseVersion # pylint: disable=no-name-in-module,import-error
|
|
||||||
|
|
||||||
from certbot import util
|
from certbot import util
|
||||||
|
from certbot_apache._internal import configurator
|
||||||
from certbot_apache import configurator
|
from certbot_apache._internal import override_arch
|
||||||
from certbot_apache import override_arch
|
from certbot_apache._internal import override_centos
|
||||||
from certbot_apache import override_fedora
|
from certbot_apache._internal import override_darwin
|
||||||
from certbot_apache import override_darwin
|
from certbot_apache._internal import override_debian
|
||||||
from certbot_apache import override_debian
|
from certbot_apache._internal import override_fedora
|
||||||
from certbot_apache import override_centos
|
from certbot_apache._internal import override_gentoo
|
||||||
from certbot_apache import override_gentoo
|
from certbot_apache._internal import override_suse
|
||||||
from certbot_apache import override_suse
|
|
||||||
|
|
||||||
OVERRIDE_CLASSES = {
|
OVERRIDE_CLASSES = {
|
||||||
"arch": override_arch.ArchConfigurator,
|
"arch": override_arch.ArchConfigurator,
|
||||||
|
"cloudlinux": override_centos.CentOSConfigurator,
|
||||||
"darwin": override_darwin.DarwinConfigurator,
|
"darwin": override_darwin.DarwinConfigurator,
|
||||||
"debian": override_debian.DebianConfigurator,
|
"debian": override_debian.DebianConfigurator,
|
||||||
"ubuntu": override_debian.DebianConfigurator,
|
"ubuntu": override_debian.DebianConfigurator,
|
||||||
@@ -23,7 +21,9 @@ OVERRIDE_CLASSES = {
|
|||||||
"centos linux": override_centos.CentOSConfigurator,
|
"centos linux": override_centos.CentOSConfigurator,
|
||||||
"fedora_old": override_centos.CentOSConfigurator,
|
"fedora_old": override_centos.CentOSConfigurator,
|
||||||
"fedora": override_fedora.FedoraConfigurator,
|
"fedora": override_fedora.FedoraConfigurator,
|
||||||
|
"linuxmint": override_debian.DebianConfigurator,
|
||||||
"ol": override_centos.CentOSConfigurator,
|
"ol": override_centos.CentOSConfigurator,
|
||||||
|
"oracle": override_centos.CentOSConfigurator,
|
||||||
"redhatenterpriseserver": override_centos.CentOSConfigurator,
|
"redhatenterpriseserver": override_centos.CentOSConfigurator,
|
||||||
"red hat enterprise linux server": override_centos.CentOSConfigurator,
|
"red hat enterprise linux server": override_centos.CentOSConfigurator,
|
||||||
"rhel": override_centos.CentOSConfigurator,
|
"rhel": override_centos.CentOSConfigurator,
|
||||||
@@ -32,6 +32,7 @@ OVERRIDE_CLASSES = {
|
|||||||
"gentoo base system": override_gentoo.GentooConfigurator,
|
"gentoo base system": override_gentoo.GentooConfigurator,
|
||||||
"opensuse": override_suse.OpenSUSEConfigurator,
|
"opensuse": override_suse.OpenSUSEConfigurator,
|
||||||
"suse": override_suse.OpenSUSEConfigurator,
|
"suse": override_suse.OpenSUSEConfigurator,
|
||||||
|
"sles": override_suse.OpenSUSEConfigurator,
|
||||||
"scientific": override_centos.CentOSConfigurator,
|
"scientific": override_centos.CentOSConfigurator,
|
||||||
"scientific linux": override_centos.CentOSConfigurator,
|
"scientific linux": override_centos.CentOSConfigurator,
|
||||||
}
|
}
|
||||||
@@ -1,20 +1,20 @@
|
|||||||
"""A class that performs HTTP-01 challenges for Apache"""
|
"""A class that performs HTTP-01 challenges for Apache"""
|
||||||
import logging
|
import logging
|
||||||
|
import errno
|
||||||
|
|
||||||
from acme.magic_typing import List, Set # pylint: disable=unused-import, no-name-in-module
|
from acme.magic_typing import List
|
||||||
|
from acme.magic_typing import Set
|
||||||
from certbot import errors
|
from certbot import errors
|
||||||
from certbot.compat import os
|
|
||||||
from certbot.compat import filesystem
|
from certbot.compat import filesystem
|
||||||
|
from certbot.compat import os
|
||||||
from certbot.plugins import common
|
from certbot.plugins import common
|
||||||
|
from certbot_apache._internal.obj import VirtualHost # pylint: disable=unused-import
|
||||||
from certbot_apache.obj import VirtualHost # pylint: disable=unused-import
|
from certbot_apache._internal.parser import get_aug_path
|
||||||
from certbot_apache.parser import get_aug_path
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class ApacheHttp01(common.TLSSNI01):
|
class ApacheHttp01(common.ChallengePerformer):
|
||||||
"""Class that performs HTTP-01 challenges within the Apache configurator."""
|
"""Class that performs HTTP-01 challenges within the Apache configurator."""
|
||||||
|
|
||||||
CONFIG_TEMPLATE22_PRE = """\
|
CONFIG_TEMPLATE22_PRE = """\
|
||||||
@@ -169,7 +169,15 @@ class ApacheHttp01(common.TLSSNI01):
|
|||||||
|
|
||||||
def _set_up_challenges(self):
|
def _set_up_challenges(self):
|
||||||
if not os.path.isdir(self.challenge_dir):
|
if not os.path.isdir(self.challenge_dir):
|
||||||
filesystem.makedirs(self.challenge_dir, 0o755)
|
old_umask = os.umask(0o022)
|
||||||
|
try:
|
||||||
|
filesystem.makedirs(self.challenge_dir, 0o755)
|
||||||
|
except OSError as exception:
|
||||||
|
if exception.errno not in (errno.EEXIST, errno.EISDIR):
|
||||||
|
raise errors.PluginError(
|
||||||
|
"Couldn't create root for http-01 challenge")
|
||||||
|
finally:
|
||||||
|
os.umask(old_umask)
|
||||||
|
|
||||||
responses = []
|
responses = []
|
||||||
for achall in self.achalls:
|
for achall in self.achalls:
|
||||||
@@ -195,8 +203,8 @@ class ApacheHttp01(common.TLSSNI01):
|
|||||||
|
|
||||||
if vhost not in self.moded_vhosts:
|
if vhost not in self.moded_vhosts:
|
||||||
logger.debug(
|
logger.debug(
|
||||||
"Adding a temporary challenge validation Include for name: %s " +
|
"Adding a temporary challenge validation Include for name: %s in: %s",
|
||||||
"in: %s", vhost.name, vhost.filep)
|
vhost.name, vhost.filep)
|
||||||
self.configurator.parser.add_dir_beginning(
|
self.configurator.parser.add_dir_beginning(
|
||||||
vhost.path, "Include", self.challenge_conf_pre)
|
vhost.path, "Include", self.challenge_conf_pre)
|
||||||
self.configurator.parser.add_dir(
|
self.configurator.parser.add_dir(
|
||||||
515
certbot-apache/certbot_apache/_internal/interfaces.py
Normal file
515
certbot-apache/certbot_apache/_internal/interfaces.py
Normal file
@@ -0,0 +1,515 @@
|
|||||||
|
"""ParserNode interface for interacting with configuration tree.
|
||||||
|
|
||||||
|
General description
|
||||||
|
-------------------
|
||||||
|
|
||||||
|
The ParserNode interfaces are designed to be able to contain all the parsing logic,
|
||||||
|
while allowing their users to interact with the configuration tree in a Pythonic
|
||||||
|
and well structured manner.
|
||||||
|
|
||||||
|
The structure allows easy traversal of the tree of ParserNodes. Each ParserNode
|
||||||
|
stores a reference to its ancestor and immediate children, allowing the user to
|
||||||
|
traverse the tree using built in interface methods as well as accessing the interface
|
||||||
|
properties directly.
|
||||||
|
|
||||||
|
ParserNode interface implementation should stand between the actual underlying
|
||||||
|
parser functionality and the business logic within Configurator code, interfacing
|
||||||
|
with both. The ParserNode tree is a result of configuration parsing action.
|
||||||
|
|
||||||
|
ParserNode tree will be in charge of maintaining the parser state and hence the
|
||||||
|
abstract syntax tree (AST). Interactions between ParserNode tree and underlying
|
||||||
|
parser should involve only parsing the configuration files to this structure, and
|
||||||
|
writing it back to the filesystem - while preserving the format including whitespaces.
|
||||||
|
|
||||||
|
For some implementations (Apache for example) it's important to keep track of and
|
||||||
|
to use state information while parsing conditional blocks and directives. This
|
||||||
|
allows the implementation to set a flag to parts of the parsed configuration
|
||||||
|
structure as not being in effect in a case of unmatched conditional block. It's
|
||||||
|
important to store these blocks in the tree as well in order to not to conduct
|
||||||
|
destructive actions (failing to write back parts of the configuration) while writing
|
||||||
|
the AST back to the filesystem.
|
||||||
|
|
||||||
|
The ParserNode tree is in charge of maintaining the its own structure while every
|
||||||
|
child node fetched with find - methods or by iterating its list of children can be
|
||||||
|
changed in place. When making changes the affected nodes should be flagged as "dirty"
|
||||||
|
in order for the parser implementation to figure out the parts of the configuration
|
||||||
|
that need to be written back to disk during the save() operation.
|
||||||
|
|
||||||
|
|
||||||
|
Metadata
|
||||||
|
--------
|
||||||
|
|
||||||
|
The metadata holds all the implementation specific attributes of the ParserNodes -
|
||||||
|
things like the positional information related to the AST, file paths, whitespacing,
|
||||||
|
and any other information relevant to the underlying parser engine.
|
||||||
|
|
||||||
|
Access to the metadata should be handled by implementation specific methods, allowing
|
||||||
|
the Configurator functionality to access the underlying information where needed.
|
||||||
|
|
||||||
|
For some implementations the node can be initialized using the information carried
|
||||||
|
in metadata alone. This is useful especially when populating the ParserNode tree
|
||||||
|
while parsing the configuration.
|
||||||
|
|
||||||
|
|
||||||
|
Apache implementation
|
||||||
|
---------------------
|
||||||
|
|
||||||
|
The Apache implementation of ParserNode interface requires some implementation
|
||||||
|
specific functionalities that are not described by the interface itself.
|
||||||
|
|
||||||
|
Initialization
|
||||||
|
|
||||||
|
When the user of a ParserNode class is creating these objects, they must specify
|
||||||
|
the parameters as described in the documentation for the __init__ methods below.
|
||||||
|
When these objects are created internally, however, some parameters may not be
|
||||||
|
needed because (possibly more detailed) information is included in the metadata
|
||||||
|
parameter. In this case, implementations can deviate from the required parameters
|
||||||
|
from __init__, however, they should still behave the same when metadata is not
|
||||||
|
provided.
|
||||||
|
|
||||||
|
For consistency internally, if an argument is provided directly in the ParserNode
|
||||||
|
initialization parameters as well as within metadata it's recommended to establish
|
||||||
|
clear behavior around this scenario within the implementation.
|
||||||
|
|
||||||
|
Conditional blocks
|
||||||
|
|
||||||
|
Apache configuration can have conditional blocks, for example: <IfModule ...>,
|
||||||
|
resulting the directives and subblocks within it being either enabled or disabled.
|
||||||
|
While find_* interface methods allow including the disabled parts of the configuration
|
||||||
|
tree in searches a special care needs to be taken while parsing the structure in
|
||||||
|
order to reflect the active state of configuration.
|
||||||
|
|
||||||
|
Whitespaces
|
||||||
|
|
||||||
|
Each ParserNode object is responsible of storing its prepending whitespace characters
|
||||||
|
in order to be able to write the AST back to filesystem like it was, preserving the
|
||||||
|
format, this applies for parameters of BlockNode and DirectiveNode as well.
|
||||||
|
When parameters of ParserNode are changed, the pre-existing whitespaces in the
|
||||||
|
parameter sequence are discarded, as the general reason for storing them is to
|
||||||
|
maintain the ability to write the configuration back to filesystem exactly like
|
||||||
|
it was. This loses its meaning when we have to change the directives or blocks
|
||||||
|
parameters for other reasons.
|
||||||
|
|
||||||
|
Searches and matching
|
||||||
|
|
||||||
|
Apache configuration is largely case insensitive, so the Apache implementation of
|
||||||
|
ParserNode interface needs to provide the user means to match block and directive
|
||||||
|
names and parameters in case insensitive manner. This does not apply to everything
|
||||||
|
however, for example the parameters of a conditional statement may be case sensitive.
|
||||||
|
For this reason the internal representation of data should not ignore the case.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import abc
|
||||||
|
import six
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
@six.add_metaclass(abc.ABCMeta)
|
||||||
|
class ParserNode(object):
|
||||||
|
"""
|
||||||
|
ParserNode is the basic building block of the tree of such nodes,
|
||||||
|
representing the structure of the configuration. It is largely meant to keep
|
||||||
|
the structure information intact and idiomatically accessible.
|
||||||
|
|
||||||
|
The root node as well as the child nodes of it should be instances of ParserNode.
|
||||||
|
Nodes keep track of their differences to on-disk representation of configuration
|
||||||
|
by marking modified ParserNodes as dirty to enable partial write-to-disk for
|
||||||
|
different files in the configuration structure.
|
||||||
|
|
||||||
|
While for the most parts the usage and the child types are obvious, "include"-
|
||||||
|
and similar directives are an exception to this rule. This is because of the
|
||||||
|
nature of include directives - which unroll the contents of another file or
|
||||||
|
configuration block to their place. While we could unroll the included nodes
|
||||||
|
to the parent tree, it remains important to keep the context of include nodes
|
||||||
|
separate in order to write back the original configuration as it was.
|
||||||
|
|
||||||
|
For parsers that require the implementation to keep track of the whitespacing,
|
||||||
|
it's responsibility of each ParserNode object itself to store its prepending
|
||||||
|
whitespaces in order to be able to reconstruct the complete configuration file
|
||||||
|
as it was when originally read from the disk.
|
||||||
|
|
||||||
|
ParserNode objects should have the following attributes:
|
||||||
|
|
||||||
|
# Reference to ancestor node, or None if the node is the root node of the
|
||||||
|
# configuration tree.
|
||||||
|
ancestor: Optional[ParserNode]
|
||||||
|
|
||||||
|
# True if this node has been modified since last save.
|
||||||
|
dirty: bool
|
||||||
|
|
||||||
|
# Filepath of the file where the configuration element for this ParserNode
|
||||||
|
# object resides. For root node, the value for filepath is the httpd root
|
||||||
|
# configuration file. Filepath can be None if a configuration directive is
|
||||||
|
# defined in for example the httpd command line.
|
||||||
|
filepath: Optional[str]
|
||||||
|
|
||||||
|
# Metadata dictionary holds all the implementation specific key-value pairs
|
||||||
|
# for the ParserNode instance.
|
||||||
|
metadata: Dict[str, Any]
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def __init__(self, **kwargs):
|
||||||
|
"""
|
||||||
|
Initializes the ParserNode instance, and sets the ParserNode specific
|
||||||
|
instance variables. This is not meant to be used directly, but through
|
||||||
|
specific classes implementing ParserNode interface.
|
||||||
|
|
||||||
|
:param ancestor: BlockNode ancestor for this CommentNode. Required.
|
||||||
|
:type ancestor: BlockNode or None
|
||||||
|
|
||||||
|
:param filepath: Filesystem path for the file where this CommentNode
|
||||||
|
does or should exist in the filesystem. Required.
|
||||||
|
:type filepath: str or None
|
||||||
|
|
||||||
|
:param dirty: Boolean flag for denoting if this CommentNode has been
|
||||||
|
created or changed after the last save. Default: False.
|
||||||
|
:type dirty: bool
|
||||||
|
|
||||||
|
:param metadata: Dictionary of metadata values for this ParserNode object.
|
||||||
|
Metadata information should be used only internally in the implementation.
|
||||||
|
Default: {}
|
||||||
|
:type metadata: dict
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def save(self, msg):
|
||||||
|
"""
|
||||||
|
Save traverses the children, and attempts to write the AST to disk for
|
||||||
|
all the objects that are marked dirty. The actual operation of course
|
||||||
|
depends on the underlying implementation. save() shouldn't be called
|
||||||
|
from the Configurator outside of its designated save() method in order
|
||||||
|
to ensure that the Reverter checkpoints are created properly.
|
||||||
|
|
||||||
|
Note: this approach of keeping internal structure of the configuration
|
||||||
|
within the ParserNode tree does not represent the file inclusion structure
|
||||||
|
of actual configuration files that reside in the filesystem. To handle
|
||||||
|
file writes properly, the file specific temporary trees should be extracted
|
||||||
|
from the full ParserNode tree where necessary when writing to disk.
|
||||||
|
|
||||||
|
:param str msg: Message describing the reason for the save.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def find_ancestors(self, name):
|
||||||
|
"""
|
||||||
|
Traverses the ancestor tree up, searching for BlockNodes with a specific
|
||||||
|
name.
|
||||||
|
|
||||||
|
:param str name: Name of the ancestor BlockNode to search for
|
||||||
|
|
||||||
|
:returns: A list of ancestor BlockNodes that match the name
|
||||||
|
:rtype: list of BlockNode
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
# Linter rule exclusion done because of https://github.com/PyCQA/pylint/issues/179
|
||||||
|
@six.add_metaclass(abc.ABCMeta) # pylint: disable=abstract-method
|
||||||
|
class CommentNode(ParserNode):
|
||||||
|
"""
|
||||||
|
CommentNode class is used for representation of comments within the parsed
|
||||||
|
configuration structure. Because of the nature of comments, it is not able
|
||||||
|
to have child nodes and hence it is always treated as a leaf node.
|
||||||
|
|
||||||
|
CommentNode stores its contents in class variable 'comment' and does not
|
||||||
|
have a specific name.
|
||||||
|
|
||||||
|
CommentNode objects should have the following attributes in addition to
|
||||||
|
the ones described in ParserNode:
|
||||||
|
|
||||||
|
# Contains the contents of the comment without the directive notation
|
||||||
|
# (typically # or /* ... */).
|
||||||
|
comment: str
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def __init__(self, **kwargs):
|
||||||
|
"""
|
||||||
|
Initializes the CommentNode instance and sets its instance variables.
|
||||||
|
|
||||||
|
:param comment: Contents of the comment. Required.
|
||||||
|
:type comment: str
|
||||||
|
|
||||||
|
:param ancestor: BlockNode ancestor for this CommentNode. Required.
|
||||||
|
:type ancestor: BlockNode or None
|
||||||
|
|
||||||
|
:param filepath: Filesystem path for the file where this CommentNode
|
||||||
|
does or should exist in the filesystem. Required.
|
||||||
|
:type filepath: str or None
|
||||||
|
|
||||||
|
:param dirty: Boolean flag for denoting if this CommentNode has been
|
||||||
|
created or changed after the last save. Default: False.
|
||||||
|
:type dirty: bool
|
||||||
|
"""
|
||||||
|
super(CommentNode, self).__init__(ancestor=kwargs['ancestor'],
|
||||||
|
dirty=kwargs.get('dirty', False),
|
||||||
|
filepath=kwargs['filepath'],
|
||||||
|
metadata=kwargs.get('metadata', {})) # pragma: no cover
|
||||||
|
|
||||||
|
|
||||||
|
@six.add_metaclass(abc.ABCMeta)
|
||||||
|
class DirectiveNode(ParserNode):
|
||||||
|
"""
|
||||||
|
DirectiveNode class represents a configuration directive within the configuration.
|
||||||
|
It can have zero or more parameters attached to it. Because of the nature of
|
||||||
|
single directives, it is not able to have child nodes and hence it is always
|
||||||
|
treated as a leaf node.
|
||||||
|
|
||||||
|
If a this directive was defined on the httpd command line, the ancestor instance
|
||||||
|
variable for this DirectiveNode should be None, and it should be inserted to the
|
||||||
|
beginning of root BlockNode children sequence.
|
||||||
|
|
||||||
|
DirectiveNode objects should have the following attributes in addition to
|
||||||
|
the ones described in ParserNode:
|
||||||
|
|
||||||
|
# True if this DirectiveNode is enabled and False if it is inside of an
|
||||||
|
# inactive conditional block.
|
||||||
|
enabled: bool
|
||||||
|
|
||||||
|
# Name, or key of the configuration directive. If BlockNode subclass of
|
||||||
|
# DirectiveNode is the root configuration node, the name should be None.
|
||||||
|
name: Optional[str]
|
||||||
|
|
||||||
|
# Tuple of parameters of this ParserNode object, excluding whitespaces.
|
||||||
|
parameters: Tuple[str, ...]
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def __init__(self, **kwargs):
|
||||||
|
"""
|
||||||
|
Initializes the DirectiveNode instance and sets its instance variables.
|
||||||
|
|
||||||
|
:param name: Name or key of the DirectiveNode object. Required.
|
||||||
|
:type name: str or None
|
||||||
|
|
||||||
|
:param tuple parameters: Tuple of str parameters for this DirectiveNode.
|
||||||
|
Default: ().
|
||||||
|
:type parameters: tuple
|
||||||
|
|
||||||
|
:param ancestor: BlockNode ancestor for this DirectiveNode, or None for
|
||||||
|
root configuration node. Required.
|
||||||
|
:type ancestor: BlockNode or None
|
||||||
|
|
||||||
|
:param filepath: Filesystem path for the file where this DirectiveNode
|
||||||
|
does or should exist in the filesystem, or None for directives introduced
|
||||||
|
in the httpd command line. Required.
|
||||||
|
:type filepath: str or None
|
||||||
|
|
||||||
|
:param dirty: Boolean flag for denoting if this DirectiveNode has been
|
||||||
|
created or changed after the last save. Default: False.
|
||||||
|
:type dirty: bool
|
||||||
|
|
||||||
|
:param enabled: True if this DirectiveNode object is parsed in the active
|
||||||
|
configuration of the httpd. False if the DirectiveNode exists within a
|
||||||
|
unmatched conditional configuration block. Default: True.
|
||||||
|
:type enabled: bool
|
||||||
|
|
||||||
|
"""
|
||||||
|
super(DirectiveNode, self).__init__(ancestor=kwargs['ancestor'],
|
||||||
|
dirty=kwargs.get('dirty', False),
|
||||||
|
filepath=kwargs['filepath'],
|
||||||
|
metadata=kwargs.get('metadata', {})) # pragma: no cover
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def set_parameters(self, parameters):
|
||||||
|
"""
|
||||||
|
Sets the sequence of parameters for this ParserNode object without
|
||||||
|
whitespaces. While the whitespaces for parameters are discarded when using
|
||||||
|
this method, the whitespacing preceeding the ParserNode itself should be
|
||||||
|
kept intact.
|
||||||
|
|
||||||
|
:param list parameters: sequence of parameters
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
@six.add_metaclass(abc.ABCMeta)
|
||||||
|
class BlockNode(DirectiveNode):
|
||||||
|
"""
|
||||||
|
BlockNode class represents a block of nested configuration directives, comments
|
||||||
|
and other blocks as its children. A BlockNode can have zero or more parameters
|
||||||
|
attached to it.
|
||||||
|
|
||||||
|
Configuration blocks typically consist of one or more child nodes of all possible
|
||||||
|
types. Because of this, the BlockNode class has various discovery and structure
|
||||||
|
management methods.
|
||||||
|
|
||||||
|
Lists of parameters used as an optional argument for some of the methods should
|
||||||
|
be lists of strings that are applicable parameters for each specific BlockNode
|
||||||
|
or DirectiveNode type. As an example, for a following configuration example:
|
||||||
|
|
||||||
|
<VirtualHost *:80>
|
||||||
|
...
|
||||||
|
</VirtualHost>
|
||||||
|
|
||||||
|
The node type would be BlockNode, name would be 'VirtualHost' and its parameters
|
||||||
|
would be: ['*:80'].
|
||||||
|
|
||||||
|
While for the following example:
|
||||||
|
|
||||||
|
LoadModule alias_module /usr/lib/apache2/modules/mod_alias.so
|
||||||
|
|
||||||
|
The node type would be DirectiveNode, name would be 'LoadModule' and its
|
||||||
|
parameters would be: ['alias_module', '/usr/lib/apache2/modules/mod_alias.so']
|
||||||
|
|
||||||
|
The applicable parameters are dependent on the underlying configuration language
|
||||||
|
and its grammar.
|
||||||
|
|
||||||
|
BlockNode objects should have the following attributes in addition to
|
||||||
|
the ones described in DirectiveNode:
|
||||||
|
|
||||||
|
# Tuple of direct children of this BlockNode object. The order of children
|
||||||
|
# in this tuple retain the order of elements in the parsed configuration
|
||||||
|
# block.
|
||||||
|
children: Tuple[ParserNode, ...]
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def add_child_block(self, name, parameters=None, position=None):
|
||||||
|
"""
|
||||||
|
Adds a new BlockNode child node with provided values and marks the callee
|
||||||
|
BlockNode dirty. This is used to add new children to the AST. The preceeding
|
||||||
|
whitespaces should not be added based on the ancestor or siblings for the
|
||||||
|
newly created object. This is to match the current behavior of the legacy
|
||||||
|
parser implementation.
|
||||||
|
|
||||||
|
:param str name: The name of the child node to add
|
||||||
|
:param list parameters: list of parameters for the node
|
||||||
|
:param int position: Position in the list of children to add the new child
|
||||||
|
node to. Defaults to None, which appends the newly created node to the list.
|
||||||
|
If an integer is given, the child is inserted before that index in the
|
||||||
|
list similar to list().insert.
|
||||||
|
|
||||||
|
:returns: BlockNode instance of the created child block
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def add_child_directive(self, name, parameters=None, position=None):
|
||||||
|
"""
|
||||||
|
Adds a new DirectiveNode child node with provided values and marks the
|
||||||
|
callee BlockNode dirty. This is used to add new children to the AST. The
|
||||||
|
preceeding whitespaces should not be added based on the ancestor or siblings
|
||||||
|
for the newly created object. This is to match the current behavior of the
|
||||||
|
legacy parser implementation.
|
||||||
|
|
||||||
|
|
||||||
|
:param str name: The name of the child node to add
|
||||||
|
:param list parameters: list of parameters for the node
|
||||||
|
:param int position: Position in the list of children to add the new child
|
||||||
|
node to. Defaults to None, which appends the newly created node to the list.
|
||||||
|
If an integer is given, the child is inserted before that index in the
|
||||||
|
list similar to list().insert.
|
||||||
|
|
||||||
|
:returns: DirectiveNode instance of the created child directive
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def add_child_comment(self, comment="", position=None):
|
||||||
|
"""
|
||||||
|
Adds a new CommentNode child node with provided value and marks the
|
||||||
|
callee BlockNode dirty. This is used to add new children to the AST. The
|
||||||
|
preceeding whitespaces should not be added based on the ancestor or siblings
|
||||||
|
for the newly created object. This is to match the current behavior of the
|
||||||
|
legacy parser implementation.
|
||||||
|
|
||||||
|
|
||||||
|
:param str comment: Comment contents
|
||||||
|
:param int position: Position in the list of children to add the new child
|
||||||
|
node to. Defaults to None, which appends the newly created node to the list.
|
||||||
|
If an integer is given, the child is inserted before that index in the
|
||||||
|
list similar to list().insert.
|
||||||
|
|
||||||
|
:returns: CommentNode instance of the created child comment
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def find_blocks(self, name, exclude=True):
|
||||||
|
"""
|
||||||
|
Find a configuration block by name. This method walks the child tree of
|
||||||
|
ParserNodes under the instance it was called from. This way it is possible
|
||||||
|
to search for the whole configuration tree, when starting from root node or
|
||||||
|
to do a partial search when starting from a specified branch. The lookup
|
||||||
|
should be case insensitive.
|
||||||
|
|
||||||
|
:param str name: The name of the directive to search for
|
||||||
|
:param bool exclude: If the search results should exclude the contents of
|
||||||
|
ParserNode objects that reside within conditional blocks and because
|
||||||
|
of current state are not enabled.
|
||||||
|
|
||||||
|
:returns: A list of found BlockNode objects.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def find_directives(self, name, exclude=True):
|
||||||
|
"""
|
||||||
|
Find a directive by name. This method walks the child tree of ParserNodes
|
||||||
|
under the instance it was called from. This way it is possible to search
|
||||||
|
for the whole configuration tree, when starting from root node, or to do
|
||||||
|
a partial search when starting from a specified branch. The lookup should
|
||||||
|
be case insensitive.
|
||||||
|
|
||||||
|
:param str name: The name of the directive to search for
|
||||||
|
:param bool exclude: If the search results should exclude the contents of
|
||||||
|
ParserNode objects that reside within conditional blocks and because
|
||||||
|
of current state are not enabled.
|
||||||
|
|
||||||
|
:returns: A list of found DirectiveNode objects.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def find_comments(self, comment):
|
||||||
|
"""
|
||||||
|
Find comments with value containing the search term.
|
||||||
|
|
||||||
|
This method walks the child tree of ParserNodes under the instance it was
|
||||||
|
called from. This way it is possible to search for the whole configuration
|
||||||
|
tree, when starting from root node, or to do a partial search when starting
|
||||||
|
from a specified branch. The lookup should be case sensitive.
|
||||||
|
|
||||||
|
:param str comment: The content of comment to search for
|
||||||
|
|
||||||
|
:returns: A list of found CommentNode objects.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def delete_child(self, child):
|
||||||
|
"""
|
||||||
|
Remove a specified child node from the list of children of the called
|
||||||
|
BlockNode object.
|
||||||
|
|
||||||
|
:param ParserNode child: Child ParserNode object to remove from the list
|
||||||
|
of children of the callee.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def unsaved_files(self):
|
||||||
|
"""
|
||||||
|
Returns a list of file paths that have been changed since the last save
|
||||||
|
(or the initial configuration parse). The intended use for this method
|
||||||
|
is to tell the Reverter which files need to be included in a checkpoint.
|
||||||
|
|
||||||
|
This is typically called for the root of the ParserNode tree.
|
||||||
|
|
||||||
|
:returns: list of file paths of files that have been changed but not yet
|
||||||
|
saved to disk.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def parsed_paths(self):
|
||||||
|
"""
|
||||||
|
Returns a list of file paths that have currently been parsed into the parser
|
||||||
|
tree. The returned list may include paths with wildcard characters, for
|
||||||
|
example: ['/etc/apache2/conf.d/*.load']
|
||||||
|
|
||||||
|
This is typically called on the root node of the ParserNode tree.
|
||||||
|
|
||||||
|
:returns: list of file paths of files that have been parsed
|
||||||
|
"""
|
||||||
@@ -1,7 +1,7 @@
|
|||||||
"""Module contains classes used by the Apache Configurator."""
|
"""Module contains classes used by the Apache Configurator."""
|
||||||
import re
|
import re
|
||||||
|
|
||||||
from acme.magic_typing import Set # pylint: disable=unused-import, no-name-in-module
|
from acme.magic_typing import Set
|
||||||
from certbot.plugins import common
|
from certbot.plugins import common
|
||||||
|
|
||||||
|
|
||||||
@@ -24,7 +24,7 @@ class Addr(common.Addr):
|
|||||||
return not self.__eq__(other)
|
return not self.__eq__(other)
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return "certbot_apache.obj.Addr(" + repr(self.tup) + ")"
|
return "certbot_apache._internal.obj.Addr(" + repr(self.tup) + ")"
|
||||||
|
|
||||||
def __hash__(self): # pylint: disable=useless-super-delegation
|
def __hash__(self): # pylint: disable=useless-super-delegation
|
||||||
# Python 3 requires explicit overridden for __hash__ if __eq__ or
|
# Python 3 requires explicit overridden for __hash__ if __eq__ or
|
||||||
@@ -98,7 +98,7 @@ class Addr(common.Addr):
|
|||||||
return self.get_addr_obj(port)
|
return self.get_addr_obj(port)
|
||||||
|
|
||||||
|
|
||||||
class VirtualHost(object): # pylint: disable=too-few-public-methods
|
class VirtualHost(object):
|
||||||
"""Represents an Apache Virtualhost.
|
"""Represents an Apache Virtualhost.
|
||||||
|
|
||||||
:ivar str filep: file path of VH
|
:ivar str filep: file path of VH
|
||||||
@@ -124,9 +124,8 @@ class VirtualHost(object): # pylint: disable=too-few-public-methods
|
|||||||
strip_name = re.compile(r"^(?:.+://)?([^ :$]*)")
|
strip_name = re.compile(r"^(?:.+://)?([^ :$]*)")
|
||||||
|
|
||||||
def __init__(self, filep, path, addrs, ssl, enabled, name=None,
|
def __init__(self, filep, path, addrs, ssl, enabled, name=None,
|
||||||
aliases=None, modmacro=False, ancestor=None):
|
aliases=None, modmacro=False, ancestor=None, node=None):
|
||||||
|
|
||||||
# pylint: disable=too-many-arguments
|
|
||||||
"""Initialize a VH."""
|
"""Initialize a VH."""
|
||||||
self.filep = filep
|
self.filep = filep
|
||||||
self.path = path
|
self.path = path
|
||||||
@@ -137,6 +136,7 @@ class VirtualHost(object): # pylint: disable=too-few-public-methods
|
|||||||
self.enabled = enabled
|
self.enabled = enabled
|
||||||
self.modmacro = modmacro
|
self.modmacro = modmacro
|
||||||
self.ancestor = ancestor
|
self.ancestor = ancestor
|
||||||
|
self.node = node
|
||||||
|
|
||||||
def get_names(self):
|
def get_names(self):
|
||||||
"""Return a set of all names."""
|
"""Return a set of all names."""
|
||||||
@@ -1,11 +1,9 @@
|
|||||||
""" Distribution specific override class for Arch Linux """
|
""" Distribution specific override class for Arch Linux """
|
||||||
import pkg_resources
|
|
||||||
|
|
||||||
import zope.interface
|
import zope.interface
|
||||||
|
|
||||||
from certbot import interfaces
|
from certbot import interfaces
|
||||||
|
from certbot_apache._internal import configurator
|
||||||
|
|
||||||
from certbot_apache import configurator
|
|
||||||
|
|
||||||
@zope.interface.provider(interfaces.IPluginFactory)
|
@zope.interface.provider(interfaces.IPluginFactory)
|
||||||
class ArchConfigurator(configurator.ApacheConfigurator):
|
class ArchConfigurator(configurator.ApacheConfigurator):
|
||||||
@@ -26,6 +24,4 @@ class ArchConfigurator(configurator.ApacheConfigurator):
|
|||||||
handle_modules=False,
|
handle_modules=False,
|
||||||
handle_sites=False,
|
handle_sites=False,
|
||||||
challenge_location="/etc/httpd/conf",
|
challenge_location="/etc/httpd/conf",
|
||||||
MOD_SSL_CONF_SRC=pkg_resources.resource_filename(
|
|
||||||
"certbot_apache", "options-ssl-apache.conf")
|
|
||||||
)
|
)
|
||||||
@@ -1,20 +1,16 @@
|
|||||||
""" Distribution specific override class for CentOS family (RHEL, Fedora) """
|
""" Distribution specific override class for CentOS family (RHEL, Fedora) """
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
import pkg_resources
|
|
||||||
import zope.interface
|
import zope.interface
|
||||||
|
|
||||||
|
from acme.magic_typing import List
|
||||||
from certbot import errors
|
from certbot import errors
|
||||||
from certbot import interfaces
|
from certbot import interfaces
|
||||||
from certbot import util
|
from certbot import util
|
||||||
from certbot.errors import MisconfigurationError
|
from certbot.errors import MisconfigurationError
|
||||||
|
from certbot_apache._internal import apache_util
|
||||||
from acme.magic_typing import List # pylint: disable=unused-import, no-name-in-module
|
from certbot_apache._internal import configurator
|
||||||
|
from certbot_apache._internal import parser
|
||||||
from certbot_apache import apache_util
|
|
||||||
from certbot_apache import configurator
|
|
||||||
from certbot_apache import parser
|
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -39,8 +35,6 @@ class CentOSConfigurator(configurator.ApacheConfigurator):
|
|||||||
handle_modules=False,
|
handle_modules=False,
|
||||||
handle_sites=False,
|
handle_sites=False,
|
||||||
challenge_location="/etc/httpd/conf.d",
|
challenge_location="/etc/httpd/conf.d",
|
||||||
MOD_SSL_CONF_SRC=pkg_resources.resource_filename(
|
|
||||||
"certbot_apache", "centos-options-ssl-apache.conf")
|
|
||||||
)
|
)
|
||||||
|
|
||||||
def config_test(self):
|
def config_test(self):
|
||||||
@@ -1,11 +1,9 @@
|
|||||||
""" Distribution specific override class for macOS """
|
""" Distribution specific override class for macOS """
|
||||||
import pkg_resources
|
|
||||||
|
|
||||||
import zope.interface
|
import zope.interface
|
||||||
|
|
||||||
from certbot import interfaces
|
from certbot import interfaces
|
||||||
|
from certbot_apache._internal import configurator
|
||||||
|
|
||||||
from certbot_apache import configurator
|
|
||||||
|
|
||||||
@zope.interface.provider(interfaces.IPluginFactory)
|
@zope.interface.provider(interfaces.IPluginFactory)
|
||||||
class DarwinConfigurator(configurator.ApacheConfigurator):
|
class DarwinConfigurator(configurator.ApacheConfigurator):
|
||||||
@@ -26,6 +24,4 @@ class DarwinConfigurator(configurator.ApacheConfigurator):
|
|||||||
handle_modules=False,
|
handle_modules=False,
|
||||||
handle_sites=False,
|
handle_sites=False,
|
||||||
challenge_location="/etc/apache2/other",
|
challenge_location="/etc/apache2/other",
|
||||||
MOD_SSL_CONF_SRC=pkg_resources.resource_filename(
|
|
||||||
"certbot_apache", "options-ssl-apache.conf")
|
|
||||||
)
|
)
|
||||||
@@ -1,7 +1,6 @@
|
|||||||
""" Distribution specific override class for Debian family (Ubuntu/Debian) """
|
""" Distribution specific override class for Debian family (Ubuntu/Debian) """
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
import pkg_resources
|
|
||||||
import zope.interface
|
import zope.interface
|
||||||
|
|
||||||
from certbot import errors
|
from certbot import errors
|
||||||
@@ -9,9 +8,8 @@ from certbot import interfaces
|
|||||||
from certbot import util
|
from certbot import util
|
||||||
from certbot.compat import filesystem
|
from certbot.compat import filesystem
|
||||||
from certbot.compat import os
|
from certbot.compat import os
|
||||||
|
from certbot_apache._internal import apache_util
|
||||||
from certbot_apache import apache_util
|
from certbot_apache._internal import configurator
|
||||||
from certbot_apache import configurator
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -35,8 +33,6 @@ class DebianConfigurator(configurator.ApacheConfigurator):
|
|||||||
handle_modules=True,
|
handle_modules=True,
|
||||||
handle_sites=True,
|
handle_sites=True,
|
||||||
challenge_location="/etc/apache2",
|
challenge_location="/etc/apache2",
|
||||||
MOD_SSL_CONF_SRC=pkg_resources.resource_filename(
|
|
||||||
"certbot_apache", "options-ssl-apache.conf")
|
|
||||||
)
|
)
|
||||||
|
|
||||||
def enable_site(self, vhost):
|
def enable_site(self, vhost):
|
||||||
@@ -46,7 +42,7 @@ class DebianConfigurator(configurator.ApacheConfigurator):
|
|||||||
modules are enabled appropriately.
|
modules are enabled appropriately.
|
||||||
|
|
||||||
:param vhost: vhost to enable
|
:param vhost: vhost to enable
|
||||||
:type vhost: :class:`~certbot_apache.obj.VirtualHost`
|
:type vhost: :class:`~certbot_apache._internal.obj.VirtualHost`
|
||||||
|
|
||||||
:raises .errors.NotSupportedError: If filesystem layout is not
|
:raises .errors.NotSupportedError: If filesystem layout is not
|
||||||
supported.
|
supported.
|
||||||
@@ -71,15 +67,14 @@ class DebianConfigurator(configurator.ApacheConfigurator):
|
|||||||
# Already in shape
|
# Already in shape
|
||||||
vhost.enabled = True
|
vhost.enabled = True
|
||||||
return None
|
return None
|
||||||
else:
|
logger.warning(
|
||||||
logger.warning(
|
"Could not symlink %s to %s, got error: %s", enabled_path,
|
||||||
"Could not symlink %s to %s, got error: %s", enabled_path,
|
vhost.filep, err.strerror)
|
||||||
vhost.filep, err.strerror)
|
errstring = ("Encountered error while trying to enable a " +
|
||||||
errstring = ("Encountered error while trying to enable a " +
|
"newly created VirtualHost located at {0} by " +
|
||||||
"newly created VirtualHost located at {0} by " +
|
"linking to it from {1}")
|
||||||
"linking to it from {1}")
|
raise errors.NotSupportedError(errstring.format(vhost.filep,
|
||||||
raise errors.NotSupportedError(errstring.format(vhost.filep,
|
enabled_path))
|
||||||
enabled_path))
|
|
||||||
vhost.enabled = True
|
vhost.enabled = True
|
||||||
logger.info("Enabling available site: %s", vhost.filep)
|
logger.info("Enabling available site: %s", vhost.filep)
|
||||||
self.save_notes += "Enabled site %s\n" % vhost.filep
|
self.save_notes += "Enabled site %s\n" % vhost.filep
|
||||||
@@ -1,14 +1,12 @@
|
|||||||
""" Distribution specific override class for Fedora 29+ """
|
""" Distribution specific override class for Fedora 29+ """
|
||||||
import pkg_resources
|
|
||||||
import zope.interface
|
import zope.interface
|
||||||
|
|
||||||
from certbot import errors
|
from certbot import errors
|
||||||
from certbot import interfaces
|
from certbot import interfaces
|
||||||
from certbot import util
|
from certbot import util
|
||||||
|
from certbot_apache._internal import apache_util
|
||||||
from certbot_apache import apache_util
|
from certbot_apache._internal import configurator
|
||||||
from certbot_apache import configurator
|
from certbot_apache._internal import parser
|
||||||
from certbot_apache import parser
|
|
||||||
|
|
||||||
|
|
||||||
@zope.interface.provider(interfaces.IPluginFactory)
|
@zope.interface.provider(interfaces.IPluginFactory)
|
||||||
@@ -31,9 +29,6 @@ class FedoraConfigurator(configurator.ApacheConfigurator):
|
|||||||
handle_modules=False,
|
handle_modules=False,
|
||||||
handle_sites=False,
|
handle_sites=False,
|
||||||
challenge_location="/etc/httpd/conf.d",
|
challenge_location="/etc/httpd/conf.d",
|
||||||
MOD_SSL_CONF_SRC=pkg_resources.resource_filename(
|
|
||||||
# TODO: eventually newest version of Fedora will need their own config
|
|
||||||
"certbot_apache", "centos-options-ssl-apache.conf")
|
|
||||||
)
|
)
|
||||||
|
|
||||||
def config_test(self):
|
def config_test(self):
|
||||||
@@ -1,13 +1,11 @@
|
|||||||
""" Distribution specific override class for Gentoo Linux """
|
""" Distribution specific override class for Gentoo Linux """
|
||||||
import pkg_resources
|
|
||||||
|
|
||||||
import zope.interface
|
import zope.interface
|
||||||
|
|
||||||
from certbot import interfaces
|
from certbot import interfaces
|
||||||
|
from certbot_apache._internal import apache_util
|
||||||
|
from certbot_apache._internal import configurator
|
||||||
|
from certbot_apache._internal import parser
|
||||||
|
|
||||||
from certbot_apache import apache_util
|
|
||||||
from certbot_apache import configurator
|
|
||||||
from certbot_apache import parser
|
|
||||||
|
|
||||||
@zope.interface.provider(interfaces.IPluginFactory)
|
@zope.interface.provider(interfaces.IPluginFactory)
|
||||||
class GentooConfigurator(configurator.ApacheConfigurator):
|
class GentooConfigurator(configurator.ApacheConfigurator):
|
||||||
@@ -29,8 +27,6 @@ class GentooConfigurator(configurator.ApacheConfigurator):
|
|||||||
handle_modules=False,
|
handle_modules=False,
|
||||||
handle_sites=False,
|
handle_sites=False,
|
||||||
challenge_location="/etc/apache2/vhosts.d",
|
challenge_location="/etc/apache2/vhosts.d",
|
||||||
MOD_SSL_CONF_SRC=pkg_resources.resource_filename(
|
|
||||||
"certbot_apache", "options-ssl-apache.conf")
|
|
||||||
)
|
)
|
||||||
|
|
||||||
def _prepare_options(self):
|
def _prepare_options(self):
|
||||||
@@ -70,6 +66,6 @@ class GentooParser(parser.ApacheParser):
|
|||||||
def update_modules(self):
|
def update_modules(self):
|
||||||
"""Get loaded modules from httpd process, and add them to DOM"""
|
"""Get loaded modules from httpd process, and add them to DOM"""
|
||||||
mod_cmd = [self.configurator.option("ctl"), "modules"]
|
mod_cmd = [self.configurator.option("ctl"), "modules"]
|
||||||
matches = self.parse_from_subprocess(mod_cmd, r"(.*)_module")
|
matches = apache_util.parse_from_subprocess(mod_cmd, r"(.*)_module")
|
||||||
for mod in matches:
|
for mod in matches:
|
||||||
self.add_mod(mod.strip())
|
self.add_mod(mod.strip())
|
||||||
@@ -1,11 +1,9 @@
|
|||||||
""" Distribution specific override class for OpenSUSE """
|
""" Distribution specific override class for OpenSUSE """
|
||||||
import pkg_resources
|
|
||||||
|
|
||||||
import zope.interface
|
import zope.interface
|
||||||
|
|
||||||
from certbot import interfaces
|
from certbot import interfaces
|
||||||
|
from certbot_apache._internal import configurator
|
||||||
|
|
||||||
from certbot_apache import configurator
|
|
||||||
|
|
||||||
@zope.interface.provider(interfaces.IPluginFactory)
|
@zope.interface.provider(interfaces.IPluginFactory)
|
||||||
class OpenSUSEConfigurator(configurator.ApacheConfigurator):
|
class OpenSUSEConfigurator(configurator.ApacheConfigurator):
|
||||||
@@ -26,6 +24,4 @@ class OpenSUSEConfigurator(configurator.ApacheConfigurator):
|
|||||||
handle_modules=False,
|
handle_modules=False,
|
||||||
handle_sites=False,
|
handle_sites=False,
|
||||||
challenge_location="/etc/apache2/vhosts.d",
|
challenge_location="/etc/apache2/vhosts.d",
|
||||||
MOD_SSL_CONF_SRC=pkg_resources.resource_filename(
|
|
||||||
"certbot_apache", "options-ssl-apache.conf")
|
|
||||||
)
|
)
|
||||||
@@ -3,23 +3,21 @@ import copy
|
|||||||
import fnmatch
|
import fnmatch
|
||||||
import logging
|
import logging
|
||||||
import re
|
import re
|
||||||
import subprocess
|
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
import six
|
import six
|
||||||
|
|
||||||
from acme.magic_typing import Dict, List, Set # pylint: disable=unused-import, no-name-in-module
|
from acme.magic_typing import Dict
|
||||||
|
from acme.magic_typing import List
|
||||||
from certbot import errors
|
from certbot import errors
|
||||||
from certbot.compat import os
|
from certbot.compat import os
|
||||||
|
from certbot_apache._internal import apache_util
|
||||||
from certbot_apache import constants
|
from certbot_apache._internal import constants
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class ApacheParser(object):
|
class ApacheParser(object):
|
||||||
# pylint: disable=too-many-public-methods
|
|
||||||
"""Class handles the fine details of parsing the Apache Configuration.
|
"""Class handles the fine details of parsing the Apache Configuration.
|
||||||
|
|
||||||
.. todo:: Make parsing general... remove sites-available etc...
|
.. todo:: Make parsing general... remove sites-available etc...
|
||||||
@@ -32,7 +30,7 @@ class ApacheParser(object):
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
arg_var_interpreter = re.compile(r"\$\{[^ \}]*}")
|
arg_var_interpreter = re.compile(r"\$\{[^ \}]*}")
|
||||||
fnmatch_chars = set(["*", "?", "\\", "[", "]"])
|
fnmatch_chars = {"*", "?", "\\", "[", "]"}
|
||||||
|
|
||||||
def __init__(self, root, vhostroot=None, version=(2, 4),
|
def __init__(self, root, vhostroot=None, version=(2, 4),
|
||||||
configurator=None):
|
configurator=None):
|
||||||
@@ -53,7 +51,7 @@ class ApacheParser(object):
|
|||||||
"version 1.2.0 or higher, please make sure you have you have "
|
"version 1.2.0 or higher, please make sure you have you have "
|
||||||
"those installed.")
|
"those installed.")
|
||||||
|
|
||||||
self.modules = set() # type: Set[str]
|
self.modules = {} # type: Dict[str, str]
|
||||||
self.parser_paths = {} # type: Dict[str, List[str]]
|
self.parser_paths = {} # type: Dict[str, List[str]]
|
||||||
self.variables = {} # type: Dict[str, str]
|
self.variables = {} # type: Dict[str, str]
|
||||||
|
|
||||||
@@ -250,14 +248,14 @@ class ApacheParser(object):
|
|||||||
def add_mod(self, mod_name):
|
def add_mod(self, mod_name):
|
||||||
"""Shortcut for updating parser modules."""
|
"""Shortcut for updating parser modules."""
|
||||||
if mod_name + "_module" not in self.modules:
|
if mod_name + "_module" not in self.modules:
|
||||||
self.modules.add(mod_name + "_module")
|
self.modules[mod_name + "_module"] = None
|
||||||
if "mod_" + mod_name + ".c" not in self.modules:
|
if "mod_" + mod_name + ".c" not in self.modules:
|
||||||
self.modules.add("mod_" + mod_name + ".c")
|
self.modules["mod_" + mod_name + ".c"] = None
|
||||||
|
|
||||||
def reset_modules(self):
|
def reset_modules(self):
|
||||||
"""Reset the loaded modules list. This is called from cleanup to clear
|
"""Reset the loaded modules list. This is called from cleanup to clear
|
||||||
temporarily loaded modules."""
|
temporarily loaded modules."""
|
||||||
self.modules = set()
|
self.modules = {}
|
||||||
self.update_modules()
|
self.update_modules()
|
||||||
self.parse_modules()
|
self.parse_modules()
|
||||||
|
|
||||||
@@ -268,7 +266,7 @@ class ApacheParser(object):
|
|||||||
the iteration issue. Else... parse and enable mods at same time.
|
the iteration issue. Else... parse and enable mods at same time.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
mods = set() # type: Set[str]
|
mods = {} # type: Dict[str, str]
|
||||||
matches = self.find_dir("LoadModule")
|
matches = self.find_dir("LoadModule")
|
||||||
iterator = iter(matches)
|
iterator = iter(matches)
|
||||||
# Make sure prev_size != cur_size for do: while: iteration
|
# Make sure prev_size != cur_size for do: while: iteration
|
||||||
@@ -282,41 +280,24 @@ class ApacheParser(object):
|
|||||||
mod_name = self.get_arg(match_name)
|
mod_name = self.get_arg(match_name)
|
||||||
mod_filename = self.get_arg(match_filename)
|
mod_filename = self.get_arg(match_filename)
|
||||||
if mod_name and mod_filename:
|
if mod_name and mod_filename:
|
||||||
mods.add(mod_name)
|
mods[mod_name] = mod_filename
|
||||||
mods.add(os.path.basename(mod_filename)[:-2] + "c")
|
mods[os.path.basename(mod_filename)[:-2] + "c"] = mod_filename
|
||||||
else:
|
else:
|
||||||
logger.debug("Could not read LoadModule directive from " +
|
logger.debug("Could not read LoadModule directive from Augeas path: %s",
|
||||||
"Augeas path: %s", match_name[6:])
|
match_name[6:])
|
||||||
self.modules.update(mods)
|
self.modules.update(mods)
|
||||||
|
|
||||||
def update_runtime_variables(self):
|
def update_runtime_variables(self):
|
||||||
"""Update Includes, Defines and Includes from httpd config dump data"""
|
"""Update Includes, Defines and Includes from httpd config dump data"""
|
||||||
|
|
||||||
self.update_defines()
|
self.update_defines()
|
||||||
self.update_includes()
|
self.update_includes()
|
||||||
self.update_modules()
|
self.update_modules()
|
||||||
|
|
||||||
def update_defines(self):
|
def update_defines(self):
|
||||||
"""Get Defines from httpd process"""
|
"""Updates the dictionary of known variables in the configuration"""
|
||||||
|
|
||||||
variables = dict()
|
self.variables = apache_util.parse_defines(self.configurator.option("ctl"))
|
||||||
define_cmd = [self.configurator.option("ctl"), "-t", "-D",
|
|
||||||
"DUMP_RUN_CFG"]
|
|
||||||
matches = self.parse_from_subprocess(define_cmd, r"Define: ([^ \n]*)")
|
|
||||||
try:
|
|
||||||
matches.remove("DUMP_RUN_CFG")
|
|
||||||
except ValueError:
|
|
||||||
return
|
|
||||||
|
|
||||||
for match in matches:
|
|
||||||
if match.count("=") > 1:
|
|
||||||
logger.error("Unexpected number of equal signs in "
|
|
||||||
"runtime config dump.")
|
|
||||||
raise errors.PluginError(
|
|
||||||
"Error parsing Apache runtime variables")
|
|
||||||
parts = match.partition("=")
|
|
||||||
variables[parts[0]] = parts[2]
|
|
||||||
|
|
||||||
self.variables = variables
|
|
||||||
|
|
||||||
def update_includes(self):
|
def update_includes(self):
|
||||||
"""Get includes from httpd process, and add them to DOM if needed"""
|
"""Get includes from httpd process, and add them to DOM if needed"""
|
||||||
@@ -326,9 +307,7 @@ class ApacheParser(object):
|
|||||||
# configuration files
|
# configuration files
|
||||||
_ = self.find_dir("Include")
|
_ = self.find_dir("Include")
|
||||||
|
|
||||||
inc_cmd = [self.configurator.option("ctl"), "-t", "-D",
|
matches = apache_util.parse_includes(self.configurator.option("ctl"))
|
||||||
"DUMP_INCLUDES"]
|
|
||||||
matches = self.parse_from_subprocess(inc_cmd, r"\(.*\) (.*)")
|
|
||||||
if matches:
|
if matches:
|
||||||
for i in matches:
|
for i in matches:
|
||||||
if not self.parsed_in_current(i):
|
if not self.parsed_in_current(i):
|
||||||
@@ -337,57 +316,11 @@ class ApacheParser(object):
|
|||||||
def update_modules(self):
|
def update_modules(self):
|
||||||
"""Get loaded modules from httpd process, and add them to DOM"""
|
"""Get loaded modules from httpd process, and add them to DOM"""
|
||||||
|
|
||||||
mod_cmd = [self.configurator.option("ctl"), "-t", "-D",
|
matches = apache_util.parse_modules(self.configurator.option("ctl"))
|
||||||
"DUMP_MODULES"]
|
|
||||||
matches = self.parse_from_subprocess(mod_cmd, r"(.*)_module")
|
|
||||||
for mod in matches:
|
for mod in matches:
|
||||||
self.add_mod(mod.strip())
|
self.add_mod(mod.strip())
|
||||||
|
|
||||||
def parse_from_subprocess(self, command, regexp):
|
def filter_args_num(self, matches, args):
|
||||||
"""Get values from stdout of subprocess command
|
|
||||||
|
|
||||||
:param list command: Command to run
|
|
||||||
:param str regexp: Regexp for parsing
|
|
||||||
|
|
||||||
:returns: list parsed from command output
|
|
||||||
:rtype: list
|
|
||||||
|
|
||||||
"""
|
|
||||||
stdout = self._get_runtime_cfg(command)
|
|
||||||
return re.compile(regexp).findall(stdout)
|
|
||||||
|
|
||||||
def _get_runtime_cfg(self, command): # pylint: disable=no-self-use
|
|
||||||
"""Get runtime configuration info.
|
|
||||||
:param command: Command to run
|
|
||||||
|
|
||||||
:returns: stdout from command
|
|
||||||
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
proc = subprocess.Popen(
|
|
||||||
command,
|
|
||||||
stdout=subprocess.PIPE,
|
|
||||||
stderr=subprocess.PIPE,
|
|
||||||
universal_newlines=True)
|
|
||||||
stdout, stderr = proc.communicate()
|
|
||||||
|
|
||||||
except (OSError, ValueError):
|
|
||||||
logger.error(
|
|
||||||
"Error running command %s for runtime parameters!%s",
|
|
||||||
command, os.linesep)
|
|
||||||
raise errors.MisconfigurationError(
|
|
||||||
"Error accessing loaded Apache parameters: {0}".format(
|
|
||||||
command))
|
|
||||||
# Small errors that do not impede
|
|
||||||
if proc.returncode != 0:
|
|
||||||
logger.warning("Error in checking parameter list: %s", stderr)
|
|
||||||
raise errors.MisconfigurationError(
|
|
||||||
"Apache is unable to check whether or not the module is "
|
|
||||||
"loaded because Apache is misconfigured.")
|
|
||||||
|
|
||||||
return stdout
|
|
||||||
|
|
||||||
def filter_args_num(self, matches, args): # pylint: disable=no-self-use
|
|
||||||
"""Filter out directives with specific number of arguments.
|
"""Filter out directives with specific number of arguments.
|
||||||
|
|
||||||
This function makes the assumption that all related arguments are given
|
This function makes the assumption that all related arguments are given
|
||||||
@@ -613,7 +546,7 @@ class ApacheParser(object):
|
|||||||
"%s//*[self::directive=~regexp('%s')]" % (start, regex))
|
"%s//*[self::directive=~regexp('%s')]" % (start, regex))
|
||||||
|
|
||||||
if exclude:
|
if exclude:
|
||||||
matches = self._exclude_dirs(matches)
|
matches = self.exclude_dirs(matches)
|
||||||
|
|
||||||
if arg is None:
|
if arg is None:
|
||||||
arg_suffix = "/arg"
|
arg_suffix = "/arg"
|
||||||
@@ -626,7 +559,7 @@ class ApacheParser(object):
|
|||||||
# https://httpd.apache.org/docs/2.4/mod/core.html#include
|
# https://httpd.apache.org/docs/2.4/mod/core.html#include
|
||||||
for match in matches:
|
for match in matches:
|
||||||
dir_ = self.aug.get(match).lower()
|
dir_ = self.aug.get(match).lower()
|
||||||
if dir_ == "include" or dir_ == "includeoptional":
|
if dir_ in ("include", "includeoptional"):
|
||||||
ordered_matches.extend(self.find_dir(
|
ordered_matches.extend(self.find_dir(
|
||||||
directive, arg,
|
directive, arg,
|
||||||
self._get_include_path(self.get_arg(match + "/arg")),
|
self._get_include_path(self.get_arg(match + "/arg")),
|
||||||
@@ -666,8 +599,7 @@ class ApacheParser(object):
|
|||||||
# e.g. strip now, not later
|
# e.g. strip now, not later
|
||||||
if not value:
|
if not value:
|
||||||
return None
|
return None
|
||||||
else:
|
value = value.strip("'\"")
|
||||||
value = value.strip("'\"")
|
|
||||||
|
|
||||||
variables = ApacheParser.arg_var_interpreter.findall(value)
|
variables = ApacheParser.arg_var_interpreter.findall(value)
|
||||||
|
|
||||||
@@ -680,9 +612,15 @@ class ApacheParser(object):
|
|||||||
|
|
||||||
return value
|
return value
|
||||||
|
|
||||||
def _exclude_dirs(self, matches):
|
def get_root_augpath(self):
|
||||||
|
"""
|
||||||
|
Returns the Augeas path of root configuration.
|
||||||
|
"""
|
||||||
|
return get_aug_path(self.loc["root"])
|
||||||
|
|
||||||
|
def exclude_dirs(self, matches):
|
||||||
"""Exclude directives that are not loaded into the configuration."""
|
"""Exclude directives that are not loaded into the configuration."""
|
||||||
filters = [("ifmodule", self.modules), ("ifdefine", self.variables)]
|
filters = [("ifmodule", self.modules.keys()), ("ifdefine", self.variables)]
|
||||||
|
|
||||||
valid_matches = []
|
valid_matches = []
|
||||||
|
|
||||||
@@ -723,6 +661,25 @@ class ApacheParser(object):
|
|||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
def standard_path_from_server_root(self, arg):
|
||||||
|
"""Ensure paths are consistent and absolute
|
||||||
|
|
||||||
|
:param str arg: Argument of directive
|
||||||
|
|
||||||
|
:returns: Standardized argument path
|
||||||
|
:rtype: str
|
||||||
|
"""
|
||||||
|
# Remove beginning and ending quotes
|
||||||
|
arg = arg.strip("'\"")
|
||||||
|
|
||||||
|
# Standardize the include argument based on server root
|
||||||
|
if not arg.startswith("/"):
|
||||||
|
# Normpath will condense ../
|
||||||
|
arg = os.path.normpath(os.path.join(self.root, arg))
|
||||||
|
else:
|
||||||
|
arg = os.path.normpath(arg)
|
||||||
|
return arg
|
||||||
|
|
||||||
def _get_include_path(self, arg):
|
def _get_include_path(self, arg):
|
||||||
"""Converts an Apache Include directive into Augeas path.
|
"""Converts an Apache Include directive into Augeas path.
|
||||||
|
|
||||||
@@ -743,16 +700,7 @@ class ApacheParser(object):
|
|||||||
# if matchObj.group() != arg:
|
# if matchObj.group() != arg:
|
||||||
# logger.error("Error: Invalid regexp characters in %s", arg)
|
# logger.error("Error: Invalid regexp characters in %s", arg)
|
||||||
# return []
|
# return []
|
||||||
|
arg = self.standard_path_from_server_root(arg)
|
||||||
# Remove beginning and ending quotes
|
|
||||||
arg = arg.strip("'\"")
|
|
||||||
|
|
||||||
# Standardize the include argument based on server root
|
|
||||||
if not arg.startswith("/"):
|
|
||||||
# Normpath will condense ../
|
|
||||||
arg = os.path.normpath(os.path.join(self.root, arg))
|
|
||||||
else:
|
|
||||||
arg = os.path.normpath(arg)
|
|
||||||
|
|
||||||
# Attempts to add a transform to the file if one does not already exist
|
# Attempts to add a transform to the file if one does not already exist
|
||||||
if os.path.isdir(arg):
|
if os.path.isdir(arg):
|
||||||
@@ -766,7 +714,7 @@ class ApacheParser(object):
|
|||||||
split_arg = arg.split("/")
|
split_arg = arg.split("/")
|
||||||
for idx, split in enumerate(split_arg):
|
for idx, split in enumerate(split_arg):
|
||||||
if any(char in ApacheParser.fnmatch_chars for char in split):
|
if any(char in ApacheParser.fnmatch_chars for char in split):
|
||||||
# Turn it into a augeas regex
|
# Turn it into an augeas regex
|
||||||
# TODO: Can this instead be an augeas glob instead of regex
|
# TODO: Can this instead be an augeas glob instead of regex
|
||||||
split_arg[idx] = ("* [label()=~regexp('%s')]" %
|
split_arg[idx] = ("* [label()=~regexp('%s')]" %
|
||||||
self.fnmatch_to_re(split))
|
self.fnmatch_to_re(split))
|
||||||
@@ -776,7 +724,7 @@ class ApacheParser(object):
|
|||||||
|
|
||||||
return get_aug_path(arg)
|
return get_aug_path(arg)
|
||||||
|
|
||||||
def fnmatch_to_re(self, clean_fn_match): # pylint: disable=no-self-use
|
def fnmatch_to_re(self, clean_fn_match):
|
||||||
"""Method converts Apache's basic fnmatch to regular expression.
|
"""Method converts Apache's basic fnmatch to regular expression.
|
||||||
|
|
||||||
Assumption - Configs are assumed to be well-formed and only writable by
|
Assumption - Configs are assumed to be well-formed and only writable by
|
||||||
@@ -793,7 +741,7 @@ class ApacheParser(object):
|
|||||||
"""
|
"""
|
||||||
if sys.version_info < (3, 6):
|
if sys.version_info < (3, 6):
|
||||||
# This strips off final /Z(?ms)
|
# This strips off final /Z(?ms)
|
||||||
return fnmatch.translate(clean_fn_match)[:-7]
|
return fnmatch.translate(clean_fn_match)[:-7] # pragma: no cover
|
||||||
# Since Python 3.6, it returns a different pattern like (?s:.*\.load)\Z
|
# Since Python 3.6, it returns a different pattern like (?s:.*\.load)\Z
|
||||||
return fnmatch.translate(clean_fn_match)[4:-3] # pragma: no cover
|
return fnmatch.translate(clean_fn_match)[4:-3] # pragma: no cover
|
||||||
|
|
||||||
@@ -997,8 +945,8 @@ def case_i(string):
|
|||||||
:param str string: string to make case i regex
|
:param str string: string to make case i regex
|
||||||
|
|
||||||
"""
|
"""
|
||||||
return "".join(["[" + c.upper() + c.lower() + "]"
|
return "".join("[" + c.upper() + c.lower() + "]"
|
||||||
if c.isalpha() else c for c in re.escape(string)])
|
if c.isalpha() else c for c in re.escape(string))
|
||||||
|
|
||||||
|
|
||||||
def get_aug_path(file_path):
|
def get_aug_path(file_path):
|
||||||
129
certbot-apache/certbot_apache/_internal/parsernode_util.py
Normal file
129
certbot-apache/certbot_apache/_internal/parsernode_util.py
Normal file
@@ -0,0 +1,129 @@
|
|||||||
|
"""ParserNode utils"""
|
||||||
|
|
||||||
|
|
||||||
|
def validate_kwargs(kwargs, required_names):
|
||||||
|
"""
|
||||||
|
Ensures that the kwargs dict has all the expected values. This function modifies
|
||||||
|
the kwargs dictionary, and hence the returned dictionary should be used instead
|
||||||
|
in the caller function instead of the original kwargs.
|
||||||
|
|
||||||
|
:param dict kwargs: Dictionary of keyword arguments to validate.
|
||||||
|
:param list required_names: List of required parameter names.
|
||||||
|
"""
|
||||||
|
|
||||||
|
validated_kwargs = {}
|
||||||
|
for name in required_names:
|
||||||
|
try:
|
||||||
|
validated_kwargs[name] = kwargs.pop(name)
|
||||||
|
except KeyError:
|
||||||
|
raise TypeError("Required keyword argument: {} undefined.".format(name))
|
||||||
|
|
||||||
|
# Raise exception if unknown key word arguments are found.
|
||||||
|
if kwargs:
|
||||||
|
unknown = ", ".join(kwargs.keys())
|
||||||
|
raise TypeError("Unknown keyword argument(s): {}".format(unknown))
|
||||||
|
return validated_kwargs
|
||||||
|
|
||||||
|
|
||||||
|
def parsernode_kwargs(kwargs):
|
||||||
|
"""
|
||||||
|
Validates keyword arguments for ParserNode. This function modifies the kwargs
|
||||||
|
dictionary, and hence the returned dictionary should be used instead in the
|
||||||
|
caller function instead of the original kwargs.
|
||||||
|
|
||||||
|
If metadata is provided, the otherwise required argument "filepath" may be
|
||||||
|
omitted if the implementation is able to extract its value from the metadata.
|
||||||
|
This usecase is handled within this function. Filepath defaults to None.
|
||||||
|
|
||||||
|
:param dict kwargs: Keyword argument dictionary to validate.
|
||||||
|
|
||||||
|
:returns: Tuple of validated and prepared arguments.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# As many values of ParserNode instances can be derived from the metadata,
|
||||||
|
# (ancestor being a common exception here) make sure we permit it here as well.
|
||||||
|
if "metadata" in kwargs:
|
||||||
|
# Filepath can be derived from the metadata in Augeas implementation.
|
||||||
|
# Default is None, as in this case the responsibility of populating this
|
||||||
|
# variable lies on the implementation.
|
||||||
|
kwargs.setdefault("filepath", None)
|
||||||
|
|
||||||
|
kwargs.setdefault("dirty", False)
|
||||||
|
kwargs.setdefault("metadata", {})
|
||||||
|
|
||||||
|
kwargs = validate_kwargs(kwargs, ["ancestor", "dirty", "filepath", "metadata"])
|
||||||
|
return kwargs["ancestor"], kwargs["dirty"], kwargs["filepath"], kwargs["metadata"]
|
||||||
|
|
||||||
|
|
||||||
|
def commentnode_kwargs(kwargs):
|
||||||
|
"""
|
||||||
|
Validates keyword arguments for CommentNode and sets the default values for
|
||||||
|
optional kwargs. This function modifies the kwargs dictionary, and hence the
|
||||||
|
returned dictionary should be used instead in the caller function instead of
|
||||||
|
the original kwargs.
|
||||||
|
|
||||||
|
If metadata is provided, the otherwise required argument "comment" may be
|
||||||
|
omitted if the implementation is able to extract its value from the metadata.
|
||||||
|
This usecase is handled within this function.
|
||||||
|
|
||||||
|
:param dict kwargs: Keyword argument dictionary to validate.
|
||||||
|
|
||||||
|
:returns: Tuple of validated and prepared arguments and ParserNode kwargs.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# As many values of ParserNode instances can be derived from the metadata,
|
||||||
|
# (ancestor being a common exception here) make sure we permit it here as well.
|
||||||
|
if "metadata" in kwargs:
|
||||||
|
kwargs.setdefault("comment", None)
|
||||||
|
# Filepath can be derived from the metadata in Augeas implementation.
|
||||||
|
# Default is None, as in this case the responsibility of populating this
|
||||||
|
# variable lies on the implementation.
|
||||||
|
kwargs.setdefault("filepath", None)
|
||||||
|
|
||||||
|
kwargs.setdefault("dirty", False)
|
||||||
|
kwargs.setdefault("metadata", {})
|
||||||
|
|
||||||
|
kwargs = validate_kwargs(kwargs, ["ancestor", "dirty", "filepath", "comment",
|
||||||
|
"metadata"])
|
||||||
|
|
||||||
|
comment = kwargs.pop("comment")
|
||||||
|
return comment, kwargs
|
||||||
|
|
||||||
|
|
||||||
|
def directivenode_kwargs(kwargs):
|
||||||
|
"""
|
||||||
|
Validates keyword arguments for DirectiveNode and BlockNode and sets the
|
||||||
|
default values for optional kwargs. This function modifies the kwargs
|
||||||
|
dictionary, and hence the returned dictionary should be used instead in the
|
||||||
|
caller function instead of the original kwargs.
|
||||||
|
|
||||||
|
If metadata is provided, the otherwise required argument "name" may be
|
||||||
|
omitted if the implementation is able to extract its value from the metadata.
|
||||||
|
This usecase is handled within this function.
|
||||||
|
|
||||||
|
:param dict kwargs: Keyword argument dictionary to validate.
|
||||||
|
|
||||||
|
:returns: Tuple of validated and prepared arguments and ParserNode kwargs.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# As many values of ParserNode instances can be derived from the metadata,
|
||||||
|
# (ancestor being a common exception here) make sure we permit it here as well.
|
||||||
|
if "metadata" in kwargs:
|
||||||
|
kwargs.setdefault("name", None)
|
||||||
|
# Filepath can be derived from the metadata in Augeas implementation.
|
||||||
|
# Default is None, as in this case the responsibility of populating this
|
||||||
|
# variable lies on the implementation.
|
||||||
|
kwargs.setdefault("filepath", None)
|
||||||
|
|
||||||
|
kwargs.setdefault("dirty", False)
|
||||||
|
kwargs.setdefault("enabled", True)
|
||||||
|
kwargs.setdefault("parameters", ())
|
||||||
|
kwargs.setdefault("metadata", {})
|
||||||
|
|
||||||
|
kwargs = validate_kwargs(kwargs, ["ancestor", "dirty", "filepath", "name",
|
||||||
|
"parameters", "enabled", "metadata"])
|
||||||
|
|
||||||
|
name = kwargs.pop("name")
|
||||||
|
parameters = kwargs.pop("parameters")
|
||||||
|
enabled = kwargs.pop("enabled")
|
||||||
|
return name, parameters, enabled, kwargs
|
||||||
@@ -0,0 +1,19 @@
|
|||||||
|
# This file contains important security parameters. If you modify this file
|
||||||
|
# manually, Certbot will be unable to automatically provide future security
|
||||||
|
# updates. Instead, Certbot will print and log an error message with a path to
|
||||||
|
# the up-to-date file that you will need to refer to when manually updating
|
||||||
|
# this file.
|
||||||
|
|
||||||
|
SSLEngine on
|
||||||
|
|
||||||
|
# Intermediate configuration, tweak to your needs
|
||||||
|
SSLProtocol all -SSLv2 -SSLv3 -TLSv1 -TLSv1.1
|
||||||
|
SSLCipherSuite ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:DHE-RSA-AES128-GCM-SHA256:DHE-RSA-AES256-GCM-SHA384
|
||||||
|
SSLHonorCipherOrder off
|
||||||
|
SSLSessionTickets off
|
||||||
|
|
||||||
|
SSLOptions +StrictRequire
|
||||||
|
|
||||||
|
# Add vhost name to log entries:
|
||||||
|
LogFormat "%h %l %u %t \"%r\" %>s %b \"%{Referer}i\" \"%{User-agent}i\"" vhost_combined
|
||||||
|
LogFormat "%v %h %l %u %t \"%r\" %>s %b" vhost_common
|
||||||
@@ -0,0 +1,18 @@
|
|||||||
|
# This file contains important security parameters. If you modify this file
|
||||||
|
# manually, Certbot will be unable to automatically provide future security
|
||||||
|
# updates. Instead, Certbot will print and log an error message with a path to
|
||||||
|
# the up-to-date file that you will need to refer to when manually updating
|
||||||
|
# this file.
|
||||||
|
|
||||||
|
SSLEngine on
|
||||||
|
|
||||||
|
# Intermediate configuration, tweak to your needs
|
||||||
|
SSLProtocol all -SSLv2 -SSLv3 -TLSv1 -TLSv1.1
|
||||||
|
SSLCipherSuite ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:DHE-RSA-AES128-GCM-SHA256:DHE-RSA-AES256-GCM-SHA384
|
||||||
|
SSLHonorCipherOrder off
|
||||||
|
|
||||||
|
SSLOptions +StrictRequire
|
||||||
|
|
||||||
|
# Add vhost name to log entries:
|
||||||
|
LogFormat "%h %l %u %t \"%r\" %>s %b \"%{Referer}i\" \"%{User-agent}i\"" vhost_combined
|
||||||
|
LogFormat "%v %h %l %u %t \"%r\" %>s %b" vhost_common
|
||||||
@@ -1,107 +0,0 @@
|
|||||||
""" Utility functions for certbot-apache plugin """
|
|
||||||
import binascii
|
|
||||||
|
|
||||||
from certbot import util
|
|
||||||
from certbot.compat import os
|
|
||||||
|
|
||||||
|
|
||||||
def get_mod_deps(mod_name):
|
|
||||||
"""Get known module dependencies.
|
|
||||||
|
|
||||||
.. note:: This does not need to be accurate in order for the client to
|
|
||||||
run. This simply keeps things clean if the user decides to revert
|
|
||||||
changes.
|
|
||||||
.. warning:: If all deps are not included, it may cause incorrect parsing
|
|
||||||
behavior, due to enable_mod's shortcut for updating the parser's
|
|
||||||
currently defined modules (`.ApacheParser.add_mod`)
|
|
||||||
This would only present a major problem in extremely atypical
|
|
||||||
configs that use ifmod for the missing deps.
|
|
||||||
|
|
||||||
"""
|
|
||||||
deps = {
|
|
||||||
"ssl": ["setenvif", "mime"]
|
|
||||||
}
|
|
||||||
return deps.get(mod_name, [])
|
|
||||||
|
|
||||||
|
|
||||||
def get_file_path(vhost_path):
|
|
||||||
"""Get file path from augeas_vhost_path.
|
|
||||||
|
|
||||||
Takes in Augeas path and returns the file name
|
|
||||||
|
|
||||||
:param str vhost_path: Augeas virtual host path
|
|
||||||
|
|
||||||
:returns: filename of vhost
|
|
||||||
:rtype: str
|
|
||||||
|
|
||||||
"""
|
|
||||||
if not vhost_path or not vhost_path.startswith("/files/"):
|
|
||||||
return None
|
|
||||||
|
|
||||||
return _split_aug_path(vhost_path)[0]
|
|
||||||
|
|
||||||
|
|
||||||
def get_internal_aug_path(vhost_path):
|
|
||||||
"""Get the Augeas path for a vhost with the file path removed.
|
|
||||||
|
|
||||||
:param str vhost_path: Augeas virtual host path
|
|
||||||
|
|
||||||
:returns: Augeas path to vhost relative to the containing file
|
|
||||||
:rtype: str
|
|
||||||
|
|
||||||
"""
|
|
||||||
return _split_aug_path(vhost_path)[1]
|
|
||||||
|
|
||||||
|
|
||||||
def _split_aug_path(vhost_path):
|
|
||||||
"""Splits an Augeas path into a file path and an internal path.
|
|
||||||
|
|
||||||
After removing "/files", this function splits vhost_path into the
|
|
||||||
file path and the remaining Augeas path.
|
|
||||||
|
|
||||||
:param str vhost_path: Augeas virtual host path
|
|
||||||
|
|
||||||
:returns: file path and internal Augeas path
|
|
||||||
:rtype: `tuple` of `str`
|
|
||||||
|
|
||||||
"""
|
|
||||||
# Strip off /files
|
|
||||||
file_path = vhost_path[6:]
|
|
||||||
internal_path = []
|
|
||||||
|
|
||||||
# Remove components from the end of file_path until it becomes valid
|
|
||||||
while not os.path.exists(file_path):
|
|
||||||
file_path, _, internal_path_part = file_path.rpartition("/")
|
|
||||||
internal_path.append(internal_path_part)
|
|
||||||
|
|
||||||
return file_path, "/".join(reversed(internal_path))
|
|
||||||
|
|
||||||
|
|
||||||
def parse_define_file(filepath, varname):
|
|
||||||
""" Parses Defines from a variable in configuration file
|
|
||||||
|
|
||||||
:param str filepath: Path of file to parse
|
|
||||||
:param str varname: Name of the variable
|
|
||||||
|
|
||||||
:returns: Dict of Define:Value pairs
|
|
||||||
:rtype: `dict`
|
|
||||||
|
|
||||||
"""
|
|
||||||
return_vars = {}
|
|
||||||
# Get list of words in the variable
|
|
||||||
a_opts = util.get_var_from_file(varname, filepath).split()
|
|
||||||
for i, v in enumerate(a_opts):
|
|
||||||
# Handle Define statements and make sure it has an argument
|
|
||||||
if v == "-D" and len(a_opts) >= i+2:
|
|
||||||
var_parts = a_opts[i+1].partition("=")
|
|
||||||
return_vars[var_parts[0]] = var_parts[2]
|
|
||||||
elif len(v) > 2 and v.startswith("-D"):
|
|
||||||
# Found var with no whitespace separator
|
|
||||||
var_parts = v[2:].partition("=")
|
|
||||||
return_vars[var_parts[0]] = var_parts[2]
|
|
||||||
return return_vars
|
|
||||||
|
|
||||||
|
|
||||||
def unique_id():
|
|
||||||
""" Returns an unique id to be used as a VirtualHost identifier"""
|
|
||||||
return binascii.hexlify(os.urandom(16)).decode("utf-8")
|
|
||||||
@@ -1,25 +0,0 @@
|
|||||||
# This file contains important security parameters. If you modify this file
|
|
||||||
# manually, Certbot will be unable to automatically provide future security
|
|
||||||
# updates. Instead, Certbot will print and log an error message with a path to
|
|
||||||
# the up-to-date file that you will need to refer to when manually updating
|
|
||||||
# this file.
|
|
||||||
|
|
||||||
SSLEngine on
|
|
||||||
|
|
||||||
# Intermediate configuration, tweak to your needs
|
|
||||||
SSLProtocol all -SSLv2 -SSLv3
|
|
||||||
SSLCipherSuite ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:DHE-RSA-AES128-GCM-SHA256:DHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-AES128-SHA256:ECDHE-RSA-AES128-SHA256:ECDHE-ECDSA-AES128-SHA:ECDHE-RSA-AES256-SHA384:ECDHE-RSA-AES128-SHA:ECDHE-ECDSA-AES256-SHA384:ECDHE-ECDSA-AES256-SHA:ECDHE-RSA-AES256-SHA:DHE-RSA-AES128-SHA256:DHE-RSA-AES128-SHA:DHE-RSA-AES256-SHA256:DHE-RSA-AES256-SHA:ECDHE-ECDSA-DES-CBC3-SHA:ECDHE-RSA-DES-CBC3-SHA:EDH-RSA-DES-CBC3-SHA:AES128-GCM-SHA256:AES256-GCM-SHA384:AES128-SHA256:AES256-SHA256:AES128-SHA:AES256-SHA:DES-CBC3-SHA:!DSS
|
|
||||||
SSLHonorCipherOrder on
|
|
||||||
|
|
||||||
SSLOptions +StrictRequire
|
|
||||||
|
|
||||||
# Add vhost name to log entries:
|
|
||||||
LogFormat "%h %l %u %t \"%r\" %>s %b \"%{Referer}i\" \"%{User-agent}i\"" vhost_combined
|
|
||||||
LogFormat "%v %h %l %u %t \"%r\" %>s %b" vhost_common
|
|
||||||
|
|
||||||
#CustomLog /var/log/apache2/access.log vhost_combined
|
|
||||||
#LogLevel warn
|
|
||||||
#ErrorLog /var/log/apache2/error.log
|
|
||||||
|
|
||||||
# Always ensure Cookies have "Secure" set (JAH 2012/1)
|
|
||||||
#Header edit Set-Cookie (?i)^(.*)(;\s*secure)??((\s*;)?(.*)) "$1; Secure$3$4"
|
|
||||||
@@ -1,26 +0,0 @@
|
|||||||
# This file contains important security parameters. If you modify this file
|
|
||||||
# manually, Certbot will be unable to automatically provide future security
|
|
||||||
# updates. Instead, Certbot will print and log an error message with a path to
|
|
||||||
# the up-to-date file that you will need to refer to when manually updating
|
|
||||||
# this file.
|
|
||||||
|
|
||||||
SSLEngine on
|
|
||||||
|
|
||||||
# Intermediate configuration, tweak to your needs
|
|
||||||
SSLProtocol all -SSLv2 -SSLv3
|
|
||||||
SSLCipherSuite ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:DHE-RSA-AES128-GCM-SHA256:DHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-AES128-SHA256:ECDHE-RSA-AES128-SHA256:ECDHE-ECDSA-AES128-SHA:ECDHE-RSA-AES256-SHA384:ECDHE-RSA-AES128-SHA:ECDHE-ECDSA-AES256-SHA384:ECDHE-ECDSA-AES256-SHA:ECDHE-RSA-AES256-SHA:DHE-RSA-AES128-SHA256:DHE-RSA-AES128-SHA:DHE-RSA-AES256-SHA256:DHE-RSA-AES256-SHA:ECDHE-ECDSA-DES-CBC3-SHA:ECDHE-RSA-DES-CBC3-SHA:EDH-RSA-DES-CBC3-SHA:AES128-GCM-SHA256:AES256-GCM-SHA384:AES128-SHA256:AES256-SHA256:AES128-SHA:AES256-SHA:DES-CBC3-SHA:!DSS
|
|
||||||
SSLHonorCipherOrder on
|
|
||||||
SSLCompression off
|
|
||||||
|
|
||||||
SSLOptions +StrictRequire
|
|
||||||
|
|
||||||
# Add vhost name to log entries:
|
|
||||||
LogFormat "%h %l %u %t \"%r\" %>s %b \"%{Referer}i\" \"%{User-agent}i\"" vhost_combined
|
|
||||||
LogFormat "%v %h %l %u %t \"%r\" %>s %b" vhost_common
|
|
||||||
|
|
||||||
#CustomLog /var/log/apache2/access.log vhost_combined
|
|
||||||
#LogLevel warn
|
|
||||||
#ErrorLog /var/log/apache2/error.log
|
|
||||||
|
|
||||||
# Always ensure Cookies have "Secure" set (JAH 2012/1)
|
|
||||||
#Header edit Set-Cookie (?i)^(.*)(;\s*secure)??((\s*;)?(.*)) "$1; Secure$3$4"
|
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user