Compare commits
284 Commits
test-windo
...
no-refacto
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
78a3461248 | ||
|
|
6766786049 | ||
|
|
d2578e05e7 | ||
|
|
2746fc572f | ||
|
|
d20e42562c | ||
|
|
4756b66089 | ||
|
|
e8265dbf9c | ||
|
|
b1edda8a65 | ||
|
|
81d5d2b421 | ||
|
|
8f8dd2824e | ||
|
|
9740f5428e | ||
|
|
91c079ab41 | ||
|
|
200e1f1709 | ||
|
|
e501e277b3 | ||
|
|
cdbc264bb6 | ||
|
|
a0f22d21ce | ||
|
|
94af235713 | ||
|
|
2375d87831 | ||
|
|
1a698fa235 | ||
|
|
d250d34193 | ||
|
|
777935c8ed | ||
|
|
15c2792036 | ||
|
|
46beb8af84 | ||
|
|
aa63688450 | ||
|
|
93f61887be | ||
|
|
54475964bd | ||
|
|
065df4c9a7 | ||
|
|
cde3e1fa97 | ||
|
|
bb2db252a7 | ||
|
|
abe23c0e60 | ||
|
|
b0aa064640 | ||
|
|
bd5f4f2d8a | ||
|
|
aea3c7e363 | ||
|
|
fc02b10560 | ||
|
|
ee190db235 | ||
|
|
077d28828a | ||
|
|
0b63d81f95 | ||
|
|
d139e26a1c | ||
|
|
dedd0b84a8 | ||
|
|
b9e4763de3 | ||
|
|
8897a81f7d | ||
|
|
5d6abc3234 | ||
|
|
dc7524d1d6 | ||
|
|
70a18a9486 | ||
|
|
b7bde05aee | ||
|
|
8ff7153019 | ||
|
|
0d4f92fa81 | ||
|
|
1a2d74decc | ||
|
|
f6d5c8ffbe | ||
|
|
52e207a404 | ||
|
|
694c03bd6a | ||
|
|
058faeadac | ||
|
|
295dc5a2a9 | ||
|
|
a8a8a39ff1 | ||
|
|
435ae075a5 | ||
|
|
06c8113863 | ||
|
|
143ea15253 | ||
|
|
acf48df979 | ||
|
|
6a9e0ec59d | ||
|
|
5b96cc9c37 | ||
|
|
525c427c60 | ||
|
|
23e1e07139 | ||
|
|
241a7c32a2 | ||
|
|
10dc41e83d | ||
|
|
6943cea6b7 | ||
|
|
b4c49cf781 | ||
|
|
5e87aee968 | ||
|
|
693a2a7904 | ||
|
|
3058b6e748 | ||
|
|
7b78770010 | ||
|
|
cd2dff2db1 | ||
|
|
8194e8faef | ||
|
|
06698ad95f | ||
|
|
0d76d1f219 | ||
|
|
5c3c682b6e | ||
|
|
1129d850d3 | ||
|
|
bdc48e6a32 | ||
|
|
523f8f5e65 | ||
|
|
1dabddeb85 | ||
|
|
f9ef894141 | ||
|
|
979e21dcbf | ||
|
|
8133d3e70a | ||
|
|
08839758bd | ||
|
|
10eecf9c97 | ||
|
|
bebd399488 | ||
|
|
a105b587ac | ||
|
|
8e29063ba7 | ||
|
|
117791b582 | ||
|
|
2ab7857fa5 | ||
|
|
7ede5c3487 | ||
|
|
915459258b | ||
|
|
d94cf0e1d6 | ||
|
|
952a296e20 | ||
|
|
d9a1850eaa | ||
|
|
667750f3ff | ||
|
|
8b610239bf | ||
|
|
62426caa5a | ||
|
|
f137d8424e | ||
|
|
e5c41e76c5 | ||
|
|
1e114b4ef8 | ||
|
|
bc7c953bcc | ||
|
|
60a91eb688 | ||
|
|
1b025e84e8 | ||
|
|
d3555623ba | ||
|
|
18ea72faf1 | ||
|
|
c8255dded5 | ||
|
|
b48e336554 | ||
|
|
0c637860cd | ||
|
|
0b08a80dce | ||
|
|
d7b26c1bb2 | ||
|
|
78261dbae2 | ||
|
|
2ed4e0a17e | ||
|
|
c372dd8aee | ||
|
|
01772280c0 | ||
|
|
814d8d1aba | ||
|
|
a190480517 | ||
|
|
7e8f22e136 | ||
|
|
965a403699 | ||
|
|
968cc5801b | ||
|
|
492b578662 | ||
|
|
e946479b9f | ||
|
|
f88105a952 | ||
|
|
3380694fa8 | ||
|
|
18631b99ef | ||
|
|
55d461392a | ||
|
|
a7a9a8480b | ||
|
|
3640b8546e | ||
|
|
1f94c7db20 | ||
|
|
a02223a97f | ||
|
|
2e31b1ca41 | ||
|
|
7ce86f588b | ||
|
|
39b396763a | ||
|
|
6f27c32db1 | ||
|
|
099c6c8b24 | ||
|
|
315ddb247f | ||
|
|
2df279bc5b | ||
|
|
9e6b406218 | ||
|
|
352ee258b7 | ||
|
|
5040495741 | ||
|
|
bc23e07ee5 | ||
|
|
466e437a20 | ||
|
|
ee3b3656ea | ||
|
|
db40974788 | ||
|
|
89396cefa2 | ||
|
|
c48adc5753 | ||
|
|
c788820f5d | ||
|
|
b0552e1939 | ||
|
|
7eae058af5 | ||
|
|
934de48d44 | ||
|
|
e39c7b5233 | ||
|
|
56c781aec4 | ||
|
|
484309ed95 | ||
|
|
67e3c54744 | ||
|
|
bb6a076fda | ||
|
|
dd0e590de3 | ||
|
|
d3d9a05826 | ||
|
|
2cf1775864 | ||
|
|
f339d23e54 | ||
|
|
ac3edc2c1d | ||
|
|
ba912018f8 | ||
|
|
c06e40dbef | ||
|
|
32247b3c89 | ||
|
|
e4f5aced1c | ||
|
|
9292666b28 | ||
|
|
fb967fda15 | ||
|
|
4a404e2a4a | ||
|
|
0dbe17bbd4 | ||
|
|
e33090f282 | ||
|
|
06bece36de | ||
|
|
7f9857a81b | ||
|
|
459a254aea | ||
|
|
c21f277248 | ||
|
|
04a85742c1 | ||
|
|
21be290e24 | ||
|
|
bf40b81b5a | ||
|
|
1b6e4028dc | ||
|
|
f15d10abc8 | ||
|
|
a12d91aef6 | ||
|
|
c438a397a0 | ||
|
|
0f9f902b6e | ||
|
|
33f177b361 | ||
|
|
69479b7277 | ||
|
|
2622a700e0 | ||
|
|
06a53cb7df | ||
|
|
584a1a3ece | ||
|
|
28fac893f4 | ||
|
|
8a84c88fee | ||
|
|
fea0b4e2e5 | ||
|
|
1ea588d504 | ||
|
|
24fd4121cf | ||
|
|
8759ccaecb | ||
|
|
f4fc3e636d | ||
|
|
018efc241c | ||
|
|
fa25d8356d | ||
|
|
fd62a09197 | ||
|
|
8d8b35b7c0 | ||
|
|
74f6f734c8 | ||
|
|
0480959893 | ||
|
|
f90e93134c | ||
|
|
d3b74f41e0 | ||
|
|
1d7ddb0c0c | ||
|
|
54b0b98988 | ||
|
|
9fdb24331c | ||
|
|
84178e2773 | ||
|
|
ae2247163e | ||
|
|
6bc8b3d2ba | ||
|
|
40ae5d939e | ||
|
|
1b39d3dc47 | ||
|
|
2324c1bb7a | ||
|
|
bc892e04c4 | ||
|
|
0962b0fc83 | ||
|
|
dd6f2f565e | ||
|
|
f2d8c81e9b | ||
|
|
67b65bb2c0 | ||
|
|
76895457c9 | ||
|
|
c02b2d30f2 | ||
|
|
94dc6936e7 | ||
|
|
a3abcc001a | ||
|
|
9643e85b4c | ||
|
|
9d97be3a84 | ||
|
|
4d6db0eb71 | ||
|
|
92a66454b6 | ||
|
|
976068b5a0 | ||
|
|
1e30723003 | ||
|
|
496a4ced25 | ||
|
|
fab9bfd878 | ||
|
|
d3ca6af982 | ||
|
|
540fd6db93 | ||
|
|
b0e35c694e | ||
|
|
67c2b27af7 | ||
|
|
135187f03e | ||
|
|
e742cfaa21 | ||
|
|
f71298f661 | ||
|
|
025eb16c7a | ||
|
|
ae3ed200c0 | ||
|
|
c3d6fca3eb | ||
|
|
c43f4fe518 | ||
|
|
0f3f07b5cb | ||
|
|
ef265eccaf | ||
|
|
c0eccdd358 | ||
|
|
c59775c3c0 | ||
|
|
cf062f4c6d | ||
|
|
3d0dad8718 | ||
|
|
edad9bd82b | ||
|
|
2a16aa16c3 | ||
|
|
711cc95dc4 | ||
|
|
c2ee0d2938 | ||
|
|
c668172ef0 | ||
|
|
666ee35e29 | ||
|
|
13af3f7ec2 | ||
|
|
5ad0c254ca | ||
|
|
236062c2d2 | ||
|
|
2bcd8c59db | ||
|
|
57cba3690d | ||
|
|
786a130b7d | ||
|
|
df866b907b | ||
|
|
f0b32783f0 | ||
|
|
534af33a50 | ||
|
|
2e33aec8a8 | ||
|
|
bdfb9f19c4 | ||
|
|
b4e955a60e | ||
|
|
7399807ff2 | ||
|
|
00235d3807 | ||
|
|
adb7e5e62f | ||
|
|
261b5a76d8 | ||
|
|
2fca48caaa | ||
|
|
c0917a0302 | ||
|
|
13d4a99251 | ||
|
|
b9de48e93e | ||
|
|
7a02deeeba | ||
|
|
42f20455cd | ||
|
|
434ca1985f | ||
|
|
4a9748ace5 | ||
|
|
fb8cd063eb | ||
|
|
e602736bda | ||
|
|
ccde1eef64 | ||
|
|
c44a5a7701 | ||
|
|
6e1d042f76 | ||
|
|
daf989fc21 | ||
|
|
5c3fd7d9ee | ||
|
|
fc6c238bf9 | ||
|
|
a49b84d64e | ||
|
|
7567e8d8db | ||
|
|
02a5d000cb |
@@ -5,3 +5,4 @@ pr:
|
||||
|
||||
jobs:
|
||||
- template: templates/jobs/standard-tests-jobs.yml
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@ jobs:
|
||||
- name: IMAGE_NAME
|
||||
value: ubuntu-18.04
|
||||
- name: PYTHON_VERSION
|
||||
value: 3.9
|
||||
value: 3.10
|
||||
- group: certbot-common
|
||||
strategy:
|
||||
matrix:
|
||||
@@ -17,30 +17,31 @@ jobs:
|
||||
linux-py38:
|
||||
PYTHON_VERSION: 3.8
|
||||
TOXENV: py38
|
||||
linux-py39:
|
||||
PYTHON_VERSION: 3.9
|
||||
TOXENV: py39
|
||||
linux-py37-nopin:
|
||||
PYTHON_VERSION: 3.7
|
||||
TOXENV: py37
|
||||
CERTBOT_NO_PIN: 1
|
||||
linux-external-mock:
|
||||
TOXENV: external-mock
|
||||
linux-boulder-v1-integration-certbot-oldest:
|
||||
PYTHON_VERSION: 3.6
|
||||
TOXENV: integration-certbot-oldest
|
||||
ACME_SERVER: boulder-v1
|
||||
linux-boulder-v2-integration-certbot-oldest:
|
||||
PYTHON_VERSION: 3.6
|
||||
TOXENV: integration-certbot-oldest
|
||||
ACME_SERVER: boulder-v2
|
||||
linux-boulder-v1-integration-nginx-oldest:
|
||||
PYTHON_VERSION: 3.6
|
||||
TOXENV: integration-nginx-oldest
|
||||
ACME_SERVER: boulder-v1
|
||||
linux-boulder-v2-integration-nginx-oldest:
|
||||
PYTHON_VERSION: 3.6
|
||||
TOXENV: integration-nginx-oldest
|
||||
ACME_SERVER: boulder-v2
|
||||
linux-boulder-v1-py27-integration:
|
||||
PYTHON_VERSION: 2.7
|
||||
TOXENV: integration
|
||||
ACME_SERVER: boulder-v1
|
||||
linux-boulder-v2-py27-integration:
|
||||
PYTHON_VERSION: 2.7
|
||||
TOXENV: integration
|
||||
ACME_SERVER: boulder-v2
|
||||
linux-boulder-v1-py36-integration:
|
||||
PYTHON_VERSION: 3.6
|
||||
TOXENV: integration
|
||||
@@ -73,6 +74,14 @@ jobs:
|
||||
PYTHON_VERSION: 3.9
|
||||
TOXENV: integration
|
||||
ACME_SERVER: boulder-v2
|
||||
linux-boulder-v1-py310-integration:
|
||||
PYTHON_VERSION: 3.10
|
||||
TOXENV: integration
|
||||
ACME_SERVER: boulder-v1
|
||||
linux-boulder-v2-py310-integration:
|
||||
PYTHON_VERSION: 3.10
|
||||
TOXENV: integration
|
||||
ACME_SERVER: boulder-v2
|
||||
nginx-compat:
|
||||
TOXENV: nginx_compat
|
||||
linux-integration-rfc2136:
|
||||
@@ -81,18 +90,15 @@ jobs:
|
||||
TOXENV: integration-dns-rfc2136
|
||||
docker-dev:
|
||||
TOXENV: docker_dev
|
||||
le-modification:
|
||||
IMAGE_NAME: ubuntu-18.04
|
||||
TOXENV: modification
|
||||
macos-farmtest-apache2:
|
||||
# We run one of these test farm tests on macOS to help ensure the
|
||||
# tests continue to work on the platform.
|
||||
IMAGE_NAME: macOS-10.15
|
||||
PYTHON_VERSION: 3.8
|
||||
TOXENV: test-farm-apache2
|
||||
farmtest-leauto-upgrades:
|
||||
PYTHON_VERSION: 3.7
|
||||
TOXENV: test-farm-leauto-upgrades
|
||||
farmtest-certonly-standalone:
|
||||
PYTHON_VERSION: 3.7
|
||||
TOXENV: test-farm-certonly-standalone
|
||||
farmtest-sdists:
|
||||
PYTHON_VERSION: 3.7
|
||||
TOXENV: test-farm-sdists
|
||||
|
||||
@@ -12,6 +12,9 @@ jobs:
|
||||
DOCKER_ARCH: arm32v6
|
||||
arm64v8:
|
||||
DOCKER_ARCH: arm64v8
|
||||
# The default timeout of 60 minutes is a little low for compiling
|
||||
# cryptography on ARM architectures.
|
||||
timeoutInMinutes: 180
|
||||
steps:
|
||||
- bash: set -e && tools/docker/build.sh $(dockerTag) $DOCKER_ARCH
|
||||
displayName: Build the Docker images
|
||||
@@ -56,10 +59,16 @@ jobs:
|
||||
steps:
|
||||
- task: UsePythonVersion@0
|
||||
inputs:
|
||||
versionSpec: 3.7
|
||||
versionSpec: 3.9
|
||||
architecture: x86
|
||||
addToPath: true
|
||||
- script: python windows-installer/construct.py
|
||||
- script: |
|
||||
python -m venv venv
|
||||
venv\Scripts\python tools\pipstrap.py
|
||||
venv\Scripts\python tools\pip_install.py -e windows-installer
|
||||
displayName: Prepare Windows installer build environment
|
||||
- script: |
|
||||
venv\Scripts\construct-windows-installer
|
||||
displayName: Build Certbot installer
|
||||
- task: CopyFiles@2
|
||||
inputs:
|
||||
@@ -91,7 +100,7 @@ jobs:
|
||||
displayName: Check Powershell 5.x is used in vs2017-win2016
|
||||
- task: UsePythonVersion@0
|
||||
inputs:
|
||||
versionSpec: 3.8
|
||||
versionSpec: 3.9
|
||||
addToPath: true
|
||||
- task: DownloadPipelineArtifact@2
|
||||
inputs:
|
||||
@@ -116,13 +125,17 @@ jobs:
|
||||
- job: snaps_build
|
||||
pool:
|
||||
vmImage: ubuntu-18.04
|
||||
timeoutInMinutes: 0
|
||||
variables:
|
||||
strategy:
|
||||
matrix:
|
||||
amd64:
|
||||
SNAP_ARCH: amd64
|
||||
# Do not run the heavy non-amd64 builds for test branches
|
||||
${{ if not(startsWith(variables['Build.SourceBranchName'], 'test-')) }}:
|
||||
ARCHS: amd64 arm64 armhf
|
||||
${{ if startsWith(variables['Build.SourceBranchName'], 'test-') }}:
|
||||
ARCHS: amd64
|
||||
armhf:
|
||||
SNAP_ARCH: armhf
|
||||
arm64:
|
||||
SNAP_ARCH: arm64
|
||||
timeoutInMinutes: 0
|
||||
steps:
|
||||
- script: |
|
||||
set -e
|
||||
@@ -144,7 +157,7 @@ jobs:
|
||||
git config --global user.name "$(Build.RequestedFor)"
|
||||
mkdir -p ~/.local/share/snapcraft/provider/launchpad
|
||||
cp $(credentials.secureFilePath) ~/.local/share/snapcraft/provider/launchpad/credentials
|
||||
python3 tools/snap/build_remote.py ALL --archs ${ARCHS} --timeout 19800
|
||||
python3 tools/snap/build_remote.py ALL --archs ${SNAP_ARCH} --timeout 19800
|
||||
displayName: Build snaps
|
||||
- script: |
|
||||
set -e
|
||||
@@ -154,7 +167,7 @@ jobs:
|
||||
- task: PublishPipelineArtifact@1
|
||||
inputs:
|
||||
path: $(Build.ArtifactStagingDirectory)
|
||||
artifact: snaps
|
||||
artifact: snaps_$(SNAP_ARCH)
|
||||
displayName: Store snaps artifacts
|
||||
- job: snap_run
|
||||
dependsOn: snaps_build
|
||||
@@ -175,12 +188,12 @@ jobs:
|
||||
displayName: Install dependencies
|
||||
- task: DownloadPipelineArtifact@2
|
||||
inputs:
|
||||
artifact: snaps
|
||||
artifact: snaps_amd64
|
||||
path: $(Build.SourcesDirectory)/snap
|
||||
displayName: Retrieve Certbot snaps
|
||||
- script: |
|
||||
set -e
|
||||
sudo snap install --dangerous --classic snap/certbot_*_amd64.snap
|
||||
sudo snap install --dangerous --classic snap/certbot_*.snap
|
||||
displayName: Install Certbot snap
|
||||
- script: |
|
||||
set -e
|
||||
@@ -202,7 +215,7 @@ jobs:
|
||||
addToPath: true
|
||||
- task: DownloadPipelineArtifact@2
|
||||
inputs:
|
||||
artifact: snaps
|
||||
artifact: snaps_amd64
|
||||
path: $(Build.SourcesDirectory)/snap
|
||||
displayName: Retrieve Certbot snaps
|
||||
- script: |
|
||||
|
||||
@@ -1,55 +1,53 @@
|
||||
jobs:
|
||||
- job: test
|
||||
variables:
|
||||
PYTHON_VERSION: 3.9
|
||||
PYTHON_VERSION: 3.10
|
||||
strategy:
|
||||
matrix:
|
||||
macos-py27:
|
||||
macos-py36:
|
||||
IMAGE_NAME: macOS-10.15
|
||||
PYTHON_VERSION: 2.7
|
||||
TOXENV: py27
|
||||
macos-py39:
|
||||
PYTHON_VERSION: 3.6
|
||||
TOXENV: py36
|
||||
macos-py310:
|
||||
IMAGE_NAME: macOS-10.15
|
||||
PYTHON_VERSION: 3.9
|
||||
PYTHON_VERSION: 3.10
|
||||
TOXENV: py39
|
||||
windows-py36:
|
||||
IMAGE_NAME: vs2017-win2016
|
||||
PYTHON_VERSION: 3.6
|
||||
TOXENV: py36
|
||||
windows-py37-cover:
|
||||
TOXENV: py36-win
|
||||
windows-py39-cover:
|
||||
IMAGE_NAME: vs2017-win2016
|
||||
PYTHON_VERSION: 3.7
|
||||
TOXENV: py37-cover
|
||||
PYTHON_VERSION: 3.9
|
||||
TOXENV: py39-cover-win
|
||||
windows-integration-certbot:
|
||||
IMAGE_NAME: vs2017-win2016
|
||||
PYTHON_VERSION: 3.7
|
||||
PYTHON_VERSION: 3.9
|
||||
TOXENV: integration-certbot
|
||||
linux-oldest-tests-1:
|
||||
IMAGE_NAME: ubuntu-18.04
|
||||
TOXENV: py27-{acme,apache,apache-v2,certbot}-oldest
|
||||
PYTHON_VERSION: 3.6
|
||||
TOXENV: '{acme,apache,apache-v2,certbot}-oldest'
|
||||
linux-oldest-tests-2:
|
||||
IMAGE_NAME: ubuntu-18.04
|
||||
TOXENV: py27-{dns,nginx}-oldest
|
||||
linux-py27:
|
||||
IMAGE_NAME: ubuntu-18.04
|
||||
PYTHON_VERSION: 2.7
|
||||
TOXENV: py27
|
||||
PYTHON_VERSION: 3.6
|
||||
TOXENV: '{dns,nginx}-oldest'
|
||||
linux-py36:
|
||||
IMAGE_NAME: ubuntu-18.04
|
||||
PYTHON_VERSION: 3.6
|
||||
TOXENV: py36
|
||||
linux-py39-cover:
|
||||
linux-py310-cover:
|
||||
IMAGE_NAME: ubuntu-18.04
|
||||
PYTHON_VERSION: 3.9
|
||||
TOXENV: py39-cover
|
||||
linux-py37-lint:
|
||||
PYTHON_VERSION: 3.10
|
||||
TOXENV: py310-cover
|
||||
linux-py310-lint:
|
||||
IMAGE_NAME: ubuntu-18.04
|
||||
PYTHON_VERSION: 3.7
|
||||
TOXENV: lint
|
||||
linux-py36-mypy:
|
||||
PYTHON_VERSION: 3.10
|
||||
TOXENV: lint-posix
|
||||
linux-py310-mypy:
|
||||
IMAGE_NAME: ubuntu-18.04
|
||||
PYTHON_VERSION: 3.6
|
||||
TOXENV: mypy
|
||||
PYTHON_VERSION: 3.10
|
||||
TOXENV: mypy-posix
|
||||
linux-integration:
|
||||
IMAGE_NAME: ubuntu-18.04
|
||||
PYTHON_VERSION: 3.8
|
||||
@@ -58,18 +56,20 @@ jobs:
|
||||
apache-compat:
|
||||
IMAGE_NAME: ubuntu-18.04
|
||||
TOXENV: apache_compat
|
||||
le-modification:
|
||||
IMAGE_NAME: ubuntu-18.04
|
||||
TOXENV: modification
|
||||
apacheconftest:
|
||||
IMAGE_NAME: ubuntu-18.04
|
||||
PYTHON_VERSION: 2.7
|
||||
PYTHON_VERSION: 3.6
|
||||
TOXENV: apacheconftest-with-pebble
|
||||
nginxroundtrip:
|
||||
IMAGE_NAME: ubuntu-18.04
|
||||
PYTHON_VERSION: 2.7
|
||||
PYTHON_VERSION: 3.6
|
||||
TOXENV: nginxroundtrip
|
||||
pool:
|
||||
vmImage: $(IMAGE_NAME)
|
||||
steps:
|
||||
- template: ../steps/tox-steps.yml
|
||||
- job: test_sphinx_builds
|
||||
pool:
|
||||
vmImage: ubuntu-20.04
|
||||
steps:
|
||||
- template: ../steps/sphinx-steps.yml
|
||||
|
||||
@@ -19,11 +19,12 @@ stages:
|
||||
# Then the file was added as a secure file in Azure pipelines
|
||||
# with the name snapcraft.cfg by following the instructions at
|
||||
# https://docs.microsoft.com/en-us/azure/devops/pipelines/library/secure-files?view=azure-devops
|
||||
# including authorizing the file in all pipelines as described at
|
||||
# https://docs.microsoft.com/en-us/azure/devops/pipelines/library/secure-files?view=azure-devops#how-do-i-authorize-a-secure-file-for-use-in-all-pipelines.
|
||||
# including authorizing the file for use in the "nightly" and "release"
|
||||
# pipelines as described at
|
||||
# https://docs.microsoft.com/en-us/azure/devops/pipelines/library/secure-files?view=azure-devops#q-how-do-i-authorize-a-secure-file-for-use-in-a-specific-pipeline.
|
||||
#
|
||||
# This file has a maximum lifetime of one year and the current
|
||||
# file will expire on 2021-07-28 which is also tracked by
|
||||
# file will expire on 2022-07-25 which is also tracked by
|
||||
# https://github.com/certbot/certbot/issues/7931. The file will
|
||||
# need to be updated before then to prevent automated deploys
|
||||
# from breaking.
|
||||
@@ -37,6 +38,14 @@ stages:
|
||||
vmImage: ubuntu-18.04
|
||||
variables:
|
||||
- group: certbot-common
|
||||
strategy:
|
||||
matrix:
|
||||
amd64:
|
||||
SNAP_ARCH: amd64
|
||||
arm32v6:
|
||||
SNAP_ARCH: armhf
|
||||
arm64v8:
|
||||
SNAP_ARCH: arm64
|
||||
steps:
|
||||
- bash: |
|
||||
set -e
|
||||
@@ -46,7 +55,7 @@ stages:
|
||||
displayName: Install dependencies
|
||||
- task: DownloadPipelineArtifact@2
|
||||
inputs:
|
||||
artifact: snaps
|
||||
artifact: snaps_$(SNAP_ARCH)
|
||||
path: $(Build.SourcesDirectory)/snap
|
||||
displayName: Retrieve Certbot snaps
|
||||
- task: DownloadSecureFile@1
|
||||
@@ -55,8 +64,7 @@ stages:
|
||||
secureFile: snapcraft.cfg
|
||||
- bash: |
|
||||
set -e
|
||||
mkdir -p .snapcraft
|
||||
ln -s $(snapcraftCfg.secureFilePath) .snapcraft/snapcraft.cfg
|
||||
snapcraft login --with $(snapcraftCfg.secureFilePath)
|
||||
for SNAP_FILE in snap/*.snap; do
|
||||
tools/retry.sh eval snapcraft upload --release=${{ parameters.snapReleaseChannel }} "${SNAP_FILE}"
|
||||
done
|
||||
|
||||
@@ -5,7 +5,7 @@ stages:
|
||||
variables:
|
||||
- group: certbot-common
|
||||
pool:
|
||||
vmImage: ubuntu-latest
|
||||
vmImage: ubuntu-20.04
|
||||
steps:
|
||||
- bash: |
|
||||
set -e
|
||||
|
||||
24
.azure-pipelines/templates/steps/sphinx-steps.yml
Normal file
24
.azure-pipelines/templates/steps/sphinx-steps.yml
Normal file
@@ -0,0 +1,24 @@
|
||||
steps:
|
||||
- bash: |
|
||||
set -e
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y --no-install-recommends libaugeas0
|
||||
FINAL_STATUS=0
|
||||
declare -a FAILED_BUILDS
|
||||
tools/venv.py
|
||||
source venv/bin/activate
|
||||
for doc_path in */docs
|
||||
do
|
||||
echo ""
|
||||
echo "##[group]Building $doc_path"
|
||||
if ! sphinx-build -W --keep-going -b html $doc_path $doc_path/_build/html; then
|
||||
FINAL_STATUS=1
|
||||
FAILED_BUILDS[${#FAILED_BUILDS[@]}]="${doc_path%/docs}"
|
||||
fi
|
||||
echo "##[endgroup]"
|
||||
done
|
||||
if [[ $FINAL_STATUS -ne 0 ]]; then
|
||||
echo "##[error]The following builds failed: ${FAILED_BUILDS[*]}"
|
||||
exit 1
|
||||
fi
|
||||
displayName: Build Sphinx Documentation
|
||||
@@ -1,6 +1,10 @@
|
||||
steps:
|
||||
# We run brew update because we've seen attempts to install an older version
|
||||
# of a package fail. See
|
||||
# https://github.com/actions/virtual-environments/issues/3165.
|
||||
- bash: |
|
||||
set -e
|
||||
brew update
|
||||
brew install augeas
|
||||
condition: startswith(variables['IMAGE_NAME'], 'macOS')
|
||||
displayName: Install MacOS dependencies
|
||||
@@ -45,11 +49,7 @@ steps:
|
||||
export TARGET_BRANCH="`echo "${BUILD_SOURCEBRANCH}" | sed -E 's!refs/(heads|tags)/!!g'`"
|
||||
[ -z "${SYSTEM_PULLREQUEST_TARGETBRANCH}" ] || export TARGET_BRANCH="${SYSTEM_PULLREQUEST_TARGETBRANCH}"
|
||||
env
|
||||
if [[ "${TOXENV}" == *"oldest"* ]]; then
|
||||
tools/run_oldest_tests.sh
|
||||
else
|
||||
python -m tox
|
||||
fi
|
||||
env:
|
||||
AWS_ACCESS_KEY_ID: $(AWS_ACCESS_KEY_ID)
|
||||
AWS_SECRET_ACCESS_KEY: $(AWS_SECRET_ACCESS_KEY)
|
||||
|
||||
@@ -8,5 +8,4 @@
|
||||
.git
|
||||
.tox
|
||||
venv
|
||||
venv3
|
||||
docs
|
||||
|
||||
2
.envrc
2
.envrc
@@ -3,7 +3,7 @@
|
||||
# activated and then deactivated when you cd elsewhere. Developers have to have
|
||||
# direnv set up and run `direnv allow` to allow this file to execute on their
|
||||
# system. You can find more information at https://direnv.net/.
|
||||
. venv3/bin/activate
|
||||
. venv/bin/activate
|
||||
# direnv doesn't support modifying PS1 so we unset it to squelch the error
|
||||
# it'll otherwise print about this being done in the activate script. See
|
||||
# https://github.com/direnv/direnv/wiki/PS1. If you would like your shell
|
||||
|
||||
1
.github/FUNDING.yml
vendored
Normal file
1
.github/FUNDING.yml
vendored
Normal file
@@ -0,0 +1 @@
|
||||
custom: https://supporters.eff.org/donate/support-work-on-certbot
|
||||
@@ -7,7 +7,7 @@ questions.
|
||||
## My operating system is (include version):
|
||||
|
||||
|
||||
## I installed Certbot with (certbot-auto, OS package manager, pip, etc):
|
||||
## I installed Certbot with (snap, OS package manager, pip, certbot-auto, etc):
|
||||
|
||||
|
||||
## I ran this command and it produced this output:
|
||||
9
.gitignore
vendored
9
.gitignore
vendored
@@ -4,13 +4,12 @@
|
||||
build/
|
||||
dist*/
|
||||
/venv*/
|
||||
/kgs/
|
||||
/.tox/
|
||||
/releases*/
|
||||
/log*
|
||||
letsencrypt.log
|
||||
certbot.log
|
||||
letsencrypt-auto-source/letsencrypt-auto.sig.lzma.base64
|
||||
poetry.lock
|
||||
|
||||
# coverage
|
||||
.coverage
|
||||
@@ -31,12 +30,6 @@ tags
|
||||
# auth --cert-path --chain-path
|
||||
/*.pem
|
||||
|
||||
# letstest
|
||||
tests/letstest/letest-*/
|
||||
tests/letstest/*.pem
|
||||
tests/letstest/venv/
|
||||
tests/letstest/venv3/
|
||||
|
||||
.venv
|
||||
|
||||
# pytest cache
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
[settings]
|
||||
skip_glob=venv*
|
||||
skip=letsencrypt-auto-source
|
||||
force_sort_within_sections=True
|
||||
force_single_line=True
|
||||
order_by_type=False
|
||||
|
||||
30
.pylintrc
30
.pylintrc
@@ -8,7 +8,10 @@ jobs=0
|
||||
|
||||
# Python code to execute, usually for sys.path manipulation such as
|
||||
# pygtk.require().
|
||||
#init-hook=
|
||||
# CERTBOT COMMENT
|
||||
# This is needed for pylint to import linter_plugin.py since
|
||||
# https://github.com/PyCQA/pylint/pull/3396.
|
||||
init-hook="import pylint.config, os, sys; sys.path.append(os.path.dirname(pylint.config.PYLINTRC))"
|
||||
|
||||
# Profiled execution.
|
||||
profile=no
|
||||
@@ -53,7 +56,28 @@ extension-pkg-whitelist=pywintypes,win32api,win32file,win32security
|
||||
# See https://github.com/PyCQA/pylint/issues/1498.
|
||||
# 3) Same as point 2 for no-value-for-parameter.
|
||||
# See https://github.com/PyCQA/pylint/issues/2820.
|
||||
disable=fixme,locally-disabled,locally-enabled,bad-continuation,no-self-use,invalid-name,cyclic-import,duplicate-code,design,import-outside-toplevel,useless-object-inheritance,unsubscriptable-object,no-value-for-parameter,no-else-return,no-else-raise,no-else-break,no-else-continue
|
||||
# 4) raise-missing-from makes it an error to raise an exception from except
|
||||
# block without using explicit exception chaining. While explicit exception
|
||||
# chaining results in a slightly more informative traceback, I don't think
|
||||
# it's beneficial enough for us to change all of our current instances and
|
||||
# give Certbot developers errors about this when they're working on new code
|
||||
# in the future. You can read more about exception chaining and this pylint
|
||||
# check at
|
||||
# https://blog.ram.rachum.com/post/621791438475296768/improving-python-exception-chaining-with.
|
||||
# 5) wrong-import-order generates false positives and a pylint developer
|
||||
# suggests that people using isort should disable this check at
|
||||
# https://github.com/PyCQA/pylint/issues/3817#issuecomment-687892090.
|
||||
# 6) unspecified-encoding generates errors when encoding is not specified in
|
||||
# in a call to the built-in open function. This relates more to a design decision
|
||||
# (unspecified encoding makes the open function use the default encoding of the system)
|
||||
# than a clear flaw on which a check should be enforced. Anyway the project does
|
||||
# not need to enforce encoding on files so we disable this check.
|
||||
# 7) pylint's refactoring checker makes suggestions on how code could be
|
||||
# structured differently. These checks are very opinionated and have caused
|
||||
# pylint to fail on Certbot when it's upgraded so let's disable this checker
|
||||
# entirely. You can see a list of the things the checker considers at
|
||||
# https://pylint.pycqa.org/en/v2.11.1/technical_reference/features.html#refactoring-checker.
|
||||
disable=fixme,locally-disabled,locally-enabled,bad-continuation,no-self-use,invalid-name,cyclic-import,duplicate-code,design,import-outside-toplevel,useless-object-inheritance,unsubscriptable-object,no-value-for-parameter,no-else-return,no-else-raise,no-else-break,no-else-continue,raise-missing-from,wrong-import-order,unspecified-encoding,refactoring
|
||||
|
||||
[REPORTS]
|
||||
|
||||
@@ -254,7 +278,7 @@ ignore-mixin-members=yes
|
||||
# List of module names for which member attributes should not be checked
|
||||
# (useful for modules/projects where namespaces are manipulated during runtime
|
||||
# and thus existing member attributes cannot be deduced by static analysis
|
||||
ignored-modules=pkg_resources,confargparse,argparse,six.moves,six.moves.urllib
|
||||
ignored-modules=pkg_resources,confargparse,argparse
|
||||
# import errors ignored only in 1.4.4
|
||||
# https://bitbucket.org/logilab/pylint/commits/cd000904c9e2
|
||||
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
Authors
|
||||
=======
|
||||
|
||||
* [Aaron Gable](https://github.com/aarongable)
|
||||
* [Aaron Zirbes](https://github.com/aaronzirbes)
|
||||
* Aaron Zuehlke
|
||||
* Ada Lovelace
|
||||
@@ -60,6 +61,7 @@ Authors
|
||||
* [DanCld](https://github.com/DanCld)
|
||||
* [Daniel Albers](https://github.com/AID)
|
||||
* [Daniel Aleksandersen](https://github.com/da2x)
|
||||
* [Daniel Almasi](https://github.com/almasen)
|
||||
* [Daniel Convissor](https://github.com/convissor)
|
||||
* [Daniel "Drex" Drexler](https://github.com/aeturnum)
|
||||
* [Daniel Huang](https://github.com/dhuang)
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# This Dockerfile builds an image for development.
|
||||
FROM debian:buster
|
||||
FROM ubuntu:focal
|
||||
|
||||
# Note: this only exposes the port to other docker containers.
|
||||
EXPOSE 80 443
|
||||
@@ -8,13 +8,14 @@ WORKDIR /opt/certbot/src
|
||||
|
||||
COPY . .
|
||||
RUN apt-get update && \
|
||||
apt-get install apache2 git python3-dev python3-venv gcc libaugeas0 \
|
||||
libssl-dev libffi-dev ca-certificates openssl nginx-light -y && \
|
||||
DEBIAN_FRONTEND=noninteractive apt-get install apache2 git python3-dev \
|
||||
python3-venv gcc libaugeas0 libssl-dev libffi-dev ca-certificates \
|
||||
openssl nginx-light -y --no-install-recommends && \
|
||||
apt-get clean && \
|
||||
rm -rf /var/lib/apt/lists/* \
|
||||
/tmp/* \
|
||||
/var/tmp/*
|
||||
|
||||
RUN VENV_NAME="../venv3" python3 tools/venv3.py
|
||||
RUN VENV_NAME="../venv" python3 tools/venv.py
|
||||
|
||||
ENV PATH /opt/certbot/venv3/bin:$PATH
|
||||
ENV PATH /opt/certbot/venv/bin:$PATH
|
||||
|
||||
@@ -6,7 +6,6 @@ This module is an implementation of the `ACME protocol`_.
|
||||
|
||||
"""
|
||||
import sys
|
||||
import warnings
|
||||
|
||||
# This code exists to keep backwards compatibility with people using acme.jose
|
||||
# before it became the standalone josepy package.
|
||||
@@ -20,10 +19,3 @@ for mod in list(sys.modules):
|
||||
# preserved (acme.jose.* is josepy.*)
|
||||
if mod == 'josepy' or mod.startswith('josepy.'):
|
||||
sys.modules['acme.' + mod.replace('josepy', 'jose', 1)] = sys.modules[mod]
|
||||
|
||||
if sys.version_info[0] == 2:
|
||||
warnings.warn(
|
||||
"Python 2 support will be dropped in the next release of acme. "
|
||||
"Please upgrade your Python version.",
|
||||
PendingDeprecationWarning,
|
||||
) # pragma: no cover
|
||||
|
||||
@@ -5,18 +5,25 @@ import functools
|
||||
import hashlib
|
||||
import logging
|
||||
import socket
|
||||
from typing import cast
|
||||
from typing import Any
|
||||
from typing import Dict
|
||||
from typing import Mapping
|
||||
from typing import Optional
|
||||
from typing import Tuple
|
||||
from typing import Type
|
||||
|
||||
from cryptography.hazmat.primitives import hashes # type: ignore
|
||||
from cryptography.hazmat.primitives import hashes
|
||||
import josepy as jose
|
||||
import requests
|
||||
import six
|
||||
from OpenSSL import SSL # type: ignore # https://github.com/python/typeshed/issues/2052
|
||||
from OpenSSL import crypto
|
||||
from OpenSSL import SSL
|
||||
import requests
|
||||
|
||||
from acme import crypto_util
|
||||
from acme import errors
|
||||
from acme import fields
|
||||
from acme.mixins import ResourceMixin, TypeMixin
|
||||
from acme.mixins import ResourceMixin
|
||||
from acme.mixins import TypeMixin
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -24,12 +31,12 @@ logger = logging.getLogger(__name__)
|
||||
class Challenge(jose.TypedJSONObjectWithFields):
|
||||
# _fields_to_partial_json
|
||||
"""ACME challenge."""
|
||||
TYPES = {} # type: dict
|
||||
TYPES: Dict[str, Type['Challenge']] = {}
|
||||
|
||||
@classmethod
|
||||
def from_json(cls, jobj):
|
||||
def from_json(cls, jobj: Mapping[str, Any]) -> 'Challenge':
|
||||
try:
|
||||
return super(Challenge, cls).from_json(jobj)
|
||||
return super().from_json(jobj)
|
||||
except jose.UnrecognizedTypeError as error:
|
||||
logger.debug(error)
|
||||
return UnrecognizedChallenge.from_json(jobj)
|
||||
@@ -38,7 +45,7 @@ class Challenge(jose.TypedJSONObjectWithFields):
|
||||
class ChallengeResponse(ResourceMixin, TypeMixin, jose.TypedJSONObjectWithFields):
|
||||
# _fields_to_partial_json
|
||||
"""ACME challenge response."""
|
||||
TYPES = {} # type: dict
|
||||
TYPES: Dict[str, Type['ChallengeResponse']] = {}
|
||||
resource_type = 'challenge'
|
||||
resource = fields.Resource(resource_type)
|
||||
|
||||
@@ -56,15 +63,15 @@ class UnrecognizedChallenge(Challenge):
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, jobj):
|
||||
super(UnrecognizedChallenge, self).__init__()
|
||||
def __init__(self, jobj: Mapping[str, Any]) -> None:
|
||||
super().__init__()
|
||||
object.__setattr__(self, "jobj", jobj)
|
||||
|
||||
def to_partial_json(self):
|
||||
def to_partial_json(self) -> Dict[str, Any]:
|
||||
return self.jobj # pylint: disable=no-member
|
||||
|
||||
@classmethod
|
||||
def from_json(cls, jobj):
|
||||
def from_json(cls, jobj: Mapping[str, Any]) -> 'UnrecognizedChallenge':
|
||||
return cls(jobj)
|
||||
|
||||
|
||||
@@ -78,13 +85,13 @@ class _TokenChallenge(Challenge):
|
||||
"""Minimum size of the :attr:`token` in bytes."""
|
||||
|
||||
# TODO: acme-spec doesn't specify token as base64-encoded value
|
||||
token = jose.Field(
|
||||
token: bytes = jose.Field(
|
||||
"token", encoder=jose.encode_b64jose, decoder=functools.partial(
|
||||
jose.decode_b64jose, size=TOKEN_SIZE, minimum=True))
|
||||
|
||||
# XXX: rename to ~token_good_for_url
|
||||
@property
|
||||
def good_token(self): # XXX: @token.decoder
|
||||
def good_token(self) -> bool: # XXX: @token.decoder
|
||||
"""Is `token` good?
|
||||
|
||||
.. todo:: acme-spec wants "It MUST NOT contain any non-ASCII
|
||||
@@ -107,7 +114,7 @@ class KeyAuthorizationChallengeResponse(ChallengeResponse):
|
||||
key_authorization = jose.Field("keyAuthorization")
|
||||
thumbprint_hash_function = hashes.SHA256
|
||||
|
||||
def verify(self, chall, account_public_key):
|
||||
def verify(self, chall: 'KeyAuthorizationChallenge', account_public_key: jose.JWK) -> bool:
|
||||
"""Verify the key authorization.
|
||||
|
||||
:param KeyAuthorization chall: Challenge that corresponds to
|
||||
@@ -139,26 +146,25 @@ class KeyAuthorizationChallengeResponse(ChallengeResponse):
|
||||
|
||||
return True
|
||||
|
||||
def to_partial_json(self):
|
||||
jobj = super(KeyAuthorizationChallengeResponse, self).to_partial_json()
|
||||
def to_partial_json(self) -> Dict[str, Any]:
|
||||
jobj = super().to_partial_json()
|
||||
jobj.pop('keyAuthorization', None)
|
||||
return jobj
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class KeyAuthorizationChallenge(_TokenChallenge):
|
||||
class KeyAuthorizationChallenge(_TokenChallenge, metaclass=abc.ABCMeta):
|
||||
"""Challenge based on Key Authorization.
|
||||
|
||||
:param response_cls: Subclass of `KeyAuthorizationChallengeResponse`
|
||||
that will be used to generate `response`.
|
||||
that will be used to generate ``response``.
|
||||
:param str typ: type of the challenge
|
||||
"""
|
||||
typ = NotImplemented
|
||||
response_cls = NotImplemented
|
||||
typ: str = NotImplemented
|
||||
response_cls: Type[KeyAuthorizationChallengeResponse] = NotImplemented
|
||||
thumbprint_hash_function = (
|
||||
KeyAuthorizationChallengeResponse.thumbprint_hash_function)
|
||||
|
||||
def key_authorization(self, account_key):
|
||||
def key_authorization(self, account_key: jose.JWK) -> str:
|
||||
"""Generate Key Authorization.
|
||||
|
||||
:param JWK account_key:
|
||||
@@ -169,7 +175,7 @@ class KeyAuthorizationChallenge(_TokenChallenge):
|
||||
account_key.thumbprint(
|
||||
hash_function=self.thumbprint_hash_function)).decode()
|
||||
|
||||
def response(self, account_key):
|
||||
def response(self, account_key: jose.JWK) -> KeyAuthorizationChallengeResponse:
|
||||
"""Generate response to the challenge.
|
||||
|
||||
:param JWK account_key:
|
||||
@@ -182,7 +188,7 @@ class KeyAuthorizationChallenge(_TokenChallenge):
|
||||
key_authorization=self.key_authorization(account_key))
|
||||
|
||||
@abc.abstractmethod
|
||||
def validation(self, account_key, **kwargs):
|
||||
def validation(self, account_key: jose.JWK, **kwargs: Any) -> Any:
|
||||
"""Generate validation for the challenge.
|
||||
|
||||
Subclasses must implement this method, but they are likely to
|
||||
@@ -196,7 +202,8 @@ class KeyAuthorizationChallenge(_TokenChallenge):
|
||||
"""
|
||||
raise NotImplementedError() # pragma: no cover
|
||||
|
||||
def response_and_validation(self, account_key, *args, **kwargs):
|
||||
def response_and_validation(self, account_key: jose.JWK, *args: Any, **kwargs: Any
|
||||
) -> Tuple[KeyAuthorizationChallengeResponse, Any]:
|
||||
"""Generate response and validation.
|
||||
|
||||
Convenience function that return results of `response` and
|
||||
@@ -215,7 +222,7 @@ class DNS01Response(KeyAuthorizationChallengeResponse):
|
||||
"""ACME dns-01 challenge response."""
|
||||
typ = "dns-01"
|
||||
|
||||
def simple_verify(self, chall, domain, account_public_key): # pylint: disable=unused-argument
|
||||
def simple_verify(self, chall: 'DNS01', domain: str, account_public_key: jose.JWK) -> bool: # pylint: disable=unused-argument
|
||||
"""Simple verify.
|
||||
|
||||
This method no longer checks DNS records and is a simple wrapper
|
||||
@@ -246,7 +253,7 @@ class DNS01(KeyAuthorizationChallenge):
|
||||
LABEL = "_acme-challenge"
|
||||
"""Label clients prepend to the domain name being validated."""
|
||||
|
||||
def validation(self, account_key, **unused_kwargs):
|
||||
def validation(self, account_key: jose.JWK, **unused_kwargs: Any) -> str:
|
||||
"""Generate validation.
|
||||
|
||||
:param JWK account_key:
|
||||
@@ -256,7 +263,7 @@ class DNS01(KeyAuthorizationChallenge):
|
||||
return jose.b64encode(hashlib.sha256(self.key_authorization(
|
||||
account_key).encode("utf-8")).digest()).decode()
|
||||
|
||||
def validation_domain_name(self, name):
|
||||
def validation_domain_name(self, name: str) -> str:
|
||||
"""Domain name for TXT validation record.
|
||||
|
||||
:param unicode name: Domain name being validated.
|
||||
@@ -281,7 +288,8 @@ class HTTP01Response(KeyAuthorizationChallengeResponse):
|
||||
WHITESPACE_CUTSET = "\n\r\t "
|
||||
"""Whitespace characters which should be ignored at the end of the body."""
|
||||
|
||||
def simple_verify(self, chall, domain, account_public_key, port=None):
|
||||
def simple_verify(self, chall: 'HTTP01', domain: str, account_public_key: jose.JWK,
|
||||
port: Optional[int] = None) -> bool:
|
||||
"""Simple verify.
|
||||
|
||||
:param challenges.SimpleHTTP chall: Corresponding challenge.
|
||||
@@ -314,6 +322,15 @@ class HTTP01Response(KeyAuthorizationChallengeResponse):
|
||||
except requests.exceptions.RequestException as error:
|
||||
logger.error("Unable to reach %s: %s", uri, error)
|
||||
return False
|
||||
# By default, http_response.text will try to guess the encoding to use
|
||||
# when decoding the response to Python unicode strings. This guesswork
|
||||
# is error prone. RFC 8555 specifies that HTTP-01 responses should be
|
||||
# key authorizations with possible trailing whitespace. Since key
|
||||
# authorizations must be composed entirely of the base64url alphabet
|
||||
# plus ".", we tell requests that the response should be ASCII. See
|
||||
# https://datatracker.ietf.org/doc/html/rfc8555#section-8.3 for more
|
||||
# info.
|
||||
http_response.encoding = "ascii"
|
||||
logger.debug("Received %s: %s. Headers: %s", http_response,
|
||||
http_response.text, http_response.headers)
|
||||
|
||||
@@ -337,7 +354,7 @@ class HTTP01(KeyAuthorizationChallenge):
|
||||
"""URI root path for the server provisioned resource."""
|
||||
|
||||
@property
|
||||
def path(self):
|
||||
def path(self) -> str:
|
||||
"""Path (starting with '/') for provisioned resource.
|
||||
|
||||
:rtype: string
|
||||
@@ -345,7 +362,7 @@ class HTTP01(KeyAuthorizationChallenge):
|
||||
"""
|
||||
return '/' + self.URI_ROOT_PATH + '/' + self.encode('token')
|
||||
|
||||
def uri(self, domain):
|
||||
def uri(self, domain: str) -> str:
|
||||
"""Create an URI to the provisioned resource.
|
||||
|
||||
Forms an URI to the HTTPS server provisioned resource
|
||||
@@ -357,7 +374,7 @@ class HTTP01(KeyAuthorizationChallenge):
|
||||
"""
|
||||
return "http://" + domain + self.path
|
||||
|
||||
def validation(self, account_key, **unused_kwargs):
|
||||
def validation(self, account_key: jose.JWK, **unused_kwargs: Any) -> str:
|
||||
"""Generate validation.
|
||||
|
||||
:param JWK account_key:
|
||||
@@ -384,11 +401,12 @@ class TLSALPN01Response(KeyAuthorizationChallengeResponse):
|
||||
ACME_TLS_1_PROTOCOL = "acme-tls/1"
|
||||
|
||||
@property
|
||||
def h(self):
|
||||
def h(self) -> bytes:
|
||||
"""Hash value stored in challenge certificate"""
|
||||
return hashlib.sha256(self.key_authorization.encode('utf-8')).digest()
|
||||
|
||||
def gen_cert(self, domain, key=None, bits=2048):
|
||||
def gen_cert(self, domain: str, key: Optional[crypto.PKey] = None, bits: int = 2048
|
||||
) -> Tuple[crypto.X509, crypto.PKey]:
|
||||
"""Generate tls-alpn-01 certificate.
|
||||
|
||||
:param unicode domain: Domain verified by the challenge.
|
||||
@@ -404,7 +422,6 @@ class TLSALPN01Response(KeyAuthorizationChallengeResponse):
|
||||
key = crypto.PKey()
|
||||
key.generate_key(crypto.TYPE_RSA, bits)
|
||||
|
||||
|
||||
der_value = b"DER:" + codecs.encode(self.h, 'hex')
|
||||
acme_extension = crypto.X509Extension(self.ID_PE_ACME_IDENTIFIER_V1,
|
||||
critical=True, value=der_value)
|
||||
@@ -412,7 +429,8 @@ class TLSALPN01Response(KeyAuthorizationChallengeResponse):
|
||||
return crypto_util.gen_ss_cert(key, [domain], force_san=True,
|
||||
extensions=[acme_extension]), key
|
||||
|
||||
def probe_cert(self, domain, host=None, port=None):
|
||||
def probe_cert(self, domain: str, host: Optional[str] = None,
|
||||
port: Optional[int] = None) -> crypto.X509:
|
||||
"""Probe tls-alpn-01 challenge certificate.
|
||||
|
||||
:param unicode domain: domain being validated, required.
|
||||
@@ -426,10 +444,10 @@ class TLSALPN01Response(KeyAuthorizationChallengeResponse):
|
||||
if port is None:
|
||||
port = self.PORT
|
||||
|
||||
return crypto_util.probe_sni(host=host, port=port, name=domain,
|
||||
return crypto_util.probe_sni(host=host.encode(), port=port, name=domain.encode(),
|
||||
alpn_protocols=[self.ACME_TLS_1_PROTOCOL])
|
||||
|
||||
def verify_cert(self, domain, cert):
|
||||
def verify_cert(self, domain: str, cert: crypto.X509) -> bool:
|
||||
"""Verify tls-alpn-01 challenge certificate.
|
||||
|
||||
:param unicode domain: Domain name being validated.
|
||||
@@ -441,7 +459,10 @@ class TLSALPN01Response(KeyAuthorizationChallengeResponse):
|
||||
"""
|
||||
# pylint: disable=protected-access
|
||||
names = crypto_util._pyopenssl_cert_or_req_all_names(cert)
|
||||
logger.debug('Certificate %s. SANs: %s', cert.digest('sha256'), names)
|
||||
# Type ignore needed due to
|
||||
# https://github.com/pyca/pyopenssl/issues/730.
|
||||
logger.debug('Certificate %s. SANs: %s',
|
||||
cert.digest('sha256'), names) # type: ignore[arg-type]
|
||||
if len(names) != 1 or names[0].lower() != domain.lower():
|
||||
return False
|
||||
|
||||
@@ -456,8 +477,9 @@ class TLSALPN01Response(KeyAuthorizationChallengeResponse):
|
||||
return False
|
||||
|
||||
# pylint: disable=too-many-arguments
|
||||
def simple_verify(self, chall, domain, account_public_key,
|
||||
cert=None, host=None, port=None):
|
||||
def simple_verify(self, chall: 'TLSALPN01', domain: str, account_public_key: jose.JWK,
|
||||
cert: Optional[crypto.X509] = None, host: Optional[str] = None,
|
||||
port: Optional[int] = None) -> bool:
|
||||
"""Simple verify.
|
||||
|
||||
Verify ``validation`` using ``account_public_key``, optionally
|
||||
@@ -497,7 +519,7 @@ class TLSALPN01(KeyAuthorizationChallenge):
|
||||
response_cls = TLSALPN01Response
|
||||
typ = response_cls.typ
|
||||
|
||||
def validation(self, account_key, **kwargs):
|
||||
def validation(self, account_key: jose.JWK, **kwargs: Any) -> Tuple[crypto.X509, crypto.PKey]:
|
||||
"""Generate validation.
|
||||
|
||||
:param JWK account_key:
|
||||
@@ -514,7 +536,7 @@ class TLSALPN01(KeyAuthorizationChallenge):
|
||||
domain=kwargs.get('domain'))
|
||||
|
||||
@staticmethod
|
||||
def is_supported():
|
||||
def is_supported() -> bool:
|
||||
"""
|
||||
Check if TLS-ALPN-01 challenge is supported on this machine.
|
||||
This implies that a recent version of OpenSSL is installed (>= 1.0.2),
|
||||
@@ -536,7 +558,8 @@ class DNS(_TokenChallenge):
|
||||
LABEL = "_acme-challenge"
|
||||
"""Label clients prepend to the domain name being validated."""
|
||||
|
||||
def gen_validation(self, account_key, alg=jose.RS256, **kwargs):
|
||||
def gen_validation(self, account_key: jose.JWK, alg: jose.JWASignature = jose.RS256,
|
||||
**kwargs: Any) -> jose.JWS:
|
||||
"""Generate validation.
|
||||
|
||||
:param .JWK account_key: Private account key.
|
||||
@@ -550,7 +573,7 @@ class DNS(_TokenChallenge):
|
||||
payload=self.json_dumps(sort_keys=True).encode('utf-8'),
|
||||
key=account_key, alg=alg, **kwargs)
|
||||
|
||||
def check_validation(self, validation, account_public_key):
|
||||
def check_validation(self, validation: jose.JWS, account_public_key: jose.JWK) -> bool:
|
||||
"""Check validation.
|
||||
|
||||
:param JWS validation:
|
||||
@@ -567,7 +590,7 @@ class DNS(_TokenChallenge):
|
||||
logger.debug("Checking validation for DNS failed: %s", error)
|
||||
return False
|
||||
|
||||
def gen_response(self, account_key, **kwargs):
|
||||
def gen_response(self, account_key: jose.JWK, **kwargs: Any) -> 'DNSResponse':
|
||||
"""Generate response.
|
||||
|
||||
:param .JWK account_key: Private account key.
|
||||
@@ -579,7 +602,7 @@ class DNS(_TokenChallenge):
|
||||
return DNSResponse(validation=self.gen_validation(
|
||||
account_key, **kwargs))
|
||||
|
||||
def validation_domain_name(self, name):
|
||||
def validation_domain_name(self, name: str) -> str:
|
||||
"""Domain name for TXT validation record.
|
||||
|
||||
:param unicode name: Domain name being validated.
|
||||
@@ -599,7 +622,7 @@ class DNSResponse(ChallengeResponse):
|
||||
|
||||
validation = jose.Field("validation", decoder=jose.JWS.from_json)
|
||||
|
||||
def check_validation(self, chall, account_public_key):
|
||||
def check_validation(self, chall: 'DNS', account_public_key: jose.JWK) -> bool:
|
||||
"""Check validation.
|
||||
|
||||
:param challenges.DNS chall:
|
||||
@@ -608,4 +631,4 @@ class DNSResponse(ChallengeResponse):
|
||||
:rtype: bool
|
||||
|
||||
"""
|
||||
return chall.check_validation(self.validation, account_public_key)
|
||||
return chall.check_validation(cast(jose.JWS, self.validation), account_public_key)
|
||||
|
||||
@@ -1,13 +1,30 @@
|
||||
"""ACME client API."""
|
||||
# pylint: disable=too-many-lines
|
||||
# This pylint disable can be deleted once the deprecated ACMEv1 code is
|
||||
# removed.
|
||||
import base64
|
||||
import collections
|
||||
import datetime
|
||||
from email.utils import parsedate_tz
|
||||
import heapq
|
||||
import http.client as http_client
|
||||
import logging
|
||||
import re
|
||||
import sys
|
||||
import time
|
||||
from types import ModuleType
|
||||
from typing import Any
|
||||
from typing import Callable
|
||||
from typing import cast
|
||||
from typing import Dict
|
||||
from typing import Iterable
|
||||
from typing import List
|
||||
from typing import Optional
|
||||
from typing import Set
|
||||
from typing import Text
|
||||
from typing import Tuple
|
||||
from typing import Union
|
||||
import warnings
|
||||
|
||||
import josepy as jose
|
||||
import OpenSSL
|
||||
@@ -15,45 +32,29 @@ import requests
|
||||
from requests.adapters import HTTPAdapter
|
||||
from requests.utils import parse_header_links
|
||||
from requests_toolbelt.adapters.source import SourceAddressAdapter
|
||||
import six
|
||||
from six.moves import http_client
|
||||
|
||||
from acme import crypto_util
|
||||
from acme import errors
|
||||
from acme import jws
|
||||
from acme import messages
|
||||
from acme.magic_typing import Dict
|
||||
from acme.magic_typing import List
|
||||
from acme.magic_typing import Set
|
||||
from acme.magic_typing import Text
|
||||
from acme.mixins import VersionedLEACMEMixin
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Prior to Python 2.7.9 the stdlib SSL module did not allow a user to configure
|
||||
# many important security related options. On these platforms we use PyOpenSSL
|
||||
# for SSL, which does allow these options to be configured.
|
||||
# https://urllib3.readthedocs.org/en/latest/security.html#insecureplatformwarning
|
||||
if sys.version_info < (2, 7, 9): # pragma: no cover
|
||||
try:
|
||||
requests.packages.urllib3.contrib.pyopenssl.inject_into_urllib3() # type: ignore
|
||||
except AttributeError:
|
||||
import urllib3.contrib.pyopenssl
|
||||
urllib3.contrib.pyopenssl.inject_into_urllib3()
|
||||
|
||||
DEFAULT_NETWORK_TIMEOUT = 45
|
||||
|
||||
DER_CONTENT_TYPE = 'application/pkix-cert'
|
||||
|
||||
|
||||
class ClientBase(object):
|
||||
class ClientBase:
|
||||
"""ACME client base object.
|
||||
|
||||
:ivar messages.Directory directory:
|
||||
:ivar .ClientNetwork net: Client network.
|
||||
:ivar int acme_version: ACME protocol version. 1 or 2.
|
||||
"""
|
||||
def __init__(self, directory, net, acme_version):
|
||||
def __init__(self, directory: messages.Directory, net: 'ClientNetwork',
|
||||
acme_version: int) -> None:
|
||||
"""Initialize.
|
||||
|
||||
:param .messages.Directory directory: Directory Resource
|
||||
@@ -65,7 +66,9 @@ class ClientBase(object):
|
||||
self.acme_version = acme_version
|
||||
|
||||
@classmethod
|
||||
def _regr_from_response(cls, response, uri=None, terms_of_service=None):
|
||||
def _regr_from_response(cls, response: requests.Response, uri: Optional[str] = None,
|
||||
terms_of_service: Optional[str] = None
|
||||
) -> messages.RegistrationResource:
|
||||
if 'terms-of-service' in response.links:
|
||||
terms_of_service = response.links['terms-of-service']['url']
|
||||
|
||||
@@ -74,7 +77,8 @@ class ClientBase(object):
|
||||
uri=response.headers.get('Location', uri),
|
||||
terms_of_service=terms_of_service)
|
||||
|
||||
def _send_recv_regr(self, regr, body):
|
||||
def _send_recv_regr(self, regr: messages.RegistrationResource,
|
||||
body: messages.Registration) -> messages.RegistrationResource:
|
||||
response = self._post(regr.uri, body)
|
||||
|
||||
# TODO: Boulder returns httplib.ACCEPTED
|
||||
@@ -87,7 +91,7 @@ class ClientBase(object):
|
||||
response, uri=regr.uri,
|
||||
terms_of_service=regr.terms_of_service)
|
||||
|
||||
def _post(self, *args, **kwargs):
|
||||
def _post(self, *args: Any, **kwargs: Any) -> requests.Response:
|
||||
"""Wrapper around self.net.post that adds the acme_version.
|
||||
|
||||
"""
|
||||
@@ -96,7 +100,9 @@ class ClientBase(object):
|
||||
kwargs.setdefault('new_nonce_url', getattr(self.directory, 'newNonce'))
|
||||
return self.net.post(*args, **kwargs)
|
||||
|
||||
def update_registration(self, regr, update=None):
|
||||
def update_registration(self, regr: messages.RegistrationResource,
|
||||
update: Optional[messages.Registration] = None
|
||||
) -> messages.RegistrationResource:
|
||||
"""Update registration.
|
||||
|
||||
:param messages.RegistrationResource regr: Registration Resource.
|
||||
@@ -113,7 +119,8 @@ class ClientBase(object):
|
||||
self.net.account = updated_regr
|
||||
return updated_regr
|
||||
|
||||
def deactivate_registration(self, regr):
|
||||
def deactivate_registration(self, regr: messages.RegistrationResource
|
||||
) -> messages.RegistrationResource:
|
||||
"""Deactivate registration.
|
||||
|
||||
:param messages.RegistrationResource regr: The Registration Resource
|
||||
@@ -123,10 +130,12 @@ class ClientBase(object):
|
||||
:rtype: `.RegistrationResource`
|
||||
|
||||
"""
|
||||
return self.update_registration(regr, update={'status': 'deactivated'})
|
||||
return self.update_registration(regr, messages.Registration.from_json(
|
||||
{"status": "deactivated", "contact": None}))
|
||||
|
||||
def deactivate_authorization(self, authzr):
|
||||
# type: (messages.AuthorizationResource) -> messages.AuthorizationResource
|
||||
def deactivate_authorization(self,
|
||||
authzr: messages.AuthorizationResource
|
||||
) -> messages.AuthorizationResource:
|
||||
"""Deactivate authorization.
|
||||
|
||||
:param messages.AuthorizationResource authzr: The Authorization resource
|
||||
@@ -141,7 +150,9 @@ class ClientBase(object):
|
||||
return self._authzr_from_response(response,
|
||||
authzr.body.identifier, authzr.uri)
|
||||
|
||||
def _authzr_from_response(self, response, identifier=None, uri=None):
|
||||
def _authzr_from_response(self, response: requests.Response,
|
||||
identifier: Optional[messages.Identifier] = None,
|
||||
uri: Optional[str] = None) -> messages.AuthorizationResource:
|
||||
authzr = messages.AuthorizationResource(
|
||||
body=messages.Authorization.from_json(response.json()),
|
||||
uri=response.headers.get('Location', uri))
|
||||
@@ -149,7 +160,8 @@ class ClientBase(object):
|
||||
raise errors.UnexpectedUpdate(authzr)
|
||||
return authzr
|
||||
|
||||
def answer_challenge(self, challb, response):
|
||||
def answer_challenge(self, challb: messages.ChallengeBody, response: requests.Response
|
||||
) -> messages.ChallengeResource:
|
||||
"""Answer challenge.
|
||||
|
||||
:param challb: Challenge Resource body.
|
||||
@@ -178,7 +190,7 @@ class ClientBase(object):
|
||||
return challr
|
||||
|
||||
@classmethod
|
||||
def retry_after(cls, response, default):
|
||||
def retry_after(cls, response: requests.Response, default: int) -> datetime.datetime:
|
||||
"""Compute next `poll` time based on response ``Retry-After`` header.
|
||||
|
||||
Handles integers and various datestring formats per
|
||||
@@ -209,7 +221,7 @@ class ClientBase(object):
|
||||
|
||||
return datetime.datetime.now() + datetime.timedelta(seconds=seconds)
|
||||
|
||||
def _revoke(self, cert, rsn, url):
|
||||
def _revoke(self, cert: jose.ComparableX509, rsn: int, url: str) -> None:
|
||||
"""Revoke certificate.
|
||||
|
||||
:param .ComparableX509 cert: `OpenSSL.crypto.X509` wrapped in
|
||||
@@ -234,6 +246,9 @@ class ClientBase(object):
|
||||
class Client(ClientBase):
|
||||
"""ACME client for a v1 API.
|
||||
|
||||
.. deprecated:: 1.18.0
|
||||
Use :class:`ClientV2` instead.
|
||||
|
||||
.. todo::
|
||||
Clean up raised error types hierarchy, document, and handle (wrap)
|
||||
instances of `.DeserializationError` raised in `from_json()`.
|
||||
@@ -248,8 +263,9 @@ class Client(ClientBase):
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, directory, key, alg=jose.RS256, verify_ssl=True,
|
||||
net=None):
|
||||
def __init__(self, directory: messages.Directory, key: jose.JWK,
|
||||
alg: jose.JWASignature=jose.RS256, verify_ssl: bool = True,
|
||||
net: Optional['ClientNetwork'] = None) -> None:
|
||||
"""Initialize.
|
||||
|
||||
:param directory: Directory Resource (`.messages.Directory`) or
|
||||
@@ -260,13 +276,14 @@ class Client(ClientBase):
|
||||
if net is None:
|
||||
net = ClientNetwork(key, alg=alg, verify_ssl=verify_ssl)
|
||||
|
||||
if isinstance(directory, six.string_types):
|
||||
if isinstance(directory, str):
|
||||
directory = messages.Directory.from_json(
|
||||
net.get(directory).json())
|
||||
super(Client, self).__init__(directory=directory,
|
||||
super().__init__(directory=directory,
|
||||
net=net, acme_version=1)
|
||||
|
||||
def register(self, new_reg=None):
|
||||
def register(self, new_reg: Optional[messages.NewRegistration] = None
|
||||
) -> messages.RegistrationResource:
|
||||
"""Register.
|
||||
|
||||
:param .NewRegistration new_reg:
|
||||
@@ -283,16 +300,18 @@ class Client(ClientBase):
|
||||
# "Instance of 'Field' has no key/contact member" bug:
|
||||
return self._regr_from_response(response)
|
||||
|
||||
def query_registration(self, regr):
|
||||
def query_registration(self, regr: messages.RegistrationResource
|
||||
) -> messages.RegistrationResource:
|
||||
"""Query server about registration.
|
||||
|
||||
:param messages.RegistrationResource: Existing Registration
|
||||
:param messages.RegistrationResource regr: Existing Registration
|
||||
Resource.
|
||||
|
||||
"""
|
||||
return self._send_recv_regr(regr, messages.UpdateRegistration())
|
||||
|
||||
def agree_to_tos(self, regr):
|
||||
def agree_to_tos(self, regr: messages.RegistrationResource
|
||||
) -> messages.RegistrationResource:
|
||||
"""Agree to the terms-of-service.
|
||||
|
||||
Agree to the terms-of-service in a Registration Resource.
|
||||
@@ -307,7 +326,8 @@ class Client(ClientBase):
|
||||
return self.update_registration(
|
||||
regr.update(body=regr.body.update(agreement=regr.terms_of_service)))
|
||||
|
||||
def request_challenges(self, identifier, new_authzr_uri=None):
|
||||
def request_challenges(self, identifier: messages.Identifier,
|
||||
new_authzr_uri: Optional[str] = None) -> messages.AuthorizationResource:
|
||||
"""Request challenges.
|
||||
|
||||
:param .messages.Identifier identifier: Identifier to be challenged.
|
||||
@@ -333,7 +353,8 @@ class Client(ClientBase):
|
||||
assert response.status_code == http_client.CREATED
|
||||
return self._authzr_from_response(response, identifier)
|
||||
|
||||
def request_domain_challenges(self, domain, new_authzr_uri=None):
|
||||
def request_domain_challenges(self, domain: str,new_authzr_uri: Optional[str] = None
|
||||
) -> messages.AuthorizationResource:
|
||||
"""Request challenges for domain names.
|
||||
|
||||
This is simply a convenience function that wraps around
|
||||
@@ -353,7 +374,9 @@ class Client(ClientBase):
|
||||
return self.request_challenges(messages.Identifier(
|
||||
typ=messages.IDENTIFIER_FQDN, value=domain), new_authzr_uri)
|
||||
|
||||
def request_issuance(self, csr, authzrs):
|
||||
def request_issuance(self, csr: jose.ComparableX509,
|
||||
authzrs: Iterable[messages.AuthorizationResource]
|
||||
) -> messages.CertificateResource:
|
||||
"""Request issuance.
|
||||
|
||||
:param csr: CSR
|
||||
@@ -390,7 +413,8 @@ class Client(ClientBase):
|
||||
body=jose.ComparableX509(OpenSSL.crypto.load_certificate(
|
||||
OpenSSL.crypto.FILETYPE_ASN1, response.content)))
|
||||
|
||||
def poll(self, authzr):
|
||||
def poll(self, authzr: messages.AuthorizationResource
|
||||
) -> Tuple[messages.AuthorizationResource, requests.Response]:
|
||||
"""Poll Authorization Resource for status.
|
||||
|
||||
:param authzr: Authorization Resource
|
||||
@@ -406,8 +430,11 @@ class Client(ClientBase):
|
||||
response, authzr.body.identifier, authzr.uri)
|
||||
return updated_authzr, response
|
||||
|
||||
def poll_and_request_issuance(
|
||||
self, csr, authzrs, mintime=5, max_attempts=10):
|
||||
def poll_and_request_issuance(self, csr: jose.ComparableX509,
|
||||
authzrs: Iterable[messages.AuthorizationResource],
|
||||
mintime: int = 5, max_attempts: int = 10
|
||||
) -> Tuple[messages.CertificateResource,
|
||||
Tuple[messages.AuthorizationResource, ...]]:
|
||||
"""Poll and request issuance.
|
||||
|
||||
This function polls all provided Authorization Resource URIs
|
||||
@@ -436,7 +463,7 @@ class Client(ClientBase):
|
||||
|
||||
"""
|
||||
assert max_attempts > 0
|
||||
attempts = collections.defaultdict(int) # type: Dict[messages.AuthorizationResource, int]
|
||||
attempts: Dict[messages.AuthorizationResource, int] = collections.defaultdict(int)
|
||||
exhausted = set()
|
||||
|
||||
# priority queue with datetime.datetime (based on Retry-After) as key,
|
||||
@@ -475,13 +502,13 @@ class Client(ClientBase):
|
||||
exhausted.add(authzr)
|
||||
|
||||
if exhausted or any(authzr.body.status == messages.STATUS_INVALID
|
||||
for authzr in six.itervalues(updated)):
|
||||
for authzr in updated.values()):
|
||||
raise errors.PollError(exhausted, updated)
|
||||
|
||||
updated_authzrs = tuple(updated[authzr] for authzr in authzrs)
|
||||
return self.request_issuance(csr, updated_authzrs), updated_authzrs
|
||||
|
||||
def _get_cert(self, uri):
|
||||
def _get_cert(self, uri: str) -> Tuple[requests.Response, jose.ComparableX509]:
|
||||
"""Returns certificate from URI.
|
||||
|
||||
:param str uri: URI of certificate
|
||||
@@ -497,7 +524,7 @@ class Client(ClientBase):
|
||||
return response, jose.ComparableX509(OpenSSL.crypto.load_certificate(
|
||||
OpenSSL.crypto.FILETYPE_ASN1, response.content))
|
||||
|
||||
def check_cert(self, certr):
|
||||
def check_cert(self, certr: messages.CertificateResource) -> messages.CertificateResource:
|
||||
"""Check for new cert.
|
||||
|
||||
:param certr: Certificate Resource
|
||||
@@ -516,7 +543,7 @@ class Client(ClientBase):
|
||||
raise errors.UnexpectedUpdate(response.text)
|
||||
return certr.update(body=cert)
|
||||
|
||||
def refresh(self, certr):
|
||||
def refresh(self, certr: messages.CertificateResource) -> messages.CertificateResource:
|
||||
"""Refresh certificate.
|
||||
|
||||
:param certr: Certificate Resource
|
||||
@@ -531,7 +558,8 @@ class Client(ClientBase):
|
||||
# respond with status code 403 (Forbidden)
|
||||
return self.check_cert(certr)
|
||||
|
||||
def fetch_chain(self, certr, max_length=10):
|
||||
def fetch_chain(self, certr: messages.CertificateResource,
|
||||
max_length: int = 10) -> List[jose.ComparableX509]:
|
||||
"""Fetch chain for certificate.
|
||||
|
||||
:param .CertificateResource certr: Certificate Resource
|
||||
@@ -549,7 +577,7 @@ class Client(ClientBase):
|
||||
:rtype: `list` of `OpenSSL.crypto.X509` wrapped in `.ComparableX509`
|
||||
|
||||
"""
|
||||
chain = [] # type: List[jose.ComparableX509]
|
||||
chain: List[jose.ComparableX509] = []
|
||||
uri = certr.cert_chain_uri
|
||||
while uri is not None and len(chain) < max_length:
|
||||
response, cert = self._get_cert(uri)
|
||||
@@ -560,7 +588,7 @@ class Client(ClientBase):
|
||||
"Recursion limit reached. Didn't get {0}".format(uri))
|
||||
return chain
|
||||
|
||||
def revoke(self, cert, rsn):
|
||||
def revoke(self, cert: jose.ComparableX509, rsn: int) -> None:
|
||||
"""Revoke certificate.
|
||||
|
||||
:param .ComparableX509 cert: `OpenSSL.crypto.X509` wrapped in
|
||||
@@ -571,7 +599,7 @@ class Client(ClientBase):
|
||||
:raises .ClientError: If revocation is unsuccessful.
|
||||
|
||||
"""
|
||||
return self._revoke(cert, rsn, self.directory[messages.Revocation])
|
||||
self._revoke(cert, rsn, self.directory[cast(str, messages.Revocation)])
|
||||
|
||||
|
||||
class ClientV2(ClientBase):
|
||||
@@ -581,16 +609,15 @@ class ClientV2(ClientBase):
|
||||
:ivar .ClientNetwork net: Client network.
|
||||
"""
|
||||
|
||||
def __init__(self, directory, net):
|
||||
def __init__(self, directory: messages.Directory, net: 'ClientNetwork') -> None:
|
||||
"""Initialize.
|
||||
|
||||
:param .messages.Directory directory: Directory Resource
|
||||
:param .ClientNetwork net: Client network.
|
||||
"""
|
||||
super(ClientV2, self).__init__(directory=directory,
|
||||
net=net, acme_version=2)
|
||||
super().__init__(directory=directory, net=net, acme_version=2)
|
||||
|
||||
def new_account(self, new_account):
|
||||
def new_account(self, new_account: messages.NewRegistration) -> messages.RegistrationResource:
|
||||
"""Register.
|
||||
|
||||
:param .NewRegistration new_account:
|
||||
@@ -603,16 +630,17 @@ class ClientV2(ClientBase):
|
||||
response = self._post(self.directory['newAccount'], new_account)
|
||||
# if account already exists
|
||||
if response.status_code == 200 and 'Location' in response.headers:
|
||||
raise errors.ConflictError(response.headers.get('Location'))
|
||||
raise errors.ConflictError(response.headers['Location'])
|
||||
# "Instance of 'Field' has no key/contact member" bug:
|
||||
regr = self._regr_from_response(response)
|
||||
self.net.account = regr
|
||||
return regr
|
||||
|
||||
def query_registration(self, regr):
|
||||
def query_registration(self, regr: messages.RegistrationResource
|
||||
) -> messages.RegistrationResource:
|
||||
"""Query server about registration.
|
||||
|
||||
:param messages.RegistrationResource: Existing Registration
|
||||
:param messages.RegistrationResource regr: Existing Registration
|
||||
Resource.
|
||||
|
||||
"""
|
||||
@@ -624,7 +652,9 @@ class ClientV2(ClientBase):
|
||||
terms_of_service=regr.terms_of_service)
|
||||
return self.net.account
|
||||
|
||||
def update_registration(self, regr, update=None):
|
||||
def update_registration(self, regr: messages.RegistrationResource,
|
||||
update: Optional[messages.Registration] = None
|
||||
) -> messages.RegistrationResource:
|
||||
"""Update registration.
|
||||
|
||||
:param messages.RegistrationResource regr: Registration Resource.
|
||||
@@ -637,9 +667,9 @@ class ClientV2(ClientBase):
|
||||
"""
|
||||
# https://github.com/certbot/certbot/issues/6155
|
||||
new_regr = self._get_v2_account(regr)
|
||||
return super(ClientV2, self).update_registration(new_regr, update)
|
||||
return super().update_registration(new_regr, update)
|
||||
|
||||
def _get_v2_account(self, regr):
|
||||
def _get_v2_account(self, regr: messages.RegistrationResource) -> messages.RegistrationResource:
|
||||
self.net.account = None
|
||||
only_existing_reg = regr.body.update(only_return_existing=True)
|
||||
response = self._post(self.directory['newAccount'], only_existing_reg)
|
||||
@@ -648,10 +678,10 @@ class ClientV2(ClientBase):
|
||||
self.net.account = new_regr
|
||||
return new_regr
|
||||
|
||||
def new_order(self, csr_pem):
|
||||
def new_order(self, csr_pem: bytes) -> messages.OrderResource:
|
||||
"""Request a new Order object from the server.
|
||||
|
||||
:param str csr_pem: A CSR in PEM format.
|
||||
:param bytes csr_pem: A CSR in PEM format.
|
||||
|
||||
:returns: The newly created order.
|
||||
:rtype: OrderResource
|
||||
@@ -659,16 +689,23 @@ class ClientV2(ClientBase):
|
||||
csr = OpenSSL.crypto.load_certificate_request(OpenSSL.crypto.FILETYPE_PEM, csr_pem)
|
||||
# pylint: disable=protected-access
|
||||
dnsNames = crypto_util._pyopenssl_cert_or_req_all_names(csr)
|
||||
|
||||
ipNames = crypto_util._pyopenssl_cert_or_req_san_ip(csr)
|
||||
# ipNames is now []string
|
||||
identifiers = []
|
||||
for name in dnsNames:
|
||||
identifiers.append(messages.Identifier(typ=messages.IDENTIFIER_FQDN,
|
||||
value=name))
|
||||
for ips in ipNames:
|
||||
identifiers.append(messages.Identifier(typ=messages.IDENTIFIER_IP,
|
||||
value=ips))
|
||||
order = messages.NewOrder(identifiers=identifiers)
|
||||
response = self._post(self.directory['newOrder'], order)
|
||||
body = messages.Order.from_json(response.json())
|
||||
authorizations = []
|
||||
for url in body.authorizations:
|
||||
# pylint has trouble understanding our josepy based objects which use
|
||||
# things like custom metaclass logic. body.authorizations should be a
|
||||
# list of strings containing URLs so let's disable this check here.
|
||||
for url in body.authorizations: # pylint: disable=not-an-iterable
|
||||
authorizations.append(self._authzr_from_response(self._post_as_get(url), uri=url))
|
||||
return messages.OrderResource(
|
||||
body=body,
|
||||
@@ -676,7 +713,8 @@ class ClientV2(ClientBase):
|
||||
authorizations=authorizations,
|
||||
csr_pem=csr_pem)
|
||||
|
||||
def poll(self, authzr):
|
||||
def poll(self, authzr: messages.AuthorizationResource
|
||||
) -> Tuple[messages.AuthorizationResource, requests.Response]:
|
||||
"""Poll Authorization Resource for status.
|
||||
|
||||
:param authzr: Authorization Resource
|
||||
@@ -692,7 +730,8 @@ class ClientV2(ClientBase):
|
||||
response, authzr.body.identifier, authzr.uri)
|
||||
return updated_authzr, response
|
||||
|
||||
def poll_and_finalize(self, orderr, deadline=None):
|
||||
def poll_and_finalize(self, orderr: messages.OrderResource,
|
||||
deadline: Optional[datetime.datetime] = None) -> messages.OrderResource:
|
||||
"""Poll authorizations and finalize the order.
|
||||
|
||||
If no deadline is provided, this method will timeout after 90
|
||||
@@ -710,7 +749,8 @@ class ClientV2(ClientBase):
|
||||
orderr = self.poll_authorizations(orderr, deadline)
|
||||
return self.finalize_order(orderr, deadline)
|
||||
|
||||
def poll_authorizations(self, orderr, deadline):
|
||||
def poll_authorizations(self, orderr: messages.OrderResource, deadline: datetime.datetime
|
||||
) -> messages.OrderResource:
|
||||
"""Poll Order Resource for status."""
|
||||
responses = []
|
||||
for url in orderr.body.authorizations:
|
||||
@@ -734,7 +774,8 @@ class ClientV2(ClientBase):
|
||||
raise errors.ValidationError(failed)
|
||||
return orderr.update(authorizations=responses)
|
||||
|
||||
def finalize_order(self, orderr, deadline, fetch_alternative_chains=False):
|
||||
def finalize_order(self, orderr: messages.OrderResource, deadline: datetime.datetime,
|
||||
fetch_alternative_chains: bool = False) -> messages.OrderResource:
|
||||
"""Finalize an order and obtain a certificate.
|
||||
|
||||
:param messages.OrderResource orderr: order to finalize
|
||||
@@ -766,7 +807,7 @@ class ClientV2(ClientBase):
|
||||
return orderr
|
||||
raise errors.TimeoutError()
|
||||
|
||||
def revoke(self, cert, rsn):
|
||||
def revoke(self, cert: jose.ComparableX509, rsn: int) -> None:
|
||||
"""Revoke certificate.
|
||||
|
||||
:param .ComparableX509 cert: `OpenSSL.crypto.X509` wrapped in
|
||||
@@ -777,13 +818,13 @@ class ClientV2(ClientBase):
|
||||
:raises .ClientError: If revocation is unsuccessful.
|
||||
|
||||
"""
|
||||
return self._revoke(cert, rsn, self.directory['revokeCert'])
|
||||
self._revoke(cert, rsn, self.directory['revokeCert'])
|
||||
|
||||
def external_account_required(self):
|
||||
def external_account_required(self) -> bool:
|
||||
"""Checks if ACME server requires External Account Binding authentication."""
|
||||
return hasattr(self.directory, 'meta') and self.directory.meta.external_account_required
|
||||
|
||||
def _post_as_get(self, *args, **kwargs):
|
||||
def _post_as_get(self, *args: Any, **kwargs: Any) -> requests.Response:
|
||||
"""
|
||||
Send GET request using the POST-as-GET protocol.
|
||||
:param args:
|
||||
@@ -793,7 +834,7 @@ class ClientV2(ClientBase):
|
||||
new_args = args[:1] + (None,) + args[1:]
|
||||
return self._post(*new_args, **kwargs)
|
||||
|
||||
def _get_links(self, response, relation_type):
|
||||
def _get_links(self, response: requests.Response, relation_type: str) -> List[str]:
|
||||
"""
|
||||
Retrieves all Link URIs of relation_type from the response.
|
||||
:param requests.Response response: The requests HTTP response.
|
||||
@@ -808,10 +849,13 @@ class ClientV2(ClientBase):
|
||||
if 'rel' in l and 'url' in l and l['rel'] == relation_type]
|
||||
|
||||
|
||||
class BackwardsCompatibleClientV2(object):
|
||||
class BackwardsCompatibleClientV2:
|
||||
"""ACME client wrapper that tends towards V2-style calls, but
|
||||
supports V1 servers.
|
||||
|
||||
.. deprecated:: 1.18.0
|
||||
Use :class:`ClientV2` instead.
|
||||
|
||||
.. note:: While this class handles the majority of the differences
|
||||
between versions of the ACME protocol, if you need to support an
|
||||
ACME server based on version 3 or older of the IETF ACME draft
|
||||
@@ -827,46 +871,51 @@ class BackwardsCompatibleClientV2(object):
|
||||
:ivar .ClientBase client: either Client or ClientV2
|
||||
"""
|
||||
|
||||
def __init__(self, net, key, server):
|
||||
def __init__(self, net: 'ClientNetwork', key: jose.JWK, server: str) -> None:
|
||||
directory = messages.Directory.from_json(net.get(server).json())
|
||||
self.acme_version = self._acme_version_from_directory(directory)
|
||||
self.client: Union[Client, ClientV2]
|
||||
if self.acme_version == 1:
|
||||
self.client = Client(directory, key=key, net=net)
|
||||
else:
|
||||
self.client = ClientV2(directory, net=net)
|
||||
|
||||
def __getattr__(self, name):
|
||||
def __getattr__(self, name: str) -> Any:
|
||||
return getattr(self.client, name)
|
||||
|
||||
def new_account_and_tos(self, regr, check_tos_cb=None):
|
||||
def new_account_and_tos(self, regr: messages.NewRegistration,
|
||||
check_tos_cb: Optional[Callable[[str], None]] = None
|
||||
) -> messages.RegistrationResource:
|
||||
"""Combined register and agree_tos for V1, new_account for V2
|
||||
|
||||
:param .NewRegistration regr:
|
||||
:param callable check_tos_cb: callback that raises an error if
|
||||
the check does not work
|
||||
"""
|
||||
def _assess_tos(tos):
|
||||
def _assess_tos(tos: str) -> None:
|
||||
if check_tos_cb is not None:
|
||||
check_tos_cb(tos)
|
||||
if self.acme_version == 1:
|
||||
regr = self.client.register(regr)
|
||||
client_v1 = cast(Client, self.client)
|
||||
regr = client_v1.register(regr)
|
||||
if regr.terms_of_service is not None:
|
||||
_assess_tos(regr.terms_of_service)
|
||||
return self.client.agree_to_tos(regr)
|
||||
return client_v1.agree_to_tos(regr)
|
||||
return regr
|
||||
else:
|
||||
if "terms_of_service" in self.client.directory.meta:
|
||||
_assess_tos(self.client.directory.meta.terms_of_service)
|
||||
client_v2 = cast(ClientV2, self.client)
|
||||
if "terms_of_service" in client_v2.directory.meta:
|
||||
_assess_tos(client_v2.directory.meta.terms_of_service)
|
||||
regr = regr.update(terms_of_service_agreed=True)
|
||||
return self.client.new_account(regr)
|
||||
return client_v2.new_account(regr)
|
||||
|
||||
def new_order(self, csr_pem):
|
||||
def new_order(self, csr_pem: bytes) -> messages.OrderResource:
|
||||
"""Request a new Order object from the server.
|
||||
|
||||
If using ACMEv1, returns a dummy OrderResource with only
|
||||
the authorizations field filled in.
|
||||
|
||||
:param str csr_pem: A CSR in PEM format.
|
||||
:param bytes csr_pem: A CSR in PEM format.
|
||||
|
||||
:returns: The newly created order.
|
||||
:rtype: OrderResource
|
||||
@@ -876,16 +925,18 @@ class BackwardsCompatibleClientV2(object):
|
||||
|
||||
"""
|
||||
if self.acme_version == 1:
|
||||
client_v1 = cast(Client, self.client)
|
||||
csr = OpenSSL.crypto.load_certificate_request(OpenSSL.crypto.FILETYPE_PEM, csr_pem)
|
||||
# pylint: disable=protected-access
|
||||
dnsNames = crypto_util._pyopenssl_cert_or_req_all_names(csr)
|
||||
authorizations = []
|
||||
for domain in dnsNames:
|
||||
authorizations.append(self.client.request_domain_challenges(domain))
|
||||
authorizations.append(client_v1.request_domain_challenges(domain))
|
||||
return messages.OrderResource(authorizations=authorizations, csr_pem=csr_pem)
|
||||
return self.client.new_order(csr_pem)
|
||||
return cast(ClientV2, self.client).new_order(csr_pem)
|
||||
|
||||
def finalize_order(self, orderr, deadline, fetch_alternative_chains=False):
|
||||
def finalize_order(self, orderr: messages.OrderResource, deadline: datetime.datetime,
|
||||
fetch_alternative_chains: bool = False) -> messages.OrderResource:
|
||||
"""Finalize an order and obtain a certificate.
|
||||
|
||||
:param messages.OrderResource orderr: order to finalize
|
||||
@@ -898,8 +949,9 @@ class BackwardsCompatibleClientV2(object):
|
||||
|
||||
"""
|
||||
if self.acme_version == 1:
|
||||
client_v1 = cast(Client, self.client)
|
||||
csr_pem = orderr.csr_pem
|
||||
certr = self.client.request_issuance(
|
||||
certr = client_v1.request_issuance(
|
||||
jose.ComparableX509(
|
||||
OpenSSL.crypto.load_certificate_request(OpenSSL.crypto.FILETYPE_PEM, csr_pem)),
|
||||
orderr.authorizations)
|
||||
@@ -907,7 +959,7 @@ class BackwardsCompatibleClientV2(object):
|
||||
chain = None
|
||||
while datetime.datetime.now() < deadline:
|
||||
try:
|
||||
chain = self.client.fetch_chain(certr)
|
||||
chain = client_v1.fetch_chain(certr)
|
||||
break
|
||||
except errors.Error:
|
||||
time.sleep(1)
|
||||
@@ -919,12 +971,13 @@ class BackwardsCompatibleClientV2(object):
|
||||
|
||||
cert = OpenSSL.crypto.dump_certificate(
|
||||
OpenSSL.crypto.FILETYPE_PEM, certr.body.wrapped).decode()
|
||||
chain = crypto_util.dump_pyopenssl_chain(chain).decode()
|
||||
chain_str = crypto_util.dump_pyopenssl_chain(chain).decode()
|
||||
|
||||
return orderr.update(fullchain_pem=(cert + chain))
|
||||
return self.client.finalize_order(orderr, deadline, fetch_alternative_chains)
|
||||
return orderr.update(fullchain_pem=(cert + chain_str))
|
||||
return cast(ClientV2, self.client).finalize_order(
|
||||
orderr, deadline, fetch_alternative_chains)
|
||||
|
||||
def revoke(self, cert, rsn):
|
||||
def revoke(self, cert: jose.ComparableX509, rsn: int) -> None:
|
||||
"""Revoke certificate.
|
||||
|
||||
:param .ComparableX509 cert: `OpenSSL.crypto.X509` wrapped in
|
||||
@@ -935,23 +988,23 @@ class BackwardsCompatibleClientV2(object):
|
||||
:raises .ClientError: If revocation is unsuccessful.
|
||||
|
||||
"""
|
||||
return self.client.revoke(cert, rsn)
|
||||
self.client.revoke(cert, rsn)
|
||||
|
||||
def _acme_version_from_directory(self, directory):
|
||||
def _acme_version_from_directory(self, directory: messages.Directory) -> int:
|
||||
if hasattr(directory, 'newNonce'):
|
||||
return 2
|
||||
return 1
|
||||
|
||||
def external_account_required(self):
|
||||
def external_account_required(self) -> bool:
|
||||
"""Checks if the server requires an external account for ACMEv2 servers.
|
||||
|
||||
Always return False for ACMEv1 servers, as it doesn't use External Account Binding."""
|
||||
if self.acme_version == 1:
|
||||
return False
|
||||
return self.client.external_account_required()
|
||||
return cast(ClientV2, self.client).external_account_required()
|
||||
|
||||
|
||||
class ClientNetwork(object):
|
||||
class ClientNetwork:
|
||||
"""Wrapper around requests that signs POSTs for authentication.
|
||||
|
||||
Also adds user agent, and handles Content-Type.
|
||||
@@ -974,14 +1027,15 @@ class ClientNetwork(object):
|
||||
:param source_address: Optional source address to bind to when making requests.
|
||||
:type source_address: str or tuple(str, int)
|
||||
"""
|
||||
def __init__(self, key, account=None, alg=jose.RS256, verify_ssl=True,
|
||||
user_agent='acme-python', timeout=DEFAULT_NETWORK_TIMEOUT,
|
||||
source_address=None):
|
||||
def __init__(self, key: jose.JWK, account: Optional[messages.RegistrationResource] = None,
|
||||
alg: jose.JWASignature = jose.RS256, verify_ssl: bool = True,
|
||||
user_agent: str = 'acme-python', timeout: int = DEFAULT_NETWORK_TIMEOUT,
|
||||
source_address: Optional[Union[str, Tuple[str, int]]] = None) -> None:
|
||||
self.key = key
|
||||
self.account = account
|
||||
self.alg = alg
|
||||
self.verify_ssl = verify_ssl
|
||||
self._nonces = set() # type: Set[Text]
|
||||
self._nonces: Set[Text] = set()
|
||||
self.user_agent = user_agent
|
||||
self.session = requests.Session()
|
||||
self._default_timeout = timeout
|
||||
@@ -993,7 +1047,7 @@ class ClientNetwork(object):
|
||||
self.session.mount("http://", adapter)
|
||||
self.session.mount("https://", adapter)
|
||||
|
||||
def __del__(self):
|
||||
def __del__(self) -> None:
|
||||
# Try to close the session, but don't show exceptions to the
|
||||
# user if the call to close() fails. See #4840.
|
||||
try:
|
||||
@@ -1001,14 +1055,15 @@ class ClientNetwork(object):
|
||||
except Exception: # pylint: disable=broad-except
|
||||
pass
|
||||
|
||||
def _wrap_in_jws(self, obj, nonce, url, acme_version):
|
||||
def _wrap_in_jws(self, obj: jose.JSONDeSerializable, nonce: str, url: str,
|
||||
acme_version: int) -> jose.JWS:
|
||||
"""Wrap `JSONDeSerializable` object in JWS.
|
||||
|
||||
.. todo:: Implement ``acmePath``.
|
||||
|
||||
:param josepy.JSONDeSerializable obj:
|
||||
:param str url: The URL to which this object will be POSTed
|
||||
:param bytes nonce:
|
||||
:param str nonce:
|
||||
:rtype: `josepy.JWS`
|
||||
|
||||
"""
|
||||
@@ -1030,7 +1085,8 @@ class ClientNetwork(object):
|
||||
return jws.JWS.sign(jobj, **kwargs).json_dumps(indent=2)
|
||||
|
||||
@classmethod
|
||||
def _check_response(cls, response, content_type=None):
|
||||
def _check_response(cls, response: requests.Response,
|
||||
content_type: Optional[str] = None) -> requests.Response:
|
||||
"""Check response content and its type.
|
||||
|
||||
.. note::
|
||||
@@ -1060,7 +1116,7 @@ class ClientNetwork(object):
|
||||
jobj = None
|
||||
|
||||
if response.status_code == 409:
|
||||
raise errors.ConflictError(response.headers.get('Location'))
|
||||
raise errors.ConflictError(response.headers.get('Location', 'UNKNOWN-LOCATION'))
|
||||
|
||||
if not response.ok:
|
||||
if jobj is not None:
|
||||
@@ -1088,7 +1144,7 @@ class ClientNetwork(object):
|
||||
|
||||
return response
|
||||
|
||||
def _send_request(self, method, url, *args, **kwargs):
|
||||
def _send_request(self, method: str, url: str, *args: Any, **kwargs: Any) -> requests.Response:
|
||||
"""Send HTTP request.
|
||||
|
||||
Makes sure that `verify_ssl` is respected. Logs request and
|
||||
@@ -1139,12 +1195,23 @@ class ClientNetwork(object):
|
||||
host, path, _err_no, err_msg = m.groups()
|
||||
raise ValueError("Requesting {0}{1}:{2}".format(host, path, err_msg))
|
||||
|
||||
# If content is DER, log the base64 of it instead of raw bytes, to keep
|
||||
# binary data out of the logs.
|
||||
if response.headers.get("Content-Type") == DER_CONTENT_TYPE:
|
||||
# If the Content-Type is DER or an Accept header was sent in the
|
||||
# request, the response may not be UTF-8 encoded. In this case, we
|
||||
# don't set response.encoding and log the base64 response instead of
|
||||
# raw bytes to keep binary data out of the logs. This code can be
|
||||
# simplified to only check for an Accept header in the request when
|
||||
# ACMEv1 support is dropped.
|
||||
debug_content: Union[bytes, str]
|
||||
if (response.headers.get("Content-Type") == DER_CONTENT_TYPE or
|
||||
"Accept" in kwargs["headers"]):
|
||||
debug_content = base64.b64encode(response.content)
|
||||
else:
|
||||
debug_content = response.content.decode("utf-8")
|
||||
# We set response.encoding so response.text knows the response is
|
||||
# UTF-8 encoded instead of trying to guess the encoding that was
|
||||
# used which is error prone. This setting affects all future
|
||||
# accesses of .text made on the returned response object as well.
|
||||
response.encoding = "utf-8"
|
||||
debug_content = response.text
|
||||
logger.debug('Received response:\nHTTP %d\n%s\n\n%s',
|
||||
response.status_code,
|
||||
"\n".join("{0}: {1}".format(k, v)
|
||||
@@ -1152,7 +1219,7 @@ class ClientNetwork(object):
|
||||
debug_content)
|
||||
return response
|
||||
|
||||
def head(self, *args, **kwargs):
|
||||
def head(self, *args: Any, **kwargs: Any) -> requests.Response:
|
||||
"""Send HEAD request without checking the response.
|
||||
|
||||
Note, that `_check_response` is not called, as it is expected
|
||||
@@ -1162,12 +1229,13 @@ class ClientNetwork(object):
|
||||
"""
|
||||
return self._send_request('HEAD', *args, **kwargs)
|
||||
|
||||
def get(self, url, content_type=JSON_CONTENT_TYPE, **kwargs):
|
||||
def get(self, url: str, content_type: str = JSON_CONTENT_TYPE,
|
||||
**kwargs: Any) -> requests.Response:
|
||||
"""Send GET request and check response."""
|
||||
return self._check_response(
|
||||
self._send_request('GET', url, **kwargs), content_type=content_type)
|
||||
|
||||
def _add_nonce(self, response):
|
||||
def _add_nonce(self, response: requests.Response) -> None:
|
||||
if self.REPLAY_NONCE_HEADER in response.headers:
|
||||
nonce = response.headers[self.REPLAY_NONCE_HEADER]
|
||||
try:
|
||||
@@ -1179,7 +1247,7 @@ class ClientNetwork(object):
|
||||
else:
|
||||
raise errors.MissingNonce(response)
|
||||
|
||||
def _get_nonce(self, url, new_nonce_url):
|
||||
def _get_nonce(self, url: str, new_nonce_url: str) -> str:
|
||||
if not self._nonces:
|
||||
logger.debug('Requesting fresh nonce')
|
||||
if new_nonce_url is None:
|
||||
@@ -1190,7 +1258,7 @@ class ClientNetwork(object):
|
||||
self._add_nonce(response)
|
||||
return self._nonces.pop()
|
||||
|
||||
def post(self, *args, **kwargs):
|
||||
def post(self, *args: Any, **kwargs: Any) -> requests.Response:
|
||||
"""POST object wrapped in `.JWS` and check response.
|
||||
|
||||
If the server responded with a badNonce error, the request will
|
||||
@@ -1205,8 +1273,9 @@ class ClientNetwork(object):
|
||||
return self._post_once(*args, **kwargs)
|
||||
raise
|
||||
|
||||
def _post_once(self, url, obj, content_type=JOSE_CONTENT_TYPE,
|
||||
acme_version=1, **kwargs):
|
||||
def _post_once(self, url: str, obj: jose.JSONDeSerializable,
|
||||
content_type: str = JOSE_CONTENT_TYPE, acme_version: int = 1,
|
||||
**kwargs: Any) -> requests.Response:
|
||||
new_nonce_url = kwargs.pop('new_nonce_url', None)
|
||||
data = self._wrap_in_jws(obj, self._get_nonce(url, new_nonce_url), url, acme_version)
|
||||
kwargs.setdefault('headers', {'Content-Type': content_type})
|
||||
@@ -1214,3 +1283,35 @@ class ClientNetwork(object):
|
||||
response = self._check_response(response, content_type=content_type)
|
||||
self._add_nonce(response)
|
||||
return response
|
||||
|
||||
|
||||
# This class takes a similar approach to the cryptography project to deprecate attributes
|
||||
# in public modules. See the _ModuleWithDeprecation class here:
|
||||
# https://github.com/pyca/cryptography/blob/91105952739442a74582d3e62b3d2111365b0dc7/src/cryptography/utils.py#L129
|
||||
class _ClientDeprecationModule:
|
||||
"""
|
||||
Internal class delegating to a module, and displaying warnings when attributes
|
||||
related to deprecated attributes in the acme.client module.
|
||||
"""
|
||||
def __init__(self, module: ModuleType) -> None:
|
||||
self.__dict__['_module'] = module
|
||||
|
||||
def __getattr__(self, attr: str) -> Any:
|
||||
if attr in ('Client', 'BackwardsCompatibleClientV2'):
|
||||
warnings.warn('The {0} attribute in acme.client is deprecated '
|
||||
'and will be removed soon.'.format(attr),
|
||||
DeprecationWarning, stacklevel=2)
|
||||
return getattr(self._module, attr)
|
||||
|
||||
def __setattr__(self, attr: str, value: Any) -> None: # pragma: no cover
|
||||
setattr(self._module, attr, value)
|
||||
|
||||
def __delattr__(self, attr: str) -> None: # pragma: no cover
|
||||
delattr(self._module, attr)
|
||||
|
||||
def __dir__(self) -> List[str]: # pragma: no cover
|
||||
return ['_module'] + dir(self._module)
|
||||
|
||||
|
||||
# Patching ourselves to warn about deprecation and planned removal of some elements in the module.
|
||||
sys.modules[__name__] = cast(ModuleType, _ClientDeprecationModule(sys.modules[__name__]))
|
||||
|
||||
@@ -1,19 +1,25 @@
|
||||
"""Crypto utilities."""
|
||||
import binascii
|
||||
import contextlib
|
||||
import ipaddress
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import socket
|
||||
from typing import Any
|
||||
from typing import Callable
|
||||
from typing import List
|
||||
from typing import Mapping
|
||||
from typing import Optional
|
||||
from typing import Set
|
||||
from typing import Tuple
|
||||
from typing import Union
|
||||
|
||||
import josepy as jose
|
||||
from OpenSSL import crypto
|
||||
from OpenSSL import SSL # type: ignore # https://github.com/python/typeshed/issues/2052
|
||||
from OpenSSL import SSL
|
||||
|
||||
from acme import errors
|
||||
from acme.magic_typing import Callable
|
||||
from acme.magic_typing import Tuple
|
||||
from acme.magic_typing import Union
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -24,19 +30,19 @@ logger = logging.getLogger(__name__)
|
||||
# https://www.openssl.org/docs/ssl/SSLv23_method.html). _serve_sni
|
||||
# should be changed to use "set_options" to disable SSLv2 and SSLv3,
|
||||
# in case it's used for things other than probing/serving!
|
||||
_DEFAULT_SSL_METHOD = SSL.SSLv23_METHOD # type: ignore
|
||||
_DEFAULT_SSL_METHOD = SSL.SSLv23_METHOD
|
||||
|
||||
|
||||
class _DefaultCertSelection(object):
|
||||
def __init__(self, certs):
|
||||
class _DefaultCertSelection:
|
||||
def __init__(self, certs: Mapping[bytes, Tuple[crypto.PKey, crypto.X509]]):
|
||||
self.certs = certs
|
||||
|
||||
def __call__(self, connection):
|
||||
def __call__(self, connection: SSL.Connection) -> Optional[Tuple[crypto.PKey, crypto.X509]]:
|
||||
server_name = connection.get_servername()
|
||||
return self.certs.get(server_name, None)
|
||||
|
||||
|
||||
class SSLSocket(object): # pylint: disable=too-few-public-methods
|
||||
class SSLSocket: # pylint: disable=too-few-public-methods
|
||||
"""SSL wrapper for sockets.
|
||||
|
||||
:ivar socket sock: Original wrapped socket.
|
||||
@@ -49,9 +55,13 @@ class SSLSocket(object): # pylint: disable=too-few-public-methods
|
||||
`certs` parameter would be ignored, and therefore must be empty.
|
||||
|
||||
"""
|
||||
def __init__(self, sock, certs=None,
|
||||
method=_DEFAULT_SSL_METHOD, alpn_selection=None,
|
||||
cert_selection=None):
|
||||
def __init__(self, sock: socket.socket,
|
||||
certs: Optional[Mapping[bytes, Tuple[crypto.PKey, crypto.X509]]] = None,
|
||||
method: int = _DEFAULT_SSL_METHOD,
|
||||
alpn_selection: Optional[Callable[[SSL.Connection, List[bytes]], bytes]] = None,
|
||||
cert_selection: Optional[Callable[[SSL.Connection],
|
||||
Tuple[crypto.PKey, crypto.X509]]] = None
|
||||
) -> None:
|
||||
self.sock = sock
|
||||
self.alpn_selection = alpn_selection
|
||||
self.method = method
|
||||
@@ -59,14 +69,18 @@ class SSLSocket(object): # pylint: disable=too-few-public-methods
|
||||
raise ValueError("Neither cert_selection or certs specified.")
|
||||
if cert_selection and certs:
|
||||
raise ValueError("Both cert_selection and certs specified.")
|
||||
if cert_selection is None:
|
||||
cert_selection = _DefaultCertSelection(certs)
|
||||
self.cert_selection = cert_selection
|
||||
actual_cert_selection: Union[_DefaultCertSelection,
|
||||
Optional[Callable[[SSL.Connection],
|
||||
Tuple[crypto.PKey,
|
||||
crypto.X509]]]] = cert_selection
|
||||
if actual_cert_selection is None:
|
||||
actual_cert_selection = _DefaultCertSelection(certs if certs else {})
|
||||
self.cert_selection = actual_cert_selection
|
||||
|
||||
def __getattr__(self, name):
|
||||
def __getattr__(self, name: str) -> Any:
|
||||
return getattr(self.sock, name)
|
||||
|
||||
def _pick_certificate_cb(self, connection):
|
||||
def _pick_certificate_cb(self, connection: SSL.Connection) -> None:
|
||||
"""SNI certificate callback.
|
||||
|
||||
This method will set a new OpenSSL context object for this
|
||||
@@ -93,22 +107,22 @@ class SSLSocket(object): # pylint: disable=too-few-public-methods
|
||||
new_context.set_alpn_select_callback(self.alpn_selection)
|
||||
connection.set_context(new_context)
|
||||
|
||||
class FakeConnection(object):
|
||||
class FakeConnection:
|
||||
"""Fake OpenSSL.SSL.Connection."""
|
||||
|
||||
# pylint: disable=missing-function-docstring
|
||||
|
||||
def __init__(self, connection):
|
||||
def __init__(self, connection: SSL.Connection) -> None:
|
||||
self._wrapped = connection
|
||||
|
||||
def __getattr__(self, name):
|
||||
def __getattr__(self, name: str) -> Any:
|
||||
return getattr(self._wrapped, name)
|
||||
|
||||
def shutdown(self, *unused_args):
|
||||
def shutdown(self, *unused_args: Any) -> bool:
|
||||
# OpenSSL.SSL.Connection.shutdown doesn't accept any args
|
||||
return self._wrapped.shutdown()
|
||||
|
||||
def accept(self): # pylint: disable=missing-function-docstring
|
||||
def accept(self) -> Tuple[FakeConnection, Any]: # pylint: disable=missing-function-docstring
|
||||
sock, addr = self.sock.accept()
|
||||
|
||||
context = SSL.Context(self.method)
|
||||
@@ -132,9 +146,9 @@ class SSLSocket(object): # pylint: disable=too-few-public-methods
|
||||
return ssl_sock, addr
|
||||
|
||||
|
||||
def probe_sni(name, host, port=443, timeout=300, # pylint: disable=too-many-arguments
|
||||
method=_DEFAULT_SSL_METHOD, source_address=('', 0),
|
||||
alpn_protocols=None):
|
||||
def probe_sni(name: bytes, host: bytes, port: int = 443, timeout: int = 300, # pylint: disable=too-many-arguments
|
||||
method: int = _DEFAULT_SSL_METHOD, source_address: Tuple[str, int] = ('', 0),
|
||||
alpn_protocols: Optional[List[str]] = None) -> crypto.X509:
|
||||
"""Probe SNI server for SSL certificate.
|
||||
|
||||
:param bytes name: Byte string to send as the server name in the
|
||||
@@ -147,7 +161,7 @@ def probe_sni(name, host, port=443, timeout=300, # pylint: disable=too-many-argu
|
||||
of source interface). See `socket.creation_connection` for more
|
||||
info. Available only in Python 2.7+.
|
||||
:param alpn_protocols: Protocols to request using ALPN.
|
||||
:type alpn_protocols: `list` of `bytes`
|
||||
:type alpn_protocols: `list` of `str`
|
||||
|
||||
:raises acme.errors.Error: In case of any problems.
|
||||
|
||||
@@ -166,10 +180,10 @@ def probe_sni(name, host, port=443, timeout=300, # pylint: disable=too-many-argu
|
||||
" from {0}:{1}".format(
|
||||
source_address[0],
|
||||
source_address[1]
|
||||
) if socket_kwargs else ""
|
||||
) if any(source_address) else ""
|
||||
)
|
||||
socket_tuple = (host, port) # type: Tuple[str, int]
|
||||
sock = socket.create_connection(socket_tuple, **socket_kwargs) # type: ignore
|
||||
socket_tuple: Tuple[bytes, int] = (host, port)
|
||||
sock = socket.create_connection(socket_tuple, **socket_kwargs) # type: ignore[arg-type]
|
||||
except socket.error as error:
|
||||
raise errors.Error(error)
|
||||
|
||||
@@ -187,23 +201,45 @@ def probe_sni(name, host, port=443, timeout=300, # pylint: disable=too-many-argu
|
||||
return client_ssl.get_peer_certificate()
|
||||
|
||||
|
||||
def make_csr(private_key_pem, domains, must_staple=False):
|
||||
"""Generate a CSR containing a list of domains as subjectAltNames.
|
||||
def make_csr(private_key_pem: bytes, domains: Optional[Union[Set[str], List[str]]] = None,
|
||||
must_staple: bool = False,
|
||||
ipaddrs: Optional[List[Union[ipaddress.IPv4Address, ipaddress.IPv6Address]]] = None
|
||||
) -> bytes:
|
||||
"""Generate a CSR containing domains or IPs as subjectAltNames.
|
||||
|
||||
:param buffer private_key_pem: Private key, in PEM PKCS#8 format.
|
||||
:param list domains: List of DNS names to include in subjectAltNames of CSR.
|
||||
:param bool must_staple: Whether to include the TLS Feature extension (aka
|
||||
OCSP Must Staple: https://tools.ietf.org/html/rfc7633).
|
||||
:param list ipaddrs: List of IPaddress(type ipaddress.IPv4Address or ipaddress.IPv6Address)
|
||||
names to include in subbjectAltNames of CSR.
|
||||
params ordered this way for backward competablity when called by positional argument.
|
||||
:returns: buffer PEM-encoded Certificate Signing Request.
|
||||
"""
|
||||
private_key = crypto.load_privatekey(
|
||||
crypto.FILETYPE_PEM, private_key_pem)
|
||||
csr = crypto.X509Req()
|
||||
sanlist = []
|
||||
# if domain or ip list not supplied make it empty list so it's easier to iterate
|
||||
if domains is None:
|
||||
domains = []
|
||||
if ipaddrs is None:
|
||||
ipaddrs = []
|
||||
if len(domains)+len(ipaddrs) == 0:
|
||||
raise ValueError("At least one of domains or ipaddrs parameter need to be not empty")
|
||||
for address in domains:
|
||||
sanlist.append('DNS:' + address)
|
||||
for ips in ipaddrs:
|
||||
sanlist.append('IP:' + ips.exploded)
|
||||
# make sure its ascii encoded
|
||||
san_string = ', '.join(sanlist).encode('ascii')
|
||||
# for IP san it's actually need to be octet-string,
|
||||
# but somewhere downsteam thankfully handle it for us
|
||||
extensions = [
|
||||
crypto.X509Extension(
|
||||
b'subjectAltName',
|
||||
critical=False,
|
||||
value=', '.join('DNS:' + d for d in domains).encode('ascii')
|
||||
value=san_string
|
||||
),
|
||||
]
|
||||
if must_staple:
|
||||
@@ -219,7 +255,9 @@ def make_csr(private_key_pem, domains, must_staple=False):
|
||||
crypto.FILETYPE_PEM, csr)
|
||||
|
||||
|
||||
def _pyopenssl_cert_or_req_all_names(loaded_cert_or_req):
|
||||
def _pyopenssl_cert_or_req_all_names(loaded_cert_or_req: Union[crypto.X509, crypto.X509Req]
|
||||
) -> List[str]:
|
||||
# unlike its name this only outputs DNS names, other type of idents will ignored
|
||||
common_name = loaded_cert_or_req.get_subject().CN
|
||||
sans = _pyopenssl_cert_or_req_san(loaded_cert_or_req)
|
||||
|
||||
@@ -228,7 +266,7 @@ def _pyopenssl_cert_or_req_all_names(loaded_cert_or_req):
|
||||
return [common_name] + [d for d in sans if d != common_name]
|
||||
|
||||
|
||||
def _pyopenssl_cert_or_req_san(cert_or_req):
|
||||
def _pyopenssl_cert_or_req_san(cert_or_req: Union[crypto.X509, crypto.X509Req]) -> List[str]:
|
||||
"""Get Subject Alternative Names from certificate or CSR using pyOpenSSL.
|
||||
|
||||
.. todo:: Implement directly in PyOpenSSL!
|
||||
@@ -239,40 +277,79 @@ def _pyopenssl_cert_or_req_san(cert_or_req):
|
||||
:param cert_or_req: Certificate or CSR.
|
||||
:type cert_or_req: `OpenSSL.crypto.X509` or `OpenSSL.crypto.X509Req`.
|
||||
|
||||
:returns: A list of Subject Alternative Names.
|
||||
:returns: A list of Subject Alternative Names that is DNS.
|
||||
:rtype: `list` of `unicode`
|
||||
|
||||
"""
|
||||
# This function finds SANs by dumping the certificate/CSR to text and
|
||||
# searching for "X509v3 Subject Alternative Name" in the text. This method
|
||||
# is used to support PyOpenSSL version 0.13 where the
|
||||
# `_subjectAltNameString` and `get_extensions` methods are not available
|
||||
# for CSRs.
|
||||
# This function finds SANs with dns name
|
||||
|
||||
# constants based on PyOpenSSL certificate/CSR text dump
|
||||
part_separator = ":"
|
||||
parts_separator = ", "
|
||||
prefix = "DNS" + part_separator
|
||||
|
||||
if isinstance(cert_or_req, crypto.X509):
|
||||
# pylint: disable=line-too-long
|
||||
func = crypto.dump_certificate # type: Union[Callable[[int, crypto.X509Req], bytes], Callable[[int, crypto.X509], bytes]]
|
||||
else:
|
||||
func = crypto.dump_certificate_request
|
||||
text = func(crypto.FILETYPE_TEXT, cert_or_req).decode("utf-8")
|
||||
# WARNING: this function does not support multiple SANs extensions.
|
||||
# Multiple X509v3 extensions of the same type is disallowed by RFC 5280.
|
||||
match = re.search(r"X509v3 Subject Alternative Name:(?: critical)?\s*(.*)", text)
|
||||
# WARNING: this function assumes that no SAN can include
|
||||
# parts_separator, hence the split!
|
||||
sans_parts = [] if match is None else match.group(1).split(parts_separator)
|
||||
sans_parts = _pyopenssl_extract_san_list_raw(cert_or_req)
|
||||
|
||||
return [part.split(part_separator)[1]
|
||||
for part in sans_parts if part.startswith(prefix)]
|
||||
|
||||
|
||||
def gen_ss_cert(key, domains, not_before=None,
|
||||
validity=(7 * 24 * 60 * 60), force_san=True, extensions=None):
|
||||
def _pyopenssl_cert_or_req_san_ip(cert_or_req: Union[crypto.X509, crypto.X509Req]) -> List[str]:
|
||||
"""Get Subject Alternative Names IPs from certificate or CSR using pyOpenSSL.
|
||||
|
||||
:param cert_or_req: Certificate or CSR.
|
||||
:type cert_or_req: `OpenSSL.crypto.X509` or `OpenSSL.crypto.X509Req`.
|
||||
|
||||
:returns: A list of Subject Alternative Names that are IP Addresses.
|
||||
:rtype: `list` of `unicode`. note that this returns as string, not IPaddress object
|
||||
|
||||
"""
|
||||
|
||||
# constants based on PyOpenSSL certificate/CSR text dump
|
||||
part_separator = ":"
|
||||
prefix = "IP Address" + part_separator
|
||||
|
||||
sans_parts = _pyopenssl_extract_san_list_raw(cert_or_req)
|
||||
|
||||
return [part[len(prefix):] for part in sans_parts if part.startswith(prefix)]
|
||||
|
||||
|
||||
def _pyopenssl_extract_san_list_raw(cert_or_req: Union[crypto.X509, crypto.X509Req]) -> List[str]:
|
||||
"""Get raw SAN string from cert or csr, parse it as UTF-8 and return.
|
||||
|
||||
:param cert_or_req: Certificate or CSR.
|
||||
:type cert_or_req: `OpenSSL.crypto.X509` or `OpenSSL.crypto.X509Req`.
|
||||
|
||||
:returns: raw san strings, parsed byte as utf-8
|
||||
:rtype: `list` of `unicode`
|
||||
|
||||
"""
|
||||
# This function finds SANs by dumping the certificate/CSR to text and
|
||||
# searching for "X509v3 Subject Alternative Name" in the text. This method
|
||||
# is used to because in PyOpenSSL version <0.17 `_subjectAltNameString` methods are
|
||||
# not able to Parse IP Addresses in subjectAltName string.
|
||||
|
||||
if isinstance(cert_or_req, crypto.X509):
|
||||
# pylint: disable=line-too-long
|
||||
text = crypto.dump_certificate(crypto.FILETYPE_TEXT, cert_or_req).decode('utf-8')
|
||||
else:
|
||||
text = crypto.dump_certificate_request(crypto.FILETYPE_TEXT, cert_or_req).decode('utf-8')
|
||||
# WARNING: this function does not support multiple SANs extensions.
|
||||
# Multiple X509v3 extensions of the same type is disallowed by RFC 5280.
|
||||
raw_san = re.search(r"X509v3 Subject Alternative Name:(?: critical)?\s*(.*)", text)
|
||||
|
||||
parts_separator = ", "
|
||||
# WARNING: this function assumes that no SAN can include
|
||||
# parts_separator, hence the split!
|
||||
sans_parts = [] if raw_san is None else raw_san.group(1).split(parts_separator)
|
||||
return sans_parts
|
||||
|
||||
|
||||
def gen_ss_cert(key: crypto.PKey, domains: Optional[List[str]] = None,
|
||||
not_before: Optional[int] = None,
|
||||
validity: int = (7 * 24 * 60 * 60), force_san: bool = True,
|
||||
extensions: Optional[List[crypto.X509Extension]] = None,
|
||||
ips: Optional[List[Union[ipaddress.IPv4Address, ipaddress.IPv6Address]]] = None
|
||||
) -> crypto.X509:
|
||||
"""Generate new self-signed certificate.
|
||||
|
||||
:type domains: `list` of `unicode`
|
||||
@@ -280,6 +357,7 @@ def gen_ss_cert(key, domains, not_before=None,
|
||||
:param bool force_san:
|
||||
:param extensions: List of additional extensions to include in the cert.
|
||||
:type extensions: `list` of `OpenSSL.crypto.X509Extension`
|
||||
:type ips: `list` of (`ipaddress.IPv4Address` or `ipaddress.IPv6Address`)
|
||||
|
||||
If more than one domain is provided, all of the domains are put into
|
||||
``subjectAltName`` X.509 extension and first domain is set as the
|
||||
@@ -287,28 +365,39 @@ def gen_ss_cert(key, domains, not_before=None,
|
||||
extension is used, unless `force_san` is ``True``.
|
||||
|
||||
"""
|
||||
assert domains, "Must provide one or more hostnames for the cert."
|
||||
assert domains or ips, "Must provide one or more hostnames or IPs for the cert."
|
||||
|
||||
cert = crypto.X509()
|
||||
cert.set_serial_number(int(binascii.hexlify(os.urandom(16)), 16))
|
||||
cert.set_version(2)
|
||||
|
||||
if extensions is None:
|
||||
extensions = []
|
||||
|
||||
if domains is None:
|
||||
domains = []
|
||||
if ips is None:
|
||||
ips = []
|
||||
extensions.append(
|
||||
crypto.X509Extension(
|
||||
b"basicConstraints", True, b"CA:TRUE, pathlen:0"),
|
||||
)
|
||||
|
||||
if len(domains) > 0:
|
||||
cert.get_subject().CN = domains[0]
|
||||
# TODO: what to put into cert.get_subject()?
|
||||
cert.set_issuer(cert.get_subject())
|
||||
|
||||
if force_san or len(domains) > 1:
|
||||
sanlist = []
|
||||
for address in domains:
|
||||
sanlist.append('DNS:' + address)
|
||||
for ip in ips:
|
||||
sanlist.append('IP:' + ip.exploded)
|
||||
san_string = ', '.join(sanlist).encode('ascii')
|
||||
if force_san or len(domains) > 1 or len(ips) > 0:
|
||||
extensions.append(crypto.X509Extension(
|
||||
b"subjectAltName",
|
||||
critical=False,
|
||||
value=b", ".join(b"DNS:" + d.encode() for d in domains)
|
||||
value=san_string
|
||||
))
|
||||
|
||||
cert.add_extensions(extensions)
|
||||
@@ -321,7 +410,7 @@ def gen_ss_cert(key, domains, not_before=None,
|
||||
return cert
|
||||
|
||||
|
||||
def dump_pyopenssl_chain(chain, filetype=crypto.FILETYPE_PEM):
|
||||
def dump_pyopenssl_chain(chain: List[crypto.X509], filetype: int = crypto.FILETYPE_PEM) -> bytes:
|
||||
"""Dump certificate chain into a bundle.
|
||||
|
||||
:param list chain: List of `OpenSSL.crypto.X509` (or wrapped in
|
||||
@@ -334,7 +423,7 @@ def dump_pyopenssl_chain(chain, filetype=crypto.FILETYPE_PEM):
|
||||
# XXX: returns empty string when no chain is available, which
|
||||
# shuts up RenewableCert, but might not be the best solution...
|
||||
|
||||
def _dump_cert(cert):
|
||||
def _dump_cert(cert: Union[jose.ComparableX509, crypto.X509]) -> bytes:
|
||||
if isinstance(cert, jose.ComparableX509):
|
||||
cert = cert.wrapped
|
||||
return crypto.dump_certificate(filetype, cert)
|
||||
|
||||
@@ -1,5 +1,17 @@
|
||||
"""ACME errors."""
|
||||
import typing
|
||||
from typing import Any
|
||||
from typing import List
|
||||
from typing import Mapping
|
||||
from typing import Set
|
||||
|
||||
from josepy import errors as jose_errors
|
||||
import requests
|
||||
|
||||
# We import acme.messages only during type check to avoid circular dependencies. Type references
|
||||
# to acme.message.* must be quoted to be lazily initialized and avoid compilation errors.
|
||||
if typing.TYPE_CHECKING:
|
||||
from acme import messages # pragma: no cover
|
||||
|
||||
|
||||
class Error(Exception):
|
||||
@@ -28,17 +40,12 @@ class NonceError(ClientError):
|
||||
|
||||
class BadNonce(NonceError):
|
||||
"""Bad nonce error."""
|
||||
def __init__(self, nonce, error, *args, **kwargs):
|
||||
# MyPy complains here that there is too many arguments for BaseException constructor.
|
||||
# This is an error fixed in typeshed, see https://github.com/python/mypy/issues/4183
|
||||
# The fix is included in MyPy>=0.740, but upgrading it would bring dozen of errors due to
|
||||
# new types definitions. So we ignore the error until the code base is fixed to match
|
||||
# with MyPy>=0.740 referential.
|
||||
super(BadNonce, self).__init__(*args, **kwargs) # type: ignore
|
||||
def __init__(self, nonce: str, error: Exception, *args: Any) -> None:
|
||||
super().__init__(*args)
|
||||
self.nonce = nonce
|
||||
self.error = error
|
||||
|
||||
def __str__(self):
|
||||
def __str__(self) -> str:
|
||||
return 'Invalid nonce ({0!r}): {1}'.format(self.nonce, self.error)
|
||||
|
||||
|
||||
@@ -49,15 +56,14 @@ class MissingNonce(NonceError):
|
||||
Replay-Nonce header field in each successful response to a POST it
|
||||
provides to a client (...)".
|
||||
|
||||
:ivar requests.Response response: HTTP Response
|
||||
:ivar requests.Response ~.response: HTTP Response
|
||||
|
||||
"""
|
||||
def __init__(self, response, *args, **kwargs):
|
||||
# See comment in BadNonce constructor above for an explanation of type: ignore here.
|
||||
super(MissingNonce, self).__init__(*args, **kwargs) # type: ignore
|
||||
def __init__(self, response: requests.Response, *args: Any) -> None:
|
||||
super().__init__(*args)
|
||||
self.response = response
|
||||
|
||||
def __str__(self):
|
||||
def __str__(self) -> str:
|
||||
return ('Server {0} response did not include a replay '
|
||||
'nonce, headers: {1} (This may be a service outage)'.format(
|
||||
self.response.request.method, self.response.headers))
|
||||
@@ -75,17 +81,20 @@ class PollError(ClientError):
|
||||
to the most recently updated one
|
||||
|
||||
"""
|
||||
def __init__(self, exhausted, updated):
|
||||
def __init__(self, exhausted: Set['messages.AuthorizationResource'],
|
||||
updated: Mapping['messages.AuthorizationResource',
|
||||
'messages.AuthorizationResource']
|
||||
) -> None:
|
||||
self.exhausted = exhausted
|
||||
self.updated = updated
|
||||
super(PollError, self).__init__()
|
||||
super().__init__()
|
||||
|
||||
@property
|
||||
def timeout(self):
|
||||
def timeout(self) -> bool:
|
||||
"""Was the error caused by timeout?"""
|
||||
return bool(self.exhausted)
|
||||
|
||||
def __repr__(self):
|
||||
def __repr__(self) -> str:
|
||||
return '{0}(exhausted={1!r}, updated={2!r})'.format(
|
||||
self.__class__.__name__, self.exhausted, self.updated)
|
||||
|
||||
@@ -94,9 +103,9 @@ class ValidationError(Error):
|
||||
"""Error for authorization failures. Contains a list of authorization
|
||||
resources, each of which is invalid and should have an error field.
|
||||
"""
|
||||
def __init__(self, failed_authzrs):
|
||||
def __init__(self, failed_authzrs: List['messages.AuthorizationResource']) -> None:
|
||||
self.failed_authzrs = failed_authzrs
|
||||
super(ValidationError, self).__init__()
|
||||
super().__init__()
|
||||
|
||||
|
||||
class TimeoutError(Error): # pylint: disable=redefined-builtin
|
||||
@@ -106,13 +115,13 @@ class TimeoutError(Error): # pylint: disable=redefined-builtin
|
||||
class IssuanceError(Error):
|
||||
"""Error sent by the server after requesting issuance of a certificate."""
|
||||
|
||||
def __init__(self, error):
|
||||
def __init__(self, error: 'messages.Error') -> None:
|
||||
"""Initialize.
|
||||
|
||||
:param messages.Error error: The error provided by the server.
|
||||
"""
|
||||
self.error = error
|
||||
super(IssuanceError, self).__init__()
|
||||
super().__init__()
|
||||
|
||||
|
||||
class ConflictError(ClientError):
|
||||
@@ -123,9 +132,9 @@ class ConflictError(ClientError):
|
||||
|
||||
Also used in V2 of the ACME client for the same purpose.
|
||||
"""
|
||||
def __init__(self, location):
|
||||
def __init__(self, location: str) -> None:
|
||||
self.location = location
|
||||
super(ConflictError, self).__init__()
|
||||
super().__init__()
|
||||
|
||||
|
||||
class WildcardUnsupportedError(Error):
|
||||
|
||||
@@ -1,4 +1,7 @@
|
||||
"""ACME JSON fields."""
|
||||
import datetime
|
||||
from typing import Any
|
||||
|
||||
import logging
|
||||
|
||||
import josepy as jose
|
||||
@@ -10,17 +13,17 @@ logger = logging.getLogger(__name__)
|
||||
class Fixed(jose.Field):
|
||||
"""Fixed field."""
|
||||
|
||||
def __init__(self, json_name, value):
|
||||
def __init__(self, json_name: str, value: Any) -> None:
|
||||
self.value = value
|
||||
super(Fixed, self).__init__(
|
||||
super().__init__(
|
||||
json_name=json_name, default=value, omitempty=False)
|
||||
|
||||
def decode(self, value):
|
||||
def decode(self, value: Any) -> Any:
|
||||
if value != self.value:
|
||||
raise jose.DeserializationError('Expected {0!r}'.format(self.value))
|
||||
return self.value
|
||||
|
||||
def encode(self, value):
|
||||
def encode(self, value: Any) -> Any:
|
||||
if value != self.value:
|
||||
logger.warning(
|
||||
'Overriding fixed field (%s) with %r', self.json_name, value)
|
||||
@@ -37,11 +40,11 @@ class RFC3339Field(jose.Field):
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
def default_encoder(cls, value):
|
||||
def default_encoder(cls, value: datetime.datetime) -> str:
|
||||
return pyrfc3339.generate(value)
|
||||
|
||||
@classmethod
|
||||
def default_decoder(cls, value):
|
||||
def default_decoder(cls, value: str) -> datetime.datetime:
|
||||
try:
|
||||
return pyrfc3339.parse(value)
|
||||
except ValueError as error:
|
||||
@@ -51,12 +54,12 @@ class RFC3339Field(jose.Field):
|
||||
class Resource(jose.Field):
|
||||
"""Resource MITM field."""
|
||||
|
||||
def __init__(self, resource_type, *args, **kwargs):
|
||||
def __init__(self, resource_type: str, *args: Any, **kwargs: Any) -> None:
|
||||
self.resource_type = resource_type
|
||||
super(Resource, self).__init__(
|
||||
super().__init__(
|
||||
'resource', default=resource_type, *args, **kwargs)
|
||||
|
||||
def decode(self, value):
|
||||
def decode(self, value: Any) -> Any:
|
||||
if value != self.resource_type:
|
||||
raise jose.DeserializationError(
|
||||
'Wrong resource type: {0} instead of {1}'.format(
|
||||
|
||||
@@ -4,6 +4,8 @@ The JWS implementation in josepy only implements the base JOSE standard. In
|
||||
order to support the new header fields defined in ACME, this module defines some
|
||||
ACME-specific classes that layer on top of josepy.
|
||||
"""
|
||||
from typing import Optional
|
||||
|
||||
import josepy as jose
|
||||
|
||||
|
||||
@@ -14,8 +16,10 @@ class Header(jose.Header):
|
||||
kid = jose.Field('kid', omitempty=True)
|
||||
url = jose.Field('url', omitempty=True)
|
||||
|
||||
@nonce.decoder
|
||||
def nonce(value): # pylint: disable=no-self-argument,missing-function-docstring
|
||||
# Mypy does not understand the josepy magic happening here, and falsely claims
|
||||
# that nonce is redefined. Let's ignore the type check here.
|
||||
@nonce.decoder # type: ignore
|
||||
def nonce(value: str) -> bytes: # pylint: disable=no-self-argument,missing-function-docstring
|
||||
try:
|
||||
return jose.decode_b64jose(value)
|
||||
except jose.DeserializationError as error:
|
||||
@@ -44,11 +48,12 @@ class JWS(jose.JWS):
|
||||
|
||||
@classmethod
|
||||
# pylint: disable=arguments-differ
|
||||
def sign(cls, payload, key, alg, nonce, url=None, kid=None):
|
||||
def sign(cls, payload: bytes, key: jose.JWK, alg: jose.JWASignature, nonce: Optional[bytes],
|
||||
url: Optional[str] = None, kid: Optional[str] = None) -> jose.JWS:
|
||||
# Per ACME spec, jwk and kid are mutually exclusive, so only include a
|
||||
# jwk field if kid is not provided.
|
||||
include_jwk = kid is None
|
||||
return super(JWS, cls).sign(payload, key=key, alg=alg,
|
||||
return super().sign(payload, key=key, alg=alg,
|
||||
protect=frozenset(['nonce', 'url', 'kid', 'jwk', 'alg']),
|
||||
nonce=nonce, url=url, kid=kid,
|
||||
include_jwk=include_jwk)
|
||||
|
||||
@@ -1,16 +1,18 @@
|
||||
"""Shim class to not have to depend on typing module in prod."""
|
||||
import sys
|
||||
"""Simple shim around the typing module.
|
||||
|
||||
This was useful when this code supported Python 2 and typing wasn't always
|
||||
available. This code is being kept for now for backwards compatibility.
|
||||
|
||||
class TypingClass(object):
|
||||
"""Ignore import errors by getting anything"""
|
||||
def __getattr__(self, name):
|
||||
return None
|
||||
|
||||
try:
|
||||
# mypy doesn't respect modifying sys.modules
|
||||
"""
|
||||
import warnings
|
||||
from typing import * # pylint: disable=wildcard-import, unused-wildcard-import
|
||||
from typing import Collection, IO # type: ignore
|
||||
except ImportError:
|
||||
# mypy complains because TypingClass is not a module
|
||||
sys.modules[__name__] = TypingClass() # type: ignore
|
||||
from typing import Any
|
||||
|
||||
warnings.warn("acme.magic_typing is deprecated and will be removed in a future release.",
|
||||
DeprecationWarning)
|
||||
|
||||
|
||||
class TypingClass:
|
||||
"""Ignore import errors by getting anything"""
|
||||
def __getattr__(self, name: str) -> Any:
|
||||
return None # pragma: no cover
|
||||
|
||||
@@ -1,8 +1,17 @@
|
||||
"""ACME protocol messages."""
|
||||
from collections.abc import Hashable
|
||||
import json
|
||||
from typing import Any
|
||||
from typing import Dict
|
||||
from typing import Iterator
|
||||
from typing import List
|
||||
from typing import Mapping
|
||||
from typing import MutableMapping
|
||||
from typing import Tuple
|
||||
from typing import Type
|
||||
from typing import Optional
|
||||
|
||||
import josepy as jose
|
||||
import six
|
||||
|
||||
from acme import challenges
|
||||
from acme import errors
|
||||
@@ -11,13 +20,6 @@ from acme import jws
|
||||
from acme import util
|
||||
from acme.mixins import ResourceMixin
|
||||
|
||||
try:
|
||||
from collections.abc import Hashable
|
||||
except ImportError: # pragma: no cover
|
||||
from collections import Hashable
|
||||
|
||||
|
||||
|
||||
OLD_ERROR_PREFIX = "urn:acme:error:"
|
||||
ERROR_PREFIX = "urn:ietf:params:acme:error:"
|
||||
|
||||
@@ -61,14 +63,13 @@ ERROR_TYPE_DESCRIPTIONS.update(dict( # add errors with old prefix, deprecate me
|
||||
(OLD_ERROR_PREFIX + name, desc) for name, desc in ERROR_CODES.items()))
|
||||
|
||||
|
||||
def is_acme_error(err):
|
||||
def is_acme_error(err: BaseException) -> bool:
|
||||
"""Check if argument is an ACME error."""
|
||||
if isinstance(err, Error) and (err.typ is not None):
|
||||
return (ERROR_PREFIX in err.typ) or (OLD_ERROR_PREFIX in err.typ)
|
||||
return False
|
||||
|
||||
|
||||
@six.python_2_unicode_compatible
|
||||
class Error(jose.JSONObjectWithFields, errors.Error):
|
||||
"""ACME error.
|
||||
|
||||
@@ -84,7 +85,7 @@ class Error(jose.JSONObjectWithFields, errors.Error):
|
||||
detail = jose.Field('detail', omitempty=True)
|
||||
|
||||
@classmethod
|
||||
def with_code(cls, code, **kwargs):
|
||||
def with_code(cls, code: str, **kwargs: Any) -> 'Error':
|
||||
"""Create an Error instance with an ACME Error code.
|
||||
|
||||
:unicode code: An ACME error code, like 'dnssec'.
|
||||
@@ -95,10 +96,12 @@ class Error(jose.JSONObjectWithFields, errors.Error):
|
||||
raise ValueError("The supplied code: %s is not a known ACME error"
|
||||
" code" % code)
|
||||
typ = ERROR_PREFIX + code
|
||||
return cls(typ=typ, **kwargs)
|
||||
# Mypy will not understand that the Error constructor accepts a named argument
|
||||
# "typ" because of josepy magic. Let's ignore the type check here.
|
||||
return cls(typ=typ, **kwargs) # type: ignore
|
||||
|
||||
@property
|
||||
def description(self):
|
||||
def description(self) -> Optional[str]:
|
||||
"""Hardcoded error description based on its type.
|
||||
|
||||
:returns: Description if standard ACME error or ``None``.
|
||||
@@ -108,7 +111,7 @@ class Error(jose.JSONObjectWithFields, errors.Error):
|
||||
return ERROR_TYPE_DESCRIPTIONS.get(self.typ)
|
||||
|
||||
@property
|
||||
def code(self):
|
||||
def code(self) -> Optional[str]:
|
||||
"""ACME error code.
|
||||
|
||||
Basically self.typ without the ERROR_PREFIX.
|
||||
@@ -117,54 +120,53 @@ class Error(jose.JSONObjectWithFields, errors.Error):
|
||||
:rtype: unicode
|
||||
|
||||
"""
|
||||
code = str(self.typ).split(':')[-1]
|
||||
code = str(self.typ).rsplit(':', maxsplit=1)[-1]
|
||||
if code in ERROR_CODES:
|
||||
return code
|
||||
return None
|
||||
|
||||
def __str__(self):
|
||||
def __str__(self) -> str:
|
||||
return b' :: '.join(
|
||||
part.encode('ascii', 'backslashreplace') for part in
|
||||
(self.typ, self.description, self.detail, self.title)
|
||||
if part is not None).decode()
|
||||
|
||||
|
||||
class _Constant(jose.JSONDeSerializable, Hashable): # type: ignore
|
||||
class _Constant(jose.JSONDeSerializable, Hashable):
|
||||
"""ACME constant."""
|
||||
__slots__ = ('name',)
|
||||
POSSIBLE_NAMES = NotImplemented
|
||||
POSSIBLE_NAMES: Dict[str, '_Constant'] = NotImplemented
|
||||
|
||||
def __init__(self, name):
|
||||
super(_Constant, self).__init__()
|
||||
def __init__(self, name: str) -> None:
|
||||
super().__init__()
|
||||
self.POSSIBLE_NAMES[name] = self # pylint: disable=unsupported-assignment-operation
|
||||
self.name = name
|
||||
|
||||
def to_partial_json(self):
|
||||
def to_partial_json(self) -> str:
|
||||
return self.name
|
||||
|
||||
@classmethod
|
||||
def from_json(cls, jobj):
|
||||
def from_json(cls, jobj: str) -> '_Constant':
|
||||
if jobj not in cls.POSSIBLE_NAMES: # pylint: disable=unsupported-membership-test
|
||||
raise jose.DeserializationError(
|
||||
'{0} not recognized'.format(cls.__name__))
|
||||
return cls.POSSIBLE_NAMES[jobj]
|
||||
|
||||
def __repr__(self):
|
||||
def __repr__(self) -> str:
|
||||
return '{0}({1})'.format(self.__class__.__name__, self.name)
|
||||
|
||||
def __eq__(self, other):
|
||||
def __eq__(self, other: Any) -> bool:
|
||||
return isinstance(other, type(self)) and other.name == self.name
|
||||
|
||||
def __hash__(self):
|
||||
def __hash__(self) -> int:
|
||||
return hash((self.__class__, self.name))
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
||||
|
||||
class Status(_Constant):
|
||||
"""ACME "status" field."""
|
||||
POSSIBLE_NAMES = {} # type: dict
|
||||
POSSIBLE_NAMES: Dict[str, 'Status'] = {}
|
||||
|
||||
|
||||
STATUS_UNKNOWN = Status('unknown')
|
||||
STATUS_PENDING = Status('pending')
|
||||
STATUS_PROCESSING = Status('processing')
|
||||
@@ -177,8 +179,11 @@ STATUS_DEACTIVATED = Status('deactivated')
|
||||
|
||||
class IdentifierType(_Constant):
|
||||
"""ACME identifier type."""
|
||||
POSSIBLE_NAMES = {} # type: dict
|
||||
POSSIBLE_NAMES: Dict[str, 'IdentifierType'] = {}
|
||||
|
||||
|
||||
IDENTIFIER_FQDN = IdentifierType('dns') # IdentifierDNS in Boulder
|
||||
IDENTIFIER_IP = IdentifierType('ip') # IdentifierIP in pebble - not in Boulder yet
|
||||
|
||||
|
||||
class Identifier(jose.JSONObjectWithFields):
|
||||
@@ -195,7 +200,7 @@ class Identifier(jose.JSONObjectWithFields):
|
||||
class Directory(jose.JSONDeSerializable):
|
||||
"""Directory."""
|
||||
|
||||
_REGISTERED_TYPES = {} # type: dict
|
||||
_REGISTERED_TYPES: Dict[str, Type['Directory']] = {}
|
||||
|
||||
class Meta(jose.JSONObjectWithFields):
|
||||
"""Directory Meta."""
|
||||
@@ -205,60 +210,59 @@ class Directory(jose.JSONDeSerializable):
|
||||
caa_identities = jose.Field('caaIdentities', omitempty=True)
|
||||
external_account_required = jose.Field('externalAccountRequired', omitempty=True)
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
def __init__(self, **kwargs: Any) -> None:
|
||||
kwargs = {self._internal_name(k): v for k, v in kwargs.items()}
|
||||
super(Directory.Meta, self).__init__(**kwargs)
|
||||
super().__init__(**kwargs)
|
||||
|
||||
@property
|
||||
def terms_of_service(self):
|
||||
def terms_of_service(self) -> str:
|
||||
"""URL for the CA TOS"""
|
||||
return self._terms_of_service or self._terms_of_service_v2
|
||||
|
||||
def __iter__(self):
|
||||
def __iter__(self) -> Iterator[str]:
|
||||
# When iterating over fields, use the external name 'terms_of_service' instead of
|
||||
# the internal '_terms_of_service'.
|
||||
for name in super(Directory.Meta, self).__iter__():
|
||||
for name in super().__iter__():
|
||||
yield name[1:] if name == '_terms_of_service' else name
|
||||
|
||||
def _internal_name(self, name):
|
||||
def _internal_name(self, name: str) -> str:
|
||||
return '_' + name if name == 'terms_of_service' else name
|
||||
|
||||
|
||||
@classmethod
|
||||
def _canon_key(cls, key):
|
||||
def _canon_key(cls, key: str) -> str:
|
||||
return getattr(key, 'resource_type', key)
|
||||
|
||||
@classmethod
|
||||
def register(cls, resource_body_cls):
|
||||
def register(cls, resource_body_cls: Type['Directory']) -> Type['Directory']:
|
||||
"""Register resource."""
|
||||
resource_type = resource_body_cls.resource_type
|
||||
assert resource_type not in cls._REGISTERED_TYPES
|
||||
cls._REGISTERED_TYPES[resource_type] = resource_body_cls
|
||||
return resource_body_cls
|
||||
|
||||
def __init__(self, jobj):
|
||||
def __init__(self, jobj: Mapping[str, Any]) -> None:
|
||||
canon_jobj = util.map_keys(jobj, self._canon_key)
|
||||
# TODO: check that everything is an absolute URL; acme-spec is
|
||||
# not clear on that
|
||||
self._jobj = canon_jobj
|
||||
|
||||
def __getattr__(self, name):
|
||||
def __getattr__(self, name: str) -> Any:
|
||||
try:
|
||||
return self[name.replace('_', '-')]
|
||||
except KeyError as error:
|
||||
raise AttributeError(str(error))
|
||||
|
||||
def __getitem__(self, name):
|
||||
def __getitem__(self, name: str) -> Any:
|
||||
try:
|
||||
return self._jobj[self._canon_key(name)]
|
||||
except KeyError:
|
||||
raise KeyError('Directory field "' + self._canon_key(name) + '" not found')
|
||||
|
||||
def to_partial_json(self):
|
||||
def to_partial_json(self) -> Dict[str, Any]:
|
||||
return self._jobj
|
||||
|
||||
@classmethod
|
||||
def from_json(cls, jobj):
|
||||
def from_json(cls, jobj: MutableMapping[str, Any]) -> 'Directory':
|
||||
jobj['meta'] = cls.Meta.from_json(jobj.pop('meta', {}))
|
||||
return cls(jobj)
|
||||
|
||||
@@ -275,7 +279,7 @@ class Resource(jose.JSONObjectWithFields):
|
||||
class ResourceWithURI(Resource):
|
||||
"""ACME Resource with URI.
|
||||
|
||||
:ivar unicode uri: Location of the resource.
|
||||
:ivar unicode ~.uri: Location of the resource.
|
||||
|
||||
"""
|
||||
uri = jose.Field('uri') # no ChallengeResource.uri
|
||||
@@ -285,11 +289,12 @@ class ResourceBody(jose.JSONObjectWithFields):
|
||||
"""ACME Resource Body."""
|
||||
|
||||
|
||||
class ExternalAccountBinding(object):
|
||||
class ExternalAccountBinding:
|
||||
"""ACME External Account Binding"""
|
||||
|
||||
@classmethod
|
||||
def from_data(cls, account_public_key, kid, hmac_key, directory):
|
||||
def from_data(cls, account_public_key: jose.JWK, kid: str, hmac_key: str,
|
||||
directory: Directory) -> Dict[str, Any]:
|
||||
"""Create External Account Binding Resource from contact details, kid and hmac."""
|
||||
|
||||
key_json = json.dumps(account_public_key.to_partial_json()).encode()
|
||||
@@ -329,7 +334,9 @@ class Registration(ResourceBody):
|
||||
email_prefix = 'mailto:'
|
||||
|
||||
@classmethod
|
||||
def from_data(cls, phone=None, email=None, external_account_binding=None, **kwargs):
|
||||
def from_data(cls, phone: Optional[str] = None, email: Optional[str] = None,
|
||||
external_account_binding: Optional[ExternalAccountBinding] = None,
|
||||
**kwargs: Any) -> 'Registration':
|
||||
"""
|
||||
Create registration resource from contact details.
|
||||
|
||||
@@ -358,19 +365,19 @@ class Registration(ResourceBody):
|
||||
|
||||
return cls(**kwargs)
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
def __init__(self, **kwargs: Any) -> None:
|
||||
"""Note if the user provides a value for the `contact` member."""
|
||||
if 'contact' in kwargs:
|
||||
if 'contact' in kwargs and kwargs['contact'] is not None:
|
||||
# Avoid the __setattr__ used by jose.TypedJSONObjectWithFields
|
||||
object.__setattr__(self, '_add_contact', True)
|
||||
super(Registration, self).__init__(**kwargs)
|
||||
super().__init__(**kwargs)
|
||||
|
||||
def _filter_contact(self, prefix):
|
||||
def _filter_contact(self, prefix: str) -> Tuple[str, ...]:
|
||||
return tuple(
|
||||
detail[len(prefix):] for detail in self.contact # pylint: disable=not-an-iterable
|
||||
if detail.startswith(prefix))
|
||||
|
||||
def _add_contact_if_appropriate(self, jobj):
|
||||
def _add_contact_if_appropriate(self, jobj: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""
|
||||
The `contact` member of Registration objects should not be required when
|
||||
de-serializing (as it would be if the Fields' `omitempty` flag were `False`), but
|
||||
@@ -387,23 +394,23 @@ class Registration(ResourceBody):
|
||||
|
||||
return jobj
|
||||
|
||||
def to_partial_json(self):
|
||||
def to_partial_json(self) -> Dict[str, Any]:
|
||||
"""Modify josepy.JSONDeserializable.to_partial_json()"""
|
||||
jobj = super(Registration, self).to_partial_json()
|
||||
jobj = super().to_partial_json()
|
||||
return self._add_contact_if_appropriate(jobj)
|
||||
|
||||
def fields_to_partial_json(self):
|
||||
def fields_to_partial_json(self) -> Dict[str, Any]:
|
||||
"""Modify josepy.JSONObjectWithFields.fields_to_partial_json()"""
|
||||
jobj = super(Registration, self).fields_to_partial_json()
|
||||
jobj = super().fields_to_partial_json()
|
||||
return self._add_contact_if_appropriate(jobj)
|
||||
|
||||
@property
|
||||
def phones(self):
|
||||
def phones(self) -> Tuple[str, ...]:
|
||||
"""All phones found in the ``contact`` field."""
|
||||
return self._filter_contact(self.phone_prefix)
|
||||
|
||||
@property
|
||||
def emails(self):
|
||||
def emails(self) -> Tuple[str, ...]:
|
||||
"""All emails found in the ``contact`` field."""
|
||||
return self._filter_contact(self.email_prefix)
|
||||
|
||||
@@ -464,39 +471,39 @@ class ChallengeBody(ResourceBody):
|
||||
error = jose.Field('error', decoder=Error.from_json,
|
||||
omitempty=True, default=None)
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
def __init__(self, **kwargs: Any) -> None:
|
||||
kwargs = {self._internal_name(k): v for k, v in kwargs.items()}
|
||||
super(ChallengeBody, self).__init__(**kwargs)
|
||||
super().__init__(**kwargs)
|
||||
|
||||
def encode(self, name):
|
||||
return super(ChallengeBody, self).encode(self._internal_name(name))
|
||||
def encode(self, name: str) -> Any:
|
||||
return super().encode(self._internal_name(name))
|
||||
|
||||
def to_partial_json(self):
|
||||
jobj = super(ChallengeBody, self).to_partial_json()
|
||||
def to_partial_json(self) -> Dict[str, Any]:
|
||||
jobj = super().to_partial_json()
|
||||
jobj.update(self.chall.to_partial_json())
|
||||
return jobj
|
||||
|
||||
@classmethod
|
||||
def fields_from_json(cls, jobj):
|
||||
jobj_fields = super(ChallengeBody, cls).fields_from_json(jobj)
|
||||
def fields_from_json(cls, jobj: Mapping[str, Any]) -> Dict[str, Any]:
|
||||
jobj_fields = super().fields_from_json(jobj)
|
||||
jobj_fields['chall'] = challenges.Challenge.from_json(jobj)
|
||||
return jobj_fields
|
||||
|
||||
@property
|
||||
def uri(self):
|
||||
def uri(self) -> str:
|
||||
"""The URL of this challenge."""
|
||||
return self._url or self._uri
|
||||
|
||||
def __getattr__(self, name):
|
||||
def __getattr__(self, name: str) -> Any:
|
||||
return getattr(self.chall, name)
|
||||
|
||||
def __iter__(self):
|
||||
def __iter__(self) -> Iterator[str]:
|
||||
# When iterating over fields, use the external name 'uri' instead of
|
||||
# the internal '_uri'.
|
||||
for name in super(ChallengeBody, self).__iter__():
|
||||
for name in super().__iter__():
|
||||
yield name[1:] if name == '_uri' else name
|
||||
|
||||
def _internal_name(self, name):
|
||||
def _internal_name(self, name: str) -> str:
|
||||
return '_' + name if name == 'uri' else name
|
||||
|
||||
|
||||
@@ -511,7 +518,7 @@ class ChallengeResource(Resource):
|
||||
authzr_uri = jose.Field('authzr_uri')
|
||||
|
||||
@property
|
||||
def uri(self):
|
||||
def uri(self) -> str:
|
||||
"""The URL of the challenge body."""
|
||||
return self.body.uri
|
||||
|
||||
@@ -539,12 +546,14 @@ class Authorization(ResourceBody):
|
||||
expires = fields.RFC3339Field('expires', omitempty=True)
|
||||
wildcard = jose.Field('wildcard', omitempty=True)
|
||||
|
||||
@challenges.decoder
|
||||
def challenges(value): # pylint: disable=no-self-argument,missing-function-docstring
|
||||
# Mypy does not understand the josepy magic happening here, and falsely claims
|
||||
# that challenge is redefined. Let's ignore the type check here.
|
||||
@challenges.decoder # type: ignore
|
||||
def challenges(value: List[Mapping[str, Any]]) -> Tuple[ChallengeBody, ...]: # pylint: disable=no-self-argument,missing-function-docstring
|
||||
return tuple(ChallengeBody.from_json(chall) for chall in value)
|
||||
|
||||
@property
|
||||
def resolved_combinations(self):
|
||||
def resolved_combinations(self) -> Tuple[Tuple[Dict[str, Any], ...], ...]:
|
||||
"""Combinations with challenges instead of indices."""
|
||||
return tuple(tuple(self.challenges[idx] for idx in combo)
|
||||
for combo in self.combinations) # pylint: disable=not-an-iterable
|
||||
@@ -627,7 +636,7 @@ class Order(ResourceBody):
|
||||
:ivar str finalize: URL to POST to to request issuance once all
|
||||
authorizations have "valid" status.
|
||||
:ivar datetime.datetime expires: When the order expires.
|
||||
:ivar .Error error: Any error that occurred during finalization, if applicable.
|
||||
:ivar ~.Error error: Any error that occurred during finalization, if applicable.
|
||||
"""
|
||||
identifiers = jose.Field('identifiers', omitempty=True)
|
||||
status = jose.Field('status', decoder=Status.from_json,
|
||||
@@ -638,10 +647,13 @@ class Order(ResourceBody):
|
||||
expires = fields.RFC3339Field('expires', omitempty=True)
|
||||
error = jose.Field('error', omitempty=True, decoder=Error.from_json)
|
||||
|
||||
@identifiers.decoder
|
||||
def identifiers(value): # pylint: disable=no-self-argument,missing-function-docstring
|
||||
# Mypy does not understand the josepy magic happening here, and falsely claims
|
||||
# that identifiers is redefined. Let's ignore the type check here.
|
||||
@identifiers.decoder # type: ignore
|
||||
def identifiers(value: List[Mapping[str, Any]]) -> Tuple[Identifier, ...]: # pylint: disable=no-self-argument,missing-function-docstring
|
||||
return tuple(Identifier.from_json(identifier) for identifier in value)
|
||||
|
||||
|
||||
class OrderResource(ResourceWithURI):
|
||||
"""Order Resource.
|
||||
|
||||
|
||||
@@ -1,26 +1,28 @@
|
||||
"""Useful mixins for Challenge and Resource objects"""
|
||||
from typing import Any
|
||||
from typing import Dict
|
||||
|
||||
|
||||
class VersionedLEACMEMixin(object):
|
||||
class VersionedLEACMEMixin:
|
||||
"""This mixin stores the version of Let's Encrypt's endpoint being used."""
|
||||
@property
|
||||
def le_acme_version(self):
|
||||
def le_acme_version(self) -> int:
|
||||
"""Define the version of ACME protocol to use"""
|
||||
return getattr(self, '_le_acme_version', 1)
|
||||
|
||||
@le_acme_version.setter
|
||||
def le_acme_version(self, version):
|
||||
def le_acme_version(self, version: int) -> None:
|
||||
# We need to use object.__setattr__ to not depend on the specific implementation of
|
||||
# __setattr__ in current class (eg. jose.TypedJSONObjectWithFields raises AttributeError
|
||||
# for any attempt to set an attribute to make objects immutable).
|
||||
object.__setattr__(self, '_le_acme_version', version)
|
||||
|
||||
def __setattr__(self, key, value):
|
||||
def __setattr__(self, key: str, value: Any) -> None:
|
||||
if key == 'le_acme_version':
|
||||
# Required for @property to operate properly. See comment above.
|
||||
object.__setattr__(self, key, value)
|
||||
else:
|
||||
super(VersionedLEACMEMixin, self).__setattr__(key, value) # pragma: no cover
|
||||
super().__setattr__(key, value) # pragma: no cover
|
||||
|
||||
|
||||
class ResourceMixin(VersionedLEACMEMixin):
|
||||
@@ -28,14 +30,14 @@ class ResourceMixin(VersionedLEACMEMixin):
|
||||
This mixin generates a RFC8555 compliant JWS payload
|
||||
by removing the `resource` field if needed (eg. ACME v2 protocol).
|
||||
"""
|
||||
def to_partial_json(self):
|
||||
def to_partial_json(self) -> Dict[str, Any]:
|
||||
"""See josepy.JSONDeserializable.to_partial_json()"""
|
||||
return _safe_jobj_compliance(super(ResourceMixin, self),
|
||||
return _safe_jobj_compliance(super(),
|
||||
'to_partial_json', 'resource')
|
||||
|
||||
def fields_to_partial_json(self):
|
||||
def fields_to_partial_json(self) -> Dict[str, Any]:
|
||||
"""See josepy.JSONObjectWithFields.fields_to_partial_json()"""
|
||||
return _safe_jobj_compliance(super(ResourceMixin, self),
|
||||
return _safe_jobj_compliance(super(),
|
||||
'fields_to_partial_json', 'resource')
|
||||
|
||||
|
||||
@@ -44,20 +46,21 @@ class TypeMixin(VersionedLEACMEMixin):
|
||||
This mixin allows generation of a RFC8555 compliant JWS payload
|
||||
by removing the `type` field if needed (eg. ACME v2 protocol).
|
||||
"""
|
||||
def to_partial_json(self):
|
||||
def to_partial_json(self) -> Dict[str, Any]:
|
||||
"""See josepy.JSONDeserializable.to_partial_json()"""
|
||||
return _safe_jobj_compliance(super(TypeMixin, self),
|
||||
return _safe_jobj_compliance(super(),
|
||||
'to_partial_json', 'type')
|
||||
|
||||
def fields_to_partial_json(self):
|
||||
def fields_to_partial_json(self) -> Dict[str, Any]:
|
||||
"""See josepy.JSONObjectWithFields.fields_to_partial_json()"""
|
||||
return _safe_jobj_compliance(super(TypeMixin, self),
|
||||
return _safe_jobj_compliance(super(),
|
||||
'fields_to_partial_json', 'type')
|
||||
|
||||
|
||||
def _safe_jobj_compliance(instance, jobj_method, uncompliant_field):
|
||||
def _safe_jobj_compliance(instance: Any, jobj_method: str,
|
||||
uncompliant_field: str) -> Dict[str, Any]:
|
||||
if hasattr(instance, jobj_method):
|
||||
jobj = getattr(instance, jobj_method)()
|
||||
jobj: Dict[str, Any] = getattr(instance, jobj_method)()
|
||||
if instance.le_acme_version == 2:
|
||||
jobj.pop(uncompliant_field, None)
|
||||
return jobj
|
||||
|
||||
0
acme/acme/py.typed
Normal file
0
acme/acme/py.typed
Normal file
@@ -1,17 +1,25 @@
|
||||
"""Support for standalone client challenge solvers. """
|
||||
import collections
|
||||
import functools
|
||||
import http.client as http_client
|
||||
import http.server as BaseHTTPServer
|
||||
import logging
|
||||
import socket
|
||||
import socketserver
|
||||
import threading
|
||||
from typing import Any
|
||||
from typing import List
|
||||
from typing import Mapping
|
||||
from typing import Optional
|
||||
from typing import Set
|
||||
from typing import Tuple
|
||||
from typing import Type
|
||||
|
||||
from six.moves import BaseHTTPServer # type: ignore
|
||||
from six.moves import http_client
|
||||
from six.moves import socketserver # type: ignore
|
||||
from OpenSSL import crypto
|
||||
from OpenSSL import SSL
|
||||
|
||||
from acme import challenges
|
||||
from acme import crypto_util
|
||||
from acme.magic_typing import List
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -19,7 +27,7 @@ logger = logging.getLogger(__name__)
|
||||
class TLSServer(socketserver.TCPServer):
|
||||
"""Generic TLS Server."""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
||||
self.ipv6 = kwargs.pop("ipv6", False)
|
||||
if self.ipv6:
|
||||
self.address_family = socket.AF_INET6
|
||||
@@ -31,18 +39,19 @@ class TLSServer(socketserver.TCPServer):
|
||||
self.allow_reuse_address = kwargs.pop("allow_reuse_address", True)
|
||||
socketserver.TCPServer.__init__(self, *args, **kwargs)
|
||||
|
||||
def _wrap_sock(self):
|
||||
def _wrap_sock(self) -> None:
|
||||
self.socket = crypto_util.SSLSocket(
|
||||
self.socket, cert_selection=self._cert_selection,
|
||||
alpn_selection=getattr(self, '_alpn_selection', None),
|
||||
method=self.method)
|
||||
|
||||
def _cert_selection(self, connection): # pragma: no cover
|
||||
def _cert_selection(self, connection: SSL.Connection
|
||||
) -> Tuple[crypto.PKey, crypto.X509]: # pragma: no cover
|
||||
"""Callback selecting certificate for connection."""
|
||||
server_name = connection.get_servername()
|
||||
return self.certs.get(server_name, None)
|
||||
|
||||
def server_bind(self):
|
||||
def server_bind(self) -> None:
|
||||
self._wrap_sock()
|
||||
return socketserver.TCPServer.server_bind(self)
|
||||
|
||||
@@ -54,7 +63,7 @@ class ACMEServerMixin:
|
||||
allow_reuse_address = True
|
||||
|
||||
|
||||
class BaseDualNetworkedServers(object):
|
||||
class BaseDualNetworkedServers:
|
||||
"""Base class for a pair of IPv6 and IPv4 servers that tries to do everything
|
||||
it's asked for both servers, but where failures in one server don't
|
||||
affect the other.
|
||||
@@ -62,10 +71,14 @@ class BaseDualNetworkedServers(object):
|
||||
If two servers are instantiated, they will serve on the same port.
|
||||
"""
|
||||
|
||||
def __init__(self, ServerClass, server_address, *remaining_args, **kwargs):
|
||||
def __init__(self, ServerClass: Type[socketserver.TCPServer], server_address: Tuple[str, int],
|
||||
*remaining_args: Any, **kwargs: Any) -> None:
|
||||
port = server_address[1]
|
||||
self.threads = [] # type: List[threading.Thread]
|
||||
self.servers = [] # type: List[ACMEServerMixin]
|
||||
self.threads: List[threading.Thread] = []
|
||||
self.servers: List[socketserver.BaseServer] = []
|
||||
|
||||
# Preserve socket error for re-raising, if no servers can be started
|
||||
last_socket_err: Optional[socket.error] = None
|
||||
|
||||
# Must try True first.
|
||||
# Ubuntu, for example, will fail to bind to IPv4 if we've already bound
|
||||
@@ -83,7 +96,8 @@ class BaseDualNetworkedServers(object):
|
||||
logger.debug(
|
||||
"Successfully bound to %s:%s using %s", new_address[0],
|
||||
new_address[1], "IPv6" if ip_version else "IPv4")
|
||||
except socket.error:
|
||||
except socket.error as e:
|
||||
last_socket_err = e
|
||||
if self.servers:
|
||||
# Already bound using IPv6.
|
||||
logger.debug(
|
||||
@@ -102,9 +116,12 @@ class BaseDualNetworkedServers(object):
|
||||
# bind to the same port for both servers.
|
||||
port = server.socket.getsockname()[1]
|
||||
if not self.servers:
|
||||
if last_socket_err:
|
||||
raise last_socket_err
|
||||
else: # pragma: no cover
|
||||
raise socket.error("Could not bind to IPv4 or IPv6.")
|
||||
|
||||
def serve_forever(self):
|
||||
def serve_forever(self) -> None:
|
||||
"""Wraps socketserver.TCPServer.serve_forever"""
|
||||
for server in self.servers:
|
||||
thread = threading.Thread(
|
||||
@@ -112,11 +129,11 @@ class BaseDualNetworkedServers(object):
|
||||
thread.start()
|
||||
self.threads.append(thread)
|
||||
|
||||
def getsocknames(self):
|
||||
def getsocknames(self) -> List[Tuple[str, int]]:
|
||||
"""Wraps socketserver.TCPServer.socket.getsockname"""
|
||||
return [server.socket.getsockname() for server in self.servers]
|
||||
|
||||
def shutdown_and_server_close(self):
|
||||
def shutdown_and_server_close(self) -> None:
|
||||
"""Wraps socketserver.TCPServer.shutdown, socketserver.TCPServer.server_close, and
|
||||
threading.Thread.join"""
|
||||
for server in self.servers:
|
||||
@@ -132,13 +149,16 @@ class TLSALPN01Server(TLSServer, ACMEServerMixin):
|
||||
|
||||
ACME_TLS_1_PROTOCOL = b"acme-tls/1"
|
||||
|
||||
def __init__(self, server_address, certs, challenge_certs, ipv6=False):
|
||||
def __init__(self, server_address: Tuple[str, int],
|
||||
certs: List[Tuple[crypto.PKey, crypto.X509]],
|
||||
challenge_certs: Mapping[str, Tuple[crypto.PKey, crypto.X509]],
|
||||
ipv6: bool = False) -> None:
|
||||
TLSServer.__init__(
|
||||
self, server_address, _BaseRequestHandlerWithLogging, certs=certs,
|
||||
ipv6=ipv6)
|
||||
self.challenge_certs = challenge_certs
|
||||
|
||||
def _cert_selection(self, connection):
|
||||
def _cert_selection(self, connection: SSL.Connection) -> Tuple[crypto.PKey, crypto.X509]:
|
||||
# TODO: We would like to serve challenge cert only if asked for it via
|
||||
# ALPN. To do this, we need to retrieve the list of protos from client
|
||||
# hello, but this is currently impossible with openssl [0], and ALPN
|
||||
@@ -148,9 +168,9 @@ class TLSALPN01Server(TLSServer, ACMEServerMixin):
|
||||
# [0] https://github.com/openssl/openssl/issues/4952
|
||||
server_name = connection.get_servername()
|
||||
logger.debug("Serving challenge cert for server name %s", server_name)
|
||||
return self.challenge_certs.get(server_name, None)
|
||||
return self.challenge_certs[server_name]
|
||||
|
||||
def _alpn_selection(self, _connection, alpn_protos):
|
||||
def _alpn_selection(self, _connection: SSL.Connection, alpn_protos: List[bytes]) -> bytes:
|
||||
"""Callback to select alpn protocol."""
|
||||
if len(alpn_protos) == 1 and alpn_protos[0] == self.ACME_TLS_1_PROTOCOL:
|
||||
logger.debug("Agreed on %s ALPN", self.ACME_TLS_1_PROTOCOL)
|
||||
@@ -164,7 +184,7 @@ class TLSALPN01Server(TLSServer, ACMEServerMixin):
|
||||
class HTTPServer(BaseHTTPServer.HTTPServer):
|
||||
"""Generic HTTP Server."""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
||||
self.ipv6 = kwargs.pop("ipv6", False)
|
||||
if self.ipv6:
|
||||
self.address_family = socket.AF_INET6
|
||||
@@ -176,7 +196,8 @@ class HTTPServer(BaseHTTPServer.HTTPServer):
|
||||
class HTTP01Server(HTTPServer, ACMEServerMixin):
|
||||
"""HTTP01 Server."""
|
||||
|
||||
def __init__(self, server_address, resources, ipv6=False, timeout=30):
|
||||
def __init__(self, server_address: Tuple[str, int], resources: Set[challenges.HTTP01],
|
||||
ipv6: bool = False, timeout: int = 30) -> None:
|
||||
HTTPServer.__init__(
|
||||
self, server_address, HTTP01RequestHandler.partial_init(
|
||||
simple_http_resources=resources, timeout=timeout), ipv6=ipv6)
|
||||
@@ -186,7 +207,7 @@ class HTTP01DualNetworkedServers(BaseDualNetworkedServers):
|
||||
"""HTTP01Server Wrapper. Tries everything for both. Failures for one don't
|
||||
affect the other."""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
||||
BaseDualNetworkedServers.__init__(self, HTTP01Server, *args, **kwargs)
|
||||
|
||||
|
||||
@@ -202,21 +223,37 @@ class HTTP01RequestHandler(BaseHTTPServer.BaseHTTPRequestHandler):
|
||||
HTTP01Resource = collections.namedtuple(
|
||||
"HTTP01Resource", "chall response validation")
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
||||
self.simple_http_resources = kwargs.pop("simple_http_resources", set())
|
||||
self.timeout = kwargs.pop('timeout', 30)
|
||||
self._timeout = kwargs.pop('timeout', 30)
|
||||
BaseHTTPServer.BaseHTTPRequestHandler.__init__(self, *args, **kwargs)
|
||||
self.server: HTTP01Server
|
||||
|
||||
def log_message(self, format, *args): # pylint: disable=redefined-builtin
|
||||
# In parent class BaseHTTPRequestHandler, 'timeout' is a class-level property but we
|
||||
# need to define its value during the initialization phase in HTTP01RequestHandler.
|
||||
# However MyPy does not appreciate that we dynamically shadow a class-level property
|
||||
# with an instance-level property (eg. self.timeout = ... in __init__()). So to make
|
||||
# everyone happy, we statically redefine 'timeout' as a method property, and set the
|
||||
# timeout value in a new internal instance-level property _timeout.
|
||||
@property
|
||||
def timeout(self) -> int: # type: ignore[override]
|
||||
"""
|
||||
The default timeout this server should apply to requests.
|
||||
:return: timeout to apply
|
||||
:rtype: int
|
||||
"""
|
||||
return self._timeout
|
||||
|
||||
def log_message(self, format: str, *args: Any) -> None: # pylint: disable=redefined-builtin
|
||||
"""Log arbitrary message."""
|
||||
logger.debug("%s - - %s", self.client_address[0], format % args)
|
||||
|
||||
def handle(self):
|
||||
def handle(self) -> None:
|
||||
"""Handle request."""
|
||||
self.log_message("Incoming request")
|
||||
BaseHTTPServer.BaseHTTPRequestHandler.handle(self)
|
||||
|
||||
def do_GET(self): # pylint: disable=invalid-name,missing-function-docstring
|
||||
def do_GET(self) -> None: # pylint: disable=invalid-name,missing-function-docstring
|
||||
if self.path == "/":
|
||||
self.handle_index()
|
||||
elif self.path.startswith("/" + challenges.HTTP01.URI_ROOT_PATH):
|
||||
@@ -224,21 +261,21 @@ class HTTP01RequestHandler(BaseHTTPServer.BaseHTTPRequestHandler):
|
||||
else:
|
||||
self.handle_404()
|
||||
|
||||
def handle_index(self):
|
||||
def handle_index(self) -> None:
|
||||
"""Handle index page."""
|
||||
self.send_response(200)
|
||||
self.send_header("Content-Type", "text/html")
|
||||
self.end_headers()
|
||||
self.wfile.write(self.server.server_version.encode())
|
||||
|
||||
def handle_404(self):
|
||||
def handle_404(self) -> None:
|
||||
"""Handler 404 Not Found errors."""
|
||||
self.send_response(http_client.NOT_FOUND, message="Not Found")
|
||||
self.send_header("Content-type", "text/html")
|
||||
self.end_headers()
|
||||
self.wfile.write(b"404")
|
||||
|
||||
def handle_simple_http_resource(self):
|
||||
def handle_simple_http_resource(self) -> None:
|
||||
"""Handle HTTP01 provisioned resources."""
|
||||
for resource in self.simple_http_resources:
|
||||
if resource.chall.path == self.path:
|
||||
@@ -254,7 +291,8 @@ class HTTP01RequestHandler(BaseHTTPServer.BaseHTTPRequestHandler):
|
||||
self.path)
|
||||
|
||||
@classmethod
|
||||
def partial_init(cls, simple_http_resources, timeout):
|
||||
def partial_init(cls, simple_http_resources: Set[challenges.HTTP01],
|
||||
timeout: int) -> 'functools.partial[HTTP01RequestHandler]':
|
||||
"""Partially initialize this handler.
|
||||
|
||||
This is useful because `socketserver.BaseServer` takes
|
||||
@@ -270,11 +308,11 @@ class HTTP01RequestHandler(BaseHTTPServer.BaseHTTPRequestHandler):
|
||||
class _BaseRequestHandlerWithLogging(socketserver.BaseRequestHandler):
|
||||
"""BaseRequestHandler with logging."""
|
||||
|
||||
def log_message(self, format, *args): # pylint: disable=redefined-builtin
|
||||
def log_message(self, format: str, *args: Any) -> None: # pylint: disable=redefined-builtin
|
||||
"""Log arbitrary message."""
|
||||
logger.debug("%s - - %s", self.client_address[0], format % args)
|
||||
|
||||
def handle(self):
|
||||
def handle(self) -> None:
|
||||
"""Handle request."""
|
||||
self.log_message("Incoming request")
|
||||
socketserver.BaseRequestHandler.handle(self)
|
||||
|
||||
@@ -1,7 +1,10 @@
|
||||
"""ACME utilities."""
|
||||
import six
|
||||
from typing import Any
|
||||
from typing import Callable
|
||||
from typing import Dict
|
||||
from typing import Mapping
|
||||
|
||||
|
||||
def map_keys(dikt, func):
|
||||
def map_keys(dikt: Mapping[Any, Any], func: Callable[[Any], Any]) -> Dict[Any, Any]:
|
||||
"""Map dictionary keys."""
|
||||
return {func(key): value for key, value in six.iteritems(dikt)}
|
||||
return {func(key): value for key, value in dikt.items()}
|
||||
|
||||
@@ -58,7 +58,7 @@ master_doc = 'index'
|
||||
|
||||
# General information about the project.
|
||||
project = u'acme-python'
|
||||
copyright = u'2015-2015, Let\'s Encrypt Project'
|
||||
copyright = u'2015, Let\'s Encrypt Project'
|
||||
author = u'Let\'s Encrypt Project'
|
||||
|
||||
# The version info for the project you're documenting, acts as replacement for
|
||||
@@ -85,7 +85,9 @@ language = 'en'
|
||||
|
||||
# List of patterns, relative to source directory, that match files and
|
||||
# directories to ignore when looking for source files.
|
||||
exclude_patterns = ['_build']
|
||||
exclude_patterns = [
|
||||
'_build',
|
||||
]
|
||||
|
||||
# The reST default role (used for this markup: `text`) to use for all
|
||||
# documents.
|
||||
|
||||
@@ -1 +1,3 @@
|
||||
:orphan:
|
||||
|
||||
.. literalinclude:: ../jws-help.txt
|
||||
|
||||
@@ -1,2 +0,0 @@
|
||||
python -m acme.standalone -p 1234
|
||||
curl -k https://localhost:1234
|
||||
@@ -1 +0,0 @@
|
||||
../../../acme/testdata/rsa2048_cert.pem
|
||||
@@ -1 +0,0 @@
|
||||
../../../acme/testdata/rsa2048_key.pem
|
||||
@@ -7,4 +7,7 @@
|
||||
# in --editable mode (-e), just "pip install acme[docs]" does not work as
|
||||
# expected and "pip install -e acme[docs]" must be used instead
|
||||
|
||||
# We also pin our dependencies for increased stability.
|
||||
|
||||
-c ../tools/requirements.txt
|
||||
-e acme[docs]
|
||||
|
||||
@@ -1,44 +1,19 @@
|
||||
from distutils.version import LooseVersion
|
||||
import sys
|
||||
|
||||
from setuptools import __version__ as setuptools_version
|
||||
from setuptools import find_packages
|
||||
from setuptools import setup
|
||||
|
||||
version = '1.11.0.dev0'
|
||||
version = '1.22.0.dev0'
|
||||
|
||||
# Please update tox.ini when modifying dependency version requirements
|
||||
install_requires = [
|
||||
# load_pem_private/public_key (>=0.6)
|
||||
# rsa_recover_prime_factors (>=0.8)
|
||||
'cryptography>=1.2.3',
|
||||
# formerly known as acme.jose:
|
||||
# 1.1.0+ is required to avoid the warnings described at
|
||||
# https://github.com/certbot/josepy/issues/13.
|
||||
'josepy>=1.1.0',
|
||||
# Connection.set_tlsext_host_name (>=0.13) + matching Xenial requirements (>=0.15.1)
|
||||
'PyOpenSSL>=0.15.1',
|
||||
'cryptography>=2.1.4',
|
||||
'josepy>=1.9.0',
|
||||
'PyOpenSSL>=17.3.0',
|
||||
'pyrfc3339',
|
||||
'pytz',
|
||||
'requests[security]>=2.6.0', # security extras added in 2.4.1
|
||||
'requests>=2.14.2',
|
||||
'requests-toolbelt>=0.3.0',
|
||||
'setuptools',
|
||||
'six>=1.9.0', # needed for python_2_unicode_compatible
|
||||
]
|
||||
|
||||
setuptools_known_environment_markers = (LooseVersion(setuptools_version) >= LooseVersion('36.2'))
|
||||
if setuptools_known_environment_markers:
|
||||
install_requires.append('mock ; python_version < "3.3"')
|
||||
elif 'bdist_wheel' in sys.argv[1:]:
|
||||
raise RuntimeError('Error, you are trying to build certbot wheels using an old version '
|
||||
'of setuptools. Version 36.2+ of setuptools is required.')
|
||||
elif sys.version_info < (3,3):
|
||||
install_requires.append('mock')
|
||||
|
||||
dev_extras = [
|
||||
'pytest',
|
||||
'pytest-xdist',
|
||||
'tox',
|
||||
'setuptools>=39.0.1',
|
||||
]
|
||||
|
||||
docs_extras = [
|
||||
@@ -46,27 +21,31 @@ docs_extras = [
|
||||
'sphinx_rtd_theme',
|
||||
]
|
||||
|
||||
test_extras = [
|
||||
'pytest',
|
||||
'pytest-xdist',
|
||||
]
|
||||
|
||||
setup(
|
||||
name='acme',
|
||||
version=version,
|
||||
description='ACME protocol implementation in Python',
|
||||
url='https://github.com/letsencrypt/letsencrypt',
|
||||
author="Certbot Project",
|
||||
author_email='client-dev@letsencrypt.org',
|
||||
author_email='certbot-dev@eff.org',
|
||||
license='Apache License 2.0',
|
||||
python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*',
|
||||
python_requires='>=3.6',
|
||||
classifiers=[
|
||||
'Development Status :: 5 - Production/Stable',
|
||||
'Intended Audience :: Developers',
|
||||
'License :: OSI Approved :: Apache Software License',
|
||||
'Programming Language :: Python',
|
||||
'Programming Language :: Python :: 2',
|
||||
'Programming Language :: Python :: 2.7',
|
||||
'Programming Language :: Python :: 3',
|
||||
'Programming Language :: Python :: 3.6',
|
||||
'Programming Language :: Python :: 3.7',
|
||||
'Programming Language :: Python :: 3.8',
|
||||
'Programming Language :: Python :: 3.9',
|
||||
'Programming Language :: Python :: 3.10',
|
||||
'Topic :: Internet :: WWW/HTTP',
|
||||
'Topic :: Security',
|
||||
],
|
||||
@@ -75,7 +54,7 @@ setup(
|
||||
include_package_data=True,
|
||||
install_requires=install_requires,
|
||||
extras_require={
|
||||
'dev': dev_extras,
|
||||
'docs': docs_extras,
|
||||
'test': test_extras,
|
||||
},
|
||||
)
|
||||
|
||||
@@ -1,14 +1,11 @@
|
||||
"""Tests for acme.challenges."""
|
||||
import urllib.parse as urllib_parse
|
||||
import unittest
|
||||
from unittest import mock
|
||||
|
||||
import josepy as jose
|
||||
import OpenSSL
|
||||
try:
|
||||
import mock
|
||||
except ImportError: # pragma: no cover
|
||||
from unittest import mock # type: ignore
|
||||
import requests
|
||||
from six.moves.urllib import parse as urllib_parse
|
||||
|
||||
from acme import errors
|
||||
|
||||
@@ -295,7 +292,7 @@ class TLSALPN01ResponseTest(unittest.TestCase):
|
||||
|
||||
def test_gen_verify_cert_gen_key(self):
|
||||
cert, key = self.response.gen_cert(self.domain)
|
||||
self.assertTrue(isinstance(key, OpenSSL.crypto.PKey))
|
||||
self.assertIsInstance(key, OpenSSL.crypto.PKey)
|
||||
self.assertTrue(self.response.verify_cert(self.domain, cert))
|
||||
|
||||
def test_verify_bad_cert(self):
|
||||
@@ -329,12 +326,12 @@ class TLSALPN01ResponseTest(unittest.TestCase):
|
||||
self.response.probe_cert('foo.com')
|
||||
mock_gethostbyname.assert_called_once_with('foo.com')
|
||||
mock_probe_sni.assert_called_once_with(
|
||||
host='127.0.0.1', port=self.response.PORT, name='foo.com',
|
||||
host=b'127.0.0.1', port=self.response.PORT, name=b'foo.com',
|
||||
alpn_protocols=['acme-tls/1'])
|
||||
|
||||
self.response.probe_cert('foo.com', host='8.8.8.8')
|
||||
mock_probe_sni.assert_called_with(
|
||||
host='8.8.8.8', port=mock.ANY, name='foo.com',
|
||||
host=b'8.8.8.8', port=mock.ANY, name=b'foo.com',
|
||||
alpn_protocols=['acme-tls/1'])
|
||||
|
||||
@mock.patch('acme.challenges.TLSALPN01Response.probe_cert')
|
||||
@@ -434,7 +431,7 @@ class DNSTest(unittest.TestCase):
|
||||
mock_gen.return_value = mock.sentinel.validation
|
||||
response = self.msg.gen_response(KEY)
|
||||
from acme.challenges import DNSResponse
|
||||
self.assertTrue(isinstance(response, DNSResponse))
|
||||
self.assertIsInstance(response, DNSResponse)
|
||||
self.assertEqual(response.validation, mock.sentinel.validation)
|
||||
|
||||
def test_validation_domain_name(self):
|
||||
|
||||
@@ -2,17 +2,16 @@
|
||||
# pylint: disable=too-many-lines
|
||||
import copy
|
||||
import datetime
|
||||
import http.client as http_client
|
||||
import ipaddress
|
||||
import json
|
||||
import unittest
|
||||
from typing import Dict
|
||||
from unittest import mock
|
||||
|
||||
import josepy as jose
|
||||
try:
|
||||
import mock
|
||||
except ImportError: # pragma: no cover
|
||||
from unittest import mock # type: ignore
|
||||
import OpenSSL
|
||||
import requests
|
||||
from six.moves import http_client # pylint: disable=import-error
|
||||
|
||||
from acme import challenges
|
||||
from acme import errors
|
||||
@@ -25,6 +24,7 @@ import test_util
|
||||
CERT_DER = test_util.load_vector('cert.der')
|
||||
CERT_SAN_PEM = test_util.load_vector('cert-san.pem')
|
||||
CSR_SAN_PEM = test_util.load_vector('csr-san.pem')
|
||||
CSR_MIXED_PEM = test_util.load_vector('csr-mixed.pem')
|
||||
KEY = jose.JWKRSA.load(test_util.load_vector('rsa512_key.pem'))
|
||||
KEY2 = jose.JWKRSA.load(test_util.load_vector('rsa256_key.pem'))
|
||||
|
||||
@@ -64,7 +64,7 @@ class ClientTestBase(unittest.TestCase):
|
||||
self.contact = ('mailto:cert-admin@example.com', 'tel:+12025551212')
|
||||
reg = messages.Registration(
|
||||
contact=self.contact, key=KEY.public_key())
|
||||
the_arg = dict(reg) # type: Dict
|
||||
the_arg: Dict = dict(reg)
|
||||
self.new_reg = messages.NewRegistration(**the_arg)
|
||||
self.regr = messages.RegistrationResource(
|
||||
body=reg, uri='https://www.letsencrypt-demo.org/acme/reg/1')
|
||||
@@ -92,7 +92,7 @@ class BackwardsCompatibleClientV2Test(ClientTestBase):
|
||||
"""Tests for acme.client.BackwardsCompatibleClientV2."""
|
||||
|
||||
def setUp(self):
|
||||
super(BackwardsCompatibleClientV2Test, self).setUp()
|
||||
super().setUp()
|
||||
# contains a loaded cert
|
||||
self.certr = messages.CertificateResource(
|
||||
body=messages_test.CERT)
|
||||
@@ -321,7 +321,7 @@ class ClientTest(ClientTestBase):
|
||||
"""Tests for acme.client.Client."""
|
||||
|
||||
def setUp(self):
|
||||
super(ClientTest, self).setUp()
|
||||
super().setUp()
|
||||
|
||||
self.directory = DIRECTORY_V1
|
||||
|
||||
@@ -606,8 +606,8 @@ class ClientTest(ClientTestBase):
|
||||
# make sure that max_attempts is per-authorization, rather
|
||||
# than global
|
||||
max_attempts=max(len(authzrs[0].retries), len(authzrs[1].retries)))
|
||||
self.assertTrue(cert[0] is csr)
|
||||
self.assertTrue(cert[1] is updated_authzrs)
|
||||
self.assertIs(cert[0], csr)
|
||||
self.assertIs(cert[1], updated_authzrs)
|
||||
self.assertEqual(updated_authzrs[0].uri, 'a...')
|
||||
self.assertEqual(updated_authzrs[1].uri, 'b.')
|
||||
self.assertEqual(updated_authzrs[0].times, [
|
||||
@@ -643,7 +643,7 @@ class ClientTest(ClientTestBase):
|
||||
authzr = self.client.deactivate_authorization(self.authzr)
|
||||
self.assertEqual(authzb, authzr.body)
|
||||
self.assertEqual(self.client.net.post.call_count, 1)
|
||||
self.assertTrue(self.authzr.uri in self.net.post.call_args_list[0][0])
|
||||
self.assertIn(self.authzr.uri, self.net.post.call_args_list[0][0])
|
||||
|
||||
def test_check_cert(self):
|
||||
self.response.headers['Location'] = self.certr.uri
|
||||
@@ -702,7 +702,7 @@ class ClientTest(ClientTestBase):
|
||||
|
||||
def test_revocation_payload(self):
|
||||
obj = messages.Revocation(certificate=self.certr.body, reason=self.rsn)
|
||||
self.assertTrue('reason' in obj.to_partial_json().keys())
|
||||
self.assertIn('reason', obj.to_partial_json().keys())
|
||||
self.assertEqual(self.rsn, obj.to_partial_json()['reason'])
|
||||
|
||||
def test_revoke_bad_status_raises_error(self):
|
||||
@@ -718,7 +718,7 @@ class ClientV2Test(ClientTestBase):
|
||||
"""Tests for acme.client.ClientV2."""
|
||||
|
||||
def setUp(self):
|
||||
super(ClientV2Test, self).setUp()
|
||||
super().setUp()
|
||||
|
||||
self.directory = DIRECTORY_V2
|
||||
|
||||
@@ -742,7 +742,7 @@ class ClientV2Test(ClientTestBase):
|
||||
self.orderr = messages.OrderResource(
|
||||
body=self.order,
|
||||
uri='https://www.letsencrypt-demo.org/acme/acct/1/order/1',
|
||||
authorizations=[self.authzr, self.authzr2], csr_pem=CSR_SAN_PEM)
|
||||
authorizations=[self.authzr, self.authzr2], csr_pem=CSR_MIXED_PEM)
|
||||
|
||||
def test_new_account(self):
|
||||
self.response.status_code = http_client.CREATED
|
||||
@@ -772,7 +772,7 @@ class ClientV2Test(ClientTestBase):
|
||||
|
||||
with mock.patch('acme.client.ClientV2._post_as_get') as mock_post_as_get:
|
||||
mock_post_as_get.side_effect = (authz_response, authz_response2)
|
||||
self.assertEqual(self.client.new_order(CSR_SAN_PEM), self.orderr)
|
||||
self.assertEqual(self.client.new_order(CSR_MIXED_PEM), self.orderr)
|
||||
|
||||
@mock.patch('acme.client.datetime')
|
||||
def test_poll_and_finalize(self, mock_datetime):
|
||||
@@ -879,9 +879,9 @@ class ClientV2Test(ClientTestBase):
|
||||
self.response.headers['Location'] = self.regr.uri
|
||||
self.response.json.return_value = self.regr.body.to_json()
|
||||
self.assertEqual(self.regr, self.client.update_registration(self.regr))
|
||||
self.assertNotEqual(self.client.net.account, None)
|
||||
self.assertIsNotNone(self.client.net.account)
|
||||
self.assertEqual(self.client.net.post.call_count, 2)
|
||||
self.assertTrue(DIRECTORY_V2.newAccount in self.net.post.call_args_list[0][0])
|
||||
self.assertIn(DIRECTORY_V2.newAccount, self.net.post.call_args_list[0][0])
|
||||
|
||||
self.response.json.return_value = self.regr.body.update(
|
||||
contact=()).to_json()
|
||||
@@ -945,7 +945,7 @@ class ClientNetworkTest(unittest.TestCase):
|
||||
self.response.links = {}
|
||||
|
||||
def test_init(self):
|
||||
self.assertTrue(self.net.verify_ssl is self.verify_ssl)
|
||||
self.assertIs(self.net.verify_ssl, self.verify_ssl)
|
||||
|
||||
def test_wrap_in_jws(self):
|
||||
# pylint: disable=protected-access
|
||||
@@ -1187,7 +1187,7 @@ class ClientNetworkWithMockedResponseTest(unittest.TestCase):
|
||||
|
||||
def send_request(*args, **kwargs):
|
||||
# pylint: disable=unused-argument,missing-docstring
|
||||
self.assertFalse("new_nonce_url" in kwargs)
|
||||
self.assertNotIn("new_nonce_url", kwargs)
|
||||
method = args[0]
|
||||
uri = args[1]
|
||||
if method == 'HEAD' and uri != "new_nonce_uri":
|
||||
@@ -1332,7 +1332,7 @@ class ClientNetworkSourceAddressBindingTest(unittest.TestCase):
|
||||
from acme.client import ClientNetwork
|
||||
net = ClientNetwork(key=None, alg=None, source_address=self.source_address)
|
||||
for adapter in net.session.adapters.values():
|
||||
self.assertTrue(self.source_address in adapter.source_address)
|
||||
self.assertIn(self.source_address, adapter.source_address)
|
||||
|
||||
def test_behavior_assumption(self):
|
||||
"""This is a test that guardrails the HTTPAdapter behavior so that if the default for
|
||||
|
||||
@@ -1,14 +1,15 @@
|
||||
"""Tests for acme.crypto_util."""
|
||||
import itertools
|
||||
import ipaddress
|
||||
import socket
|
||||
import socketserver
|
||||
import threading
|
||||
import time
|
||||
import unittest
|
||||
from typing import List
|
||||
|
||||
import josepy as jose
|
||||
import OpenSSL
|
||||
import six
|
||||
from six.moves import socketserver # type: ignore # pylint: disable=import-error
|
||||
|
||||
from acme import errors
|
||||
import test_util
|
||||
@@ -27,8 +28,6 @@ class SSLSocketAndProbeSNITest(unittest.TestCase):
|
||||
|
||||
class _TestServer(socketserver.TCPServer):
|
||||
|
||||
# six.moves.* | pylint: disable=attribute-defined-outside-init,no-init
|
||||
|
||||
def server_bind(self): # pylint: disable=missing-docstring
|
||||
self.socket = SSLSocket(socket.socket(),
|
||||
certs)
|
||||
@@ -62,7 +61,6 @@ class SSLSocketAndProbeSNITest(unittest.TestCase):
|
||||
self.assertRaises(errors.Error, self._probe, b'bar')
|
||||
|
||||
def test_probe_connection_error(self):
|
||||
# pylint has a hard time with six
|
||||
self.server.server_close()
|
||||
original_timeout = socket.getdefaulttimeout()
|
||||
try:
|
||||
@@ -111,7 +109,6 @@ class PyOpenSSLCertOrReqAllNamesTest(unittest.TestCase):
|
||||
class PyOpenSSLCertOrReqSANTest(unittest.TestCase):
|
||||
"""Test for acme.crypto_util._pyopenssl_cert_or_req_san."""
|
||||
|
||||
|
||||
@classmethod
|
||||
def _call(cls, loader, name):
|
||||
# pylint: disable=protected-access
|
||||
@@ -121,7 +118,7 @@ class PyOpenSSLCertOrReqSANTest(unittest.TestCase):
|
||||
@classmethod
|
||||
def _get_idn_names(cls):
|
||||
"""Returns expected names from '{cert,csr}-idnsans.pem'."""
|
||||
chars = [six.unichr(i) for i in itertools.chain(range(0x3c3, 0x400),
|
||||
chars = [chr(i) for i in itertools.chain(range(0x3c3, 0x400),
|
||||
range(0x641, 0x6fc),
|
||||
range(0x1820, 0x1877))]
|
||||
return [''.join(chars[i: i + 45]) + '.invalid'
|
||||
@@ -177,24 +174,73 @@ class PyOpenSSLCertOrReqSANTest(unittest.TestCase):
|
||||
['chicago-cubs.venafi.example', 'cubs.venafi.example'])
|
||||
|
||||
|
||||
class PyOpenSSLCertOrReqSANIPTest(unittest.TestCase):
|
||||
"""Test for acme.crypto_util._pyopenssl_cert_or_req_san_ip."""
|
||||
|
||||
class RandomSnTest(unittest.TestCase):
|
||||
"""Test for random certificate serial numbers."""
|
||||
@classmethod
|
||||
def _call(cls, loader, name):
|
||||
# pylint: disable=protected-access
|
||||
from acme.crypto_util import _pyopenssl_cert_or_req_san_ip
|
||||
return _pyopenssl_cert_or_req_san_ip(loader(name))
|
||||
|
||||
def _call_cert(self, name):
|
||||
return self._call(test_util.load_cert, name)
|
||||
|
||||
def _call_csr(self, name):
|
||||
return self._call(test_util.load_csr, name)
|
||||
|
||||
def test_cert_no_sans(self):
|
||||
self.assertEqual(self._call_cert('cert.pem'), [])
|
||||
|
||||
def test_csr_no_sans(self):
|
||||
self.assertEqual(self._call_csr('csr-nosans.pem'), [])
|
||||
|
||||
def test_cert_domain_sans(self):
|
||||
self.assertEqual(self._call_cert('cert-san.pem'), [])
|
||||
|
||||
def test_csr_domain_sans(self):
|
||||
self.assertEqual(self._call_csr('csr-san.pem'), [])
|
||||
|
||||
def test_cert_ip_two_sans(self):
|
||||
self.assertEqual(self._call_cert('cert-ipsans.pem'), ['192.0.2.145', '203.0.113.1'])
|
||||
|
||||
def test_csr_ip_two_sans(self):
|
||||
self.assertEqual(self._call_csr('csr-ipsans.pem'), ['192.0.2.145', '203.0.113.1'])
|
||||
|
||||
def test_csr_ipv6_sans(self):
|
||||
self.assertEqual(self._call_csr('csr-ipv6sans.pem'),
|
||||
['0:0:0:0:0:0:0:1', 'A3BE:32F3:206E:C75D:956:CEE:9858:5EC5'])
|
||||
|
||||
def test_cert_ipv6_sans(self):
|
||||
self.assertEqual(self._call_cert('cert-ipv6sans.pem'),
|
||||
['0:0:0:0:0:0:0:1', 'A3BE:32F3:206E:C75D:956:CEE:9858:5EC5'])
|
||||
|
||||
|
||||
class GenSsCertTest(unittest.TestCase):
|
||||
"""Test for gen_ss_cert (generation of self-signed cert)."""
|
||||
|
||||
|
||||
def setUp(self):
|
||||
self.cert_count = 5
|
||||
self.serial_num = [] # type: List[int]
|
||||
self.serial_num: List[int] = []
|
||||
self.key = OpenSSL.crypto.PKey()
|
||||
self.key.generate_key(OpenSSL.crypto.TYPE_RSA, 2048)
|
||||
|
||||
def test_sn_collisions(self):
|
||||
from acme.crypto_util import gen_ss_cert
|
||||
|
||||
for _ in range(self.cert_count):
|
||||
cert = gen_ss_cert(self.key, ['dummy'], force_san=True)
|
||||
cert = gen_ss_cert(self.key, ['dummy'], force_san=True,
|
||||
ips=[ipaddress.ip_address("10.10.10.10")])
|
||||
self.serial_num.append(cert.get_serial_number())
|
||||
self.assertTrue(len(set(self.serial_num)) > 1)
|
||||
self.assertGreaterEqual(len(set(self.serial_num)), self.cert_count)
|
||||
|
||||
|
||||
def test_no_name(self):
|
||||
from acme.crypto_util import gen_ss_cert
|
||||
with self.assertRaises(AssertionError):
|
||||
gen_ss_cert(self.key, ips=[ipaddress.ip_address("1.1.1.1")])
|
||||
gen_ss_cert(self.key)
|
||||
|
||||
|
||||
class MakeCSRTest(unittest.TestCase):
|
||||
"""Test for standalone functions."""
|
||||
@@ -209,8 +255,8 @@ class MakeCSRTest(unittest.TestCase):
|
||||
|
||||
def test_make_csr(self):
|
||||
csr_pem = self._call_with_key(["a.example", "b.example"])
|
||||
self.assertTrue(b'--BEGIN CERTIFICATE REQUEST--' in csr_pem)
|
||||
self.assertTrue(b'--END CERTIFICATE REQUEST--' in csr_pem)
|
||||
self.assertIn(b'--BEGIN CERTIFICATE REQUEST--', csr_pem)
|
||||
self.assertIn(b'--END CERTIFICATE REQUEST--', csr_pem)
|
||||
csr = OpenSSL.crypto.load_certificate_request(
|
||||
OpenSSL.crypto.FILETYPE_PEM, csr_pem)
|
||||
# In pyopenssl 0.13 (used with TOXENV=py27-oldest), csr objects don't
|
||||
@@ -226,6 +272,27 @@ class MakeCSRTest(unittest.TestCase):
|
||||
).get_data(),
|
||||
)
|
||||
|
||||
def test_make_csr_ip(self):
|
||||
csr_pem = self._call_with_key(["a.example"], False, [ipaddress.ip_address('127.0.0.1'), ipaddress.ip_address('::1')])
|
||||
self.assertIn(b'--BEGIN CERTIFICATE REQUEST--' , csr_pem)
|
||||
self.assertIn(b'--END CERTIFICATE REQUEST--' , csr_pem)
|
||||
csr = OpenSSL.crypto.load_certificate_request(
|
||||
OpenSSL.crypto.FILETYPE_PEM, csr_pem)
|
||||
# In pyopenssl 0.13 (used with TOXENV=py27-oldest), csr objects don't
|
||||
# have a get_extensions() method, so we skip this test if the method
|
||||
# isn't available.
|
||||
if hasattr(csr, 'get_extensions'):
|
||||
self.assertEqual(len(csr.get_extensions()), 1)
|
||||
self.assertEqual(csr.get_extensions()[0].get_data(),
|
||||
OpenSSL.crypto.X509Extension(
|
||||
b'subjectAltName',
|
||||
critical=False,
|
||||
value=b'DNS:a.example, IP:127.0.0.1, IP:::1',
|
||||
).get_data(),
|
||||
)
|
||||
# for IP san it's actually need to be octet-string,
|
||||
# but somewhere downstream thankfully handle it for us
|
||||
|
||||
def test_make_csr_must_staple(self):
|
||||
csr_pem = self._call_with_key(["a.example"], must_staple=True)
|
||||
csr = OpenSSL.crypto.load_certificate_request(
|
||||
@@ -244,6 +311,9 @@ class MakeCSRTest(unittest.TestCase):
|
||||
self.assertEqual(len(must_staple_exts), 1,
|
||||
"Expected exactly one Must Staple extension")
|
||||
|
||||
def test_make_csr_without_hostname(self):
|
||||
self.assertRaises(ValueError, self._call_with_key)
|
||||
|
||||
|
||||
class DumpPyopensslChainTest(unittest.TestCase):
|
||||
"""Test for dump_pyopenssl_chain."""
|
||||
|
||||
@@ -1,10 +1,6 @@
|
||||
"""Tests for acme.errors."""
|
||||
import unittest
|
||||
|
||||
try:
|
||||
import mock
|
||||
except ImportError: # pragma: no cover
|
||||
from unittest import mock # type: ignore
|
||||
from unittest import mock
|
||||
|
||||
|
||||
class BadNonceTest(unittest.TestCase):
|
||||
@@ -28,8 +24,8 @@ class MissingNonceTest(unittest.TestCase):
|
||||
self.error = MissingNonce(self.response)
|
||||
|
||||
def test_str(self):
|
||||
self.assertTrue("FOO" in str(self.error))
|
||||
self.assertTrue("{}" in str(self.error))
|
||||
self.assertIn("FOO", str(self.error))
|
||||
self.assertIn("{}", str(self.error))
|
||||
|
||||
|
||||
class PollErrorTest(unittest.TestCase):
|
||||
|
||||
@@ -48,7 +48,7 @@ class JWSTest(unittest.TestCase):
|
||||
self.assertEqual(jws.signature.combined.nonce, self.nonce)
|
||||
self.assertEqual(jws.signature.combined.url, self.url)
|
||||
self.assertEqual(jws.signature.combined.kid, self.kid)
|
||||
self.assertEqual(jws.signature.combined.jwk, None)
|
||||
self.assertIsNone(jws.signature.combined.jwk)
|
||||
# TODO: check that nonce is in protected header
|
||||
|
||||
self.assertEqual(jws, JWS.from_json(jws.to_json()))
|
||||
@@ -58,7 +58,7 @@ class JWSTest(unittest.TestCase):
|
||||
jws = JWS.sign(payload=b'foo', key=self.privkey,
|
||||
alg=jose.RS256, nonce=self.nonce,
|
||||
url=self.url)
|
||||
self.assertEqual(jws.signature.combined.kid, None)
|
||||
self.assertIsNone(jws.signature.combined.kid)
|
||||
self.assertEqual(jws.signature.combined.jwk, self.pubkey)
|
||||
|
||||
|
||||
|
||||
@@ -1,11 +1,8 @@
|
||||
"""Tests for acme.magic_typing."""
|
||||
import sys
|
||||
import unittest
|
||||
|
||||
try:
|
||||
import mock
|
||||
except ImportError: # pragma: no cover
|
||||
from unittest import mock # type: ignore
|
||||
import warnings
|
||||
from unittest import mock
|
||||
|
||||
|
||||
class MagicTypingTest(unittest.TestCase):
|
||||
@@ -21,24 +18,13 @@ class MagicTypingTest(unittest.TestCase):
|
||||
sys.modules['typing'] = typing_class_mock
|
||||
if 'acme.magic_typing' in sys.modules:
|
||||
del sys.modules['acme.magic_typing'] # pragma: no cover
|
||||
with warnings.catch_warnings():
|
||||
warnings.filterwarnings("ignore", category=DeprecationWarning)
|
||||
from acme.magic_typing import Text
|
||||
self.assertEqual(Text, text_mock)
|
||||
del sys.modules['acme.magic_typing']
|
||||
sys.modules['typing'] = temp_typing
|
||||
|
||||
def test_import_failure(self):
|
||||
try:
|
||||
import typing as temp_typing
|
||||
except ImportError: # pragma: no cover
|
||||
temp_typing = None # pragma: no cover
|
||||
sys.modules['typing'] = None
|
||||
if 'acme.magic_typing' in sys.modules:
|
||||
del sys.modules['acme.magic_typing'] # pragma: no cover
|
||||
from acme.magic_typing import Text
|
||||
self.assertTrue(Text is None)
|
||||
del sys.modules['acme.magic_typing']
|
||||
sys.modules['typing'] = temp_typing
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main() # pragma: no cover
|
||||
|
||||
@@ -1,11 +1,9 @@
|
||||
"""Tests for acme.messages."""
|
||||
from typing import Dict
|
||||
import unittest
|
||||
from unittest import mock
|
||||
|
||||
import josepy as jose
|
||||
try:
|
||||
import mock
|
||||
except ImportError: # pragma: no cover
|
||||
from unittest import mock # type: ignore
|
||||
|
||||
from acme import challenges
|
||||
import test_util
|
||||
@@ -43,13 +41,13 @@ class ErrorTest(unittest.TestCase):
|
||||
|
||||
def test_description(self):
|
||||
self.assertEqual('The request message was malformed', self.error.description)
|
||||
self.assertTrue(self.error_custom.description is None)
|
||||
self.assertIsNone(self.error_custom.description)
|
||||
|
||||
def test_code(self):
|
||||
from acme.messages import Error
|
||||
self.assertEqual('malformed', self.error.code)
|
||||
self.assertEqual(None, self.error_custom.code)
|
||||
self.assertEqual(None, Error().code)
|
||||
self.assertIsNone(self.error_custom.code)
|
||||
self.assertIsNone(Error().code)
|
||||
|
||||
def test_is_acme_error(self):
|
||||
from acme.messages import is_acme_error, Error
|
||||
@@ -84,7 +82,7 @@ class ConstantTest(unittest.TestCase):
|
||||
from acme.messages import _Constant
|
||||
|
||||
class MockConstant(_Constant): # pylint: disable=missing-docstring
|
||||
POSSIBLE_NAMES = {} # type: Dict
|
||||
POSSIBLE_NAMES: Dict = {}
|
||||
|
||||
self.MockConstant = MockConstant # pylint: disable=invalid-name
|
||||
self.const_a = MockConstant('a')
|
||||
@@ -262,10 +260,10 @@ class RegistrationTest(unittest.TestCase):
|
||||
self.assertEqual(empty_new_reg.contact, ())
|
||||
self.assertEqual(new_reg_with_contact.contact, ())
|
||||
|
||||
self.assertTrue('contact' not in empty_new_reg.to_partial_json())
|
||||
self.assertTrue('contact' not in empty_new_reg.fields_to_partial_json())
|
||||
self.assertTrue('contact' in new_reg_with_contact.to_partial_json())
|
||||
self.assertTrue('contact' in new_reg_with_contact.fields_to_partial_json())
|
||||
self.assertNotIn('contact', empty_new_reg.to_partial_json())
|
||||
self.assertNotIn('contact', empty_new_reg.fields_to_partial_json())
|
||||
self.assertIn('contact', new_reg_with_contact.to_partial_json())
|
||||
self.assertIn('contact', new_reg_with_contact.fields_to_partial_json())
|
||||
|
||||
|
||||
class UpdateRegistrationTest(unittest.TestCase):
|
||||
@@ -408,7 +406,7 @@ class AuthorizationResourceTest(unittest.TestCase):
|
||||
authzr = AuthorizationResource(
|
||||
uri=mock.sentinel.uri,
|
||||
body=mock.sentinel.body)
|
||||
self.assertTrue(isinstance(authzr, jose.JSONDeSerializable))
|
||||
self.assertIsInstance(authzr, jose.JSONDeSerializable)
|
||||
|
||||
|
||||
class CertificateRequestTest(unittest.TestCase):
|
||||
@@ -419,7 +417,7 @@ class CertificateRequestTest(unittest.TestCase):
|
||||
self.req = CertificateRequest(csr=CSR)
|
||||
|
||||
def test_json_de_serializable(self):
|
||||
self.assertTrue(isinstance(self.req, jose.JSONDeSerializable))
|
||||
self.assertIsInstance(self.req, jose.JSONDeSerializable)
|
||||
from acme.messages import CertificateRequest
|
||||
self.assertEqual(
|
||||
self.req, CertificateRequest.from_json(self.req.to_json()))
|
||||
@@ -435,7 +433,7 @@ class CertificateResourceTest(unittest.TestCase):
|
||||
cert_chain_uri=mock.sentinel.cert_chain_uri)
|
||||
|
||||
def test_json_de_serializable(self):
|
||||
self.assertTrue(isinstance(self.certr, jose.JSONDeSerializable))
|
||||
self.assertIsInstance(self.certr, jose.JSONDeSerializable)
|
||||
from acme.messages import CertificateResource
|
||||
self.assertEqual(
|
||||
self.certr, CertificateResource.from_json(self.certr.to_json()))
|
||||
|
||||
@@ -1,16 +1,14 @@
|
||||
"""Tests for acme.standalone."""
|
||||
import http.client as http_client
|
||||
import socket
|
||||
import socketserver
|
||||
import threading
|
||||
import unittest
|
||||
from typing import Set
|
||||
from unittest import mock
|
||||
|
||||
import josepy as jose
|
||||
try:
|
||||
import mock
|
||||
except ImportError: # pragma: no cover
|
||||
from unittest import mock # type: ignore
|
||||
import requests
|
||||
from six.moves import http_client # pylint: disable=import-error
|
||||
from six.moves import socketserver # type: ignore # pylint: disable=import-error
|
||||
|
||||
from acme import challenges
|
||||
from acme import crypto_util
|
||||
@@ -44,7 +42,7 @@ class HTTP01ServerTest(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.account_key = jose.JWK.load(
|
||||
test_util.load_vector('rsa1024_key.pem'))
|
||||
self.resources = set() # type: Set
|
||||
self.resources: Set = set()
|
||||
|
||||
from acme.standalone import HTTP01Server
|
||||
self.server = HTTP01Server(('', 0), resources=self.resources)
|
||||
@@ -192,12 +190,18 @@ class BaseDualNetworkedServersTest(unittest.TestCase):
|
||||
|
||||
@mock.patch("socket.socket.bind")
|
||||
def test_fail_to_bind(self, mock_bind):
|
||||
mock_bind.side_effect = socket.error
|
||||
from errno import EADDRINUSE
|
||||
from acme.standalone import BaseDualNetworkedServers
|
||||
self.assertRaises(socket.error, BaseDualNetworkedServers,
|
||||
|
||||
mock_bind.side_effect = socket.error(EADDRINUSE, "Fake addr in use error")
|
||||
|
||||
with self.assertRaises(socket.error) as em:
|
||||
BaseDualNetworkedServers(
|
||||
BaseDualNetworkedServersTest.SingleProtocolServer,
|
||||
('', 0),
|
||||
socketserver.BaseRequestHandler)
|
||||
('', 0), socketserver.BaseRequestHandler)
|
||||
|
||||
self.assertEqual(em.exception.errno, EADDRINUSE)
|
||||
|
||||
|
||||
def test_ports_equal(self):
|
||||
from acme.standalone import BaseDualNetworkedServers
|
||||
@@ -221,7 +225,7 @@ class HTTP01DualNetworkedServersTest(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.account_key = jose.JWK.load(
|
||||
test_util.load_vector('rsa1024_key.pem'))
|
||||
self.resources = set() # type: Set
|
||||
self.resources: Set = set()
|
||||
|
||||
from acme.standalone import HTTP01DualNetworkedServers
|
||||
self.servers = HTTP01DualNetworkedServers(('', 0), resources=self.resources)
|
||||
|
||||
21
acme/tests/testdata/cert-ipsans.pem
vendored
Normal file
21
acme/tests/testdata/cert-ipsans.pem
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIDizCCAnOgAwIBAgIIPNBLQXwhoUkwDQYJKoZIhvcNAQELBQAwKDEmMCQGA1UE
|
||||
AxMdUGViYmxlIEludGVybWVkaWF0ZSBDQSAxNzNiMjYwHhcNMjAwNTI5MTkxODA5
|
||||
WhcNMjUwNTI5MTkxODA5WjAWMRQwEgYDVQQDEwsxOTIuMC4yLjE0NTCCASIwDQYJ
|
||||
KoZIhvcNAQEBBQADggEPADCCAQoCggEBALyChb+NDA26GF1AfC0nzEdfOTchKw0h
|
||||
q41xEjonvg5UXgZf/aH/ntvugIkYP0MaFifNAjebOVVsemEVEtyWcUKTfBHKZGbZ
|
||||
ukTDwFIjfTccCfo6U/B2H7ZLzJIywl8DcUw9DypadeQBm8PS0VVR2ncy73dvaqym
|
||||
crhAwlASyXU0mhLqRDMMxfg5Bn/FWpcsIcDpLmPn8Q/FvdRc2t5ryBNw/aWOlwqT
|
||||
Oy16nbfLj2T0zG1A3aPuD+eT/JFUe/o3K7R+FAx7wt+RziQO46wLVVF1SueZUrIU
|
||||
zqN04Gl8Kt1WM2SniZ0gq/rORUNcPtT0NAEsEslTQfA+Trq6j2peqyMCAwEAAaOB
|
||||
yjCBxzAOBgNVHQ8BAf8EBAMCBaAwHQYDVR0lBBYwFAYIKwYBBQUHAwEGCCsGAQUF
|
||||
BwMCMAwGA1UdEwEB/wQCMAAwHQYDVR0OBBYEFHj1mwZzP//nMIH2i58NRUl/arHn
|
||||
MB8GA1UdIwQYMBaAFF5DVAKabvIUvKFHGouscA2Qdpe6MDEGCCsGAQUFBwEBBCUw
|
||||
IzAhBggrBgEFBQcwAYYVaHR0cDovLzEyNy4wLjAuMTo0MDAyMBUGA1UdEQQOMAyH
|
||||
BMAAApGHBMsAcQEwDQYJKoZIhvcNAQELBQADggEBAHjSgDg76/UCIMSYddyhj18r
|
||||
LdNKjA7p8ovnErSkebFT4lIZ9f3Sma9moNr0w64M33NamuFyHe/KTdk90mvoW8Uu
|
||||
26aDekiRIeeMakzbAtDKn67tt2tbedKIYRATcSYVwsV46uZKbM621dZKIjjxOWpo
|
||||
IY6rZYrku8LYhoXJXOqRduV3cTRVuTm5bBa9FfVNtt6N1T5JOtKKDEhuSaF4RSug
|
||||
PDy3hQIiHrVvhPfVrXU3j6owz/8UCS5549inES9ONTFrvM9o0H1R/MsmGNXR5hF5
|
||||
iJqHKC7n8LZujhVnoFIpHu2Dsiefbfr+yRYJS4I+ezy6Nq/Ok8rc8zp0eoX+uyY=
|
||||
-----END CERTIFICATE-----
|
||||
22
acme/tests/testdata/cert-ipv6sans.pem
vendored
Normal file
22
acme/tests/testdata/cert-ipv6sans.pem
vendored
Normal file
@@ -0,0 +1,22 @@
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIDmzCCAoOgAwIBAgIIFdxeZP+v2rgwDQYJKoZIhvcNAQELBQAwKDEmMCQGA1UE
|
||||
AxMdUGViYmxlIEludGVybWVkaWF0ZSBDQSA0M2M5NTcwHhcNMjAwNTMwMDQwNzMw
|
||||
WhcNMjUwNTMwMDQwNzMwWjAOMQwwCgYDVQQDEwM6OjEwggEiMA0GCSqGSIb3DQEB
|
||||
AQUAA4IBDwAwggEKAoIBAQC7VidVduJvqKtrSH0fw6PjE0cqL4Kfzo7klexWUkHG
|
||||
KVAa0fRVZFZ462jxKOt417V2U4WJQ6WHHO9PJ+3gW62d/MhCw8FRtUQS4nYFjqB6
|
||||
32+RFU21VRN7cWoQEqSwnEPbh/v/zv/KS5JhQ+swWUo79AOLm1kjnZWCKtcqh1Lc
|
||||
Ug5Tkpot6luoxTKp52MkchvXDpj0q2B/XpLJ8/pw5cqjv7mH12EDOK2HXllA+WwX
|
||||
ZpstcEhaA4FqtaHOW/OHnwTX5MUbINXE5YYHVEDR6moVM31/W/3pe9NDUMTDE7Si
|
||||
lVQnZbXM9NYbzZqlh+WhemDWwnIfGI6rtsfNEiirVEOlAgMBAAGjgeIwgd8wDgYD
|
||||
VR0PAQH/BAQDAgWgMB0GA1UdJQQWMBQGCCsGAQUFBwMBBggrBgEFBQcDAjAMBgNV
|
||||
HRMBAf8EAjAAMB0GA1UdDgQWBBS8DL+MZfDIy6AKky69Tgry2Vxq5DAfBgNVHSME
|
||||
GDAWgBRAsFqVenRRKgB1YPzWKzb9bzZ/ozAxBggrBgEFBQcBAQQlMCMwIQYIKwYB
|
||||
BQUHMAGGFWh0dHA6Ly8xMjcuMC4wLjE6NDAwMjAtBgNVHREEJjAkhxAAAAAAAAAA
|
||||
AAAAAAAAAAABhxCjvjLzIG7HXQlWDO6YWF7FMA0GCSqGSIb3DQEBCwUAA4IBAQBY
|
||||
M9UTZ3uaKMQ+He9kWR3p9jh6hTSD0FNi79ZdfkG0lgSzhhduhN7OhzQH2ihUUfa6
|
||||
rtKTw74fGbszhizCd9UB8YPKlm3si1Xbg6ZUQlA1RtoQo7RUGEa6ZbR68PKGm9Go
|
||||
hTTFIl/JS8jzxBR8jywZdyqtprUx+nnNUDiNk0hJtFLhw7OJH0AHlAUNqHsfD08m
|
||||
HXRdaV6q14HXU5g31slBat9H4D6tCU/2uqBURwW0wVdnqh4QeRfAeqiatJS9EmSF
|
||||
ctbc7n894Idy2Xce7NFoIy5cht3m6Rd42o/LmBsJopBmQcDPZT70/XzRtc2qE0cS
|
||||
CzBIGQHUJ6BfmBjrCQnp
|
||||
-----END CERTIFICATE-----
|
||||
16
acme/tests/testdata/csr-ipsans.pem
vendored
Normal file
16
acme/tests/testdata/csr-ipsans.pem
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
-----BEGIN CERTIFICATE REQUEST-----
|
||||
MIICbTCCAVUCAQIwADCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKT/
|
||||
CE7Y5EYBvI4p7Frt763upIKHDHO/R5/TWMjG8Jm9qTMui8sbMgyh2Yh+lR/j/5Xd
|
||||
tQrhgC6wx10MrW2+3JtYS88HP1p6si8zU1dbK34n3NyyklR2RivW0R7dXgnYNy7t
|
||||
5YcDYLCrbRMIPINV/uHrmzIHWYUDNcZVdAfIM2AHfKYuV6Mepcn///5GR+l4GcAh
|
||||
Nkf9CW8OdAIuKdbyLCxVr0mUW/vJz1b12uxPsgUdax9sjXgZdT4pfMXADsFd1NeF
|
||||
atpsXU073inqtHru+2F9ijHTQ75TC+u/rr6eYl3BnBntac0gp/ADtDBii7/Q1JOO
|
||||
Bhq7xJNqqxIEdiyM7zcCAwEAAaAoMCYGCSqGSIb3DQEJDjEZMBcwFQYDVR0RBA4w
|
||||
DIcEwAACkYcEywBxATANBgkqhkiG9w0BAQsFAAOCAQEADG5g3zdbSCaXpZhWHkzE
|
||||
Mek3f442TUE1pB+ITRpthmM4N3zZWETYmbLCIAO624uMrRnbCCMvAoLs/L/9ETg/
|
||||
XMMFtonQC8u9i9tV8B1ceBh8lpIfa+8b9TMWH3bqnrbWQ+YIl+Yd0gXiCZWJ9vK4
|
||||
eM1Gddu/2bR6s/k4h/XAWRgEexqk57EHr1z0N+T9OoX939n3mVcNI+u9kfd5VJ0z
|
||||
VyA3R8WR6T6KlEl5P5pcWe5Kuyhi7xMmLVImXqBtvKq4O1AMfM+gQr/yn9aE8IRq
|
||||
khP7JrMBLUIub1c/qu2TfvnynNPSM/ZcOX+6PHdHmRkR3nI0Ndpv7Ntv31FTplAm
|
||||
Dw==
|
||||
-----END CERTIFICATE REQUEST-----
|
||||
16
acme/tests/testdata/csr-ipv6sans.pem
vendored
Normal file
16
acme/tests/testdata/csr-ipv6sans.pem
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
-----BEGIN CERTIFICATE REQUEST-----
|
||||
MIIChTCCAW0CAQIwADCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAOIc
|
||||
UAppcqJfTkSqqOFqGt1v7lIJZPOcF4bcKI3d5cHAGbOuVxbC7uMaDuObwYLzoiED
|
||||
qnvs1NaEq2phO6KsgGESB7IE2LUjJivO7OnSZjNRpL5si/9egvBiNCn/50lULaWG
|
||||
gLEuyMfk3awZy2mVAymy7Grhbx069A4TH8TqsHuq2RpKyuDL27e/jUt6yYecb3pu
|
||||
hWMiWy3segif4tI46pkOW0/I6DpxyYD2OqOvzxm/voS9RMqE2+7YJA327H7bEi3N
|
||||
lJZEZ1zy7clZ9ga5fBQaetzbg2RyxTrZ7F919NQXSFoXgxb10Eg64wIpz0L3ooCm
|
||||
GEHehsZZexa3J5ccIvMCAwEAAaBAMD4GCSqGSIb3DQEJDjExMC8wLQYDVR0RBCYw
|
||||
JIcQAAAAAAAAAAAAAAAAAAAAAYcQo74y8yBux10JVgzumFhexTANBgkqhkiG9w0B
|
||||
AQsFAAOCAQEALvwVn0A/JPTCiNzcozHFnp5M23C9PXCplWc5u4k34d4XXzpSeFDz
|
||||
fL4gy7NpYIueme2K2ppw2j3PNQUdR6vQ5a75sriegWYrosL+7Q6Joh51ZyEUZQoD
|
||||
mNl4M4S4oX85EaChR6NFGBywTfjFarYi32XBTbFE7rK8N8KM+DQkNdwL1MXqaHWz
|
||||
F1obQKpNXlLedbCBOteV5Eg4zG3565zu/Gw/NhwzzV3mQmgxUcd1sMJxAfHQz4Vl
|
||||
ImLL+xMcR03nDsH2bgtDbK2tJm7WszSxA9tC+Xp2lRewxrnQloRWPYDz177WGQ5Q
|
||||
SoGDzTTtA6uWZxG8h7CkNLOGvA8LtU2rNA==
|
||||
-----END CERTIFICATE REQUEST-----
|
||||
16
acme/tests/testdata/csr-mixed.pem
vendored
Normal file
16
acme/tests/testdata/csr-mixed.pem
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
-----BEGIN CERTIFICATE REQUEST-----
|
||||
MIICdjCCAV4CAQIwADCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMXq
|
||||
v1y8EIcCbaUIzCtOcLkLS0MJ35oS+6DmV5WB1A0cIk6YrjsHIsY2lwMm13BWIvmw
|
||||
tY+Y6n0rr7eViNx5ZRGHpHEI/TL3Neb+VefTydL5CgvK3dd4ex2kSbTaed3fmpOx
|
||||
qMajEduwNcZPCcmoEXPkfrCP8w2vKQUkQ+JRPcdX1nTuzticeRP5B7YCmJsmxkEh
|
||||
Y0tzzZ+NIRDARoYNofefY86h3e5q66gtJxccNchmIM3YQahhg5n3Xoo8hGfM/TIc
|
||||
R7ncCBCLO6vtqo0QFva/NQODrgOmOsmgvqPkUWQFdZfWM8yIaU826dktx0CPB78t
|
||||
TudnJ1rBRvGsjHMsZikCAwEAAaAxMC8GCSqGSIb3DQEJDjEiMCAwHgYDVR0RBBcw
|
||||
FYINYS5leGVtcGxlLmNvbYcEwAACbzANBgkqhkiG9w0BAQsFAAOCAQEAdGMcRCxq
|
||||
1X09gn1TNdMt64XUv+wdJCKDaJ+AgyIJj7QvVw8H5k7dOnxS4I+a/yo4jE+LDl2/
|
||||
AuHcBLFEI4ddewdJSMrTNZjuRYuOdr3KP7fL7MffICSBi45vw5EOXg0tnjJCEiKu
|
||||
6gcJgbLSP5JMMd7Haf33Q/VWsmHofR3VwOMdrnakwAU3Ff5WTuXTNVhL1kT/uLFX
|
||||
yW1ru6BF4unwNqSR2UeulljpNfRBsiN4zJK11W6n9KT0NkBr9zY5WCM4sW7i8k9V
|
||||
TeypWGo3jBKzYAGeuxZsB97U77jZ2lrGdBLZKfbcjnTeRVqCvCRrui4El7UGYFmj
|
||||
7s6OJyWx5DSV8w==
|
||||
-----END CERTIFICATE REQUEST-----
|
||||
@@ -9,7 +9,6 @@ import pkg_resources
|
||||
|
||||
from certbot import errors
|
||||
from certbot import util
|
||||
|
||||
from certbot.compat import os
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@@ -154,13 +153,10 @@ def parse_defines(apachectl):
|
||||
return {}
|
||||
|
||||
for match in matches:
|
||||
if match.count("=") > 1:
|
||||
logger.error("Unexpected number of equal signs in "
|
||||
"runtime config dump.")
|
||||
raise errors.PluginError(
|
||||
"Error parsing Apache runtime variables")
|
||||
parts = match.partition("=")
|
||||
variables[parts[0]] = parts[2]
|
||||
# Value could also contain = so split only once
|
||||
parts = match.split('=', 1)
|
||||
value = parts[1] if len(parts) == 2 else ''
|
||||
variables[parts[0]] = value
|
||||
|
||||
return variables
|
||||
|
||||
@@ -221,13 +217,14 @@ def _get_runtime_cfg(command):
|
||||
|
||||
"""
|
||||
try:
|
||||
proc = subprocess.Popen(
|
||||
proc = subprocess.run(
|
||||
command,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
universal_newlines=True,
|
||||
check=False,
|
||||
env=util.env_no_snap_for_external_calls())
|
||||
stdout, stderr = proc.communicate()
|
||||
stdout, stderr = proc.stdout, proc.stderr
|
||||
|
||||
except (OSError, ValueError):
|
||||
logger.error(
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
""" apacheconfig implementation of the ParserNode interfaces """
|
||||
from typing import Tuple
|
||||
|
||||
from certbot_apache._internal import assertions
|
||||
from certbot_apache._internal import interfaces
|
||||
@@ -14,7 +15,7 @@ class ApacheParserNode(interfaces.ParserNode):
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
ancestor, dirty, filepath, metadata = util.parsernode_kwargs(kwargs) # pylint: disable=unused-variable
|
||||
super(ApacheParserNode, self).__init__(**kwargs)
|
||||
super().__init__(**kwargs)
|
||||
self.ancestor = ancestor
|
||||
self.filepath = filepath
|
||||
self.dirty = dirty
|
||||
@@ -38,7 +39,7 @@ class ApacheCommentNode(ApacheParserNode):
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
comment, kwargs = util.commentnode_kwargs(kwargs) # pylint: disable=unused-variable
|
||||
super(ApacheCommentNode, self).__init__(**kwargs)
|
||||
super().__init__(**kwargs)
|
||||
self.comment = comment
|
||||
|
||||
def __eq__(self, other): # pragma: no cover
|
||||
@@ -56,7 +57,7 @@ class ApacheDirectiveNode(ApacheParserNode):
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
name, parameters, enabled, kwargs = util.directivenode_kwargs(kwargs)
|
||||
super(ApacheDirectiveNode, self).__init__(**kwargs)
|
||||
super().__init__(**kwargs)
|
||||
self.name = name
|
||||
self.parameters = parameters
|
||||
self.enabled = enabled
|
||||
@@ -82,8 +83,8 @@ class ApacheBlockNode(ApacheDirectiveNode):
|
||||
""" apacheconfig implementation of BlockNode interface """
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
super(ApacheBlockNode, self).__init__(**kwargs)
|
||||
self.children = ()
|
||||
super().__init__(**kwargs)
|
||||
self.children: Tuple[ApacheParserNode, ...] = ()
|
||||
|
||||
def __eq__(self, other): # pragma: no cover
|
||||
if isinstance(other, self.__class__):
|
||||
|
||||
@@ -3,7 +3,6 @@ import fnmatch
|
||||
|
||||
from certbot_apache._internal import interfaces
|
||||
|
||||
|
||||
PASS = "CERTBOT_PASS_ASSERT"
|
||||
|
||||
|
||||
@@ -137,6 +136,6 @@ def assertEqualPathsList(first, second): # pragma: no cover
|
||||
if any(isPass(path) for path in second):
|
||||
return
|
||||
for fpath in first:
|
||||
assert any([fnmatch.fnmatch(fpath, spath) for spath in second])
|
||||
assert any(fnmatch.fnmatch(fpath, spath) for spath in second)
|
||||
for spath in second:
|
||||
assert any([fnmatch.fnmatch(fpath, spath) for fpath in first])
|
||||
assert any(fnmatch.fnmatch(fpath, spath) for fpath in first)
|
||||
|
||||
@@ -64,10 +64,10 @@ Translates over to:
|
||||
"/files/etc/apache2/apache2.conf/bLoCk[1]",
|
||||
]
|
||||
"""
|
||||
from acme.magic_typing import Set
|
||||
from typing import Set
|
||||
|
||||
from certbot import errors
|
||||
from certbot.compat import os
|
||||
|
||||
from certbot_apache._internal import apache_util
|
||||
from certbot_apache._internal import assertions
|
||||
from certbot_apache._internal import interfaces
|
||||
@@ -80,7 +80,7 @@ class AugeasParserNode(interfaces.ParserNode):
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
ancestor, dirty, filepath, metadata = util.parsernode_kwargs(kwargs) # pylint: disable=unused-variable
|
||||
super(AugeasParserNode, self).__init__(**kwargs)
|
||||
super().__init__(**kwargs)
|
||||
self.ancestor = ancestor
|
||||
self.filepath = filepath
|
||||
self.dirty = dirty
|
||||
@@ -169,7 +169,7 @@ class AugeasCommentNode(AugeasParserNode):
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
comment, kwargs = util.commentnode_kwargs(kwargs) # pylint: disable=unused-variable
|
||||
super(AugeasCommentNode, self).__init__(**kwargs)
|
||||
super().__init__(**kwargs)
|
||||
# self.comment = comment
|
||||
self.comment = comment
|
||||
|
||||
@@ -188,7 +188,7 @@ class AugeasDirectiveNode(AugeasParserNode):
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
name, parameters, enabled, kwargs = util.directivenode_kwargs(kwargs)
|
||||
super(AugeasDirectiveNode, self).__init__(**kwargs)
|
||||
super().__init__(**kwargs)
|
||||
self.name = name
|
||||
self.enabled = enabled
|
||||
if parameters:
|
||||
@@ -245,7 +245,7 @@ class AugeasBlockNode(AugeasDirectiveNode):
|
||||
""" Augeas implementation of BlockNode interface """
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
super(AugeasBlockNode, self).__init__(**kwargs)
|
||||
super().__init__(**kwargs)
|
||||
self.children = ()
|
||||
|
||||
def __eq__(self, other):
|
||||
@@ -355,7 +355,7 @@ class AugeasBlockNode(AugeasDirectiveNode):
|
||||
ownpath = self.metadata.get("augeaspath")
|
||||
|
||||
directives = self.parser.find_dir(name, start=ownpath, exclude=exclude)
|
||||
already_parsed = set() # type: Set[str]
|
||||
already_parsed: Set[str] = set()
|
||||
for directive in directives:
|
||||
# Remove the /arg part from the Augeas path
|
||||
directive = directive.partition("/arg")[0]
|
||||
|
||||
@@ -1,34 +1,27 @@
|
||||
"""Apache Configurator."""
|
||||
# pylint: disable=too-many-lines
|
||||
from collections import defaultdict
|
||||
from distutils.version import LooseVersion
|
||||
import copy
|
||||
import fnmatch
|
||||
import logging
|
||||
import re
|
||||
import socket
|
||||
import time
|
||||
|
||||
import zope.component
|
||||
import zope.interface
|
||||
try:
|
||||
import apacheconfig
|
||||
HAS_APACHECONFIG = True
|
||||
except ImportError: # pragma: no cover
|
||||
HAS_APACHECONFIG = False
|
||||
from typing import DefaultDict
|
||||
from typing import Dict
|
||||
from typing import List
|
||||
from typing import Optional
|
||||
from typing import Set
|
||||
from typing import Union
|
||||
|
||||
from acme import challenges
|
||||
from acme.magic_typing import DefaultDict
|
||||
from acme.magic_typing import Dict
|
||||
from acme.magic_typing import List
|
||||
from acme.magic_typing import Set
|
||||
from acme.magic_typing import Union
|
||||
from certbot import errors
|
||||
from certbot import interfaces
|
||||
from certbot import util
|
||||
from certbot.achallenges import KeyAuthorizationAnnotatedChallenge # pylint: disable=unused-import
|
||||
from certbot.compat import filesystem
|
||||
from certbot.compat import os
|
||||
from certbot.display import util as display_util
|
||||
from certbot.plugins import common
|
||||
from certbot.plugins.enhancements import AutoHSTSEnhancement
|
||||
from certbot.plugins.util import path_surgery
|
||||
@@ -40,10 +33,61 @@ from certbot_apache._internal import dualparser
|
||||
from certbot_apache._internal import http_01
|
||||
from certbot_apache._internal import obj
|
||||
from certbot_apache._internal import parser
|
||||
from certbot_apache._internal.dualparser import DualBlockNode
|
||||
from certbot_apache._internal.obj import VirtualHost
|
||||
from certbot_apache._internal.parser import ApacheParser
|
||||
|
||||
try:
|
||||
import apacheconfig
|
||||
HAS_APACHECONFIG = True
|
||||
except ImportError: # pragma: no cover
|
||||
HAS_APACHECONFIG = False
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class OsOptions:
|
||||
"""
|
||||
Dedicated class to describe the OS specificities (eg. paths, binary names)
|
||||
that the Apache configurator needs to be aware to operate properly.
|
||||
"""
|
||||
def __init__(self,
|
||||
server_root="/etc/apache2",
|
||||
vhost_root="/etc/apache2/sites-available",
|
||||
vhost_files="*",
|
||||
logs_root="/var/log/apache2",
|
||||
ctl="apache2ctl",
|
||||
version_cmd: Optional[List[str]] = None,
|
||||
restart_cmd: Optional[List[str]] = None,
|
||||
restart_cmd_alt: Optional[List[str]] = None,
|
||||
conftest_cmd: Optional[List[str]] = None,
|
||||
enmod: Optional[str] = None,
|
||||
dismod: Optional[str] = None,
|
||||
le_vhost_ext="-le-ssl.conf",
|
||||
handle_modules=False,
|
||||
handle_sites=False,
|
||||
challenge_location="/etc/apache2",
|
||||
apache_bin: Optional[str] = None,
|
||||
):
|
||||
self.server_root = server_root
|
||||
self.vhost_root = vhost_root
|
||||
self.vhost_files = vhost_files
|
||||
self.logs_root = logs_root
|
||||
self.ctl = ctl
|
||||
self.version_cmd = ['apache2ctl', '-v'] if not version_cmd else version_cmd
|
||||
self.restart_cmd = ['apache2ctl', 'graceful'] if not restart_cmd else restart_cmd
|
||||
self.restart_cmd_alt = restart_cmd_alt
|
||||
self.conftest_cmd = ['apache2ctl', 'configtest'] if not conftest_cmd else conftest_cmd
|
||||
self.enmod = enmod
|
||||
self.dismod = dismod
|
||||
self.le_vhost_ext = le_vhost_ext
|
||||
self.handle_modules = handle_modules
|
||||
self.handle_sites = handle_sites
|
||||
self.challenge_location = challenge_location
|
||||
self.bin = apache_bin
|
||||
|
||||
|
||||
# TODO: Augeas sections ie. <VirtualHost>, <IfModule> beginning and closing
|
||||
# tags need to be the same case, otherwise Augeas doesn't recognize them.
|
||||
# This is not able to be completely remedied by regular expressions because
|
||||
@@ -72,14 +116,11 @@ logger = logging.getLogger(__name__)
|
||||
# TODO: Add directives to sites-enabled... not sites-available.
|
||||
# sites-available doesn't allow immediate find_dir search even with save()
|
||||
# and load()
|
||||
|
||||
@zope.interface.implementer(interfaces.IAuthenticator, interfaces.IInstaller)
|
||||
@zope.interface.provider(interfaces.IPluginFactory)
|
||||
class ApacheConfigurator(common.Installer):
|
||||
class ApacheConfigurator(common.Installer, interfaces.Authenticator):
|
||||
"""Apache configurator.
|
||||
|
||||
:ivar config: Configuration.
|
||||
:type config: :class:`~certbot.interfaces.IConfig`
|
||||
:type config: certbot.configuration.NamespaceConfig
|
||||
|
||||
:ivar parser: Handles low level parsing
|
||||
:type parser: :class:`~certbot_apache._internal.parser`
|
||||
@@ -99,27 +140,7 @@ class ApacheConfigurator(common.Installer):
|
||||
" change depending on the operating system Certbot is run on.)"
|
||||
)
|
||||
|
||||
OS_DEFAULTS = dict(
|
||||
server_root="/etc/apache2",
|
||||
vhost_root="/etc/apache2/sites-available",
|
||||
vhost_files="*",
|
||||
logs_root="/var/log/apache2",
|
||||
ctl="apache2ctl",
|
||||
version_cmd=['apache2ctl', '-v'],
|
||||
restart_cmd=['apache2ctl', 'graceful'],
|
||||
conftest_cmd=['apache2ctl', 'configtest'],
|
||||
enmod=None,
|
||||
dismod=None,
|
||||
le_vhost_ext="-le-ssl.conf",
|
||||
handle_modules=False,
|
||||
handle_sites=False,
|
||||
challenge_location="/etc/apache2",
|
||||
bin=None
|
||||
)
|
||||
|
||||
def option(self, key):
|
||||
"""Get a value from options"""
|
||||
return self.options.get(key)
|
||||
OS_DEFAULTS = OsOptions()
|
||||
|
||||
def pick_apache_config(self, warn_on_no_mod_ssl=True):
|
||||
"""
|
||||
@@ -132,9 +153,10 @@ class ApacheConfigurator(common.Installer):
|
||||
"""
|
||||
# Disabling TLS session tickets is supported by Apache 2.4.11+ and OpenSSL 1.0.2l+.
|
||||
# So for old versions of Apache we pick a configuration without this option.
|
||||
min_openssl_version = util.parse_loose_version('1.0.2l')
|
||||
openssl_version = self.openssl_version(warn_on_no_mod_ssl)
|
||||
if self.version < (2, 4, 11) or not openssl_version or\
|
||||
LooseVersion(openssl_version) < LooseVersion('1.0.2l'):
|
||||
util.parse_loose_version(openssl_version) < min_openssl_version:
|
||||
return apache_util.find_ssl_apache_conf("old")
|
||||
return apache_util.find_ssl_apache_conf("current")
|
||||
|
||||
@@ -149,14 +171,14 @@ class ApacheConfigurator(common.Installer):
|
||||
for o in opts:
|
||||
# Config options use dashes instead of underscores
|
||||
if self.conf(o.replace("_", "-")) is not None:
|
||||
self.options[o] = self.conf(o.replace("_", "-"))
|
||||
setattr(self.options, o, self.conf(o.replace("_", "-")))
|
||||
else:
|
||||
self.options[o] = self.OS_DEFAULTS[o]
|
||||
setattr(self.options, o, getattr(self.OS_DEFAULTS, o))
|
||||
|
||||
# Special cases
|
||||
self.options["version_cmd"][0] = self.option("ctl")
|
||||
self.options["restart_cmd"][0] = self.option("ctl")
|
||||
self.options["conftest_cmd"][0] = self.option("ctl")
|
||||
self.options.version_cmd[0] = self.options.ctl
|
||||
self.options.restart_cmd[0] = self.options.ctl
|
||||
self.options.conftest_cmd[0] = self.options.ctl
|
||||
|
||||
@classmethod
|
||||
def add_parser_arguments(cls, add):
|
||||
@@ -171,30 +193,30 @@ class ApacheConfigurator(common.Installer):
|
||||
else:
|
||||
# cls.OS_DEFAULTS can be distribution specific, see override classes
|
||||
DEFAULTS = cls.OS_DEFAULTS
|
||||
add("enmod", default=DEFAULTS["enmod"],
|
||||
add("enmod", default=DEFAULTS.enmod,
|
||||
help="Path to the Apache 'a2enmod' binary")
|
||||
add("dismod", default=DEFAULTS["dismod"],
|
||||
add("dismod", default=DEFAULTS.dismod,
|
||||
help="Path to the Apache 'a2dismod' binary")
|
||||
add("le-vhost-ext", default=DEFAULTS["le_vhost_ext"],
|
||||
add("le-vhost-ext", default=DEFAULTS.le_vhost_ext,
|
||||
help="SSL vhost configuration extension")
|
||||
add("server-root", default=DEFAULTS["server_root"],
|
||||
add("server-root", default=DEFAULTS.server_root,
|
||||
help="Apache server root directory")
|
||||
add("vhost-root", default=None,
|
||||
help="Apache server VirtualHost configuration root")
|
||||
add("logs-root", default=DEFAULTS["logs_root"],
|
||||
add("logs-root", default=DEFAULTS.logs_root,
|
||||
help="Apache server logs directory")
|
||||
add("challenge-location",
|
||||
default=DEFAULTS["challenge_location"],
|
||||
default=DEFAULTS.challenge_location,
|
||||
help="Directory path for challenge configuration")
|
||||
add("handle-modules", default=DEFAULTS["handle_modules"],
|
||||
add("handle-modules", default=DEFAULTS.handle_modules,
|
||||
help="Let installer handle enabling required modules for you " +
|
||||
"(Only Ubuntu/Debian currently)")
|
||||
add("handle-sites", default=DEFAULTS["handle_sites"],
|
||||
add("handle-sites", default=DEFAULTS.handle_sites,
|
||||
help="Let installer handle enabling sites for you " +
|
||||
"(Only Ubuntu/Debian currently)")
|
||||
add("ctl", default=DEFAULTS["ctl"],
|
||||
add("ctl", default=DEFAULTS.ctl,
|
||||
help="Full path to Apache control script")
|
||||
add("bin", default=DEFAULTS["bin"],
|
||||
add("bin", default=DEFAULTS.bin,
|
||||
help="Full path to apache2/httpd binary")
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
@@ -207,33 +229,33 @@ class ApacheConfigurator(common.Installer):
|
||||
version = kwargs.pop("version", None)
|
||||
use_parsernode = kwargs.pop("use_parsernode", False)
|
||||
openssl_version = kwargs.pop("openssl_version", None)
|
||||
super(ApacheConfigurator, self).__init__(*args, **kwargs)
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
# Add name_server association dict
|
||||
self.assoc = {} # type: Dict[str, obj.VirtualHost]
|
||||
self.assoc: Dict[str, obj.VirtualHost] = {}
|
||||
# Outstanding challenges
|
||||
self._chall_out = set() # type: Set[KeyAuthorizationAnnotatedChallenge]
|
||||
self._chall_out: Set[KeyAuthorizationAnnotatedChallenge] = set()
|
||||
# List of vhosts configured per wildcard domain on this run.
|
||||
# used by deploy_cert() and enhance()
|
||||
self._wildcard_vhosts = {} # type: Dict[str, List[obj.VirtualHost]]
|
||||
self._wildcard_vhosts: Dict[str, List[obj.VirtualHost]] = {}
|
||||
# Maps enhancements to vhosts we've enabled the enhancement for
|
||||
self._enhanced_vhosts = defaultdict(set) # type: DefaultDict[str, Set[obj.VirtualHost]]
|
||||
self._enhanced_vhosts: DefaultDict[str, Set[obj.VirtualHost]] = defaultdict(set)
|
||||
# Temporary state for AutoHSTS enhancement
|
||||
self._autohsts = {} # type: Dict[str, Dict[str, Union[int, float]]]
|
||||
self._autohsts: Dict[str, Dict[str, Union[int, float]]] = {}
|
||||
# Reverter save notes
|
||||
self.save_notes = ""
|
||||
# Should we use ParserNode implementation instead of the old behavior
|
||||
self.USE_PARSERNODE = use_parsernode
|
||||
# Saves the list of file paths that were parsed initially, and
|
||||
# not added to parser tree by self.conf("vhost-root") for example.
|
||||
self.parsed_paths = [] # type: List[str]
|
||||
self.parsed_paths: List[str] = []
|
||||
# These will be set in the prepare function
|
||||
self._prepared = False
|
||||
self.parser = None
|
||||
self.parser_root = None
|
||||
self.parser: ApacheParser
|
||||
self.parser_root: Optional[DualBlockNode] = None
|
||||
self.version = version
|
||||
self._openssl_version = openssl_version
|
||||
self.vhosts = None
|
||||
self.vhosts: List[VirtualHost]
|
||||
self.options = copy.deepcopy(self.OS_DEFAULTS)
|
||||
self._enhance_func = {"redirect": self._enable_redirect,
|
||||
"ensure-http-header": self._set_http_header,
|
||||
@@ -283,8 +305,8 @@ class ApacheConfigurator(common.Installer):
|
||||
ssl_module_location = self.parser.standard_path_from_server_root(ssl_module_location)
|
||||
else:
|
||||
# Possibility B: ssl_module is statically linked into Apache
|
||||
if self.option("bin"):
|
||||
ssl_module_location = self.option("bin")
|
||||
if self.options.bin:
|
||||
ssl_module_location = self.options.bin
|
||||
else:
|
||||
logger.warning("ssl_module is statically linked but --apache-bin is "
|
||||
"missing; not disabling session tickets.")
|
||||
@@ -314,7 +336,7 @@ class ApacheConfigurator(common.Installer):
|
||||
self._prepare_options()
|
||||
|
||||
# Verify Apache is installed
|
||||
self._verify_exe_availability(self.option("ctl"))
|
||||
self._verify_exe_availability(self.options.ctl)
|
||||
|
||||
# Make sure configuration is valid
|
||||
self.config_test()
|
||||
@@ -342,8 +364,9 @@ class ApacheConfigurator(common.Installer):
|
||||
"augeaspath": self.parser.get_root_augpath(),
|
||||
"ac_ast": None}
|
||||
if self.USE_PARSERNODE:
|
||||
self.parser_root = self.get_parsernode_root(pn_meta)
|
||||
self.parsed_paths = self.parser_root.parsed_paths()
|
||||
parser_root = self.get_parsernode_root(pn_meta)
|
||||
self.parser_root = parser_root
|
||||
self.parsed_paths = parser_root.parsed_paths()
|
||||
|
||||
# Check for errors in parsing files with Augeas
|
||||
self.parser.check_parsing_errors("httpd.aug")
|
||||
@@ -353,20 +376,20 @@ class ApacheConfigurator(common.Installer):
|
||||
|
||||
# We may try to enable mod_ssl later. If so, we shouldn't warn if we can't find it now.
|
||||
# This is currently only true for debian/ubuntu.
|
||||
warn_on_no_mod_ssl = not self.option("handle_modules")
|
||||
warn_on_no_mod_ssl = not self.options.handle_modules
|
||||
self.install_ssl_options_conf(self.mod_ssl_conf,
|
||||
self.updated_mod_ssl_conf_digest,
|
||||
warn_on_no_mod_ssl)
|
||||
|
||||
# Prevent two Apache plugins from modifying a config at once
|
||||
try:
|
||||
util.lock_dir_until_exit(self.option("server_root"))
|
||||
util.lock_dir_until_exit(self.options.server_root)
|
||||
except (OSError, errors.LockError):
|
||||
logger.debug("Encountered error:", exc_info=True)
|
||||
raise errors.PluginError(
|
||||
"Unable to create a lock file in {0}. Are you running"
|
||||
" Certbot with sufficient privileges to modify your"
|
||||
" Apache configuration?".format(self.option("server_root")))
|
||||
" Apache configuration?".format(self.options.server_root))
|
||||
self._prepared = True
|
||||
|
||||
def save(self, title=None, temporary=False):
|
||||
@@ -402,10 +425,10 @@ class ApacheConfigurator(common.Installer):
|
||||
:raises .errors.PluginError: If unable to recover the configuration
|
||||
|
||||
"""
|
||||
super(ApacheConfigurator, self).recovery_routine()
|
||||
super().recovery_routine()
|
||||
# Reload configuration after these changes take effect if needed
|
||||
# ie. ApacheParser has been initialized.
|
||||
if self.parser:
|
||||
if hasattr(self, "parser"):
|
||||
# TODO: wrap into non-implementation specific parser interface
|
||||
self.parser.aug.load()
|
||||
|
||||
@@ -427,7 +450,7 @@ class ApacheConfigurator(common.Installer):
|
||||
the function is unable to correctly revert the configuration
|
||||
|
||||
"""
|
||||
super(ApacheConfigurator, self).rollback_checkpoints(rollback)
|
||||
super().rollback_checkpoints(rollback)
|
||||
self.parser.aug.load()
|
||||
|
||||
def _verify_exe_availability(self, exe):
|
||||
@@ -441,7 +464,7 @@ class ApacheConfigurator(common.Installer):
|
||||
"""Initializes the ApacheParser"""
|
||||
# If user provided vhost_root value in command line, use it
|
||||
return parser.ApacheParser(
|
||||
self.option("server_root"), self.conf("vhost-root"),
|
||||
self.options.server_root, self.conf("vhost-root"),
|
||||
self.version, configurator=self)
|
||||
|
||||
def get_parsernode_root(self, metadata):
|
||||
@@ -449,9 +472,9 @@ class ApacheConfigurator(common.Installer):
|
||||
|
||||
if HAS_APACHECONFIG:
|
||||
apache_vars = {}
|
||||
apache_vars["defines"] = apache_util.parse_defines(self.option("ctl"))
|
||||
apache_vars["includes"] = apache_util.parse_includes(self.option("ctl"))
|
||||
apache_vars["modules"] = apache_util.parse_modules(self.option("ctl"))
|
||||
apache_vars["defines"] = apache_util.parse_defines(self.options.ctl)
|
||||
apache_vars["includes"] = apache_util.parse_includes(self.options.ctl)
|
||||
apache_vars["modules"] = apache_util.parse_modules(self.options.ctl)
|
||||
metadata["apache_vars"] = apache_vars
|
||||
|
||||
with open(self.parser.loc["root"]) as f:
|
||||
@@ -487,6 +510,8 @@ class ApacheConfigurator(common.Installer):
|
||||
vhosts = self.choose_vhosts(domain)
|
||||
for vhost in vhosts:
|
||||
self._deploy_cert(vhost, cert_path, key_path, chain_path, fullchain_path)
|
||||
display_util.notify("Successfully deployed certificate for {} to {}"
|
||||
.format(domain, vhost.filep))
|
||||
|
||||
def choose_vhosts(self, domain, create_if_no_ssl=True):
|
||||
"""
|
||||
@@ -525,6 +550,19 @@ class ApacheConfigurator(common.Installer):
|
||||
|
||||
return list(matched)
|
||||
|
||||
def _raise_no_suitable_vhost_error(self, target_name: str):
|
||||
"""
|
||||
Notifies the user that Certbot could not find a vhost to secure
|
||||
and raises an error.
|
||||
:param str target_name: The server name that could not be mapped
|
||||
:raises errors.PluginError: Raised unconditionally
|
||||
"""
|
||||
raise errors.PluginError(
|
||||
"Certbot could not find a VirtualHost for {0} in the Apache "
|
||||
"configuration. Please create a VirtualHost with a ServerName "
|
||||
"matching {0} and try again.".format(target_name)
|
||||
)
|
||||
|
||||
def _in_wildcard_scope(self, name, domain):
|
||||
"""
|
||||
Helper method for _vhosts_for_wildcard() that makes sure that the domain
|
||||
@@ -562,12 +600,7 @@ class ApacheConfigurator(common.Installer):
|
||||
dialog_output = display_ops.select_vhost_multiple(list(dialog_input))
|
||||
|
||||
if not dialog_output:
|
||||
logger.error(
|
||||
"No vhost exists with servername or alias for domain %s. "
|
||||
"No vhost was selected. Please specify ServerName or ServerAlias "
|
||||
"in the Apache config.",
|
||||
domain)
|
||||
raise errors.PluginError("No vhost selected")
|
||||
self._raise_no_suitable_vhost_error(domain)
|
||||
|
||||
# Make sure we create SSL vhosts for the ones that are HTTP only
|
||||
# if requested.
|
||||
@@ -691,12 +724,7 @@ class ApacheConfigurator(common.Installer):
|
||||
# Select a vhost from a list
|
||||
vhost = display_ops.select_vhost(target_name, self.vhosts)
|
||||
if vhost is None:
|
||||
logger.error(
|
||||
"No vhost exists with servername or alias of %s. "
|
||||
"No vhost was selected. Please specify ServerName or ServerAlias "
|
||||
"in the Apache config.",
|
||||
target_name)
|
||||
raise errors.PluginError("No vhost selected")
|
||||
self._raise_no_suitable_vhost_error(target_name)
|
||||
if temp:
|
||||
return vhost
|
||||
if not vhost.ssl:
|
||||
@@ -832,7 +860,7 @@ class ApacheConfigurator(common.Installer):
|
||||
:rtype: set
|
||||
|
||||
"""
|
||||
all_names = set() # type: Set[str]
|
||||
all_names: Set[str] = set()
|
||||
|
||||
vhost_macro = []
|
||||
|
||||
@@ -850,7 +878,7 @@ class ApacheConfigurator(common.Installer):
|
||||
all_names.add(name)
|
||||
|
||||
if vhost_macro:
|
||||
zope.component.getUtility(interfaces.IDisplay).notification(
|
||||
display_util.notification(
|
||||
"Apache mod_macro seems to be in use in file(s):\n{0}"
|
||||
"\n\nUnfortunately mod_macro is not yet supported".format(
|
||||
"\n ".join(vhost_macro)), force_interactive=True)
|
||||
@@ -996,8 +1024,8 @@ class ApacheConfigurator(common.Installer):
|
||||
|
||||
"""
|
||||
# Search base config, and all included paths for VirtualHosts
|
||||
file_paths = {} # type: Dict[str, str]
|
||||
internal_paths = defaultdict(set) # type: DefaultDict[str, Set[str]]
|
||||
file_paths: Dict[str, str] = {}
|
||||
internal_paths: DefaultDict[str, Set[str]] = defaultdict(set)
|
||||
vhs = []
|
||||
# Make a list of parser paths because the parser_paths
|
||||
# dictionary may be modified during the loop.
|
||||
@@ -1048,6 +1076,9 @@ class ApacheConfigurator(common.Installer):
|
||||
:rtype: list
|
||||
"""
|
||||
|
||||
if not self.parser_root:
|
||||
raise errors.Error("This ApacheConfigurator instance is not" # pragma: no cover
|
||||
" configured to use a node parser.")
|
||||
vhs = []
|
||||
vhosts = self.parser_root.find_blocks("VirtualHost", exclude=False)
|
||||
for vhblock in vhosts:
|
||||
@@ -1300,7 +1331,7 @@ class ApacheConfigurator(common.Installer):
|
||||
:param boolean temp: If the change is temporary
|
||||
"""
|
||||
|
||||
if self.option("handle_modules"):
|
||||
if self.options.handle_modules:
|
||||
if self.version >= (2, 4) and ("socache_shmcb_module" not in
|
||||
self.parser.modules):
|
||||
self.enable_mod("socache_shmcb", temp=temp)
|
||||
@@ -1320,7 +1351,7 @@ class ApacheConfigurator(common.Installer):
|
||||
|
||||
Duplicates vhost and adds default ssl options
|
||||
New vhost will reside as (nonssl_vhost.path) +
|
||||
``self.option("le_vhost_ext")``
|
||||
``self.options.le_vhost_ext``
|
||||
|
||||
.. note:: This function saves the configuration
|
||||
|
||||
@@ -1419,15 +1450,15 @@ class ApacheConfigurator(common.Installer):
|
||||
"""
|
||||
|
||||
if self.conf("vhost-root") and os.path.exists(self.conf("vhost-root")):
|
||||
fp = os.path.join(filesystem.realpath(self.option("vhost_root")),
|
||||
fp = os.path.join(filesystem.realpath(self.options.vhost_root),
|
||||
os.path.basename(non_ssl_vh_fp))
|
||||
else:
|
||||
# Use non-ssl filepath
|
||||
fp = filesystem.realpath(non_ssl_vh_fp)
|
||||
|
||||
if fp.endswith(".conf"):
|
||||
return fp[:-(len(".conf"))] + self.option("le_vhost_ext")
|
||||
return fp + self.option("le_vhost_ext")
|
||||
return fp[:-(len(".conf"))] + self.options.le_vhost_ext
|
||||
return fp + self.options.le_vhost_ext
|
||||
|
||||
def _sift_rewrite_rule(self, line):
|
||||
"""Decides whether a line should be copied to a SSL vhost.
|
||||
@@ -1501,12 +1532,11 @@ class ApacheConfigurator(common.Installer):
|
||||
raise errors.PluginError("Unable to write/read in make_vhost_ssl")
|
||||
|
||||
if sift:
|
||||
reporter = zope.component.getUtility(interfaces.IReporter)
|
||||
reporter.add_message(
|
||||
"Some rewrite rules copied from {0} were disabled in the "
|
||||
"vhost for your HTTPS site located at {1} because they have "
|
||||
"the potential to create redirection loops.".format(
|
||||
vhost.filep, ssl_fp), reporter.MEDIUM_PRIORITY)
|
||||
display_util.notify(
|
||||
f"Some rewrite rules copied from {vhost.filep} were disabled in the "
|
||||
f"vhost for your HTTPS site located at {ssl_fp} because they have "
|
||||
"the potential to create redirection loops."
|
||||
)
|
||||
self.parser.aug.set("/augeas/files%s/mtime" % (self._escape(ssl_fp)), "0")
|
||||
self.parser.aug.set("/augeas/files%s/mtime" % (self._escape(vhost.filep)), "0")
|
||||
|
||||
@@ -1835,13 +1865,13 @@ class ApacheConfigurator(common.Installer):
|
||||
if options:
|
||||
msg_enhancement += ": " + options
|
||||
msg = msg_tmpl.format(domain, msg_enhancement)
|
||||
logger.warning(msg)
|
||||
logger.error(msg)
|
||||
raise errors.PluginError(msg)
|
||||
try:
|
||||
for vhost in vhosts:
|
||||
func(vhost, options)
|
||||
except errors.PluginError:
|
||||
logger.warning("Failed %s for %s", enhancement, domain)
|
||||
logger.error("Failed %s for %s", enhancement, domain)
|
||||
raise
|
||||
|
||||
def _autohsts_increase(self, vhost, id_str, nextstep):
|
||||
@@ -2156,7 +2186,7 @@ class ApacheConfigurator(common.Installer):
|
||||
# There can be other RewriteRule directive lines in vhost config.
|
||||
# rewrite_args_dict keys are directive ids and the corresponding value
|
||||
# for each is a list of arguments to that directive.
|
||||
rewrite_args_dict = defaultdict(list) # type: DefaultDict[str, List[str]]
|
||||
rewrite_args_dict: DefaultDict[str, List[str]] = defaultdict(list)
|
||||
pat = r'(.*directive\[\d+\]).*'
|
||||
for match in rewrite_path:
|
||||
m = re.match(pat, match)
|
||||
@@ -2250,7 +2280,7 @@ class ApacheConfigurator(common.Installer):
|
||||
if ssl_vhost.aliases:
|
||||
serveralias = "ServerAlias " + " ".join(ssl_vhost.aliases)
|
||||
|
||||
rewrite_rule_args = [] # type: List[str]
|
||||
rewrite_rule_args: List[str] = []
|
||||
if self.get_version() >= (2, 3, 9):
|
||||
rewrite_rule_args = constants.REWRITE_HTTPS_ARGS_WITH_END
|
||||
else:
|
||||
@@ -2271,7 +2301,7 @@ class ApacheConfigurator(common.Installer):
|
||||
addr in self._get_proposed_addrs(ssl_vhost)),
|
||||
servername, serveralias,
|
||||
" ".join(rewrite_rule_args),
|
||||
self.option("logs_root")))
|
||||
self.options.logs_root))
|
||||
|
||||
def _write_out_redirect(self, ssl_vhost, text):
|
||||
# This is the default name
|
||||
@@ -2283,7 +2313,7 @@ class ApacheConfigurator(common.Installer):
|
||||
if len(ssl_vhost.name) < (255 - (len(redirect_filename) + 1)):
|
||||
redirect_filename = "le-redirect-%s.conf" % ssl_vhost.name
|
||||
|
||||
redirect_filepath = os.path.join(self.option("vhost_root"),
|
||||
redirect_filepath = os.path.join(self.options.vhost_root,
|
||||
redirect_filename)
|
||||
|
||||
# Register the new file that will be created
|
||||
@@ -2365,7 +2395,7 @@ class ApacheConfigurator(common.Installer):
|
||||
vhost.enabled = True
|
||||
return
|
||||
|
||||
def enable_mod(self, mod_name, temp=False):
|
||||
def enable_mod(self, mod_name, temp=False): # pylint: disable=unused-argument
|
||||
"""Enables module in Apache.
|
||||
|
||||
Both enables and reloads Apache so module is active.
|
||||
@@ -2403,19 +2433,17 @@ class ApacheConfigurator(common.Installer):
|
||||
|
||||
"""
|
||||
try:
|
||||
util.run_script(self.option("restart_cmd"))
|
||||
util.run_script(self.options.restart_cmd)
|
||||
except errors.SubprocessError as err:
|
||||
logger.info("Unable to restart apache using %s",
|
||||
self.option("restart_cmd"))
|
||||
alt_restart = self.option("restart_cmd_alt")
|
||||
if alt_restart:
|
||||
logger.warning("Unable to restart apache using %s",
|
||||
self.options.restart_cmd)
|
||||
if self.options.restart_cmd_alt:
|
||||
logger.debug("Trying alternative restart command: %s",
|
||||
alt_restart)
|
||||
self.options.restart_cmd_alt)
|
||||
# There is an alternative restart command available
|
||||
# This usually is "restart" verb while original is "graceful"
|
||||
try:
|
||||
util.run_script(self.option(
|
||||
"restart_cmd_alt"))
|
||||
util.run_script(self.options.restart_cmd_alt)
|
||||
return
|
||||
except errors.SubprocessError as secerr:
|
||||
error = str(secerr)
|
||||
@@ -2430,7 +2458,7 @@ class ApacheConfigurator(common.Installer):
|
||||
|
||||
"""
|
||||
try:
|
||||
util.run_script(self.option("conftest_cmd"))
|
||||
util.run_script(self.options.conftest_cmd)
|
||||
except errors.SubprocessError as err:
|
||||
raise errors.MisconfigurationError(str(err))
|
||||
|
||||
@@ -2446,11 +2474,11 @@ class ApacheConfigurator(common.Installer):
|
||||
|
||||
"""
|
||||
try:
|
||||
stdout, _ = util.run_script(self.option("version_cmd"))
|
||||
stdout, _ = util.run_script(self.options.version_cmd)
|
||||
except errors.SubprocessError:
|
||||
raise errors.PluginError(
|
||||
"Unable to run %s -v" %
|
||||
self.option("version_cmd"))
|
||||
self.options.version_cmd)
|
||||
|
||||
regex = re.compile(r"Apache/([0-9\.]*)", re.IGNORECASE)
|
||||
matches = regex.findall(stdout)
|
||||
@@ -2470,6 +2498,11 @@ class ApacheConfigurator(common.Installer):
|
||||
version=".".join(str(i) for i in self.version))
|
||||
)
|
||||
|
||||
def auth_hint(self, failed_achalls): # pragma: no cover
|
||||
return ("The Certificate Authority failed to verify the temporary Apache configuration "
|
||||
"changes made by Certbot. Ensure that the listed domains point to this Apache "
|
||||
"server and that it is accessible from the internet.")
|
||||
|
||||
###########################################################################
|
||||
# Challenges Section
|
||||
###########################################################################
|
||||
@@ -2563,7 +2596,7 @@ class ApacheConfigurator(common.Installer):
|
||||
msg_tmpl = ("Certbot was not able to find SSL VirtualHost for a "
|
||||
"domain {0} for enabling AutoHSTS enhancement.")
|
||||
msg = msg_tmpl.format(d)
|
||||
logger.warning(msg)
|
||||
logger.error(msg)
|
||||
raise errors.PluginError(msg)
|
||||
for vh in vhosts:
|
||||
try:
|
||||
@@ -2649,7 +2682,7 @@ class ApacheConfigurator(common.Installer):
|
||||
except errors.PluginError:
|
||||
msg = ("Could not find VirtualHost with ID {0}, disabling "
|
||||
"AutoHSTS for this VirtualHost").format(id_str)
|
||||
logger.warning(msg)
|
||||
logger.error(msg)
|
||||
# Remove the orphaned AutoHSTS entry from pluginstorage
|
||||
self._autohsts.pop(id_str)
|
||||
continue
|
||||
@@ -2689,7 +2722,7 @@ class ApacheConfigurator(common.Installer):
|
||||
except errors.PluginError:
|
||||
msg = ("VirtualHost with id {} was not found, unable to "
|
||||
"make HSTS max-age permanent.").format(id_str)
|
||||
logger.warning(msg)
|
||||
logger.error(msg)
|
||||
self._autohsts.pop(id_str)
|
||||
continue
|
||||
if self._autohsts_vhost_in_lineage(vhost, lineage):
|
||||
|
||||
@@ -4,11 +4,13 @@ import pkg_resources
|
||||
from certbot.compat import os
|
||||
|
||||
MOD_SSL_CONF_DEST = "options-ssl-apache.conf"
|
||||
"""Name of the mod_ssl config file as saved in `IConfig.config_dir`."""
|
||||
"""Name of the mod_ssl config file as saved
|
||||
in `certbot.configuration.NamespaceConfig.config_dir`."""
|
||||
|
||||
|
||||
UPDATED_MOD_SSL_CONF_DIGEST = ".updated-options-ssl-apache-conf-digest.txt"
|
||||
"""Name of the hash of the updated or informed mod_ssl_conf as saved in `IConfig.config_dir`."""
|
||||
"""Name of the hash of the updated or informed mod_ssl_conf as saved
|
||||
in `certbot.configuration.NamespaceConfig.config_dir`."""
|
||||
|
||||
# NEVER REMOVE A SINGLE HASH FROM THIS LIST UNLESS YOU KNOW EXACTLY WHAT YOU ARE DOING!
|
||||
ALL_SSL_OPTIONS_HASHES = [
|
||||
|
||||
@@ -1,12 +1,9 @@
|
||||
"""Contains UI methods for Apache operations."""
|
||||
import logging
|
||||
|
||||
import zope.component
|
||||
|
||||
from certbot import errors
|
||||
from certbot import interfaces
|
||||
from certbot.compat import os
|
||||
import certbot.display.util as display_util
|
||||
from certbot.display import util as display_util
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -26,7 +23,7 @@ def select_vhost_multiple(vhosts):
|
||||
# Remove the extra newline from the last entry
|
||||
if tags_list:
|
||||
tags_list[-1] = tags_list[-1][:-1]
|
||||
code, names = zope.component.getUtility(interfaces.IDisplay).checklist(
|
||||
code, names = display_util.checklist(
|
||||
"Which VirtualHosts would you like to install the wildcard certificate for?",
|
||||
tags=tags_list, force_interactive=True)
|
||||
if code == display_util.OK:
|
||||
@@ -34,6 +31,7 @@ def select_vhost_multiple(vhosts):
|
||||
return return_vhosts
|
||||
return []
|
||||
|
||||
|
||||
def _reversemap_vhosts(names, vhosts):
|
||||
"""Helper function for select_vhost_multiple for mapping string
|
||||
representations back to actual vhost objects"""
|
||||
@@ -45,6 +43,7 @@ def _reversemap_vhosts(names, vhosts):
|
||||
return_vhosts.append(vhost)
|
||||
return return_vhosts
|
||||
|
||||
|
||||
def select_vhost(domain, vhosts):
|
||||
"""Select an appropriate Apache Vhost.
|
||||
|
||||
@@ -62,6 +61,7 @@ def select_vhost(domain, vhosts):
|
||||
return vhosts[tag]
|
||||
return None
|
||||
|
||||
|
||||
def _vhost_menu(domain, vhosts):
|
||||
"""Select an appropriate Apache Vhost.
|
||||
|
||||
@@ -107,7 +107,7 @@ def _vhost_menu(domain, vhosts):
|
||||
)
|
||||
|
||||
try:
|
||||
code, tag = zope.component.getUtility(interfaces.IDisplay).menu(
|
||||
code, tag = display_util.menu(
|
||||
"We were unable to find a vhost with a ServerName "
|
||||
"or Address of {0}.{1}Which virtual host would you "
|
||||
"like to choose?".format(domain, os.linesep),
|
||||
@@ -119,7 +119,7 @@ def _vhost_menu(domain, vhosts):
|
||||
"guidance in non-interactive mode. Certbot may need "
|
||||
"vhosts to be explicitly labelled with ServerName or "
|
||||
"ServerAlias directives.".format(domain))
|
||||
logger.warning(msg)
|
||||
logger.error(msg)
|
||||
raise errors.MissingCommandlineFlag(msg)
|
||||
|
||||
return code, tag
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
""" Dual ParserNode implementation """
|
||||
from certbot_apache._internal import apacheparser
|
||||
from certbot_apache._internal import assertions
|
||||
from certbot_apache._internal import augeasparser
|
||||
from certbot_apache._internal import apacheparser
|
||||
|
||||
|
||||
class DualNodeBase(object):
|
||||
class DualNodeBase:
|
||||
""" Dual parser interface for in development testing. This is used as the
|
||||
base class for dual parser interface classes. This class handles runtime
|
||||
attribute value assertions."""
|
||||
|
||||
@@ -1,6 +1,4 @@
|
||||
""" Entry point for Apache Plugin """
|
||||
from distutils.version import LooseVersion
|
||||
|
||||
from certbot import util
|
||||
from certbot_apache._internal import configurator
|
||||
from certbot_apache._internal import override_arch
|
||||
@@ -10,6 +8,7 @@ from certbot_apache._internal import override_debian
|
||||
from certbot_apache._internal import override_fedora
|
||||
from certbot_apache._internal import override_gentoo
|
||||
from certbot_apache._internal import override_suse
|
||||
from certbot_apache._internal import override_void
|
||||
|
||||
OVERRIDE_CLASSES = {
|
||||
"arch": override_arch.ArchConfigurator,
|
||||
@@ -35,6 +34,7 @@ OVERRIDE_CLASSES = {
|
||||
"sles": override_suse.OpenSUSEConfigurator,
|
||||
"scientific": override_centos.CentOSConfigurator,
|
||||
"scientific linux": override_centos.CentOSConfigurator,
|
||||
"void": override_void.VoidConfigurator,
|
||||
}
|
||||
|
||||
|
||||
@@ -45,7 +45,8 @@ def get_configurator():
|
||||
override_class = None
|
||||
|
||||
# Special case for older Fedora versions
|
||||
if os_name == 'fedora' and LooseVersion(os_version) < LooseVersion('29'):
|
||||
min_version = util.parse_loose_version('29')
|
||||
if os_name == 'fedora' and util.parse_loose_version(os_version) < min_version:
|
||||
os_name = 'fedora_old'
|
||||
|
||||
try:
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
"""A class that performs HTTP-01 challenges for Apache"""
|
||||
import logging
|
||||
import errno
|
||||
import logging
|
||||
from typing import List
|
||||
from typing import Set
|
||||
|
||||
from acme.magic_typing import List
|
||||
from acme.magic_typing import Set
|
||||
from certbot import errors
|
||||
from certbot.compat import filesystem
|
||||
from certbot.compat import os
|
||||
@@ -47,7 +47,7 @@ class ApacheHttp01(common.ChallengePerformer):
|
||||
"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(ApacheHttp01, self).__init__(*args, **kwargs)
|
||||
super().__init__(*args, **kwargs)
|
||||
self.challenge_conf_pre = os.path.join(
|
||||
self.configurator.conf("challenge-location"),
|
||||
"le_http_01_challenge_pre.conf")
|
||||
@@ -57,7 +57,7 @@ class ApacheHttp01(common.ChallengePerformer):
|
||||
self.challenge_dir = os.path.join(
|
||||
self.configurator.config.work_dir,
|
||||
"http_challenges")
|
||||
self.moded_vhosts = set() # type: Set[VirtualHost]
|
||||
self.moded_vhosts: Set[VirtualHost] = set()
|
||||
|
||||
def perform(self):
|
||||
"""Perform all HTTP-01 challenges."""
|
||||
@@ -93,12 +93,12 @@ class ApacheHttp01(common.ChallengePerformer):
|
||||
self.configurator.enable_mod(mod, temp=True)
|
||||
|
||||
def _mod_config(self):
|
||||
selected_vhosts = [] # type: List[VirtualHost]
|
||||
selected_vhosts: List[VirtualHost] = []
|
||||
http_port = str(self.configurator.config.http01_port)
|
||||
|
||||
# Search for VirtualHosts matching by name
|
||||
for chall in self.achalls:
|
||||
# Search for matching VirtualHosts
|
||||
for vh in self._matching_vhosts(chall.domain):
|
||||
selected_vhosts.append(vh)
|
||||
selected_vhosts += self._matching_vhosts(chall.domain)
|
||||
|
||||
# Ensure that we have one or more VirtualHosts that we can continue
|
||||
# with. (one that listens to port configured with --http-01-port)
|
||||
@@ -107,9 +107,13 @@ class ApacheHttp01(common.ChallengePerformer):
|
||||
if any(a.is_wildcard() or a.get_port() == http_port for a in vhost.addrs):
|
||||
found = True
|
||||
|
||||
if not found:
|
||||
for vh in self._relevant_vhosts():
|
||||
selected_vhosts.append(vh)
|
||||
# If there's at least one eligible VirtualHost, also add all unnamed VirtualHosts
|
||||
# because they might match at runtime (#8890)
|
||||
if found:
|
||||
selected_vhosts += self._unnamed_vhosts()
|
||||
# Otherwise, add every Virtualhost which listens on the right port
|
||||
else:
|
||||
selected_vhosts += self._relevant_vhosts()
|
||||
|
||||
# Add the challenge configuration
|
||||
for vh in selected_vhosts:
|
||||
@@ -167,6 +171,10 @@ class ApacheHttp01(common.ChallengePerformer):
|
||||
|
||||
return relevant_vhosts
|
||||
|
||||
def _unnamed_vhosts(self) -> List[VirtualHost]:
|
||||
"""Return all VirtualHost objects with no ServerName"""
|
||||
return [vh for vh in self.configurator.vhosts if vh.name is None]
|
||||
|
||||
def _set_up_challenges(self):
|
||||
if not os.path.isdir(self.challenge_dir):
|
||||
old_umask = filesystem.umask(0o022)
|
||||
|
||||
@@ -100,12 +100,9 @@ For this reason the internal representation of data should not ignore the case.
|
||||
"""
|
||||
|
||||
import abc
|
||||
import six
|
||||
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class ParserNode(object):
|
||||
class ParserNode(object, metaclass=abc.ABCMeta):
|
||||
"""
|
||||
ParserNode is the basic building block of the tree of such nodes,
|
||||
representing the structure of the configuration. It is largely meant to keep
|
||||
@@ -204,9 +201,7 @@ class ParserNode(object):
|
||||
"""
|
||||
|
||||
|
||||
# Linter rule exclusion done because of https://github.com/PyCQA/pylint/issues/179
|
||||
@six.add_metaclass(abc.ABCMeta) # pylint: disable=abstract-method
|
||||
class CommentNode(ParserNode):
|
||||
class CommentNode(ParserNode, metaclass=abc.ABCMeta):
|
||||
"""
|
||||
CommentNode class is used for representation of comments within the parsed
|
||||
configuration structure. Because of the nature of comments, it is not able
|
||||
@@ -243,14 +238,13 @@ class CommentNode(ParserNode):
|
||||
created or changed after the last save. Default: False.
|
||||
:type dirty: bool
|
||||
"""
|
||||
super(CommentNode, self).__init__(ancestor=kwargs['ancestor'],
|
||||
super().__init__(ancestor=kwargs['ancestor'],
|
||||
dirty=kwargs.get('dirty', False),
|
||||
filepath=kwargs['filepath'],
|
||||
metadata=kwargs.get('metadata', {})) # pragma: no cover
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class DirectiveNode(ParserNode):
|
||||
class DirectiveNode(ParserNode, metaclass=abc.ABCMeta):
|
||||
"""
|
||||
DirectiveNode class represents a configuration directive within the configuration.
|
||||
It can have zero or more parameters attached to it. Because of the nature of
|
||||
@@ -308,7 +302,7 @@ class DirectiveNode(ParserNode):
|
||||
:type enabled: bool
|
||||
|
||||
"""
|
||||
super(DirectiveNode, self).__init__(ancestor=kwargs['ancestor'],
|
||||
super().__init__(ancestor=kwargs['ancestor'],
|
||||
dirty=kwargs.get('dirty', False),
|
||||
filepath=kwargs['filepath'],
|
||||
metadata=kwargs.get('metadata', {})) # pragma: no cover
|
||||
@@ -318,15 +312,14 @@ class DirectiveNode(ParserNode):
|
||||
"""
|
||||
Sets the sequence of parameters for this ParserNode object without
|
||||
whitespaces. While the whitespaces for parameters are discarded when using
|
||||
this method, the whitespacing preceeding the ParserNode itself should be
|
||||
this method, the whitespacing preceding the ParserNode itself should be
|
||||
kept intact.
|
||||
|
||||
:param list parameters: sequence of parameters
|
||||
"""
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class BlockNode(DirectiveNode):
|
||||
class BlockNode(DirectiveNode, metaclass=abc.ABCMeta):
|
||||
"""
|
||||
BlockNode class represents a block of nested configuration directives, comments
|
||||
and other blocks as its children. A BlockNode can have zero or more parameters
|
||||
@@ -371,7 +364,7 @@ class BlockNode(DirectiveNode):
|
||||
def add_child_block(self, name, parameters=None, position=None):
|
||||
"""
|
||||
Adds a new BlockNode child node with provided values and marks the callee
|
||||
BlockNode dirty. This is used to add new children to the AST. The preceeding
|
||||
BlockNode dirty. This is used to add new children to the AST. The preceding
|
||||
whitespaces should not be added based on the ancestor or siblings for the
|
||||
newly created object. This is to match the current behavior of the legacy
|
||||
parser implementation.
|
||||
@@ -392,7 +385,7 @@ class BlockNode(DirectiveNode):
|
||||
"""
|
||||
Adds a new DirectiveNode child node with provided values and marks the
|
||||
callee BlockNode dirty. This is used to add new children to the AST. The
|
||||
preceeding whitespaces should not be added based on the ancestor or siblings
|
||||
preceding whitespaces should not be added based on the ancestor or siblings
|
||||
for the newly created object. This is to match the current behavior of the
|
||||
legacy parser implementation.
|
||||
|
||||
@@ -413,7 +406,7 @@ class BlockNode(DirectiveNode):
|
||||
"""
|
||||
Adds a new CommentNode child node with provided value and marks the
|
||||
callee BlockNode dirty. This is used to add new children to the AST. The
|
||||
preceeding whitespaces should not be added based on the ancestor or siblings
|
||||
preceding whitespaces should not be added based on the ancestor or siblings
|
||||
for the newly created object. This is to match the current behavior of the
|
||||
legacy parser implementation.
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
"""Module contains classes used by the Apache Configurator."""
|
||||
import re
|
||||
from typing import Set
|
||||
|
||||
from acme.magic_typing import Set
|
||||
from certbot.plugins import common
|
||||
|
||||
|
||||
@@ -20,16 +20,13 @@ class Addr(common.Addr):
|
||||
self.is_wildcard() and other.is_wildcard()))
|
||||
return False
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self.__eq__(other)
|
||||
|
||||
def __repr__(self):
|
||||
return "certbot_apache._internal.obj.Addr(" + repr(self.tup) + ")"
|
||||
|
||||
def __hash__(self): # pylint: disable=useless-super-delegation
|
||||
# Python 3 requires explicit overridden for __hash__ if __eq__ or
|
||||
# __cmp__ is overridden. See https://bugs.python.org/issue2235
|
||||
return super(Addr, self).__hash__()
|
||||
return super().__hash__()
|
||||
|
||||
def _addr_less_specific(self, addr):
|
||||
"""Returns if addr.get_addr() is more specific than self.get_addr()."""
|
||||
@@ -98,7 +95,7 @@ class Addr(common.Addr):
|
||||
return self.get_addr_obj(port)
|
||||
|
||||
|
||||
class VirtualHost(object):
|
||||
class VirtualHost:
|
||||
"""Represents an Apache Virtualhost.
|
||||
|
||||
:ivar str filep: file path of VH
|
||||
@@ -140,7 +137,7 @@ class VirtualHost(object):
|
||||
|
||||
def get_names(self):
|
||||
"""Return a set of all names."""
|
||||
all_names = set() # type: Set[str]
|
||||
all_names: Set[str] = set()
|
||||
all_names.update(self.aliases)
|
||||
# Strip out any scheme:// and <port> field from servername
|
||||
if self.name is not None:
|
||||
@@ -191,9 +188,6 @@ class VirtualHost(object):
|
||||
|
||||
return False
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self.__eq__(other)
|
||||
|
||||
def __hash__(self):
|
||||
return hash((self.filep, self.path,
|
||||
tuple(self.addrs), tuple(self.get_names()),
|
||||
@@ -251,7 +245,7 @@ class VirtualHost(object):
|
||||
|
||||
# already_found acts to keep everything very conservative.
|
||||
# Don't allow multiple ip:ports in same set.
|
||||
already_found = set() # type: Set[str]
|
||||
already_found: Set[str] = set()
|
||||
|
||||
for addr in vhost.addrs:
|
||||
for local_addr in self.addrs:
|
||||
|
||||
@@ -1,15 +1,12 @@
|
||||
""" Distribution specific override class for Arch Linux """
|
||||
import zope.interface
|
||||
|
||||
from certbot import interfaces
|
||||
from certbot_apache._internal import configurator
|
||||
from certbot_apache._internal.configurator import OsOptions
|
||||
|
||||
|
||||
@zope.interface.provider(interfaces.IPluginFactory)
|
||||
class ArchConfigurator(configurator.ApacheConfigurator):
|
||||
"""Arch Linux specific ApacheConfigurator override class"""
|
||||
|
||||
OS_DEFAULTS = dict(
|
||||
OS_DEFAULTS = OsOptions(
|
||||
server_root="/etc/httpd",
|
||||
vhost_root="/etc/httpd/conf",
|
||||
vhost_files="*.conf",
|
||||
@@ -18,11 +15,5 @@ class ArchConfigurator(configurator.ApacheConfigurator):
|
||||
version_cmd=['apachectl', '-v'],
|
||||
restart_cmd=['apachectl', 'graceful'],
|
||||
conftest_cmd=['apachectl', 'configtest'],
|
||||
enmod=None,
|
||||
dismod=None,
|
||||
le_vhost_ext="-le-ssl.conf",
|
||||
handle_modules=False,
|
||||
handle_sites=False,
|
||||
challenge_location="/etc/httpd/conf",
|
||||
bin=None,
|
||||
)
|
||||
|
||||
@@ -1,25 +1,23 @@
|
||||
""" Distribution specific override class for CentOS family (RHEL, Fedora) """
|
||||
import logging
|
||||
from typing import cast
|
||||
from typing import List
|
||||
|
||||
import zope.interface
|
||||
|
||||
from acme.magic_typing import List
|
||||
from certbot import errors
|
||||
from certbot import interfaces
|
||||
from certbot import util
|
||||
from certbot.errors import MisconfigurationError
|
||||
from certbot_apache._internal import apache_util
|
||||
from certbot_apache._internal import configurator
|
||||
from certbot_apache._internal import parser
|
||||
from certbot_apache._internal.configurator import OsOptions
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@zope.interface.provider(interfaces.IPluginFactory)
|
||||
class CentOSConfigurator(configurator.ApacheConfigurator):
|
||||
"""CentOS specific ApacheConfigurator override class"""
|
||||
|
||||
OS_DEFAULTS = dict(
|
||||
OS_DEFAULTS = OsOptions(
|
||||
server_root="/etc/httpd",
|
||||
vhost_root="/etc/httpd/conf.d",
|
||||
vhost_files="*.conf",
|
||||
@@ -29,13 +27,7 @@ class CentOSConfigurator(configurator.ApacheConfigurator):
|
||||
restart_cmd=['apachectl', 'graceful'],
|
||||
restart_cmd_alt=['apachectl', 'restart'],
|
||||
conftest_cmd=['apachectl', 'configtest'],
|
||||
enmod=None,
|
||||
dismod=None,
|
||||
le_vhost_ext="-le-ssl.conf",
|
||||
handle_modules=False,
|
||||
handle_sites=False,
|
||||
challenge_location="/etc/httpd/conf.d",
|
||||
bin=None,
|
||||
)
|
||||
|
||||
def config_test(self):
|
||||
@@ -50,7 +42,7 @@ class CentOSConfigurator(configurator.ApacheConfigurator):
|
||||
fedora = os_info[0].lower() == "fedora"
|
||||
|
||||
try:
|
||||
super(CentOSConfigurator, self).config_test()
|
||||
super().config_test()
|
||||
except errors.MisconfigurationError:
|
||||
if fedora:
|
||||
self._try_restart_fedora()
|
||||
@@ -68,20 +60,22 @@ class CentOSConfigurator(configurator.ApacheConfigurator):
|
||||
raise errors.MisconfigurationError(str(err))
|
||||
|
||||
# Finish with actual config check to see if systemctl restart helped
|
||||
super(CentOSConfigurator, self).config_test()
|
||||
super().config_test()
|
||||
|
||||
def _prepare_options(self):
|
||||
"""
|
||||
Override the options dictionary initialization in order to support
|
||||
alternative restart cmd used in CentOS.
|
||||
"""
|
||||
super(CentOSConfigurator, self)._prepare_options()
|
||||
self.options["restart_cmd_alt"][0] = self.option("ctl")
|
||||
super()._prepare_options()
|
||||
if not self.options.restart_cmd_alt: # pragma: no cover
|
||||
raise ValueError("OS option restart_cmd_alt must be set for CentOS.")
|
||||
self.options.restart_cmd_alt[0] = self.options.ctl
|
||||
|
||||
def get_parser(self):
|
||||
"""Initializes the ApacheParser"""
|
||||
return CentOSParser(
|
||||
self.option("server_root"), self.option("vhost_root"),
|
||||
self.options.server_root, self.options.vhost_root,
|
||||
self.version, configurator=self)
|
||||
|
||||
def _deploy_cert(self, *args, **kwargs): # pylint: disable=arguments-differ
|
||||
@@ -90,7 +84,7 @@ class CentOSConfigurator(configurator.ApacheConfigurator):
|
||||
has "LoadModule ssl_module..." before parsing the VirtualHost configuration
|
||||
that was created by Certbot
|
||||
"""
|
||||
super(CentOSConfigurator, self)._deploy_cert(*args, **kwargs)
|
||||
super()._deploy_cert(*args, **kwargs)
|
||||
if self.version < (2, 4, 0):
|
||||
self._deploy_loadmodule_ssl_if_needed()
|
||||
|
||||
@@ -102,9 +96,9 @@ class CentOSConfigurator(configurator.ApacheConfigurator):
|
||||
|
||||
loadmods = self.parser.find_dir("LoadModule", "ssl_module", exclude=False)
|
||||
|
||||
correct_ifmods = [] # type: List[str]
|
||||
loadmod_args = [] # type: List[str]
|
||||
loadmod_paths = [] # type: List[str]
|
||||
correct_ifmods: List[str] = []
|
||||
loadmod_args: List[str] = []
|
||||
loadmod_paths: List[str] = []
|
||||
for m in loadmods:
|
||||
noarg_path = m.rpartition("/")[0]
|
||||
path_args = self.parser.get_all_args(noarg_path)
|
||||
@@ -118,8 +112,9 @@ class CentOSConfigurator(configurator.ApacheConfigurator):
|
||||
else:
|
||||
loadmod_args = path_args
|
||||
|
||||
if self.parser.not_modssl_ifmodule(noarg_path): # pylint: disable=no-member
|
||||
if self.parser.loc["default"] in noarg_path:
|
||||
centos_parser: CentOSParser = cast(CentOSParser, self.parser)
|
||||
if centos_parser.not_modssl_ifmodule(noarg_path):
|
||||
if centos_parser.loc["default"] in noarg_path:
|
||||
# LoadModule already in the main configuration file
|
||||
if ("ifmodule/" in noarg_path.lower() or
|
||||
"ifmodule[1]" in noarg_path.lower()):
|
||||
@@ -167,19 +162,19 @@ class CentOSParser(parser.ApacheParser):
|
||||
def __init__(self, *args, **kwargs):
|
||||
# CentOS specific configuration file for Apache
|
||||
self.sysconfig_filep = "/etc/sysconfig/httpd"
|
||||
super(CentOSParser, self).__init__(*args, **kwargs)
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
def update_runtime_variables(self):
|
||||
""" Override for update_runtime_variables for custom parsing """
|
||||
# Opportunistic, works if SELinux not enforced
|
||||
super(CentOSParser, self).update_runtime_variables()
|
||||
super().update_runtime_variables()
|
||||
self.parse_sysconfig_var()
|
||||
|
||||
def parse_sysconfig_var(self):
|
||||
""" Parses Apache CLI options from CentOS configuration file """
|
||||
defines = apache_util.parse_define_file(self.sysconfig_filep, "OPTIONS")
|
||||
for k in defines:
|
||||
self.variables[k] = defines[k]
|
||||
for k, v in defines.items():
|
||||
self.variables[k] = v
|
||||
|
||||
def not_modssl_ifmodule(self, path):
|
||||
"""Checks if the provided Augeas path has argument !mod_ssl"""
|
||||
|
||||
@@ -1,28 +1,17 @@
|
||||
""" Distribution specific override class for macOS """
|
||||
import zope.interface
|
||||
|
||||
from certbot import interfaces
|
||||
from certbot_apache._internal import configurator
|
||||
from certbot_apache._internal.configurator import OsOptions
|
||||
|
||||
|
||||
@zope.interface.provider(interfaces.IPluginFactory)
|
||||
class DarwinConfigurator(configurator.ApacheConfigurator):
|
||||
"""macOS specific ApacheConfigurator override class"""
|
||||
|
||||
OS_DEFAULTS = dict(
|
||||
server_root="/etc/apache2",
|
||||
OS_DEFAULTS = OsOptions(
|
||||
vhost_root="/etc/apache2/other",
|
||||
vhost_files="*.conf",
|
||||
logs_root="/var/log/apache2",
|
||||
ctl="apachectl",
|
||||
version_cmd=['apachectl', '-v'],
|
||||
restart_cmd=['apachectl', 'graceful'],
|
||||
conftest_cmd=['apachectl', 'configtest'],
|
||||
enmod=None,
|
||||
dismod=None,
|
||||
le_vhost_ext="-le-ssl.conf",
|
||||
handle_modules=False,
|
||||
handle_sites=False,
|
||||
challenge_location="/etc/apache2/other",
|
||||
bin=None,
|
||||
)
|
||||
|
||||
@@ -1,39 +1,25 @@
|
||||
""" Distribution specific override class for Debian family (Ubuntu/Debian) """
|
||||
import logging
|
||||
|
||||
import zope.interface
|
||||
|
||||
from certbot import errors
|
||||
from certbot import interfaces
|
||||
from certbot import util
|
||||
from certbot.compat import filesystem
|
||||
from certbot.compat import os
|
||||
from certbot_apache._internal import apache_util
|
||||
from certbot_apache._internal import configurator
|
||||
from certbot_apache._internal.configurator import OsOptions
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@zope.interface.provider(interfaces.IPluginFactory)
|
||||
class DebianConfigurator(configurator.ApacheConfigurator):
|
||||
"""Debian specific ApacheConfigurator override class"""
|
||||
|
||||
OS_DEFAULTS = dict(
|
||||
server_root="/etc/apache2",
|
||||
vhost_root="/etc/apache2/sites-available",
|
||||
vhost_files="*",
|
||||
logs_root="/var/log/apache2",
|
||||
ctl="apache2ctl",
|
||||
version_cmd=['apache2ctl', '-v'],
|
||||
restart_cmd=['apache2ctl', 'graceful'],
|
||||
conftest_cmd=['apache2ctl', 'configtest'],
|
||||
OS_DEFAULTS = OsOptions(
|
||||
enmod="a2enmod",
|
||||
dismod="a2dismod",
|
||||
le_vhost_ext="-le-ssl.conf",
|
||||
handle_modules=True,
|
||||
handle_sites=True,
|
||||
challenge_location="/etc/apache2",
|
||||
bin=None,
|
||||
)
|
||||
|
||||
def enable_site(self, vhost):
|
||||
@@ -58,7 +44,7 @@ class DebianConfigurator(configurator.ApacheConfigurator):
|
||||
if not os.path.isdir(os.path.dirname(enabled_path)):
|
||||
# For some reason, sites-enabled / sites-available do not exist
|
||||
# Call the parent method
|
||||
return super(DebianConfigurator, self).enable_site(vhost)
|
||||
return super().enable_site(vhost)
|
||||
self.reverter.register_file_creation(False, enabled_path)
|
||||
try:
|
||||
os.symlink(vhost.filep, enabled_path)
|
||||
@@ -68,7 +54,7 @@ class DebianConfigurator(configurator.ApacheConfigurator):
|
||||
# Already in shape
|
||||
vhost.enabled = True
|
||||
return None
|
||||
logger.warning(
|
||||
logger.error(
|
||||
"Could not symlink %s to %s, got error: %s", enabled_path,
|
||||
vhost.filep, err.strerror)
|
||||
errstring = ("Encountered error while trying to enable a " +
|
||||
@@ -132,11 +118,12 @@ class DebianConfigurator(configurator.ApacheConfigurator):
|
||||
# Generate reversal command.
|
||||
# Try to be safe here... check that we can probably reverse before
|
||||
# applying enmod command
|
||||
if not util.exe_exists(self.option("dismod")):
|
||||
if (self.options.dismod is None or self.options.enmod is None
|
||||
or not util.exe_exists(self.options.dismod)):
|
||||
raise errors.MisconfigurationError(
|
||||
"Unable to find a2dismod, please make sure a2enmod and "
|
||||
"a2dismod are configured correctly for certbot.")
|
||||
|
||||
self.reverter.register_undo_command(
|
||||
temp, [self.option("dismod"), "-f", mod_name])
|
||||
util.run_script([self.option("enmod"), mod_name])
|
||||
temp, [self.options.dismod, "-f", mod_name])
|
||||
util.run_script([self.options.enmod, mod_name])
|
||||
|
||||
@@ -1,19 +1,16 @@
|
||||
""" Distribution specific override class for Fedora 29+ """
|
||||
import zope.interface
|
||||
|
||||
from certbot import errors
|
||||
from certbot import interfaces
|
||||
from certbot import util
|
||||
from certbot_apache._internal import apache_util
|
||||
from certbot_apache._internal import configurator
|
||||
from certbot_apache._internal import parser
|
||||
from certbot_apache._internal.configurator import OsOptions
|
||||
|
||||
|
||||
@zope.interface.provider(interfaces.IPluginFactory)
|
||||
class FedoraConfigurator(configurator.ApacheConfigurator):
|
||||
"""Fedora 29+ specific ApacheConfigurator override class"""
|
||||
|
||||
OS_DEFAULTS = dict(
|
||||
OS_DEFAULTS = OsOptions(
|
||||
server_root="/etc/httpd",
|
||||
vhost_root="/etc/httpd/conf.d",
|
||||
vhost_files="*.conf",
|
||||
@@ -23,13 +20,7 @@ class FedoraConfigurator(configurator.ApacheConfigurator):
|
||||
restart_cmd=['apachectl', 'graceful'],
|
||||
restart_cmd_alt=['apachectl', 'restart'],
|
||||
conftest_cmd=['apachectl', 'configtest'],
|
||||
enmod=None,
|
||||
dismod=None,
|
||||
le_vhost_ext="-le-ssl.conf",
|
||||
handle_modules=False,
|
||||
handle_sites=False,
|
||||
challenge_location="/etc/httpd/conf.d",
|
||||
bin=None,
|
||||
)
|
||||
|
||||
def config_test(self):
|
||||
@@ -40,14 +31,14 @@ class FedoraConfigurator(configurator.ApacheConfigurator):
|
||||
during the first (re)start of httpd.
|
||||
"""
|
||||
try:
|
||||
super(FedoraConfigurator, self).config_test()
|
||||
super().config_test()
|
||||
except errors.MisconfigurationError:
|
||||
self._try_restart_fedora()
|
||||
|
||||
def get_parser(self):
|
||||
"""Initializes the ApacheParser"""
|
||||
return FedoraParser(
|
||||
self.option("server_root"), self.option("vhost_root"),
|
||||
self.options.server_root, self.options.vhost_root,
|
||||
self.version, configurator=self)
|
||||
|
||||
def _try_restart_fedora(self):
|
||||
@@ -60,7 +51,7 @@ class FedoraConfigurator(configurator.ApacheConfigurator):
|
||||
raise errors.MisconfigurationError(str(err))
|
||||
|
||||
# Finish with actual config check to see if systemctl restart helped
|
||||
super(FedoraConfigurator, self).config_test()
|
||||
super().config_test()
|
||||
|
||||
def _prepare_options(self):
|
||||
"""
|
||||
@@ -68,10 +59,12 @@ class FedoraConfigurator(configurator.ApacheConfigurator):
|
||||
instead of httpd and so take advantages of this new bash script in newer versions
|
||||
of Fedora to restart httpd.
|
||||
"""
|
||||
super(FedoraConfigurator, self)._prepare_options()
|
||||
self.options["restart_cmd"][0] = 'apachectl'
|
||||
self.options["restart_cmd_alt"][0] = 'apachectl'
|
||||
self.options["conftest_cmd"][0] = 'apachectl'
|
||||
super()._prepare_options()
|
||||
self.options.restart_cmd[0] = 'apachectl'
|
||||
if not self.options.restart_cmd_alt: # pragma: no cover
|
||||
raise ValueError("OS option restart_cmd_alt must be set for Fedora.")
|
||||
self.options.restart_cmd_alt[0] = 'apachectl'
|
||||
self.options.conftest_cmd[0] = 'apachectl'
|
||||
|
||||
|
||||
class FedoraParser(parser.ApacheParser):
|
||||
@@ -79,16 +72,16 @@ class FedoraParser(parser.ApacheParser):
|
||||
def __init__(self, *args, **kwargs):
|
||||
# Fedora 29+ specific configuration file for Apache
|
||||
self.sysconfig_filep = "/etc/sysconfig/httpd"
|
||||
super(FedoraParser, self).__init__(*args, **kwargs)
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
def update_runtime_variables(self):
|
||||
""" Override for update_runtime_variables for custom parsing """
|
||||
# Opportunistic, works if SELinux not enforced
|
||||
super(FedoraParser, self).update_runtime_variables()
|
||||
super().update_runtime_variables()
|
||||
self._parse_sysconfig_var()
|
||||
|
||||
def _parse_sysconfig_var(self):
|
||||
""" Parses Apache CLI options from Fedora configuration file """
|
||||
defines = apache_util.parse_define_file(self.sysconfig_filep, "OPTIONS")
|
||||
for k in defines:
|
||||
self.variables[k] = defines[k]
|
||||
for k, v in defines.items():
|
||||
self.variables[k] = v
|
||||
|
||||
@@ -1,33 +1,19 @@
|
||||
""" Distribution specific override class for Gentoo Linux """
|
||||
import zope.interface
|
||||
|
||||
from certbot import interfaces
|
||||
from certbot_apache._internal import apache_util
|
||||
from certbot_apache._internal import configurator
|
||||
from certbot_apache._internal import parser
|
||||
from certbot_apache._internal.configurator import OsOptions
|
||||
|
||||
|
||||
@zope.interface.provider(interfaces.IPluginFactory)
|
||||
class GentooConfigurator(configurator.ApacheConfigurator):
|
||||
"""Gentoo specific ApacheConfigurator override class"""
|
||||
|
||||
OS_DEFAULTS = dict(
|
||||
OS_DEFAULTS = OsOptions(
|
||||
server_root="/etc/apache2",
|
||||
vhost_root="/etc/apache2/vhosts.d",
|
||||
vhost_files="*.conf",
|
||||
logs_root="/var/log/apache2",
|
||||
ctl="apache2ctl",
|
||||
version_cmd=['apache2ctl', '-v'],
|
||||
restart_cmd=['apache2ctl', 'graceful'],
|
||||
restart_cmd_alt=['apache2ctl', 'restart'],
|
||||
conftest_cmd=['apache2ctl', 'configtest'],
|
||||
enmod=None,
|
||||
dismod=None,
|
||||
le_vhost_ext="-le-ssl.conf",
|
||||
handle_modules=False,
|
||||
handle_sites=False,
|
||||
challenge_location="/etc/apache2/vhosts.d",
|
||||
bin=None,
|
||||
)
|
||||
|
||||
def _prepare_options(self):
|
||||
@@ -35,13 +21,15 @@ class GentooConfigurator(configurator.ApacheConfigurator):
|
||||
Override the options dictionary initialization in order to support
|
||||
alternative restart cmd used in Gentoo.
|
||||
"""
|
||||
super(GentooConfigurator, self)._prepare_options()
|
||||
self.options["restart_cmd_alt"][0] = self.option("ctl")
|
||||
super()._prepare_options()
|
||||
if not self.options.restart_cmd_alt: # pragma: no cover
|
||||
raise ValueError("OS option restart_cmd_alt must be set for Gentoo.")
|
||||
self.options.restart_cmd_alt[0] = self.options.ctl
|
||||
|
||||
def get_parser(self):
|
||||
"""Initializes the ApacheParser"""
|
||||
return GentooParser(
|
||||
self.option("server_root"), self.option("vhost_root"),
|
||||
self.options.server_root, self.options.vhost_root,
|
||||
self.version, configurator=self)
|
||||
|
||||
|
||||
@@ -50,7 +38,7 @@ class GentooParser(parser.ApacheParser):
|
||||
def __init__(self, *args, **kwargs):
|
||||
# Gentoo specific configuration file for Apache2
|
||||
self.apacheconfig_filep = "/etc/conf.d/apache2"
|
||||
super(GentooParser, self).__init__(*args, **kwargs)
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
def update_runtime_variables(self):
|
||||
""" Override for update_runtime_variables for custom parsing """
|
||||
@@ -61,12 +49,12 @@ class GentooParser(parser.ApacheParser):
|
||||
""" Parses Apache CLI options from Gentoo configuration file """
|
||||
defines = apache_util.parse_define_file(self.apacheconfig_filep,
|
||||
"APACHE2_OPTS")
|
||||
for k in defines:
|
||||
self.variables[k] = defines[k]
|
||||
for k, v in defines.items():
|
||||
self.variables[k] = v
|
||||
|
||||
def update_modules(self):
|
||||
"""Get loaded modules from httpd process, and add them to DOM"""
|
||||
mod_cmd = [self.configurator.option("ctl"), "modules"]
|
||||
mod_cmd = [self.configurator.options.ctl, "modules"]
|
||||
matches = apache_util.parse_from_subprocess(mod_cmd, r"(.*)_module")
|
||||
for mod in matches:
|
||||
self.add_mod(mod.strip())
|
||||
|
||||
@@ -1,28 +1,19 @@
|
||||
""" Distribution specific override class for OpenSUSE """
|
||||
import zope.interface
|
||||
|
||||
from certbot import interfaces
|
||||
from certbot_apache._internal import configurator
|
||||
from certbot_apache._internal.configurator import OsOptions
|
||||
|
||||
|
||||
@zope.interface.provider(interfaces.IPluginFactory)
|
||||
class OpenSUSEConfigurator(configurator.ApacheConfigurator):
|
||||
"""OpenSUSE specific ApacheConfigurator override class"""
|
||||
|
||||
OS_DEFAULTS = dict(
|
||||
server_root="/etc/apache2",
|
||||
OS_DEFAULTS = OsOptions(
|
||||
vhost_root="/etc/apache2/vhosts.d",
|
||||
vhost_files="*.conf",
|
||||
logs_root="/var/log/apache2",
|
||||
ctl="apache2ctl",
|
||||
version_cmd=['apache2ctl', '-v'],
|
||||
restart_cmd=['apache2ctl', 'graceful'],
|
||||
conftest_cmd=['apache2ctl', 'configtest'],
|
||||
ctl="apachectl",
|
||||
version_cmd=['apachectl', '-v'],
|
||||
restart_cmd=['apachectl', 'graceful'],
|
||||
conftest_cmd=['apachectl', 'configtest'],
|
||||
enmod="a2enmod",
|
||||
dismod="a2dismod",
|
||||
le_vhost_ext="-le-ssl.conf",
|
||||
handle_modules=False,
|
||||
handle_sites=False,
|
||||
challenge_location="/etc/apache2/vhosts.d",
|
||||
bin=None,
|
||||
)
|
||||
|
||||
19
certbot-apache/certbot_apache/_internal/override_void.py
Normal file
19
certbot-apache/certbot_apache/_internal/override_void.py
Normal file
@@ -0,0 +1,19 @@
|
||||
""" Distribution specific override class for Void Linux """
|
||||
from certbot_apache._internal import configurator
|
||||
from certbot_apache._internal.configurator import OsOptions
|
||||
|
||||
|
||||
class VoidConfigurator(configurator.ApacheConfigurator):
|
||||
"""Void Linux specific ApacheConfigurator override class"""
|
||||
|
||||
OS_DEFAULTS = OsOptions(
|
||||
server_root="/etc/apache",
|
||||
vhost_root="/etc/apache/extra",
|
||||
vhost_files="*.conf",
|
||||
logs_root="/var/log/httpd",
|
||||
ctl="apachectl",
|
||||
version_cmd=['apachectl', '-v'],
|
||||
restart_cmd=['apachectl', 'graceful'],
|
||||
conftest_cmd=['apachectl', 'configtest'],
|
||||
challenge_location="/etc/apache/extra",
|
||||
)
|
||||
@@ -3,21 +3,24 @@ import copy
|
||||
import fnmatch
|
||||
import logging
|
||||
import re
|
||||
import sys
|
||||
from typing import Dict
|
||||
from typing import List
|
||||
from typing import Optional
|
||||
|
||||
import six
|
||||
|
||||
from acme.magic_typing import Dict
|
||||
from acme.magic_typing import List
|
||||
from certbot import errors
|
||||
from certbot.compat import os
|
||||
from certbot_apache._internal import apache_util
|
||||
from certbot_apache._internal import constants
|
||||
|
||||
try:
|
||||
from augeas import Augeas
|
||||
except ImportError: # pragma: no cover
|
||||
Augeas = None
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ApacheParser(object):
|
||||
class ApacheParser:
|
||||
"""Class handles the fine details of parsing the Apache Configuration.
|
||||
|
||||
.. todo:: Make parsing general... remove sites-available etc...
|
||||
@@ -42,8 +45,7 @@ class ApacheParser(object):
|
||||
self.configurator = configurator
|
||||
|
||||
# Initialize augeas
|
||||
self.aug = None
|
||||
self.init_augeas()
|
||||
self.aug = init_augeas()
|
||||
|
||||
if not self.check_aug_version():
|
||||
raise errors.NotSupportedError(
|
||||
@@ -51,9 +53,9 @@ class ApacheParser(object):
|
||||
"version 1.2.0 or higher, please make sure you have you have "
|
||||
"those installed.")
|
||||
|
||||
self.modules = {} # type: Dict[str, str]
|
||||
self.parser_paths = {} # type: Dict[str, List[str]]
|
||||
self.variables = {} # type: Dict[str, str]
|
||||
self.modules: Dict[str, Optional[str]] = {}
|
||||
self.parser_paths: Dict[str, List[str]] = {}
|
||||
self.variables: Dict[str, str] = {}
|
||||
|
||||
# Find configuration root and make sure augeas can parse it.
|
||||
self.root = os.path.abspath(root)
|
||||
@@ -79,30 +81,13 @@ class ApacheParser(object):
|
||||
# Must also attempt to parse additional virtual host root
|
||||
if vhostroot:
|
||||
self.parse_file(os.path.abspath(vhostroot) + "/" +
|
||||
self.configurator.option("vhost_files"))
|
||||
self.configurator.options.vhost_files)
|
||||
|
||||
# check to see if there were unparsed define statements
|
||||
if version < (2, 4):
|
||||
if self.find_dir("Define", exclude=False):
|
||||
raise errors.PluginError("Error parsing runtime variables")
|
||||
|
||||
def init_augeas(self):
|
||||
""" Initialize the actual Augeas instance """
|
||||
|
||||
try:
|
||||
import augeas
|
||||
except ImportError: # pragma: no cover
|
||||
raise errors.NoInstallationError("Problem in Augeas installation")
|
||||
|
||||
self.aug = augeas.Augeas(
|
||||
# specify a directory to load our preferred lens from
|
||||
loadpath=constants.AUGEAS_LENS_DIR,
|
||||
# Do not save backup (we do it ourselves), do not load
|
||||
# anything by default
|
||||
flags=(augeas.Augeas.NONE |
|
||||
augeas.Augeas.NO_MODL_AUTOLOAD |
|
||||
augeas.Augeas.ENABLE_SPAN))
|
||||
|
||||
def check_parsing_errors(self, lens):
|
||||
"""Verify Augeas can parse all of the lens files.
|
||||
|
||||
@@ -266,7 +251,7 @@ class ApacheParser(object):
|
||||
the iteration issue. Else... parse and enable mods at same time.
|
||||
|
||||
"""
|
||||
mods = {} # type: Dict[str, str]
|
||||
mods: Dict[str, str] = {}
|
||||
matches = self.find_dir("LoadModule")
|
||||
iterator = iter(matches)
|
||||
# Make sure prev_size != cur_size for do: while: iteration
|
||||
@@ -275,7 +260,7 @@ class ApacheParser(object):
|
||||
while len(mods) != prev_size:
|
||||
prev_size = len(mods)
|
||||
|
||||
for match_name, match_filename in six.moves.zip(
|
||||
for match_name, match_filename in zip(
|
||||
iterator, iterator):
|
||||
mod_name = self.get_arg(match_name)
|
||||
mod_filename = self.get_arg(match_filename)
|
||||
@@ -297,7 +282,7 @@ class ApacheParser(object):
|
||||
def update_defines(self):
|
||||
"""Updates the dictionary of known variables in the configuration"""
|
||||
|
||||
self.variables = apache_util.parse_defines(self.configurator.option("ctl"))
|
||||
self.variables = apache_util.parse_defines(self.configurator.options.ctl)
|
||||
|
||||
def update_includes(self):
|
||||
"""Get includes from httpd process, and add them to DOM if needed"""
|
||||
@@ -307,7 +292,7 @@ class ApacheParser(object):
|
||||
# configuration files
|
||||
_ = self.find_dir("Include")
|
||||
|
||||
matches = apache_util.parse_includes(self.configurator.option("ctl"))
|
||||
matches = apache_util.parse_includes(self.configurator.options.ctl)
|
||||
if matches:
|
||||
for i in matches:
|
||||
if not self.parsed_in_current(i):
|
||||
@@ -316,7 +301,7 @@ class ApacheParser(object):
|
||||
def update_modules(self):
|
||||
"""Get loaded modules from httpd process, and add them to DOM"""
|
||||
|
||||
matches = apache_util.parse_modules(self.configurator.option("ctl"))
|
||||
matches = apache_util.parse_modules(self.configurator.options.ctl)
|
||||
for mod in matches:
|
||||
self.add_mod(mod.strip())
|
||||
|
||||
@@ -455,7 +440,11 @@ class ApacheParser(object):
|
||||
:type args: list or str
|
||||
"""
|
||||
first_dir = aug_conf_path + "/directive[1]"
|
||||
if self.aug.get(first_dir):
|
||||
self.aug.insert(first_dir, "directive", True)
|
||||
else:
|
||||
self.aug.set(first_dir, "directive")
|
||||
|
||||
self.aug.set(first_dir, dirname)
|
||||
if isinstance(args, list):
|
||||
for i, value in enumerate(args, 1):
|
||||
@@ -553,7 +542,7 @@ class ApacheParser(object):
|
||||
else:
|
||||
arg_suffix = "/*[self::arg=~regexp('%s')]" % case_i(arg)
|
||||
|
||||
ordered_matches = [] # type: List[str]
|
||||
ordered_matches: List[str] = []
|
||||
|
||||
# TODO: Wildcards should be included in alphabetical order
|
||||
# https://httpd.apache.org/docs/2.4/mod/core.html#include
|
||||
@@ -738,9 +727,6 @@ class ApacheParser(object):
|
||||
:rtype: str
|
||||
|
||||
"""
|
||||
if sys.version_info < (3, 6):
|
||||
# This strips off final /Z(?ms)
|
||||
return fnmatch.translate(clean_fn_match)[:-7] # pragma: no cover
|
||||
# Since Python 3.6, it returns a different pattern like (?s:.*\.load)\Z
|
||||
return fnmatch.translate(clean_fn_match)[4:-3] # pragma: no cover
|
||||
|
||||
@@ -955,3 +941,19 @@ def get_aug_path(file_path):
|
||||
|
||||
"""
|
||||
return "/files%s" % file_path
|
||||
|
||||
|
||||
def init_augeas() -> Augeas:
|
||||
""" Initialize the actual Augeas instance """
|
||||
|
||||
if not Augeas: # pragma: no cover
|
||||
raise errors.NoInstallationError("Problem in Augeas installation")
|
||||
|
||||
return Augeas(
|
||||
# specify a directory to load our preferred lens from
|
||||
loadpath=constants.AUGEAS_LENS_DIR,
|
||||
# Do not save backup (we do it ourselves), do not load
|
||||
# anything by default
|
||||
flags=(Augeas.NONE |
|
||||
Augeas.NO_MODL_AUTOLOAD |
|
||||
Augeas.ENABLE_SPAN))
|
||||
|
||||
0
certbot-apache/certbot_apache/py.typed
Normal file
0
certbot-apache/certbot_apache/py.typed
Normal file
@@ -1,3 +0,0 @@
|
||||
# Remember to update setup.py to match the package versions below.
|
||||
acme[dev]==0.29.0
|
||||
certbot[dev]==1.6.0
|
||||
@@ -1,32 +1,18 @@
|
||||
from distutils.version import LooseVersion
|
||||
import sys
|
||||
|
||||
from setuptools import __version__ as setuptools_version
|
||||
from setuptools import find_packages
|
||||
from setuptools import setup
|
||||
|
||||
version = '1.11.0.dev0'
|
||||
version = '1.22.0.dev0'
|
||||
|
||||
# Remember to update local-oldest-requirements.txt when changing the minimum
|
||||
# acme/certbot version.
|
||||
install_requires = [
|
||||
'acme>=0.29.0',
|
||||
'certbot>=1.6.0',
|
||||
# We specify the minimum acme and certbot version as the current plugin
|
||||
# version for simplicity. See
|
||||
# https://github.com/certbot/certbot/issues/8761 for more info.
|
||||
f'acme>={version}',
|
||||
f'certbot>={version}',
|
||||
'python-augeas',
|
||||
'setuptools',
|
||||
'zope.component',
|
||||
'zope.interface',
|
||||
'setuptools>=39.0.1',
|
||||
]
|
||||
|
||||
setuptools_known_environment_markers = (LooseVersion(setuptools_version) >= LooseVersion('36.2'))
|
||||
if setuptools_known_environment_markers:
|
||||
install_requires.append('mock ; python_version < "3.3"')
|
||||
elif 'bdist_wheel' in sys.argv[1:]:
|
||||
raise RuntimeError('Error, you are trying to build certbot wheels using an old version '
|
||||
'of setuptools. Version 36.2+ of setuptools is required.')
|
||||
elif sys.version_info < (3,3):
|
||||
install_requires.append('mock')
|
||||
|
||||
dev_extras = [
|
||||
'apacheconfig>=0.3.2',
|
||||
]
|
||||
@@ -37,9 +23,9 @@ setup(
|
||||
description="Apache plugin for Certbot",
|
||||
url='https://github.com/letsencrypt/letsencrypt',
|
||||
author="Certbot Project",
|
||||
author_email='client-dev@letsencrypt.org',
|
||||
author_email='certbot-dev@eff.org',
|
||||
license='Apache License 2.0',
|
||||
python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*',
|
||||
python_requires='>=3.6',
|
||||
classifiers=[
|
||||
'Development Status :: 5 - Production/Stable',
|
||||
'Environment :: Plugins',
|
||||
@@ -47,13 +33,12 @@ setup(
|
||||
'License :: OSI Approved :: Apache Software License',
|
||||
'Operating System :: POSIX :: Linux',
|
||||
'Programming Language :: Python',
|
||||
'Programming Language :: Python :: 2',
|
||||
'Programming Language :: Python :: 2.7',
|
||||
'Programming Language :: Python :: 3',
|
||||
'Programming Language :: Python :: 3.6',
|
||||
'Programming Language :: Python :: 3.7',
|
||||
'Programming Language :: Python :: 3.8',
|
||||
'Programming Language :: Python :: 3.9',
|
||||
'Programming Language :: Python :: 3.10',
|
||||
'Topic :: Internet :: WWW/HTTP',
|
||||
'Topic :: Security',
|
||||
'Topic :: System :: Installation/Setup',
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
"""Tests for AugeasParserNode classes"""
|
||||
from typing import List
|
||||
|
||||
try:
|
||||
import mock
|
||||
except ImportError: # pragma: no cover
|
||||
@@ -27,7 +29,7 @@ class AugeasParserNodeTest(util.ApacheTest): # pylint: disable=too-many-public-
|
||||
"""Test AugeasParserNode using available test configurations"""
|
||||
|
||||
def setUp(self): # pylint: disable=arguments-differ
|
||||
super(AugeasParserNodeTest, self).setUp()
|
||||
super().setUp()
|
||||
|
||||
with mock.patch("certbot_apache._internal.configurator.ApacheConfigurator.get_parsernode_root") as mock_parsernode:
|
||||
mock_parsernode.side_effect = _get_augeasnode_mock(
|
||||
@@ -107,7 +109,7 @@ class AugeasParserNodeTest(util.ApacheTest): # pylint: disable=too-many-public-
|
||||
|
||||
def test_set_parameters(self):
|
||||
servernames = self.config.parser_root.find_directives("servername")
|
||||
names = [] # type: List[str]
|
||||
names: List[str] = []
|
||||
for servername in servernames:
|
||||
names += servername.parameters
|
||||
self.assertFalse("going_to_set_this" in names)
|
||||
|
||||
@@ -7,7 +7,6 @@ try:
|
||||
import mock
|
||||
except ImportError: # pragma: no cover
|
||||
from unittest import mock # type: ignore
|
||||
import six # pylint: disable=unused-import # six is used in mock.patch()
|
||||
|
||||
from certbot import errors
|
||||
from certbot_apache._internal import constants
|
||||
@@ -19,7 +18,7 @@ class AutoHSTSTest(util.ApacheTest):
|
||||
# pylint: disable=protected-access
|
||||
|
||||
def setUp(self): # pylint: disable=arguments-differ
|
||||
super(AutoHSTSTest, self).setUp()
|
||||
super().setUp()
|
||||
|
||||
self.config = util.get_apache_configurator(
|
||||
self.config_path, self.vhost_path, self.config_dir, self.work_dir)
|
||||
@@ -147,7 +146,7 @@ class AutoHSTSTest(util.ApacheTest):
|
||||
@mock.patch("certbot_apache._internal.display_ops.select_vhost")
|
||||
def test_autohsts_no_ssl_vhost(self, mock_select):
|
||||
mock_select.return_value = self.vh_truth[0]
|
||||
with mock.patch("certbot_apache._internal.configurator.logger.warning") as mock_log:
|
||||
with mock.patch("certbot_apache._internal.configurator.logger.error") as mock_log:
|
||||
self.assertRaises(errors.PluginError,
|
||||
self.config.enable_autohsts,
|
||||
mock.MagicMock(), "invalid.example.com")
|
||||
@@ -180,7 +179,7 @@ class AutoHSTSTest(util.ApacheTest):
|
||||
self.config._autohsts_fetch_state()
|
||||
self.config._autohsts["orphan_id"] = {"laststep": 999, "timestamp": 0}
|
||||
self.config._autohsts_save_state()
|
||||
with mock.patch("certbot_apache._internal.configurator.logger.warning") as mock_log:
|
||||
with mock.patch("certbot_apache._internal.configurator.logger.error") as mock_log:
|
||||
self.config.deploy_autohsts(mock.MagicMock())
|
||||
self.assertTrue(mock_log.called)
|
||||
self.assertTrue(
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
"""Test for certbot_apache._internal.configurator for CentOS 6 overrides"""
|
||||
import unittest
|
||||
from unittest import mock
|
||||
|
||||
from certbot.compat import os
|
||||
from certbot.errors import MisconfigurationError
|
||||
@@ -36,7 +37,7 @@ class CentOS6Tests(util.ApacheTest):
|
||||
test_dir = "centos6_apache/apache"
|
||||
config_root = "centos6_apache/apache/httpd"
|
||||
vhost_root = "centos6_apache/apache/httpd/conf.d"
|
||||
super(CentOS6Tests, self).setUp(test_dir=test_dir,
|
||||
super().setUp(test_dir=test_dir,
|
||||
config_root=config_root,
|
||||
vhost_root=vhost_root)
|
||||
|
||||
@@ -65,7 +66,8 @@ class CentOS6Tests(util.ApacheTest):
|
||||
raise Exception("Missed: %s" % vhost) # pragma: no cover
|
||||
self.assertEqual(found, 2)
|
||||
|
||||
def test_loadmod_default(self):
|
||||
@mock.patch("certbot_apache._internal.configurator.display_util.notify")
|
||||
def test_loadmod_default(self, unused_mock_notify):
|
||||
ssl_loadmods = self.config.parser.find_dir(
|
||||
"LoadModule", "ssl_module", exclude=False)
|
||||
self.assertEqual(len(ssl_loadmods), 1)
|
||||
@@ -95,7 +97,8 @@ class CentOS6Tests(util.ApacheTest):
|
||||
ifmod_args = self.config.parser.get_all_args(lm[:-17])
|
||||
self.assertTrue("!mod_ssl.c" in ifmod_args)
|
||||
|
||||
def test_loadmod_multiple(self):
|
||||
@mock.patch("certbot_apache._internal.configurator.display_util.notify")
|
||||
def test_loadmod_multiple(self, unused_mock_notify):
|
||||
sslmod_args = ["ssl_module", "modules/mod_ssl.so"]
|
||||
# Adds another LoadModule to main httpd.conf in addtition to ssl.conf
|
||||
self.config.parser.add_dir(self.config.parser.loc["default"], "LoadModule",
|
||||
@@ -115,7 +118,8 @@ class CentOS6Tests(util.ApacheTest):
|
||||
for mod in post_loadmods:
|
||||
self.assertTrue(self.config.parser.not_modssl_ifmodule(mod)) #pylint: disable=no-member
|
||||
|
||||
def test_loadmod_rootconf_exists(self):
|
||||
@mock.patch("certbot_apache._internal.configurator.display_util.notify")
|
||||
def test_loadmod_rootconf_exists(self, unused_mock_notify):
|
||||
sslmod_args = ["ssl_module", "modules/mod_ssl.so"]
|
||||
rootconf_ifmod = self.config.parser.get_ifmod(
|
||||
parser.get_aug_path(self.config.parser.loc["default"]),
|
||||
@@ -142,7 +146,8 @@ class CentOS6Tests(util.ApacheTest):
|
||||
self.config.parser.get_all_args(mods[0][:-7]),
|
||||
sslmod_args)
|
||||
|
||||
def test_neg_loadmod_already_on_path(self):
|
||||
@mock.patch("certbot_apache._internal.configurator.display_util.notify")
|
||||
def test_neg_loadmod_already_on_path(self, unused_mock_notify):
|
||||
loadmod_args = ["ssl_module", "modules/mod_ssl.so"]
|
||||
ifmod = self.config.parser.get_ifmod(
|
||||
self.vh_truth[1].path, "!mod_ssl.c", beginning=True)
|
||||
@@ -185,7 +190,8 @@ class CentOS6Tests(util.ApacheTest):
|
||||
# Make sure that none was changed
|
||||
self.assertEqual(pre_matches, post_matches)
|
||||
|
||||
def test_loadmod_not_found(self):
|
||||
@mock.patch("certbot_apache._internal.configurator.display_util.notify")
|
||||
def test_loadmod_not_found(self, unused_mock_notify):
|
||||
# Remove all existing LoadModule ssl_module... directives
|
||||
orig_loadmods = self.config.parser.find_dir("LoadModule",
|
||||
"ssl_module",
|
||||
|
||||
@@ -41,7 +41,7 @@ class FedoraRestartTest(util.ApacheTest):
|
||||
test_dir = "centos7_apache/apache"
|
||||
config_root = "centos7_apache/apache/httpd"
|
||||
vhost_root = "centos7_apache/apache/httpd/conf.d"
|
||||
super(FedoraRestartTest, self).setUp(test_dir=test_dir,
|
||||
super().setUp(test_dir=test_dir,
|
||||
config_root=config_root,
|
||||
vhost_root=vhost_root)
|
||||
self.config = util.get_apache_configurator(
|
||||
@@ -96,7 +96,7 @@ class MultipleVhostsTestCentOS(util.ApacheTest):
|
||||
test_dir = "centos7_apache/apache"
|
||||
config_root = "centos7_apache/apache/httpd"
|
||||
vhost_root = "centos7_apache/apache/httpd/conf.d"
|
||||
super(MultipleVhostsTestCentOS, self).setUp(test_dir=test_dir,
|
||||
super().setUp(test_dir=test_dir,
|
||||
config_root=config_root,
|
||||
vhost_root=vhost_root)
|
||||
|
||||
|
||||
@@ -11,7 +11,7 @@ class ComplexParserTest(util.ParserTest):
|
||||
"""Apache Parser Test."""
|
||||
|
||||
def setUp(self): # pylint: disable=arguments-differ
|
||||
super(ComplexParserTest, self).setUp(
|
||||
super().setUp(
|
||||
"complex_parsing", "complex_parsing")
|
||||
|
||||
self.setup_variables()
|
||||
|
||||
@@ -16,7 +16,7 @@ class ConfiguratorReverterTest(util.ApacheTest):
|
||||
|
||||
|
||||
def setUp(self): # pylint: disable=arguments-differ
|
||||
super(ConfiguratorReverterTest, self).setUp()
|
||||
super().setUp()
|
||||
|
||||
self.config = util.get_apache_configurator(
|
||||
self.config_path, self.vhost_path, self.config_dir, self.work_dir)
|
||||
|
||||
@@ -10,7 +10,6 @@ try:
|
||||
import mock
|
||||
except ImportError: # pragma: no cover
|
||||
from unittest import mock # type: ignore
|
||||
import six # pylint: disable=unused-import # six is used in mock.patch()
|
||||
|
||||
from acme import challenges
|
||||
from certbot import achallenges
|
||||
@@ -31,7 +30,7 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
"""Test two standard well-configured HTTP vhosts."""
|
||||
|
||||
def setUp(self): # pylint: disable=arguments-differ
|
||||
super(MultipleVhostsTest, self).setUp()
|
||||
super().setUp()
|
||||
|
||||
self.config = util.get_apache_configurator(
|
||||
self.config_path, self.vhost_path, self.config_dir, self.work_dir)
|
||||
@@ -104,9 +103,9 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
"handle_modules", "handle_sites", "ctl"]
|
||||
exp = {}
|
||||
|
||||
for k in ApacheConfigurator.OS_DEFAULTS:
|
||||
for k in ApacheConfigurator.OS_DEFAULTS.__dict__.keys():
|
||||
if k in parserargs:
|
||||
exp[k.replace("_", "-")] = ApacheConfigurator.OS_DEFAULTS[k]
|
||||
exp[k.replace("_", "-")] = getattr(ApacheConfigurator.OS_DEFAULTS, k)
|
||||
# Special cases
|
||||
exp["vhost-root"] = None
|
||||
|
||||
@@ -129,16 +128,15 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
def test_all_configurators_defaults_defined(self):
|
||||
from certbot_apache._internal.entrypoint import OVERRIDE_CLASSES
|
||||
from certbot_apache._internal.configurator import ApacheConfigurator
|
||||
parameters = set(ApacheConfigurator.OS_DEFAULTS.keys())
|
||||
parameters = set(ApacheConfigurator.OS_DEFAULTS.__dict__.keys())
|
||||
for cls in OVERRIDE_CLASSES.values():
|
||||
self.assertTrue(parameters.issubset(set(cls.OS_DEFAULTS.keys())))
|
||||
self.assertTrue(parameters.issubset(set(cls.OS_DEFAULTS.__dict__.keys())))
|
||||
|
||||
def test_constant(self):
|
||||
self.assertTrue("debian_apache_2_4/multiple_vhosts/apache" in
|
||||
self.config.option("server_root"))
|
||||
self.assertEqual(self.config.option("nonexistent"), None)
|
||||
self.config.options.server_root)
|
||||
|
||||
@certbot_util.patch_get_utility()
|
||||
@certbot_util.patch_display_util()
|
||||
def test_get_all_names(self, mock_getutility):
|
||||
mock_utility = mock_getutility()
|
||||
mock_utility.notification = mock.MagicMock(return_value=True)
|
||||
@@ -147,7 +145,7 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
"nonsym.link", "vhost.in.rootconf", "www.certbot.demo",
|
||||
"duplicate.example.com"})
|
||||
|
||||
@certbot_util.patch_get_utility()
|
||||
@certbot_util.patch_display_util()
|
||||
@mock.patch("certbot_apache._internal.configurator.socket.gethostbyaddr")
|
||||
def test_get_all_names_addrs(self, mock_gethost, mock_getutility):
|
||||
mock_gethost.side_effect = [("google.com", "", ""), socket.error]
|
||||
@@ -339,7 +337,8 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
vhosts = self.config._non_default_vhosts(self.config.vhosts)
|
||||
self.assertEqual(len(vhosts), 10)
|
||||
|
||||
def test_deploy_cert_enable_new_vhost(self):
|
||||
@mock.patch('certbot_apache._internal.configurator.display_util.notify')
|
||||
def test_deploy_cert_enable_new_vhost(self, unused_mock_notify):
|
||||
# Create
|
||||
ssl_vhost = self.config.make_vhost_ssl(self.vh_truth[0])
|
||||
self.config.parser.modules["ssl_module"] = None
|
||||
@@ -377,7 +376,8 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
self.fail("Include shouldn't be added, as patched find_dir 'finds' existing one") \
|
||||
# pragma: no cover
|
||||
|
||||
def test_deploy_cert(self):
|
||||
@mock.patch('certbot_apache._internal.configurator.display_util.notify')
|
||||
def test_deploy_cert(self, unused_mock_notify):
|
||||
self.config.parser.modules["ssl_module"] = None
|
||||
self.config.parser.modules["mod_ssl.c"] = None
|
||||
self.config.parser.modules["socache_shmcb_module"] = None
|
||||
@@ -726,7 +726,7 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
# This calls open
|
||||
self.config.reverter.register_file_creation = mock.Mock()
|
||||
mock_open.side_effect = IOError
|
||||
with mock.patch("six.moves.builtins.open", mock_open):
|
||||
with mock.patch("builtins.open", mock_open):
|
||||
self.assertRaises(
|
||||
errors.PluginError,
|
||||
self.config.make_vhost_ssl, self.vh_truth[0])
|
||||
@@ -893,7 +893,7 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
self.config.enhance, "certbot.demo", "unknown_enhancement")
|
||||
|
||||
def test_enhance_no_ssl_vhost(self):
|
||||
with mock.patch("certbot_apache._internal.configurator.logger.warning") as mock_log:
|
||||
with mock.patch("certbot_apache._internal.configurator.logger.error") as mock_log:
|
||||
self.assertRaises(errors.PluginError, self.config.enhance,
|
||||
"certbot.demo", "redirect")
|
||||
# Check that correct logger.warning was printed
|
||||
@@ -1292,7 +1292,8 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
os.path.basename(inc_path) in self.config.parser.existing_paths[
|
||||
os.path.dirname(inc_path)])
|
||||
|
||||
def test_deploy_cert_not_parsed_path(self):
|
||||
@mock.patch('certbot_apache._internal.configurator.display_util.notify')
|
||||
def test_deploy_cert_not_parsed_path(self, unused_mock_notify):
|
||||
# Make sure that we add include to root config for vhosts when
|
||||
# handle-sites is false
|
||||
self.config.parser.modules["ssl_module"] = None
|
||||
@@ -1388,7 +1389,8 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
self.assertEqual(vhs[0], self.vh_truth[7])
|
||||
|
||||
|
||||
def test_deploy_cert_wildcard(self):
|
||||
@mock.patch('certbot_apache._internal.configurator.display_util.notify')
|
||||
def test_deploy_cert_wildcard(self, unused_mock_notify):
|
||||
# pylint: disable=protected-access
|
||||
mock_choose_vhosts = mock.MagicMock()
|
||||
mock_choose_vhosts.return_value = [self.vh_truth[7]]
|
||||
@@ -1478,7 +1480,7 @@ class AugeasVhostsTest(util.ApacheTest):
|
||||
td = "debian_apache_2_4/augeas_vhosts"
|
||||
cr = "debian_apache_2_4/augeas_vhosts/apache2"
|
||||
vr = "debian_apache_2_4/augeas_vhosts/apache2/sites-available"
|
||||
super(AugeasVhostsTest, self).setUp(test_dir=td,
|
||||
super().setUp(test_dir=td,
|
||||
config_root=cr,
|
||||
vhost_root=vr)
|
||||
|
||||
@@ -1557,7 +1559,7 @@ class MultiVhostsTest(util.ApacheTest):
|
||||
td = "debian_apache_2_4/multi_vhosts"
|
||||
cr = "debian_apache_2_4/multi_vhosts/apache2"
|
||||
vr = "debian_apache_2_4/multi_vhosts/apache2/sites-available"
|
||||
super(MultiVhostsTest, self).setUp(test_dir=td,
|
||||
super().setUp(test_dir=td,
|
||||
config_root=cr,
|
||||
vhost_root=vr)
|
||||
|
||||
@@ -1608,8 +1610,8 @@ class MultiVhostsTest(util.ApacheTest):
|
||||
self.assertEqual(self.config._get_new_vh_path(without_index, both),
|
||||
with_index_2[0])
|
||||
|
||||
@certbot_util.patch_get_utility()
|
||||
def test_make_vhost_ssl_with_existing_rewrite_rule(self, mock_get_utility):
|
||||
@mock.patch("certbot_apache._internal.configurator.display_util.notify")
|
||||
def test_make_vhost_ssl_with_existing_rewrite_rule(self, mock_notify):
|
||||
self.config.parser.modules["rewrite_module"] = None
|
||||
|
||||
ssl_vhost = self.config.make_vhost_ssl(self.vh_truth[4])
|
||||
@@ -1625,11 +1627,11 @@ class MultiVhostsTest(util.ApacheTest):
|
||||
"\"http://new.example.com/docs/$1\" [R,L]")
|
||||
self.assertTrue(commented_rewrite_rule in conf_text)
|
||||
self.assertTrue(uncommented_rewrite_rule in conf_text)
|
||||
mock_get_utility().add_message.assert_called_once_with(mock.ANY,
|
||||
mock.ANY)
|
||||
self.assertEqual(mock_notify.call_count, 1)
|
||||
self.assertIn("Some rewrite rules", mock_notify.call_args[0][0])
|
||||
|
||||
@certbot_util.patch_get_utility()
|
||||
def test_make_vhost_ssl_with_existing_rewrite_conds(self, mock_get_utility):
|
||||
@mock.patch("certbot_apache._internal.configurator.display_util.notify")
|
||||
def test_make_vhost_ssl_with_existing_rewrite_conds(self, mock_notify):
|
||||
self.config.parser.modules["rewrite_module"] = None
|
||||
|
||||
ssl_vhost = self.config.make_vhost_ssl(self.vh_truth[3])
|
||||
@@ -1654,15 +1656,15 @@ class MultiVhostsTest(util.ApacheTest):
|
||||
self.assertTrue(commented_cond1 in conf_line_set)
|
||||
self.assertTrue(commented_cond2 in conf_line_set)
|
||||
self.assertTrue(commented_rewrite_rule in conf_line_set)
|
||||
mock_get_utility().add_message.assert_called_once_with(mock.ANY,
|
||||
mock.ANY)
|
||||
self.assertEqual(mock_notify.call_count, 1)
|
||||
self.assertIn("Some rewrite rules", mock_notify.call_args[0][0])
|
||||
|
||||
|
||||
class InstallSslOptionsConfTest(util.ApacheTest):
|
||||
"""Test that the options-ssl-nginx.conf file is installed and updated properly."""
|
||||
|
||||
def setUp(self): # pylint: disable=arguments-differ
|
||||
super(InstallSslOptionsConfTest, self).setUp()
|
||||
super().setUp()
|
||||
|
||||
self.config = util.get_apache_configurator(
|
||||
self.config_path, self.vhost_path, self.config_dir, self.work_dir)
|
||||
@@ -1775,7 +1777,7 @@ class InstallSslOptionsConfTest(util.ApacheTest):
|
||||
# ssl_module statically linked
|
||||
self.config._openssl_version = None
|
||||
self.config.parser.modules['ssl_module'] = None
|
||||
self.config.options['bin'] = '/fake/path/to/httpd'
|
||||
self.config.options.bin = '/fake/path/to/httpd'
|
||||
with mock.patch("certbot_apache._internal.configurator."
|
||||
"ApacheConfigurator._open_module_file") as mock_omf:
|
||||
mock_omf.return_value = some_string_contents
|
||||
@@ -1811,7 +1813,7 @@ class InstallSslOptionsConfTest(util.ApacheTest):
|
||||
|
||||
# When ssl_module is statically linked but --apache-bin not provided
|
||||
self.config._openssl_version = None
|
||||
self.config.options['bin'] = None
|
||||
self.config.options.bin = None
|
||||
self.config.parser.modules['ssl_module'] = None
|
||||
with mock.patch("certbot_apache._internal.configurator.logger.warning") as mock_log:
|
||||
self.assertEqual(self.config.openssl_version(), None)
|
||||
@@ -1834,7 +1836,7 @@ class InstallSslOptionsConfTest(util.ApacheTest):
|
||||
|
||||
def test_open_module_file(self):
|
||||
mock_open = mock.mock_open(read_data="testing 12 3")
|
||||
with mock.patch("six.moves.builtins.open", mock_open):
|
||||
with mock.patch("builtins.open", mock_open):
|
||||
self.assertEqual(self.config._open_module_file("/nonsense/"), "testing 12 3")
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
@@ -9,6 +9,7 @@ except ImportError: # pragma: no cover
|
||||
|
||||
from certbot import errors
|
||||
from certbot.compat import os
|
||||
from certbot.tests import util as certbot_util
|
||||
from certbot_apache._internal import apache_util
|
||||
from certbot_apache._internal import obj
|
||||
import util
|
||||
@@ -20,7 +21,7 @@ class MultipleVhostsTestDebian(util.ApacheTest):
|
||||
_multiprocess_can_split_ = True
|
||||
|
||||
def setUp(self): # pylint: disable=arguments-differ
|
||||
super(MultipleVhostsTestDebian, self).setUp()
|
||||
super().setUp()
|
||||
self.config = util.get_apache_configurator(
|
||||
self.config_path, self.vhost_path, self.config_dir, self.work_dir,
|
||||
os_info="debian")
|
||||
@@ -49,10 +50,11 @@ class MultipleVhostsTestDebian(util.ApacheTest):
|
||||
|
||||
@mock.patch("certbot.util.run_script")
|
||||
@mock.patch("certbot.util.exe_exists")
|
||||
@mock.patch("certbot_apache._internal.apache_util.subprocess.Popen")
|
||||
def test_enable_mod(self, mock_popen, mock_exe_exists, mock_run_script):
|
||||
mock_popen().communicate.return_value = ("Define: DUMP_RUN_CFG", "")
|
||||
mock_popen().returncode = 0
|
||||
@mock.patch("certbot_apache._internal.apache_util.subprocess.run")
|
||||
def test_enable_mod(self, mock_run, mock_exe_exists, mock_run_script):
|
||||
mock_run.return_value.stdout = "Define: DUMP_RUN_CFG"
|
||||
mock_run.return_value.stderr = ""
|
||||
mock_run.return_value.returncode = 0
|
||||
mock_exe_exists.return_value = True
|
||||
|
||||
self.config.enable_mod("ssl")
|
||||
@@ -67,6 +69,7 @@ class MultipleVhostsTestDebian(util.ApacheTest):
|
||||
self.config.parser.modules["ssl_module"] = None
|
||||
self.config.parser.modules["mod_ssl.c"] = None
|
||||
self.assertFalse(ssl_vhost.enabled)
|
||||
with certbot_util.patch_display_util():
|
||||
self.config.deploy_cert(
|
||||
"encryption-example.demo", "example/cert.pem", "example/key.pem",
|
||||
"example/cert_chain.pem", "example/fullchain.pem")
|
||||
@@ -100,6 +103,7 @@ class MultipleVhostsTestDebian(util.ApacheTest):
|
||||
|
||||
# Get the default 443 vhost
|
||||
self.config.assoc["random.demo"] = self.vh_truth[1]
|
||||
with certbot_util.patch_display_util():
|
||||
self.config.deploy_cert(
|
||||
"random.demo", "example/cert.pem", "example/key.pem",
|
||||
"example/cert_chain.pem", "example/fullchain.pem")
|
||||
|
||||
@@ -25,7 +25,7 @@ class SelectVhostMultiTest(unittest.TestCase):
|
||||
def test_select_no_input(self):
|
||||
self.assertFalse(select_vhost_multiple([]))
|
||||
|
||||
@certbot_util.patch_get_utility()
|
||||
@certbot_util.patch_display_util()
|
||||
def test_select_correct(self, mock_util):
|
||||
mock_util().checklist.return_value = (
|
||||
display_util.OK, [self.vhosts[3].display_repr(),
|
||||
@@ -37,12 +37,13 @@ class SelectVhostMultiTest(unittest.TestCase):
|
||||
self.assertTrue(self.vhosts[3] in vhs)
|
||||
self.assertFalse(self.vhosts[1] in vhs)
|
||||
|
||||
@certbot_util.patch_get_utility()
|
||||
@certbot_util.patch_display_util()
|
||||
def test_select_cancel(self, mock_util):
|
||||
mock_util().checklist.return_value = (display_util.CANCEL, "whatever")
|
||||
vhs = select_vhost_multiple([self.vhosts[2], self.vhosts[3]])
|
||||
self.assertFalse(vhs)
|
||||
|
||||
|
||||
class SelectVhostTest(unittest.TestCase):
|
||||
"""Tests for certbot_apache._internal.display_ops.select_vhost."""
|
||||
|
||||
@@ -56,12 +57,12 @@ class SelectVhostTest(unittest.TestCase):
|
||||
from certbot_apache._internal.display_ops import select_vhost
|
||||
return select_vhost("example.com", vhosts)
|
||||
|
||||
@certbot_util.patch_get_utility()
|
||||
@certbot_util.patch_display_util()
|
||||
def test_successful_choice(self, mock_util):
|
||||
mock_util().menu.return_value = (display_util.OK, 3)
|
||||
self.assertEqual(self.vhosts[3], self._call(self.vhosts))
|
||||
|
||||
@certbot_util.patch_get_utility()
|
||||
@certbot_util.patch_display_util()
|
||||
def test_noninteractive(self, mock_util):
|
||||
mock_util().menu.side_effect = errors.MissingCommandlineFlag("no vhost default")
|
||||
try:
|
||||
@@ -69,7 +70,7 @@ class SelectVhostTest(unittest.TestCase):
|
||||
except errors.MissingCommandlineFlag as e:
|
||||
self.assertTrue("vhost ambiguity" in str(e))
|
||||
|
||||
@certbot_util.patch_get_utility()
|
||||
@certbot_util.patch_display_util()
|
||||
def test_more_info_cancel(self, mock_util):
|
||||
mock_util().menu.side_effect = [
|
||||
(display_util.CANCEL, -1),
|
||||
@@ -81,16 +82,15 @@ class SelectVhostTest(unittest.TestCase):
|
||||
self.assertEqual(self._call([]), None)
|
||||
|
||||
@mock.patch("certbot_apache._internal.display_ops.display_util")
|
||||
@certbot_util.patch_get_utility()
|
||||
@mock.patch("certbot_apache._internal.display_ops.logger")
|
||||
def test_small_display(self, mock_logger, mock_util, mock_display_util):
|
||||
def test_small_display(self, mock_logger, mock_display_util):
|
||||
mock_display_util.WIDTH = 20
|
||||
mock_util().menu.return_value = (display_util.OK, 0)
|
||||
mock_display_util.menu.return_value = (display_util.OK, 0)
|
||||
self._call(self.vhosts)
|
||||
|
||||
self.assertEqual(mock_logger.debug.call_count, 1)
|
||||
|
||||
@certbot_util.patch_get_utility()
|
||||
@certbot_util.patch_display_util()
|
||||
def test_multiple_names(self, mock_util):
|
||||
mock_util().menu.return_value = (display_util.OK, 5)
|
||||
|
||||
|
||||
@@ -41,7 +41,7 @@ class EntryPointTest(unittest.TestCase):
|
||||
with mock.patch("certbot.util.get_os_info") as mock_info:
|
||||
mock_info.return_value = ("nonexistent", "irrelevant")
|
||||
with mock.patch("certbot.util.get_systemd_os_like") as mock_like:
|
||||
mock_like.return_value = ["unknonwn"]
|
||||
mock_like.return_value = ["unknown"]
|
||||
self.assertEqual(entrypoint.get_configurator(),
|
||||
configurator.ApacheConfigurator)
|
||||
|
||||
|
||||
@@ -46,7 +46,7 @@ class FedoraRestartTest(util.ApacheTest):
|
||||
test_dir = "centos7_apache/apache"
|
||||
config_root = "centos7_apache/apache/httpd"
|
||||
vhost_root = "centos7_apache/apache/httpd/conf.d"
|
||||
super(FedoraRestartTest, self).setUp(test_dir=test_dir,
|
||||
super().setUp(test_dir=test_dir,
|
||||
config_root=config_root,
|
||||
vhost_root=vhost_root)
|
||||
self.config = util.get_apache_configurator(
|
||||
@@ -90,7 +90,7 @@ class MultipleVhostsTestFedora(util.ApacheTest):
|
||||
test_dir = "centos7_apache/apache"
|
||||
config_root = "centos7_apache/apache/httpd"
|
||||
vhost_root = "centos7_apache/apache/httpd/conf.d"
|
||||
super(MultipleVhostsTestFedora, self).setUp(test_dir=test_dir,
|
||||
super().setUp(test_dir=test_dir,
|
||||
config_root=config_root,
|
||||
vhost_root=vhost_root)
|
||||
|
||||
|
||||
@@ -50,7 +50,7 @@ class MultipleVhostsTestGentoo(util.ApacheTest):
|
||||
test_dir = "gentoo_apache/apache"
|
||||
config_root = "gentoo_apache/apache/apache2"
|
||||
vhost_root = "gentoo_apache/apache/apache2/vhosts.d"
|
||||
super(MultipleVhostsTestGentoo, self).setUp(test_dir=test_dir,
|
||||
super().setUp(test_dir=test_dir,
|
||||
config_root=config_root,
|
||||
vhost_root=vhost_root)
|
||||
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
"""Test for certbot_apache._internal.http_01."""
|
||||
import unittest
|
||||
import errno
|
||||
from typing import List
|
||||
|
||||
try:
|
||||
import mock
|
||||
@@ -23,10 +24,10 @@ class ApacheHttp01Test(util.ApacheTest):
|
||||
"""Test for certbot_apache._internal.http_01.ApacheHttp01."""
|
||||
|
||||
def setUp(self, *args, **kwargs): # pylint: disable=arguments-differ
|
||||
super(ApacheHttp01Test, self).setUp(*args, **kwargs)
|
||||
super().setUp(*args, **kwargs)
|
||||
|
||||
self.account_key = self.rsa512jwk
|
||||
self.achalls = [] # type: List[achallenges.KeyAuthorizationAnnotatedChallenge]
|
||||
self.achalls: List[achallenges.KeyAuthorizationAnnotatedChallenge] = []
|
||||
vh_truth = util.get_vh_truth(
|
||||
self.temp_dir, "debian_apache_2_4/multiple_vhosts")
|
||||
# Takes the vhosts for encryption-example.demo, certbot.demo
|
||||
@@ -124,6 +125,18 @@ class ApacheHttp01Test(util.ApacheTest):
|
||||
domain="duplicate.example.com", account_key=self.account_key)]
|
||||
self.common_perform_test(achalls, vhosts)
|
||||
|
||||
def test_configure_name_and_blank(self):
|
||||
domain = "certbot.demo"
|
||||
vhosts = [v for v in self.config.vhosts if v.name == domain or v.name is None]
|
||||
achalls = [
|
||||
achallenges.KeyAuthorizationAnnotatedChallenge(
|
||||
challb=acme_util.chall_to_challb(
|
||||
challenges.HTTP01(token=((b'a' * 16))),
|
||||
"pending"),
|
||||
domain=domain, account_key=self.account_key),
|
||||
]
|
||||
self.common_perform_test(achalls, vhosts)
|
||||
|
||||
def test_no_vhost(self):
|
||||
for achall in self.achalls:
|
||||
self.http.add_chall(achall)
|
||||
|
||||
@@ -16,7 +16,7 @@ class BasicParserTest(util.ParserTest):
|
||||
"""Apache Parser Test."""
|
||||
|
||||
def setUp(self): # pylint: disable=arguments-differ
|
||||
super(BasicParserTest, self).setUp()
|
||||
super().setUp()
|
||||
|
||||
def tearDown(self):
|
||||
shutil.rmtree(self.temp_dir)
|
||||
@@ -105,6 +105,11 @@ class BasicParserTest(util.ParserTest):
|
||||
for i, match in enumerate(matches):
|
||||
self.assertEqual(self.parser.aug.get(match), str(i + 1))
|
||||
|
||||
for name in ("empty.conf", "no-directives.conf"):
|
||||
conf = "/files" + os.path.join(self.parser.root, "sites-available", name)
|
||||
self.parser.add_dir_beginning(conf, "AddDirectiveBeginning", "testBegin")
|
||||
self.assertTrue(self.parser.find_dir("AddDirectiveBeginning", "testBegin", conf))
|
||||
|
||||
def test_empty_arg(self):
|
||||
self.assertEqual(None,
|
||||
self.parser.get_arg("/files/whatever/nonexistent"))
|
||||
@@ -183,6 +188,8 @@ class BasicParserTest(util.ParserTest):
|
||||
'Define: DUMP_RUN_CFG\n'
|
||||
'Define: U_MICH\n'
|
||||
'Define: TLS=443\n'
|
||||
'Define: WITH_ASSIGNMENT=URL=http://example.com\n'
|
||||
'Define: EMPTY=\n'
|
||||
'Define: example_path=Documents/path\n'
|
||||
'User: name="www-data" id=33 not_used\n'
|
||||
'Group: name="www-data" id=33 not_used\n'
|
||||
@@ -261,7 +268,10 @@ class BasicParserTest(util.ParserTest):
|
||||
mock_cfg.side_effect = mock_get_vars
|
||||
|
||||
expected_vars = {"TEST": "", "U_MICH": "", "TLS": "443",
|
||||
"example_path": "Documents/path"}
|
||||
"example_path": "Documents/path",
|
||||
"WITH_ASSIGNMENT": "URL=http://example.com",
|
||||
"EMPTY": "",
|
||||
}
|
||||
|
||||
self.parser.modules = {}
|
||||
with mock.patch(
|
||||
@@ -296,28 +306,19 @@ class BasicParserTest(util.ParserTest):
|
||||
# path derived from root configuration Include statements
|
||||
self.assertEqual(mock_parse.call_count, 1)
|
||||
|
||||
@mock.patch("certbot_apache._internal.apache_util._get_runtime_cfg")
|
||||
def test_update_runtime_vars_bad_output(self, mock_cfg):
|
||||
mock_cfg.return_value = "Define: TLS=443=24"
|
||||
self.parser.update_runtime_variables()
|
||||
|
||||
mock_cfg.return_value = "Define: DUMP_RUN_CFG\nDefine: TLS=443=24"
|
||||
self.assertRaises(
|
||||
errors.PluginError, self.parser.update_runtime_variables)
|
||||
|
||||
@mock.patch("certbot_apache._internal.configurator.ApacheConfigurator.option")
|
||||
@mock.patch("certbot_apache._internal.apache_util.subprocess.Popen")
|
||||
def test_update_runtime_vars_bad_ctl(self, mock_popen, mock_opt):
|
||||
mock_popen.side_effect = OSError
|
||||
mock_opt.return_value = "nonexistent"
|
||||
@mock.patch("certbot_apache._internal.apache_util.subprocess.run")
|
||||
def test_update_runtime_vars_bad_ctl(self, mock_run):
|
||||
mock_run.side_effect = OSError
|
||||
self.assertRaises(
|
||||
errors.MisconfigurationError,
|
||||
self.parser.update_runtime_variables)
|
||||
|
||||
@mock.patch("certbot_apache._internal.apache_util.subprocess.Popen")
|
||||
def test_update_runtime_vars_bad_exit(self, mock_popen):
|
||||
mock_popen().communicate.return_value = ("", "")
|
||||
mock_popen.returncode = -1
|
||||
@mock.patch("certbot_apache._internal.apache_util.subprocess.run")
|
||||
def test_update_runtime_vars_bad_exit(self, mock_run):
|
||||
mock_proc = mock_run.return_value
|
||||
mock_proc.stdout = ""
|
||||
mock_proc.stderr = ""
|
||||
mock_proc.returncode = -1
|
||||
self.assertRaises(
|
||||
errors.MisconfigurationError,
|
||||
self.parser.update_runtime_variables)
|
||||
@@ -332,14 +333,14 @@ class BasicParserTest(util.ParserTest):
|
||||
|
||||
class ParserInitTest(util.ApacheTest):
|
||||
def setUp(self): # pylint: disable=arguments-differ
|
||||
super(ParserInitTest, self).setUp()
|
||||
super().setUp()
|
||||
|
||||
def tearDown(self):
|
||||
shutil.rmtree(self.temp_dir)
|
||||
shutil.rmtree(self.config_dir)
|
||||
shutil.rmtree(self.work_dir)
|
||||
|
||||
@mock.patch("certbot_apache._internal.parser.ApacheParser.init_augeas")
|
||||
@mock.patch("certbot_apache._internal.parser.init_augeas")
|
||||
def test_prepare_no_augeas(self, mock_init_augeas):
|
||||
from certbot_apache._internal.parser import ApacheParser
|
||||
mock_init_augeas.side_effect = errors.NoInstallationError
|
||||
|
||||
@@ -20,7 +20,7 @@ class ConfiguratorParserNodeTest(util.ApacheTest): # pylint: disable=too-many-p
|
||||
"""Test AugeasParserNode using available test configurations"""
|
||||
|
||||
def setUp(self): # pylint: disable=arguments-differ
|
||||
super(ConfiguratorParserNodeTest, self).setUp()
|
||||
super().setUp()
|
||||
|
||||
self.config = util.get_apache_configurator(
|
||||
self.config_path, self.vhost_path, self.config_dir,
|
||||
|
||||
@@ -18,7 +18,7 @@ class DummyParserNode(interfaces.ParserNode):
|
||||
self.dirty = dirty
|
||||
self.filepath = filepath
|
||||
self.metadata = metadata
|
||||
super(DummyParserNode, self).__init__(**kwargs)
|
||||
super().__init__(**kwargs)
|
||||
|
||||
def save(self, msg): # pragma: no cover
|
||||
"""Save"""
|
||||
@@ -38,7 +38,7 @@ class DummyCommentNode(DummyParserNode):
|
||||
"""
|
||||
comment, kwargs = util.commentnode_kwargs(kwargs)
|
||||
self.comment = comment
|
||||
super(DummyCommentNode, self).__init__(**kwargs)
|
||||
super().__init__(**kwargs)
|
||||
|
||||
|
||||
class DummyDirectiveNode(DummyParserNode):
|
||||
@@ -54,7 +54,7 @@ class DummyDirectiveNode(DummyParserNode):
|
||||
self.parameters = parameters
|
||||
self.enabled = enabled
|
||||
|
||||
super(DummyDirectiveNode, self).__init__(**kwargs)
|
||||
super().__init__(**kwargs)
|
||||
|
||||
def set_parameters(self, parameters): # pragma: no cover
|
||||
"""Set parameters"""
|
||||
|
||||
@@ -0,0 +1,5 @@
|
||||
<VirtualHost *:80>
|
||||
<Location />
|
||||
Require all denied
|
||||
</Location>
|
||||
</VirtualHost>
|
||||
@@ -5,16 +5,16 @@ import unittest
|
||||
|
||||
import augeas
|
||||
import josepy as jose
|
||||
|
||||
try:
|
||||
import mock
|
||||
except ImportError: # pragma: no cover
|
||||
from unittest import mock # type: ignore
|
||||
import zope.component
|
||||
|
||||
from certbot.compat import os
|
||||
from certbot.display import util as display_util
|
||||
from certbot.plugins import common
|
||||
from certbot.tests import util as test_util
|
||||
from certbot.display import util as display_util
|
||||
from certbot_apache._internal import configurator
|
||||
from certbot_apache._internal import entrypoint
|
||||
from certbot_apache._internal import obj
|
||||
@@ -67,10 +67,7 @@ class ParserTest(ApacheTest):
|
||||
def setUp(self, test_dir="debian_apache_2_4/multiple_vhosts",
|
||||
config_root="debian_apache_2_4/multiple_vhosts/apache2",
|
||||
vhost_root="debian_apache_2_4/multiple_vhosts/apache2/sites-available"):
|
||||
super(ParserTest, self).setUp(test_dir, config_root, vhost_root)
|
||||
|
||||
zope.component.provideUtility(display_util.FileDisplay(sys.stdout,
|
||||
False))
|
||||
super().setUp(test_dir, config_root, vhost_root)
|
||||
|
||||
from certbot_apache._internal.parser import ApacheParser
|
||||
self.aug = augeas.Augeas(
|
||||
@@ -123,11 +120,11 @@ def get_apache_configurator(
|
||||
version=version, use_parsernode=use_parsernode,
|
||||
openssl_version=openssl_version)
|
||||
if not conf_vhost_path:
|
||||
config_class.OS_DEFAULTS["vhost_root"] = vhost_path
|
||||
config_class.OS_DEFAULTS.vhost_root = vhost_path
|
||||
else:
|
||||
# Custom virtualhost path was requested
|
||||
config.config.apache_vhost_root = conf_vhost_path
|
||||
config.config.apache_ctl = config_class.OS_DEFAULTS["ctl"]
|
||||
config.config.apache_ctl = config_class.OS_DEFAULTS.ctl
|
||||
config.prepare()
|
||||
return config
|
||||
|
||||
|
||||
1992
certbot-auto
1992
certbot-auto
File diff suppressed because it is too large
Load Diff
@@ -1,5 +1,4 @@
|
||||
#!/usr/bin/env python
|
||||
from __future__ import print_function
|
||||
import os
|
||||
import sys
|
||||
|
||||
|
||||
@@ -7,14 +7,14 @@ import tempfile
|
||||
from certbot_integration_tests.utils import certbot_call
|
||||
|
||||
|
||||
class IntegrationTestsContext(object):
|
||||
class IntegrationTestsContext:
|
||||
"""General fixture describing a certbot integration tests context"""
|
||||
def __init__(self, request):
|
||||
self.request = request
|
||||
|
||||
if hasattr(request.config, 'slaveinput'): # Worker node
|
||||
self.worker_id = request.config.slaveinput['slaveid']
|
||||
acme_xdist = request.config.slaveinput['acme_xdist']
|
||||
if hasattr(request.config, 'workerinput'): # Worker node
|
||||
self.worker_id = request.config.workerinput['workerid']
|
||||
acme_xdist = request.config.workerinput['acme_xdist']
|
||||
else: # Primary node
|
||||
self.worker_id = 'primary'
|
||||
acme_xdist = request.config.acme_xdist
|
||||
@@ -61,7 +61,7 @@ class IntegrationTestsContext(object):
|
||||
Execute certbot with given args, not renewing certificates by default.
|
||||
:param args: args to pass to certbot
|
||||
:param force_renew: set to False to not renew by default
|
||||
:return: output of certbot execution
|
||||
:return: stdout and stderr from certbot execution
|
||||
"""
|
||||
command = ['--authenticator', 'standalone', '--installer', 'null']
|
||||
command.extend(args)
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
"""Module executing integration tests against certbot core."""
|
||||
from __future__ import print_function
|
||||
|
||||
import os
|
||||
from os.path import exists
|
||||
@@ -9,19 +8,20 @@ import shutil
|
||||
import subprocess
|
||||
import time
|
||||
|
||||
from cryptography.hazmat.primitives.asymmetric.ec import SECP256R1, SECP384R1
|
||||
from cryptography.hazmat.primitives.asymmetric.ec import SECP256R1
|
||||
from cryptography.hazmat.primitives.asymmetric.ec import SECP384R1
|
||||
from cryptography.hazmat.primitives.asymmetric.ec import SECP521R1
|
||||
from cryptography.x509 import NameOID
|
||||
|
||||
import pytest
|
||||
|
||||
from certbot_integration_tests.certbot_tests import context as certbot_context
|
||||
from certbot_integration_tests.certbot_tests.assertions import assert_cert_count_for_lineage
|
||||
from certbot_integration_tests.certbot_tests.assertions import assert_elliptic_key
|
||||
from certbot_integration_tests.certbot_tests.assertions import assert_rsa_key
|
||||
from certbot_integration_tests.certbot_tests.assertions import assert_equals_group_owner
|
||||
from certbot_integration_tests.certbot_tests.assertions import assert_equals_group_permissions
|
||||
from certbot_integration_tests.certbot_tests.assertions import assert_equals_world_read_permissions
|
||||
from certbot_integration_tests.certbot_tests.assertions import assert_hook_execution
|
||||
from certbot_integration_tests.certbot_tests.assertions import assert_rsa_key
|
||||
from certbot_integration_tests.certbot_tests.assertions import assert_saved_renew_hook
|
||||
from certbot_integration_tests.certbot_tests.assertions import assert_world_no_permissions
|
||||
from certbot_integration_tests.certbot_tests.assertions import assert_world_read_permissions
|
||||
@@ -78,9 +78,9 @@ def test_registration_override(context):
|
||||
|
||||
def test_prepare_plugins(context):
|
||||
"""Test that plugins are correctly instantiated and displayed."""
|
||||
output = context.certbot(['plugins', '--init', '--prepare'])
|
||||
stdout, _ = context.certbot(['plugins', '--init', '--prepare'])
|
||||
|
||||
assert 'webroot' in output
|
||||
assert 'webroot' in stdout
|
||||
|
||||
|
||||
def test_http_01(context):
|
||||
@@ -148,6 +148,17 @@ def test_certonly(context):
|
||||
"""Test the certonly verb on certbot."""
|
||||
context.certbot(['certonly', '--cert-name', 'newname', '-d', context.get_domain('newname')])
|
||||
|
||||
assert_cert_count_for_lineage(context.config_dir, 'newname', 1)
|
||||
|
||||
|
||||
def test_certonly_webroot(context):
|
||||
"""Test the certonly verb with webroot plugin"""
|
||||
with misc.create_http_server(context.http_01_port) as webroot:
|
||||
certname = context.get_domain('webroot')
|
||||
context.certbot(['certonly', '-a', 'webroot', '--webroot-path', webroot, '-d', certname])
|
||||
|
||||
assert_cert_count_for_lineage(context.config_dir, certname, 1)
|
||||
|
||||
|
||||
def test_auth_and_install_with_csr(context):
|
||||
"""Test certificate issuance and install using an existing CSR."""
|
||||
@@ -335,7 +346,8 @@ def test_renew_empty_hook_scripts(context):
|
||||
for hook_dir in misc.list_renewal_hooks_dirs(context.config_dir):
|
||||
shutil.rmtree(hook_dir)
|
||||
os.makedirs(join(hook_dir, 'dir'))
|
||||
open(join(hook_dir, 'file'), 'w').close()
|
||||
with open(join(hook_dir, 'file'), 'w'):
|
||||
pass
|
||||
context.certbot(['renew'])
|
||||
|
||||
assert_cert_count_for_lineage(context.config_dir, certname, 2)
|
||||
@@ -357,7 +369,8 @@ def test_renew_hook_override(context):
|
||||
assert_hook_execution(context.hook_probe, 'deploy')
|
||||
|
||||
# Now we override all previous hooks during next renew.
|
||||
open(context.hook_probe, 'w').close()
|
||||
with open(context.hook_probe, 'w'):
|
||||
pass
|
||||
context.certbot([
|
||||
'renew', '--cert-name', certname,
|
||||
'--pre-hook', misc.echo('pre_override', context.hook_probe),
|
||||
@@ -376,7 +389,8 @@ def test_renew_hook_override(context):
|
||||
assert_hook_execution(context.hook_probe, 'deploy')
|
||||
|
||||
# Expect that this renew will reuse new hooks registered in the previous renew.
|
||||
open(context.hook_probe, 'w').close()
|
||||
with open(context.hook_probe, 'w'):
|
||||
pass
|
||||
context.certbot(['renew', '--cert-name', certname])
|
||||
|
||||
assert_hook_execution(context.hook_probe, 'pre_override')
|
||||
@@ -396,9 +410,9 @@ def test_invalid_domain_with_dns_challenge(context):
|
||||
'--manual-cleanup-hook', context.manual_dns_cleanup_hook
|
||||
])
|
||||
|
||||
output = context.certbot(['certificates'])
|
||||
stdout, _ = context.certbot(['certificates'])
|
||||
|
||||
assert context.get_domain('fail-dns1') not in output
|
||||
assert context.get_domain('fail-dns1') not in stdout
|
||||
|
||||
|
||||
def test_reuse_key(context):
|
||||
@@ -419,6 +433,21 @@ def test_reuse_key(context):
|
||||
privkey3 = file.read()
|
||||
assert privkey2 != privkey3
|
||||
|
||||
context.certbot(['--cert-name', certname, '--domains', certname,
|
||||
'--reuse-key','--force-renewal'])
|
||||
context.certbot(['renew', '--cert-name', certname, '--no-reuse-key', '--force-renewal'])
|
||||
context.certbot(['renew', '--cert-name', certname, '--force-renewal'])
|
||||
|
||||
with open(join(context.config_dir, 'archive/{0}/privkey4.pem').format(certname), 'r') as file:
|
||||
privkey4 = file.read()
|
||||
with open(join(context.config_dir, 'archive/{0}/privkey5.pem').format(certname), 'r') as file:
|
||||
privkey5 = file.read()
|
||||
with open(join(context.config_dir, 'archive/{0}/privkey6.pem').format(certname), 'r') as file:
|
||||
privkey6 = file.read()
|
||||
assert privkey3 == privkey4
|
||||
assert privkey4 != privkey5
|
||||
assert privkey5 != privkey6
|
||||
|
||||
with open(join(context.config_dir, 'archive/{0}/cert1.pem').format(certname), 'r') as file:
|
||||
cert1 = file.read()
|
||||
with open(join(context.config_dir, 'archive/{0}/cert2.pem').format(certname), 'r') as file:
|
||||
@@ -476,6 +505,28 @@ def test_default_curve_type(context):
|
||||
assert_elliptic_key(key1, SECP256R1)
|
||||
|
||||
|
||||
@pytest.mark.parametrize('curve,curve_cls,skip_servers', [
|
||||
# Curve name, Curve class, ACME servers to skip
|
||||
('secp256r1', SECP256R1, []),
|
||||
('secp384r1', SECP384R1, []),
|
||||
('secp521r1', SECP521R1, ['boulder-v1', 'boulder-v2'])]
|
||||
)
|
||||
def test_ecdsa_curves(context, curve, curve_cls, skip_servers):
|
||||
"""Test issuance for each supported ECDSA curve"""
|
||||
if context.acme_server in skip_servers:
|
||||
pytest.skip('ACME server {} does not support ECDSA curve {}'
|
||||
.format(context.acme_server, curve))
|
||||
|
||||
domain = context.get_domain('curve')
|
||||
context.certbot([
|
||||
'certonly',
|
||||
'--key-type', 'ecdsa', '--elliptic-curve', curve,
|
||||
'--force-renewal', '-d', domain,
|
||||
])
|
||||
key = join(context.config_dir, "live", domain, 'privkey.pem')
|
||||
assert_elliptic_key(key, curve_cls)
|
||||
|
||||
|
||||
def test_renew_with_ec_keys(context):
|
||||
"""Test proper renew with updated private key complexity."""
|
||||
certname = context.get_domain('renew')
|
||||
@@ -581,11 +632,11 @@ def test_revoke_and_unregister(context):
|
||||
|
||||
context.certbot(['unregister'])
|
||||
|
||||
output = context.certbot(['certificates'])
|
||||
stdout, _ = context.certbot(['certificates'])
|
||||
|
||||
assert cert1 not in output
|
||||
assert cert2 not in output
|
||||
assert cert3 in output
|
||||
assert cert1 not in stdout
|
||||
assert cert2 not in stdout
|
||||
assert cert3 in stdout
|
||||
|
||||
|
||||
def test_revoke_mutual_exclusive_flags(context):
|
||||
@@ -597,7 +648,7 @@ def test_revoke_mutual_exclusive_flags(context):
|
||||
'revoke', '--cert-name', cert,
|
||||
'--cert-path', join(context.config_dir, 'live', cert, 'fullchain.pem')
|
||||
])
|
||||
assert 'Exactly one of --cert-path or --cert-name must be specified' in error.out
|
||||
assert 'Exactly one of --cert-path or --cert-name must be specified' in error.value.stderr
|
||||
|
||||
|
||||
def test_revoke_multiple_lineages(context):
|
||||
@@ -652,12 +703,12 @@ def test_wildcard_certificates(context):
|
||||
def test_ocsp_status_stale(context):
|
||||
"""Test retrieval of OCSP statuses for staled config"""
|
||||
sample_data_path = misc.load_sample_data_path(context.workspace)
|
||||
output = context.certbot(['certificates', '--config-dir', sample_data_path])
|
||||
stdout, _ = context.certbot(['certificates', '--config-dir', sample_data_path])
|
||||
|
||||
assert output.count('TEST_CERT') == 2, ('Did not find two test certs as expected ({0})'
|
||||
.format(output.count('TEST_CERT')))
|
||||
assert output.count('EXPIRED') == 2, ('Did not find two expired certs as expected ({0})'
|
||||
.format(output.count('EXPIRED')))
|
||||
assert stdout.count('TEST_CERT') == 2, ('Did not find two test certs as expected ({0})'
|
||||
.format(stdout.count('TEST_CERT')))
|
||||
assert stdout.count('EXPIRED') == 2, ('Did not find two expired certs as expected ({0})'
|
||||
.format(stdout.count('EXPIRED')))
|
||||
|
||||
|
||||
def test_ocsp_status_live(context):
|
||||
@@ -666,20 +717,20 @@ def test_ocsp_status_live(context):
|
||||
|
||||
# OSCP 1: Check live certificate OCSP status (VALID)
|
||||
context.certbot(['--domains', cert])
|
||||
output = context.certbot(['certificates'])
|
||||
stdout, _ = context.certbot(['certificates'])
|
||||
|
||||
assert output.count('VALID') == 1, 'Expected {0} to be VALID'.format(cert)
|
||||
assert output.count('EXPIRED') == 0, 'Did not expect {0} to be EXPIRED'.format(cert)
|
||||
assert stdout.count('VALID') == 1, 'Expected {0} to be VALID'.format(cert)
|
||||
assert stdout.count('EXPIRED') == 0, 'Did not expect {0} to be EXPIRED'.format(cert)
|
||||
|
||||
# OSCP 2: Check live certificate OCSP status (REVOKED)
|
||||
context.certbot(['revoke', '--cert-name', cert, '--no-delete-after-revoke'])
|
||||
# Sometimes in oldest tests (using openssl binary and not cryptography), the OCSP status is
|
||||
# not seen immediately by Certbot as invalid. Waiting few seconds solves this transient issue.
|
||||
time.sleep(5)
|
||||
output = context.certbot(['certificates'])
|
||||
stdout, _ = context.certbot(['certificates'])
|
||||
|
||||
assert output.count('INVALID') == 1, 'Expected {0} to be INVALID'.format(cert)
|
||||
assert output.count('REVOKED') == 1, 'Expected {0} to be REVOKED'.format(cert)
|
||||
assert stdout.count('INVALID') == 1, 'Expected {0} to be INVALID'.format(cert)
|
||||
assert stdout.count('REVOKED') == 1, 'Expected {0} to be REVOKED'.format(cert)
|
||||
|
||||
|
||||
def test_ocsp_renew(context):
|
||||
|
||||
@@ -6,7 +6,6 @@ for a directory a specific configuration using built-in pytest hooks.
|
||||
|
||||
See https://docs.pytest.org/en/latest/reference.html#hook-reference
|
||||
"""
|
||||
from __future__ import print_function
|
||||
import contextlib
|
||||
import subprocess
|
||||
import sys
|
||||
@@ -35,7 +34,7 @@ def pytest_configure(config):
|
||||
Standard pytest hook used to add a configuration logic for each node of a pytest run.
|
||||
:param config: the current pytest configuration
|
||||
"""
|
||||
if not hasattr(config, 'slaveinput'): # If true, this is the primary node
|
||||
if not hasattr(config, 'workerinput'): # If true, this is the primary node
|
||||
with _print_on_err():
|
||||
_setup_primary_node(config)
|
||||
|
||||
@@ -45,8 +44,8 @@ def pytest_configure_node(node):
|
||||
Standard pytest-xdist hook used to configure a worker node.
|
||||
:param node: current worker node
|
||||
"""
|
||||
node.slaveinput['acme_xdist'] = node.config.acme_xdist
|
||||
node.slaveinput['dns_xdist'] = node.config.dns_xdist
|
||||
node.workerinput['acme_xdist'] = node.config.acme_xdist
|
||||
node.workerinput['dns_xdist'] = node.config.dns_xdist
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
|
||||
@@ -11,7 +11,7 @@ from certbot_integration_tests.utils import misc
|
||||
class IntegrationTestsContext(certbot_context.IntegrationTestsContext):
|
||||
"""General fixture describing a certbot-nginx integration tests context"""
|
||||
def __init__(self, request):
|
||||
super(IntegrationTestsContext, self).__init__(request)
|
||||
super().__init__(request)
|
||||
|
||||
self.nginx_root = os.path.join(self.workspace, 'nginx')
|
||||
os.mkdir(self.nginx_root)
|
||||
@@ -29,7 +29,7 @@ class IntegrationTestsContext(certbot_context.IntegrationTestsContext):
|
||||
|
||||
def cleanup(self):
|
||||
self._stop_nginx()
|
||||
super(IntegrationTestsContext, self).cleanup()
|
||||
super().cleanup()
|
||||
|
||||
def certbot_test_nginx(self, args):
|
||||
"""
|
||||
@@ -51,6 +51,7 @@ class IntegrationTestsContext(certbot_context.IntegrationTestsContext):
|
||||
with open(self.nginx_config_path, 'w') as file:
|
||||
file.write(self.nginx_config)
|
||||
|
||||
# pylint: disable=consider-using-with
|
||||
process = subprocess.Popen(['nginx', '-c', self.nginx_config_path, '-g', 'daemon off;'])
|
||||
|
||||
assert process.poll() is None
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user