Compare commits
169 Commits
update-mac
...
test-windo
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c52be1eeb5 | ||
|
|
39e786405d | ||
|
|
1b89ea773c | ||
|
|
98fb9d2d93 | ||
|
|
32fb89df7e | ||
|
|
d3b82a4e8e | ||
|
|
18faf4f7ab | ||
|
|
a7c3c0b90c | ||
|
|
421e8b6270 | ||
|
|
8e7353900c | ||
|
|
1146f35519 | ||
|
|
198f7d66e6 | ||
|
|
e9bdfcc94b | ||
|
|
a8b6a1c98d | ||
|
|
d714ccec05 | ||
|
|
0465643d0a | ||
|
|
cbf42ffae1 | ||
|
|
fcdfed9c2c | ||
|
|
96a05d946c | ||
|
|
d38766e05c | ||
|
|
c5a0b1ae5d | ||
|
|
fcc8b38c02 | ||
|
|
7febc18bb0 | ||
|
|
5151e2afee | ||
|
|
3889311557 | ||
|
|
6d71378c05 | ||
|
|
e9a96f5e2a | ||
|
|
878c3e396f | ||
|
|
148246b85b | ||
|
|
9045c03949 | ||
|
|
447b6ffaef | ||
|
|
38017473c5 | ||
|
|
dc3ac13750 | ||
|
|
5871de0c07 | ||
|
|
356e8d84d6 | ||
|
|
d476aa4389 | ||
|
|
22cf94f930 | ||
|
|
d3166d7072 | ||
|
|
67fecbe1e0 | ||
|
|
1dfac955c7 | ||
|
|
38f3d3d185 | ||
|
|
64543d4970 | ||
|
|
4c896fd87c | ||
|
|
a71e22678f | ||
|
|
45e48b565d | ||
|
|
5f73274390 | ||
|
|
87386769f7 | ||
|
|
7497c51f34 | ||
|
|
1a3c96a955 | ||
|
|
d1e7404358 | ||
|
|
e5113d5815 | ||
|
|
ff3a07dca3 | ||
|
|
31b5f1310e | ||
|
|
faa8d230c7 | ||
|
|
baab69e653 | ||
|
|
7b687611a4 | ||
|
|
adacc4ab6d | ||
|
|
43ee2993f1 | ||
|
|
f5a88ade54 | ||
|
|
aea416f654 | ||
|
|
590ad226cb | ||
|
|
9a4e95e25a | ||
|
|
9ca7f76505 | ||
|
|
a8cede6ae1 | ||
|
|
9dfe0cd547 | ||
|
|
be3d0d872f | ||
|
|
5a85825493 | ||
|
|
e8139e80be | ||
|
|
7ba35b4407 | ||
|
|
90557921e3 | ||
|
|
78edb2889e | ||
|
|
553d3279c6 | ||
|
|
b742b60c4d | ||
|
|
2132cf7f04 | ||
|
|
f15f4f9838 | ||
|
|
2a118f3e83 | ||
|
|
8f5787008d | ||
|
|
db2ffea351 | ||
|
|
bf20f39ceb | ||
|
|
11a4882128 | ||
|
|
c102ca66c3 | ||
|
|
75365f1d4e | ||
|
|
198f5a99bc | ||
|
|
47c1045f6d | ||
|
|
e570e8ad32 | ||
|
|
df138d0027 | ||
|
|
9567352002 | ||
|
|
6c7b99f7e0 | ||
|
|
3673ca77a5 | ||
|
|
bb45c9aa41 | ||
|
|
4c347f5576 | ||
|
|
bf07ec20b0 | ||
|
|
fc864543a7 | ||
|
|
4fa1df3075 | ||
|
|
cfd0a6ff1f | ||
|
|
00ed56afd6 | ||
|
|
b6e3a3ad02 | ||
|
|
c250957ab0 | ||
|
|
4eb0b560c5 | ||
|
|
cb916a0682 | ||
|
|
88386e8c82 | ||
|
|
a64e1f0129 | ||
|
|
fea176449c | ||
|
|
ff03e34c70 | ||
|
|
6fc832677e | ||
|
|
725870d558 | ||
|
|
631c88b209 | ||
|
|
6a093bd35a | ||
|
|
afb07cf50d | ||
|
|
aa61e6ad4e | ||
|
|
8a3aed0476 | ||
|
|
afc5baad4a | ||
|
|
eff761ab1e | ||
|
|
5f040a8e32 | ||
|
|
5173ab6b90 | ||
|
|
448fd9145a | ||
|
|
ac8798e818 | ||
|
|
34694251dd | ||
|
|
cc76906712 | ||
|
|
ef8c481634 | ||
|
|
c12404451d | ||
|
|
e378931eda | ||
|
|
160b209394 | ||
|
|
cac9d8f75e | ||
|
|
7f0fa18c57 | ||
|
|
fca7ec896a | ||
|
|
e066766cc9 | ||
|
|
be6c890874 | ||
|
|
feca125437 | ||
|
|
1be005289a | ||
|
|
79297ef5cb | ||
|
|
5ec29ca60b | ||
|
|
9a72db5b9b | ||
|
|
14cbf67d65 | ||
|
|
b20aaff661 | ||
|
|
a66f4e1150 | ||
|
|
501df0dc4e | ||
|
|
b551b6ee73 | ||
|
|
71d9dfa86e | ||
|
|
6628bc0e9b | ||
|
|
f43fa12fc0 | ||
|
|
2b425110dc | ||
|
|
55d411f1eb | ||
|
|
7ddd327f63 | ||
|
|
3a615176c5 | ||
|
|
e79af1b1de | ||
|
|
c8828dab30 | ||
|
|
f85b738e2f | ||
|
|
95a6b61cdc | ||
|
|
21b320ef42 | ||
|
|
8c81a1aaf8 | ||
|
|
ec147740ee | ||
|
|
b7b0ec321e | ||
|
|
7fe7a965f5 | ||
|
|
9f243c768f | ||
|
|
b841f0f307 | ||
|
|
8e736479f7 | ||
|
|
2ceabadb81 | ||
|
|
a2951b4db1 | ||
|
|
98615564ed | ||
|
|
3ce87d1fcb | ||
|
|
d62d853ea4 | ||
|
|
70731dd75b | ||
|
|
ae7b4a1755 | ||
|
|
f66a592e37 | ||
|
|
e8518bf206 | ||
|
|
2a047eb526 | ||
|
|
bc137103a3 | ||
|
|
085967ad29 |
@@ -3,6 +3,8 @@ jobs:
|
|||||||
variables:
|
variables:
|
||||||
- name: IMAGE_NAME
|
- name: IMAGE_NAME
|
||||||
value: ubuntu-18.04
|
value: ubuntu-18.04
|
||||||
|
- name: PYTHON_VERSION
|
||||||
|
value: 3.9
|
||||||
- group: certbot-common
|
- group: certbot-common
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
@@ -12,6 +14,9 @@ jobs:
|
|||||||
linux-py37:
|
linux-py37:
|
||||||
PYTHON_VERSION: 3.7
|
PYTHON_VERSION: 3.7
|
||||||
TOXENV: py37
|
TOXENV: py37
|
||||||
|
linux-py38:
|
||||||
|
PYTHON_VERSION: 3.8
|
||||||
|
TOXENV: py38
|
||||||
linux-py37-nopin:
|
linux-py37-nopin:
|
||||||
PYTHON_VERSION: 3.7
|
PYTHON_VERSION: 3.7
|
||||||
TOXENV: py37
|
TOXENV: py37
|
||||||
@@ -60,16 +65,27 @@ jobs:
|
|||||||
PYTHON_VERSION: 3.8
|
PYTHON_VERSION: 3.8
|
||||||
TOXENV: integration
|
TOXENV: integration
|
||||||
ACME_SERVER: boulder-v2
|
ACME_SERVER: boulder-v2
|
||||||
|
linux-boulder-v1-py39-integration:
|
||||||
|
PYTHON_VERSION: 3.9
|
||||||
|
TOXENV: integration
|
||||||
|
ACME_SERVER: boulder-v1
|
||||||
|
linux-boulder-v2-py39-integration:
|
||||||
|
PYTHON_VERSION: 3.9
|
||||||
|
TOXENV: integration
|
||||||
|
ACME_SERVER: boulder-v2
|
||||||
nginx-compat:
|
nginx-compat:
|
||||||
TOXENV: nginx_compat
|
TOXENV: nginx_compat
|
||||||
le-auto-centos6:
|
linux-integration-rfc2136:
|
||||||
TOXENV: le_auto_centos6
|
IMAGE_NAME: ubuntu-18.04
|
||||||
le-auto-oraclelinux6:
|
PYTHON_VERSION: 3.8
|
||||||
TOXENV: le_auto_oraclelinux6
|
TOXENV: integration-dns-rfc2136
|
||||||
docker-dev:
|
docker-dev:
|
||||||
TOXENV: docker_dev
|
TOXENV: docker_dev
|
||||||
farmtest-apache2:
|
macos-farmtest-apache2:
|
||||||
PYTHON_VERSION: 3.7
|
# We run one of these test farm tests on macOS to help ensure the
|
||||||
|
# tests continue to work on the platform.
|
||||||
|
IMAGE_NAME: macOS-10.15
|
||||||
|
PYTHON_VERSION: 3.8
|
||||||
TOXENV: test-farm-apache2
|
TOXENV: test-farm-apache2
|
||||||
farmtest-leauto-upgrades:
|
farmtest-leauto-upgrades:
|
||||||
PYTHON_VERSION: 3.7
|
PYTHON_VERSION: 3.7
|
||||||
|
|||||||
@@ -1,42 +1,62 @@
|
|||||||
jobs:
|
jobs:
|
||||||
- job: docker_build
|
# - job: docker_build
|
||||||
pool:
|
# pool:
|
||||||
vmImage: ubuntu-18.04
|
# vmImage: ubuntu-18.04
|
||||||
strategy:
|
# strategy:
|
||||||
matrix:
|
# matrix:
|
||||||
amd64:
|
# amd64:
|
||||||
DOCKER_ARCH: amd64
|
# DOCKER_ARCH: amd64
|
||||||
# Do not run the heavy non-amd64 builds for test branches
|
# # Do not run the heavy non-amd64 builds for test branches
|
||||||
${{ if not(startsWith(variables['Build.SourceBranchName'], 'test-')) }}:
|
# ${{ if not(startsWith(variables['Build.SourceBranchName'], 'test-')) }}:
|
||||||
arm32v6:
|
# arm32v6:
|
||||||
DOCKER_ARCH: arm32v6
|
# DOCKER_ARCH: arm32v6
|
||||||
arm64v8:
|
# arm64v8:
|
||||||
DOCKER_ARCH: arm64v8
|
# DOCKER_ARCH: arm64v8
|
||||||
steps:
|
# steps:
|
||||||
- bash: tools/docker/build.sh $(dockerTag) $DOCKER_ARCH
|
# - bash: set -e && tools/docker/build.sh $(dockerTag) $DOCKER_ARCH
|
||||||
displayName: Build the Docker images
|
# displayName: Build the Docker images
|
||||||
# We don't filter for the Docker Hub organization to continue to allow
|
# # We don't filter for the Docker Hub organization to continue to allow
|
||||||
# easy testing of these scripts on forks.
|
# # easy testing of these scripts on forks.
|
||||||
- bash: |
|
# - bash: |
|
||||||
DOCKER_IMAGES=$(docker images --filter reference='*/certbot' --filter reference='*/dns-*' --format '{{.Repository}}')
|
# set -e
|
||||||
docker save --output images.tar $DOCKER_IMAGES
|
# DOCKER_IMAGES=$(docker images --filter reference='*/certbot' --filter reference='*/dns-*' --format '{{.Repository}}')
|
||||||
displayName: Save the Docker images
|
# docker save --output images.tar $DOCKER_IMAGES
|
||||||
# If the name of the tar file or artifact changes, the deploy stage will
|
# displayName: Save the Docker images
|
||||||
# also need to be updated.
|
# # If the name of the tar file or artifact changes, the deploy stage will
|
||||||
- bash: mv images.tar $(Build.ArtifactStagingDirectory)
|
# # also need to be updated.
|
||||||
displayName: Prepare Docker artifact
|
# - bash: set -e && mv images.tar $(Build.ArtifactStagingDirectory)
|
||||||
- task: PublishPipelineArtifact@1
|
# displayName: Prepare Docker artifact
|
||||||
inputs:
|
# - task: PublishPipelineArtifact@1
|
||||||
path: $(Build.ArtifactStagingDirectory)
|
# inputs:
|
||||||
artifact: docker_$(DOCKER_ARCH)
|
# path: $(Build.ArtifactStagingDirectory)
|
||||||
displayName: Store Docker artifact
|
# artifact: docker_$(DOCKER_ARCH)
|
||||||
|
# displayName: Store Docker artifact
|
||||||
|
# - job: docker_run
|
||||||
|
# dependsOn: docker_build
|
||||||
|
# pool:
|
||||||
|
# vmImage: ubuntu-18.04
|
||||||
|
# steps:
|
||||||
|
# - task: DownloadPipelineArtifact@2
|
||||||
|
# inputs:
|
||||||
|
# artifact: docker_amd64
|
||||||
|
# path: $(Build.SourcesDirectory)
|
||||||
|
# displayName: Retrieve Docker images
|
||||||
|
# - bash: set -e && docker load --input $(Build.SourcesDirectory)/images.tar
|
||||||
|
# displayName: Load Docker images
|
||||||
|
# - bash: |
|
||||||
|
# set -ex
|
||||||
|
# DOCKER_IMAGES=$(docker images --filter reference='*/certbot' --filter reference='*/dns-*' --format '{{.Repository}}:{{.Tag}}')
|
||||||
|
# for DOCKER_IMAGE in ${DOCKER_IMAGES}
|
||||||
|
# do docker run --rm "${DOCKER_IMAGE}" plugins --prepare
|
||||||
|
# done
|
||||||
|
# displayName: Run integration tests for Docker images
|
||||||
- job: installer_build
|
- job: installer_build
|
||||||
pool:
|
pool:
|
||||||
vmImage: vs2017-win2016
|
vmImage: vs2017-win2016
|
||||||
steps:
|
steps:
|
||||||
- task: UsePythonVersion@0
|
- task: UsePythonVersion@0
|
||||||
inputs:
|
inputs:
|
||||||
versionSpec: 3.7
|
versionSpec: 3.8
|
||||||
architecture: x86
|
architecture: x86
|
||||||
addToPath: true
|
addToPath: true
|
||||||
- script: python windows-installer/construct.py
|
- script: python windows-installer/construct.py
|
||||||
@@ -79,8 +99,11 @@ jobs:
|
|||||||
path: $(Build.SourcesDirectory)/bin
|
path: $(Build.SourcesDirectory)/bin
|
||||||
displayName: Retrieve Windows installer
|
displayName: Retrieve Windows installer
|
||||||
- script: |
|
- script: |
|
||||||
py -3 -m venv venv
|
python -m venv venv
|
||||||
|
venv\Scripts\python tools\pipstrap.py
|
||||||
venv\Scripts\python tools\pip_install.py -e certbot-ci
|
venv\Scripts\python tools\pip_install.py -e certbot-ci
|
||||||
|
env:
|
||||||
|
PIP_NO_BUILD_ISOLATION: no
|
||||||
displayName: Prepare Certbot-CI
|
displayName: Prepare Certbot-CI
|
||||||
- script: |
|
- script: |
|
||||||
set PATH=%ProgramFiles(x86)%\Certbot\bin;%PATH%
|
set PATH=%ProgramFiles(x86)%\Certbot\bin;%PATH%
|
||||||
@@ -90,89 +113,105 @@ jobs:
|
|||||||
set PATH=%ProgramFiles(x86)%\Certbot\bin;%PATH%
|
set PATH=%ProgramFiles(x86)%\Certbot\bin;%PATH%
|
||||||
venv\Scripts\python -m pytest certbot-ci\certbot_integration_tests\certbot_tests -n 4
|
venv\Scripts\python -m pytest certbot-ci\certbot_integration_tests\certbot_tests -n 4
|
||||||
displayName: Run certbot integration tests
|
displayName: Run certbot integration tests
|
||||||
- job: snaps_build
|
# - job: snaps_build
|
||||||
pool:
|
# pool:
|
||||||
vmImage: ubuntu-18.04
|
# vmImage: ubuntu-18.04
|
||||||
timeoutInMinutes: 0
|
# timeoutInMinutes: 0
|
||||||
variables:
|
# variables:
|
||||||
# Do not run the heavy non-amd64 builds for test branches
|
# # Do not run the heavy non-amd64 builds for test branches
|
||||||
${{ if not(startsWith(variables['Build.SourceBranchName'], 'test-')) }}:
|
# ${{ if not(startsWith(variables['Build.SourceBranchName'], 'test-')) }}:
|
||||||
ARCHS: amd64 arm64 armhf
|
# ARCHS: amd64 arm64 armhf
|
||||||
${{ if startsWith(variables['Build.SourceBranchName'], 'test-') }}:
|
# ${{ if startsWith(variables['Build.SourceBranchName'], 'test-') }}:
|
||||||
ARCHS: amd64
|
# ARCHS: amd64
|
||||||
steps:
|
# steps:
|
||||||
- script: |
|
# - script: |
|
||||||
sudo apt-get update
|
# set -e
|
||||||
sudo apt-get install -y --no-install-recommends snapd
|
# sudo apt-get update
|
||||||
sudo snap install --classic snapcraft
|
# sudo apt-get install -y --no-install-recommends snapd
|
||||||
displayName: Install dependencies
|
# sudo snap install --classic snapcraft
|
||||||
- task: UsePythonVersion@0
|
# displayName: Install dependencies
|
||||||
inputs:
|
# - task: UsePythonVersion@0
|
||||||
versionSpec: 3.8
|
# inputs:
|
||||||
addToPath: true
|
# versionSpec: 3.8
|
||||||
- task: DownloadSecureFile@1
|
# addToPath: true
|
||||||
name: credentials
|
# - task: DownloadSecureFile@1
|
||||||
inputs:
|
# name: credentials
|
||||||
secureFile: launchpad-credentials
|
# inputs:
|
||||||
- script: |
|
# secureFile: launchpad-credentials
|
||||||
git config --global user.email "$(Build.RequestedForEmail)"
|
# - script: |
|
||||||
git config --global user.name "$(Build.RequestedFor)"
|
# set -e
|
||||||
mkdir -p ~/.local/share/snapcraft/provider/launchpad
|
# git config --global user.email "$(Build.RequestedForEmail)"
|
||||||
cp $(credentials.secureFilePath) ~/.local/share/snapcraft/provider/launchpad/credentials
|
# git config --global user.name "$(Build.RequestedFor)"
|
||||||
python3 tools/snap/build_remote.py ALL --archs ${ARCHS}
|
# mkdir -p ~/.local/share/snapcraft/provider/launchpad
|
||||||
displayName: Build snaps
|
# cp $(credentials.secureFilePath) ~/.local/share/snapcraft/provider/launchpad/credentials
|
||||||
- script: |
|
# python3 tools/snap/build_remote.py ALL --archs ${ARCHS} --timeout 19800
|
||||||
mv *.snap $(Build.ArtifactStagingDirectory)
|
# displayName: Build snaps
|
||||||
mv certbot-dns-*/*.snap $(Build.ArtifactStagingDirectory)
|
# - script: |
|
||||||
displayName: Prepare artifacts
|
# set -e
|
||||||
- task: PublishPipelineArtifact@1
|
# mv *.snap $(Build.ArtifactStagingDirectory)
|
||||||
inputs:
|
# mv certbot-dns-*/*.snap $(Build.ArtifactStagingDirectory)
|
||||||
path: $(Build.ArtifactStagingDirectory)
|
# displayName: Prepare artifacts
|
||||||
artifact: snaps
|
# - task: PublishPipelineArtifact@1
|
||||||
displayName: Store snaps artifacts
|
# inputs:
|
||||||
- job: snap_run
|
# path: $(Build.ArtifactStagingDirectory)
|
||||||
dependsOn: snaps_build
|
# artifact: snaps
|
||||||
pool:
|
# displayName: Store snaps artifacts
|
||||||
vmImage: ubuntu-18.04
|
# - job: snap_run
|
||||||
steps:
|
# dependsOn: snaps_build
|
||||||
- script: |
|
# pool:
|
||||||
sudo apt-get update
|
# vmImage: ubuntu-18.04
|
||||||
sudo apt-get install -y --no-install-recommends nginx-light snapd
|
# steps:
|
||||||
python tools/pip_install.py -U tox
|
# - task: UsePythonVersion@0
|
||||||
displayName: Install dependencies
|
# inputs:
|
||||||
- task: DownloadPipelineArtifact@2
|
# versionSpec: 3.8
|
||||||
inputs:
|
# addToPath: true
|
||||||
artifact: snaps
|
# - script: |
|
||||||
path: $(Build.SourcesDirectory)/snap
|
# set -e
|
||||||
displayName: Retrieve Certbot snaps
|
# sudo apt-get update
|
||||||
- script: |
|
# sudo apt-get install -y --no-install-recommends nginx-light snapd
|
||||||
sudo snap install --dangerous --classic snap/certbot_*_amd64.snap
|
# python3 -m venv venv
|
||||||
displayName: Install Certbot snap
|
# venv/bin/python tools/pipstrap.py
|
||||||
- script: |
|
# venv/bin/python tools/pip_install.py -U tox
|
||||||
python -m tox -e integration-external,apacheconftest-external-with-pebble
|
# displayName: Install dependencies
|
||||||
displayName: Run tox
|
# - task: DownloadPipelineArtifact@2
|
||||||
- job: snap_dns_run
|
# inputs:
|
||||||
dependsOn: snaps_build
|
# artifact: snaps
|
||||||
pool:
|
# path: $(Build.SourcesDirectory)/snap
|
||||||
vmImage: ubuntu-18.04
|
# displayName: Retrieve Certbot snaps
|
||||||
steps:
|
# - script: |
|
||||||
- script: |
|
# set -e
|
||||||
sudo apt-get update
|
# sudo snap install --dangerous --classic snap/certbot_*_amd64.snap
|
||||||
sudo apt-get install -y --no-install-recommends snapd
|
# displayName: Install Certbot snap
|
||||||
displayName: Install dependencies
|
# - script: |
|
||||||
- task: UsePythonVersion@0
|
# set -e
|
||||||
inputs:
|
# venv/bin/python -m tox -e integration-external,apacheconftest-external-with-pebble
|
||||||
versionSpec: 3.8
|
# displayName: Run tox
|
||||||
addToPath: true
|
# - job: snap_dns_run
|
||||||
- task: DownloadPipelineArtifact@2
|
# dependsOn: snaps_build
|
||||||
inputs:
|
# pool:
|
||||||
artifact: snaps
|
# vmImage: ubuntu-18.04
|
||||||
path: $(Build.SourcesDirectory)/snap
|
# steps:
|
||||||
displayName: Retrieve Certbot snaps
|
# - script: |
|
||||||
- script: |
|
# set -e
|
||||||
python3 -m venv venv
|
# sudo apt-get update
|
||||||
venv/bin/python tools/pip_install.py -e certbot-ci
|
# sudo apt-get install -y --no-install-recommends snapd
|
||||||
displayName: Prepare Certbot-CI
|
# displayName: Install dependencies
|
||||||
- script: |
|
# - task: UsePythonVersion@0
|
||||||
sudo -E venv/bin/pytest certbot-ci/snap_integration_tests/dns_tests --allow-persistent-changes --snap-folder $(Build.SourcesDirectory)/snap --snap-arch amd64
|
# inputs:
|
||||||
displayName: Test DNS plugins snaps
|
# versionSpec: 3.8
|
||||||
|
# addToPath: true
|
||||||
|
# - task: DownloadPipelineArtifact@2
|
||||||
|
# inputs:
|
||||||
|
# artifact: snaps
|
||||||
|
# path: $(Build.SourcesDirectory)/snap
|
||||||
|
# displayName: Retrieve Certbot snaps
|
||||||
|
# - script: |
|
||||||
|
# set -e
|
||||||
|
# python3 -m venv venv
|
||||||
|
# venv/bin/python tools/pipstrap.py
|
||||||
|
# venv/bin/python tools/pip_install.py -e certbot-ci
|
||||||
|
# displayName: Prepare Certbot-CI
|
||||||
|
# - script: |
|
||||||
|
# set -e
|
||||||
|
# sudo -E venv/bin/pytest certbot-ci/snap_integration_tests/dns_tests --allow-persistent-changes --snap-folder $(Build.SourcesDirectory)/snap --snap-arch amd64
|
||||||
|
# displayName: Test DNS plugins snaps
|
||||||
|
|||||||
@@ -1,26 +1,28 @@
|
|||||||
jobs:
|
jobs:
|
||||||
- job: test
|
- job: test
|
||||||
|
variables:
|
||||||
|
PYTHON_VERSION: 3.9
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
macos-py27:
|
macos-py27:
|
||||||
IMAGE_NAME: macOS-10.14
|
IMAGE_NAME: macOS-10.15
|
||||||
PYTHON_VERSION: 2.7
|
PYTHON_VERSION: 2.7
|
||||||
TOXENV: py27
|
TOXENV: py27
|
||||||
macos-py38:
|
macos-py39:
|
||||||
IMAGE_NAME: macOS-10.14
|
IMAGE_NAME: macOS-10.15
|
||||||
PYTHON_VERSION: 3.8
|
PYTHON_VERSION: 3.9
|
||||||
TOXENV: py38
|
TOXENV: py39
|
||||||
windows-py36:
|
windows-py36:
|
||||||
IMAGE_NAME: vs2017-win2016
|
IMAGE_NAME: vs2017-win2016
|
||||||
PYTHON_VERSION: 3.6
|
PYTHON_VERSION: 3.6
|
||||||
TOXENV: py36
|
TOXENV: py36
|
||||||
windows-py37-cover:
|
windows-py38-cover:
|
||||||
IMAGE_NAME: vs2017-win2016
|
IMAGE_NAME: vs2017-win2016
|
||||||
PYTHON_VERSION: 3.7
|
PYTHON_VERSION: 3.8
|
||||||
TOXENV: py37-cover
|
TOXENV: py38-cover
|
||||||
windows-integration-certbot:
|
windows-integration-certbot:
|
||||||
IMAGE_NAME: vs2017-win2016
|
IMAGE_NAME: vs2017-win2016
|
||||||
PYTHON_VERSION: 3.7
|
PYTHON_VERSION: 3.8
|
||||||
TOXENV: integration-certbot
|
TOXENV: integration-certbot
|
||||||
linux-oldest-tests-1:
|
linux-oldest-tests-1:
|
||||||
IMAGE_NAME: ubuntu-18.04
|
IMAGE_NAME: ubuntu-18.04
|
||||||
@@ -36,10 +38,10 @@ jobs:
|
|||||||
IMAGE_NAME: ubuntu-18.04
|
IMAGE_NAME: ubuntu-18.04
|
||||||
PYTHON_VERSION: 3.6
|
PYTHON_VERSION: 3.6
|
||||||
TOXENV: py36
|
TOXENV: py36
|
||||||
linux-py38-cover:
|
linux-py39-cover:
|
||||||
IMAGE_NAME: ubuntu-18.04
|
IMAGE_NAME: ubuntu-18.04
|
||||||
PYTHON_VERSION: 3.8
|
PYTHON_VERSION: 3.9
|
||||||
TOXENV: py38-cover
|
TOXENV: py39-cover
|
||||||
linux-py37-lint:
|
linux-py37-lint:
|
||||||
IMAGE_NAME: ubuntu-18.04
|
IMAGE_NAME: ubuntu-18.04
|
||||||
PYTHON_VERSION: 3.7
|
PYTHON_VERSION: 3.7
|
||||||
@@ -50,15 +52,15 @@ jobs:
|
|||||||
TOXENV: mypy
|
TOXENV: mypy
|
||||||
linux-integration:
|
linux-integration:
|
||||||
IMAGE_NAME: ubuntu-18.04
|
IMAGE_NAME: ubuntu-18.04
|
||||||
PYTHON_VERSION: 2.7
|
PYTHON_VERSION: 3.8
|
||||||
TOXENV: integration
|
TOXENV: integration
|
||||||
ACME_SERVER: pebble
|
ACME_SERVER: pebble
|
||||||
apache-compat:
|
apache-compat:
|
||||||
IMAGE_NAME: ubuntu-18.04
|
IMAGE_NAME: ubuntu-18.04
|
||||||
TOXENV: apache_compat
|
TOXENV: apache_compat
|
||||||
le-auto-xenial:
|
le-modification:
|
||||||
IMAGE_NAME: ubuntu-18.04
|
IMAGE_NAME: ubuntu-18.04
|
||||||
TOXENV: le_auto_xenial
|
TOXENV: modification
|
||||||
apacheconftest:
|
apacheconftest:
|
||||||
IMAGE_NAME: ubuntu-18.04
|
IMAGE_NAME: ubuntu-18.04
|
||||||
PYTHON_VERSION: 2.7
|
PYTHON_VERSION: 2.7
|
||||||
|
|||||||
@@ -7,6 +7,7 @@ stages:
|
|||||||
steps:
|
steps:
|
||||||
# If we change the output filename from `release_notes.md`, it should also be changed in tools/create_github_release.py
|
# If we change the output filename from `release_notes.md`, it should also be changed in tools/create_github_release.py
|
||||||
- bash: |
|
- bash: |
|
||||||
|
set -e
|
||||||
CERTBOT_VERSION="$(cd certbot && python -c "import certbot; print(certbot.__version__)" && cd ~-)"
|
CERTBOT_VERSION="$(cd certbot && python -c "import certbot; print(certbot.__version__)" && cd ~-)"
|
||||||
"${BUILD_REPOSITORY_LOCALPATH}\tools\extract_changelog.py" "${CERTBOT_VERSION}" >> "${BUILD_ARTIFACTSTAGINGDIRECTORY}/release_notes.md"
|
"${BUILD_REPOSITORY_LOCALPATH}\tools\extract_changelog.py" "${CERTBOT_VERSION}" >> "${BUILD_ARTIFACTSTAGINGDIRECTORY}/release_notes.md"
|
||||||
displayName: Prepare changelog
|
displayName: Prepare changelog
|
||||||
|
|||||||
@@ -39,6 +39,7 @@ stages:
|
|||||||
- group: certbot-common
|
- group: certbot-common
|
||||||
steps:
|
steps:
|
||||||
- bash: |
|
- bash: |
|
||||||
|
set -e
|
||||||
sudo apt-get update
|
sudo apt-get update
|
||||||
sudo apt-get install -y --no-install-recommends snapd
|
sudo apt-get install -y --no-install-recommends snapd
|
||||||
sudo snap install --classic snapcraft
|
sudo snap install --classic snapcraft
|
||||||
@@ -53,10 +54,11 @@ stages:
|
|||||||
inputs:
|
inputs:
|
||||||
secureFile: snapcraft.cfg
|
secureFile: snapcraft.cfg
|
||||||
- bash: |
|
- bash: |
|
||||||
|
set -e
|
||||||
mkdir -p .snapcraft
|
mkdir -p .snapcraft
|
||||||
ln -s $(snapcraftCfg.secureFilePath) .snapcraft/snapcraft.cfg
|
ln -s $(snapcraftCfg.secureFilePath) .snapcraft/snapcraft.cfg
|
||||||
for SNAP_FILE in snap/*.snap; do
|
for SNAP_FILE in snap/*.snap; do
|
||||||
snapcraft upload --release=${{ parameters.snapReleaseChannel }} "${SNAP_FILE}"
|
tools/retry.sh eval snapcraft upload --release=${{ parameters.snapReleaseChannel }} "${SNAP_FILE}"
|
||||||
done
|
done
|
||||||
displayName: Publish to Snap store
|
displayName: Publish to Snap store
|
||||||
- job: publish_docker
|
- job: publish_docker
|
||||||
@@ -76,7 +78,7 @@ stages:
|
|||||||
artifact: docker_$(DOCKER_ARCH)
|
artifact: docker_$(DOCKER_ARCH)
|
||||||
path: $(Build.SourcesDirectory)
|
path: $(Build.SourcesDirectory)
|
||||||
displayName: Retrieve Docker images
|
displayName: Retrieve Docker images
|
||||||
- bash: docker load --input $(Build.SourcesDirectory)/images.tar
|
- bash: set -e && docker load --input $(Build.SourcesDirectory)/images.tar
|
||||||
displayName: Load Docker images
|
displayName: Load Docker images
|
||||||
- task: Docker@2
|
- task: Docker@2
|
||||||
inputs:
|
inputs:
|
||||||
@@ -93,5 +95,5 @@ stages:
|
|||||||
# Certbot organization on Docker Hub.
|
# Certbot organization on Docker Hub.
|
||||||
containerRegistry: docker-hub
|
containerRegistry: docker-hub
|
||||||
displayName: Login to Docker Hub
|
displayName: Login to Docker Hub
|
||||||
- bash: tools/docker/deploy.sh $(dockerTag) $DOCKER_ARCH
|
- bash: set -e && tools/docker/deploy.sh $(dockerTag) $DOCKER_ARCH
|
||||||
displayName: Deploy the Docker images
|
displayName: Deploy the Docker images
|
||||||
|
|||||||
@@ -8,6 +8,7 @@ stages:
|
|||||||
vmImage: ubuntu-latest
|
vmImage: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- bash: |
|
- bash: |
|
||||||
|
set -e
|
||||||
MESSAGE="\
|
MESSAGE="\
|
||||||
---\n\
|
---\n\
|
||||||
##### Azure Pipeline
|
##### Azure Pipeline
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
stages:
|
stages:
|
||||||
- stage: TestAndPackage
|
- stage: TestAndPackage
|
||||||
jobs:
|
jobs:
|
||||||
- template: ../jobs/standard-tests-jobs.yml
|
# - template: ../jobs/standard-tests-jobs.yml
|
||||||
- template: ../jobs/extended-tests-jobs.yml
|
# - template: ../jobs/extended-tests-jobs.yml
|
||||||
- template: ../jobs/packaging-jobs.yml
|
- template: ../jobs/packaging-jobs.yml
|
||||||
|
|||||||
@@ -1,9 +1,11 @@
|
|||||||
steps:
|
steps:
|
||||||
- bash: |
|
- bash: |
|
||||||
|
set -e
|
||||||
brew install augeas
|
brew install augeas
|
||||||
condition: startswith(variables['IMAGE_NAME'], 'macOS')
|
condition: startswith(variables['IMAGE_NAME'], 'macOS')
|
||||||
displayName: Install MacOS dependencies
|
displayName: Install MacOS dependencies
|
||||||
- bash: |
|
- bash: |
|
||||||
|
set -e
|
||||||
sudo apt-get update
|
sudo apt-get update
|
||||||
sudo apt-get install -y --no-install-recommends \
|
sudo apt-get install -y --no-install-recommends \
|
||||||
python-dev \
|
python-dev \
|
||||||
@@ -21,7 +23,6 @@ steps:
|
|||||||
inputs:
|
inputs:
|
||||||
versionSpec: $(PYTHON_VERSION)
|
versionSpec: $(PYTHON_VERSION)
|
||||||
addToPath: true
|
addToPath: true
|
||||||
condition: ne(variables['PYTHON_VERSION'], '')
|
|
||||||
# tools/pip_install.py is used to pin packages to a known working version
|
# tools/pip_install.py is used to pin packages to a known working version
|
||||||
# except in tests where the environment variable CERTBOT_NO_PIN is set.
|
# except in tests where the environment variable CERTBOT_NO_PIN is set.
|
||||||
# virtualenv is listed here explicitly to make sure it is upgraded when
|
# virtualenv is listed here explicitly to make sure it is upgraded when
|
||||||
@@ -30,6 +31,8 @@ steps:
|
|||||||
# set, pip updates dependencies it thinks are already satisfied to avoid some
|
# set, pip updates dependencies it thinks are already satisfied to avoid some
|
||||||
# problems with its lack of real dependency resolution.
|
# problems with its lack of real dependency resolution.
|
||||||
- bash: |
|
- bash: |
|
||||||
|
set -e
|
||||||
|
python tools/pipstrap.py
|
||||||
python tools/pip_install.py -I tox virtualenv
|
python tools/pip_install.py -I tox virtualenv
|
||||||
displayName: Install runtime dependencies
|
displayName: Install runtime dependencies
|
||||||
- task: DownloadSecureFile@1
|
- task: DownloadSecureFile@1
|
||||||
@@ -38,6 +41,7 @@ steps:
|
|||||||
secureFile: azure-test-farm.pem
|
secureFile: azure-test-farm.pem
|
||||||
condition: contains(variables['TOXENV'], 'test-farm')
|
condition: contains(variables['TOXENV'], 'test-farm')
|
||||||
- bash: |
|
- bash: |
|
||||||
|
set -e
|
||||||
export TARGET_BRANCH="`echo "${BUILD_SOURCEBRANCH}" | sed -E 's!refs/(heads|tags)/!!g'`"
|
export TARGET_BRANCH="`echo "${BUILD_SOURCEBRANCH}" | sed -E 's!refs/(heads|tags)/!!g'`"
|
||||||
[ -z "${SYSTEM_PULLREQUEST_TARGETBRANCH}" ] || export TARGET_BRANCH="${SYSTEM_PULLREQUEST_TARGETBRANCH}"
|
[ -z "${SYSTEM_PULLREQUEST_TARGETBRANCH}" ] || export TARGET_BRANCH="${SYSTEM_PULLREQUEST_TARGETBRANCH}"
|
||||||
env
|
env
|
||||||
|
|||||||
18
.editorconfig
Normal file
18
.editorconfig
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
# https://editorconfig.org/
|
||||||
|
|
||||||
|
root = true
|
||||||
|
|
||||||
|
[*]
|
||||||
|
insert_final_newline = true
|
||||||
|
trim_trailing_whitespace = true
|
||||||
|
end_of_line = lf
|
||||||
|
|
||||||
|
[*.py]
|
||||||
|
indent_style = space
|
||||||
|
indent_size = 4
|
||||||
|
charset = utf-8
|
||||||
|
max_line_length = 100
|
||||||
|
|
||||||
|
[*.yaml]
|
||||||
|
indent_style = space
|
||||||
|
indent_size = 2
|
||||||
12
.envrc
Normal file
12
.envrc
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
# This file is just a nicety for developers who use direnv. When you cd under
|
||||||
|
# the Certbot repo, Certbot's virtual environment will be automatically
|
||||||
|
# activated and then deactivated when you cd elsewhere. Developers have to have
|
||||||
|
# direnv set up and run `direnv allow` to allow this file to execute on their
|
||||||
|
# system. You can find more information at https://direnv.net/.
|
||||||
|
. venv3/bin/activate
|
||||||
|
# direnv doesn't support modifying PS1 so we unset it to squelch the error
|
||||||
|
# it'll otherwise print about this being done in the activate script. See
|
||||||
|
# https://github.com/direnv/direnv/wiki/PS1. If you would like your shell
|
||||||
|
# prompt to change like it normally does, see
|
||||||
|
# https://github.com/direnv/direnv/wiki/Python#restoring-the-ps1.
|
||||||
|
unset PS1
|
||||||
5
.gitignore
vendored
5
.gitignore
vendored
@@ -60,3 +60,8 @@ stage
|
|||||||
*.snap
|
*.snap
|
||||||
snap-constraints.txt
|
snap-constraints.txt
|
||||||
qemu-*
|
qemu-*
|
||||||
|
certbot-dns*/certbot-dns*_amd64*.txt
|
||||||
|
certbot-dns*/certbot-dns*_arm*.txt
|
||||||
|
/certbot_amd64*.txt
|
||||||
|
/certbot_arm*.txt
|
||||||
|
certbot-dns*/snap
|
||||||
|
|||||||
@@ -61,6 +61,7 @@ Authors
|
|||||||
* [Daniel Albers](https://github.com/AID)
|
* [Daniel Albers](https://github.com/AID)
|
||||||
* [Daniel Aleksandersen](https://github.com/da2x)
|
* [Daniel Aleksandersen](https://github.com/da2x)
|
||||||
* [Daniel Convissor](https://github.com/convissor)
|
* [Daniel Convissor](https://github.com/convissor)
|
||||||
|
* [Daniel "Drex" Drexler](https://github.com/aeturnum)
|
||||||
* [Daniel Huang](https://github.com/dhuang)
|
* [Daniel Huang](https://github.com/dhuang)
|
||||||
* [Dave Guarino](https://github.com/daguar)
|
* [Dave Guarino](https://github.com/daguar)
|
||||||
* [David cz](https://github.com/dave-cz)
|
* [David cz](https://github.com/dave-cz)
|
||||||
@@ -148,11 +149,13 @@ Authors
|
|||||||
* [Lior Sabag](https://github.com/liorsbg)
|
* [Lior Sabag](https://github.com/liorsbg)
|
||||||
* [Lipis](https://github.com/lipis)
|
* [Lipis](https://github.com/lipis)
|
||||||
* [lord63](https://github.com/lord63)
|
* [lord63](https://github.com/lord63)
|
||||||
|
* [Lorenzo Fundaró](https://github.com/lfundaro)
|
||||||
* [Luca Beltrame](https://github.com/lbeltrame)
|
* [Luca Beltrame](https://github.com/lbeltrame)
|
||||||
* [Luca Ebach](https://github.com/lucebac)
|
* [Luca Ebach](https://github.com/lucebac)
|
||||||
* [Luca Olivetti](https://github.com/olivluca)
|
* [Luca Olivetti](https://github.com/olivluca)
|
||||||
* [Luke Rogers](https://github.com/lukeroge)
|
* [Luke Rogers](https://github.com/lukeroge)
|
||||||
* [Maarten](https://github.com/mrtndwrd)
|
* [Maarten](https://github.com/mrtndwrd)
|
||||||
|
* [Mads Jensen](https://github.com/atombrella)
|
||||||
* [Maikel Martens](https://github.com/krukas)
|
* [Maikel Martens](https://github.com/krukas)
|
||||||
* [Malte Janduda](https://github.com/MalteJ)
|
* [Malte Janduda](https://github.com/MalteJ)
|
||||||
* [Mantas Mikulėnas](https://github.com/grawity)
|
* [Mantas Mikulėnas](https://github.com/grawity)
|
||||||
@@ -212,6 +215,7 @@ Authors
|
|||||||
* [Richard Barnes](https://github.com/r-barnes)
|
* [Richard Barnes](https://github.com/r-barnes)
|
||||||
* [Richard Panek](https://github.com/kernelpanek)
|
* [Richard Panek](https://github.com/kernelpanek)
|
||||||
* [Robert Buchholz](https://github.com/rbu)
|
* [Robert Buchholz](https://github.com/rbu)
|
||||||
|
* [Robert Dailey](https://github.com/pahrohfit)
|
||||||
* [Robert Habermann](https://github.com/frennkie)
|
* [Robert Habermann](https://github.com/frennkie)
|
||||||
* [Robert Xiao](https://github.com/nneonneo)
|
* [Robert Xiao](https://github.com/nneonneo)
|
||||||
* [Roland Shoemaker](https://github.com/rolandshoemaker)
|
* [Roland Shoemaker](https://github.com/rolandshoemaker)
|
||||||
|
|||||||
@@ -11,7 +11,7 @@ to the Sphinx generated docs is provided below.
|
|||||||
|
|
||||||
|
|
||||||
[1] https://github.com/blog/1184-contributing-guidelines
|
[1] https://github.com/blog/1184-contributing-guidelines
|
||||||
[2] http://docutils.sourceforge.net/docs/user/rst/quickref.html#hyperlink-targets
|
[2] https://docutils.sourceforge.io/docs/user/rst/quickref.html#hyperlink-targets
|
||||||
|
|
||||||
-->
|
-->
|
||||||
|
|
||||||
|
|||||||
@@ -20,3 +20,10 @@ for mod in list(sys.modules):
|
|||||||
# preserved (acme.jose.* is josepy.*)
|
# preserved (acme.jose.* is josepy.*)
|
||||||
if mod == 'josepy' or mod.startswith('josepy.'):
|
if mod == 'josepy' or mod.startswith('josepy.'):
|
||||||
sys.modules['acme.' + mod.replace('josepy', 'jose', 1)] = sys.modules[mod]
|
sys.modules['acme.' + mod.replace('josepy', 'jose', 1)] = sys.modules[mod]
|
||||||
|
|
||||||
|
if sys.version_info[0] == 2:
|
||||||
|
warnings.warn(
|
||||||
|
"Python 2 support will be dropped in the next release of acme. "
|
||||||
|
"Please upgrade your Python version.",
|
||||||
|
PendingDeprecationWarning,
|
||||||
|
) # pragma: no cover
|
||||||
|
|||||||
@@ -201,7 +201,7 @@ class ClientBase(object):
|
|||||||
when = parsedate_tz(retry_after)
|
when = parsedate_tz(retry_after)
|
||||||
if when is not None:
|
if when is not None:
|
||||||
try:
|
try:
|
||||||
tz_secs = datetime.timedelta(when[-1] if when[-1] else 0)
|
tz_secs = datetime.timedelta(when[-1] if when[-1] is not None else 0)
|
||||||
return datetime.datetime(*when[:7]) - tz_secs
|
return datetime.datetime(*when[:7]) - tz_secs
|
||||||
except (ValueError, OverflowError):
|
except (ValueError, OverflowError):
|
||||||
pass
|
pass
|
||||||
@@ -448,7 +448,7 @@ class Client(ClientBase):
|
|||||||
heapq.heapify(waiting)
|
heapq.heapify(waiting)
|
||||||
# mapping between original Authorization Resource and the most
|
# mapping between original Authorization Resource and the most
|
||||||
# recently updated one
|
# recently updated one
|
||||||
updated = dict((authzr, authzr) for authzr in authzrs)
|
updated = {authzr: authzr for authzr in authzrs}
|
||||||
|
|
||||||
while waiting:
|
while waiting:
|
||||||
# find the smallest Retry-After, and sleep if necessary
|
# find the smallest Retry-After, and sleep if necessary
|
||||||
@@ -801,7 +801,7 @@ class ClientV2(ClientBase):
|
|||||||
"""
|
"""
|
||||||
# Can't use response.links directly because it drops multiple links
|
# Can't use response.links directly because it drops multiple links
|
||||||
# of the same relation type, which is possible in RFC8555 responses.
|
# of the same relation type, which is possible in RFC8555 responses.
|
||||||
if not 'Link' in response.headers:
|
if 'Link' not in response.headers:
|
||||||
return []
|
return []
|
||||||
links = parse_header_links(response.headers['Link'])
|
links = parse_header_links(response.headers['Link'])
|
||||||
return [l['url'] for l in links
|
return [l['url'] for l in links
|
||||||
|
|||||||
@@ -186,6 +186,7 @@ def probe_sni(name, host, port=443, timeout=300, # pylint: disable=too-many-argu
|
|||||||
raise errors.Error(error)
|
raise errors.Error(error)
|
||||||
return client_ssl.get_peer_certificate()
|
return client_ssl.get_peer_certificate()
|
||||||
|
|
||||||
|
|
||||||
def make_csr(private_key_pem, domains, must_staple=False):
|
def make_csr(private_key_pem, domains, must_staple=False):
|
||||||
"""Generate a CSR containing a list of domains as subjectAltNames.
|
"""Generate a CSR containing a list of domains as subjectAltNames.
|
||||||
|
|
||||||
@@ -217,6 +218,7 @@ def make_csr(private_key_pem, domains, must_staple=False):
|
|||||||
return crypto.dump_certificate_request(
|
return crypto.dump_certificate_request(
|
||||||
crypto.FILETYPE_PEM, csr)
|
crypto.FILETYPE_PEM, csr)
|
||||||
|
|
||||||
|
|
||||||
def _pyopenssl_cert_or_req_all_names(loaded_cert_or_req):
|
def _pyopenssl_cert_or_req_all_names(loaded_cert_or_req):
|
||||||
common_name = loaded_cert_or_req.get_subject().CN
|
common_name = loaded_cert_or_req.get_subject().CN
|
||||||
sans = _pyopenssl_cert_or_req_san(loaded_cert_or_req)
|
sans = _pyopenssl_cert_or_req_san(loaded_cert_or_req)
|
||||||
@@ -225,6 +227,7 @@ def _pyopenssl_cert_or_req_all_names(loaded_cert_or_req):
|
|||||||
return sans
|
return sans
|
||||||
return [common_name] + [d for d in sans if d != common_name]
|
return [common_name] + [d for d in sans if d != common_name]
|
||||||
|
|
||||||
|
|
||||||
def _pyopenssl_cert_or_req_san(cert_or_req):
|
def _pyopenssl_cert_or_req_san(cert_or_req):
|
||||||
"""Get Subject Alternative Names from certificate or CSR using pyOpenSSL.
|
"""Get Subject Alternative Names from certificate or CSR using pyOpenSSL.
|
||||||
|
|
||||||
@@ -317,6 +320,7 @@ def gen_ss_cert(key, domains, not_before=None,
|
|||||||
cert.sign(key, "sha256")
|
cert.sign(key, "sha256")
|
||||||
return cert
|
return cert
|
||||||
|
|
||||||
|
|
||||||
def dump_pyopenssl_chain(chain, filetype=crypto.FILETYPE_PEM):
|
def dump_pyopenssl_chain(chain, filetype=crypto.FILETYPE_PEM):
|
||||||
"""Dump certificate chain into a bundle.
|
"""Dump certificate chain into a bundle.
|
||||||
|
|
||||||
|
|||||||
@@ -12,4 +12,5 @@ try:
|
|||||||
from typing import * # pylint: disable=wildcard-import, unused-wildcard-import
|
from typing import * # pylint: disable=wildcard-import, unused-wildcard-import
|
||||||
from typing import Collection, IO # type: ignore
|
from typing import Collection, IO # type: ignore
|
||||||
except ImportError:
|
except ImportError:
|
||||||
sys.modules[__name__] = TypingClass()
|
# mypy complains because TypingClass is not a module
|
||||||
|
sys.modules[__name__] = TypingClass() # type: ignore
|
||||||
|
|||||||
@@ -206,7 +206,7 @@ class Directory(jose.JSONDeSerializable):
|
|||||||
external_account_required = jose.Field('externalAccountRequired', omitempty=True)
|
external_account_required = jose.Field('externalAccountRequired', omitempty=True)
|
||||||
|
|
||||||
def __init__(self, **kwargs):
|
def __init__(self, **kwargs):
|
||||||
kwargs = dict((self._internal_name(k), v) for k, v in kwargs.items())
|
kwargs = {self._internal_name(k): v for k, v in kwargs.items()}
|
||||||
super(Directory.Meta, self).__init__(**kwargs)
|
super(Directory.Meta, self).__init__(**kwargs)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@@ -315,6 +315,9 @@ class Registration(ResourceBody):
|
|||||||
# on new-reg key server ignores 'key' and populates it based on
|
# on new-reg key server ignores 'key' and populates it based on
|
||||||
# JWS.signature.combined.jwk
|
# JWS.signature.combined.jwk
|
||||||
key = jose.Field('key', omitempty=True, decoder=jose.JWK.from_json)
|
key = jose.Field('key', omitempty=True, decoder=jose.JWK.from_json)
|
||||||
|
# Contact field implements special behavior to allow messages that clear existing
|
||||||
|
# contacts while not expecting the `contact` field when loading from json.
|
||||||
|
# This is implemented in the constructor and *_json methods.
|
||||||
contact = jose.Field('contact', omitempty=True, default=())
|
contact = jose.Field('contact', omitempty=True, default=())
|
||||||
agreement = jose.Field('agreement', omitempty=True)
|
agreement = jose.Field('agreement', omitempty=True)
|
||||||
status = jose.Field('status', omitempty=True)
|
status = jose.Field('status', omitempty=True)
|
||||||
@@ -327,24 +330,73 @@ class Registration(ResourceBody):
|
|||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def from_data(cls, phone=None, email=None, external_account_binding=None, **kwargs):
|
def from_data(cls, phone=None, email=None, external_account_binding=None, **kwargs):
|
||||||
"""Create registration resource from contact details."""
|
"""
|
||||||
|
Create registration resource from contact details.
|
||||||
|
|
||||||
|
The `contact` keyword being passed to a Registration object is meaningful, so
|
||||||
|
this function represents empty iterables in its kwargs by passing on an empty
|
||||||
|
`tuple`.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Note if `contact` was in kwargs.
|
||||||
|
contact_provided = 'contact' in kwargs
|
||||||
|
|
||||||
|
# Pop `contact` from kwargs and add formatted email or phone numbers
|
||||||
details = list(kwargs.pop('contact', ()))
|
details = list(kwargs.pop('contact', ()))
|
||||||
if phone is not None:
|
if phone is not None:
|
||||||
details.append(cls.phone_prefix + phone)
|
details.append(cls.phone_prefix + phone)
|
||||||
if email is not None:
|
if email is not None:
|
||||||
details.extend([cls.email_prefix + mail for mail in email.split(',')])
|
details.extend([cls.email_prefix + mail for mail in email.split(',')])
|
||||||
kwargs['contact'] = tuple(details)
|
|
||||||
|
# Insert formatted contact information back into kwargs
|
||||||
|
# or insert an empty tuple if `contact` provided.
|
||||||
|
if details or contact_provided:
|
||||||
|
kwargs['contact'] = tuple(details)
|
||||||
|
|
||||||
if external_account_binding:
|
if external_account_binding:
|
||||||
kwargs['external_account_binding'] = external_account_binding
|
kwargs['external_account_binding'] = external_account_binding
|
||||||
|
|
||||||
return cls(**kwargs)
|
return cls(**kwargs)
|
||||||
|
|
||||||
|
def __init__(self, **kwargs):
|
||||||
|
"""Note if the user provides a value for the `contact` member."""
|
||||||
|
if 'contact' in kwargs:
|
||||||
|
# Avoid the __setattr__ used by jose.TypedJSONObjectWithFields
|
||||||
|
object.__setattr__(self, '_add_contact', True)
|
||||||
|
super(Registration, self).__init__(**kwargs)
|
||||||
|
|
||||||
def _filter_contact(self, prefix):
|
def _filter_contact(self, prefix):
|
||||||
return tuple(
|
return tuple(
|
||||||
detail[len(prefix):] for detail in self.contact # pylint: disable=not-an-iterable
|
detail[len(prefix):] for detail in self.contact # pylint: disable=not-an-iterable
|
||||||
if detail.startswith(prefix))
|
if detail.startswith(prefix))
|
||||||
|
|
||||||
|
def _add_contact_if_appropriate(self, jobj):
|
||||||
|
"""
|
||||||
|
The `contact` member of Registration objects should not be required when
|
||||||
|
de-serializing (as it would be if the Fields' `omitempty` flag were `False`), but
|
||||||
|
it should be included in serializations if it was provided.
|
||||||
|
|
||||||
|
:param jobj: Dictionary containing this Registrations' data
|
||||||
|
:type jobj: dict
|
||||||
|
|
||||||
|
:returns: Dictionary containing Registrations data to transmit to the server
|
||||||
|
:rtype: dict
|
||||||
|
"""
|
||||||
|
if getattr(self, '_add_contact', False):
|
||||||
|
jobj['contact'] = self.encode('contact')
|
||||||
|
|
||||||
|
return jobj
|
||||||
|
|
||||||
|
def to_partial_json(self):
|
||||||
|
"""Modify josepy.JSONDeserializable.to_partial_json()"""
|
||||||
|
jobj = super(Registration, self).to_partial_json()
|
||||||
|
return self._add_contact_if_appropriate(jobj)
|
||||||
|
|
||||||
|
def fields_to_partial_json(self):
|
||||||
|
"""Modify josepy.JSONObjectWithFields.fields_to_partial_json()"""
|
||||||
|
jobj = super(Registration, self).fields_to_partial_json()
|
||||||
|
return self._add_contact_if_appropriate(jobj)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def phones(self):
|
def phones(self):
|
||||||
"""All phones found in the ``contact`` field."""
|
"""All phones found in the ``contact`` field."""
|
||||||
@@ -413,7 +465,7 @@ class ChallengeBody(ResourceBody):
|
|||||||
omitempty=True, default=None)
|
omitempty=True, default=None)
|
||||||
|
|
||||||
def __init__(self, **kwargs):
|
def __init__(self, **kwargs):
|
||||||
kwargs = dict((self._internal_name(k), v) for k, v in kwargs.items())
|
kwargs = {self._internal_name(k): v for k, v in kwargs.items()}
|
||||||
super(ChallengeBody, self).__init__(**kwargs)
|
super(ChallengeBody, self).__init__(**kwargs)
|
||||||
|
|
||||||
def encode(self, name):
|
def encode(self, name):
|
||||||
|
|||||||
@@ -4,4 +4,4 @@ import six
|
|||||||
|
|
||||||
def map_keys(dikt, func):
|
def map_keys(dikt, func):
|
||||||
"""Map dictionary keys."""
|
"""Map dictionary keys."""
|
||||||
return dict((func(key), value) for key, value in six.iteritems(dikt))
|
return {func(key): value for key, value in six.iteritems(dikt)}
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ BUILDDIR = _build
|
|||||||
|
|
||||||
# User-friendly check for sphinx-build
|
# User-friendly check for sphinx-build
|
||||||
ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1)
|
ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1)
|
||||||
$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/)
|
$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from https://www.sphinx-doc.org/)
|
||||||
endif
|
endif
|
||||||
|
|
||||||
# Internal variables.
|
# Internal variables.
|
||||||
|
|||||||
@@ -120,7 +120,7 @@ todo_include_todos = False
|
|||||||
# The theme to use for HTML and HTML Help pages. See the documentation for
|
# The theme to use for HTML and HTML Help pages. See the documentation for
|
||||||
# a list of builtin themes.
|
# a list of builtin themes.
|
||||||
|
|
||||||
# http://docs.readthedocs.org/en/latest/theme.html#how-do-i-use-this-locally-and-on-read-the-docs
|
# https://docs.readthedocs.io/en/stable/faq.html#i-want-to-use-the-read-the-docs-theme-locally
|
||||||
# on_rtd is whether we are on readthedocs.org
|
# on_rtd is whether we are on readthedocs.org
|
||||||
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
|
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
|
||||||
if not on_rtd: # only import and set the theme if we're building docs locally
|
if not on_rtd: # only import and set the theme if we're building docs locally
|
||||||
|
|||||||
@@ -65,7 +65,7 @@ if errorlevel 9009 (
|
|||||||
echo.may add the Sphinx directory to PATH.
|
echo.may add the Sphinx directory to PATH.
|
||||||
echo.
|
echo.
|
||||||
echo.If you don't have Sphinx installed, grab it from
|
echo.If you don't have Sphinx installed, grab it from
|
||||||
echo.http://sphinx-doc.org/
|
echo.https://www.sphinx-doc.org/
|
||||||
exit /b 1
|
exit /b 1
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|||||||
@@ -4,9 +4,8 @@ import sys
|
|||||||
from setuptools import __version__ as setuptools_version
|
from setuptools import __version__ as setuptools_version
|
||||||
from setuptools import find_packages
|
from setuptools import find_packages
|
||||||
from setuptools import setup
|
from setuptools import setup
|
||||||
from setuptools.command.test import test as TestCommand
|
|
||||||
|
|
||||||
version = '1.8.0.dev0'
|
version = '1.11.0.dev0'
|
||||||
|
|
||||||
# Please update tox.ini when modifying dependency version requirements
|
# Please update tox.ini when modifying dependency version requirements
|
||||||
install_requires = [
|
install_requires = [
|
||||||
@@ -47,22 +46,6 @@ docs_extras = [
|
|||||||
'sphinx_rtd_theme',
|
'sphinx_rtd_theme',
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
class PyTest(TestCommand):
|
|
||||||
user_options = []
|
|
||||||
|
|
||||||
def initialize_options(self):
|
|
||||||
TestCommand.initialize_options(self)
|
|
||||||
self.pytest_args = ''
|
|
||||||
|
|
||||||
def run_tests(self):
|
|
||||||
import shlex
|
|
||||||
# import here, cause outside the eggs aren't loaded
|
|
||||||
import pytest
|
|
||||||
errno = pytest.main(shlex.split(self.pytest_args))
|
|
||||||
sys.exit(errno)
|
|
||||||
|
|
||||||
|
|
||||||
setup(
|
setup(
|
||||||
name='acme',
|
name='acme',
|
||||||
version=version,
|
version=version,
|
||||||
@@ -83,6 +66,7 @@ setup(
|
|||||||
'Programming Language :: Python :: 3.6',
|
'Programming Language :: Python :: 3.6',
|
||||||
'Programming Language :: Python :: 3.7',
|
'Programming Language :: Python :: 3.7',
|
||||||
'Programming Language :: Python :: 3.8',
|
'Programming Language :: Python :: 3.8',
|
||||||
|
'Programming Language :: Python :: 3.9',
|
||||||
'Topic :: Internet :: WWW/HTTP',
|
'Topic :: Internet :: WWW/HTTP',
|
||||||
'Topic :: Security',
|
'Topic :: Security',
|
||||||
],
|
],
|
||||||
@@ -94,7 +78,4 @@ setup(
|
|||||||
'dev': dev_extras,
|
'dev': dev_extras,
|
||||||
'docs': docs_extras,
|
'docs': docs_extras,
|
||||||
},
|
},
|
||||||
test_suite='acme',
|
|
||||||
tests_require=["pytest"],
|
|
||||||
cmdclass={"test": PyTest},
|
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -1342,7 +1342,7 @@ class ClientNetworkSourceAddressBindingTest(unittest.TestCase):
|
|||||||
# test should fail if the default adapter type is changed by requests
|
# test should fail if the default adapter type is changed by requests
|
||||||
net = ClientNetwork(key=None, alg=None)
|
net = ClientNetwork(key=None, alg=None)
|
||||||
session = requests.Session()
|
session = requests.Session()
|
||||||
for scheme in session.adapters.keys():
|
for scheme in session.adapters:
|
||||||
client_network_adapter = net.session.adapters.get(scheme)
|
client_network_adapter = net.session.adapters.get(scheme)
|
||||||
default_adapter = session.adapters.get(scheme)
|
default_adapter = session.adapters.get(scheme)
|
||||||
self.assertEqual(client_network_adapter.__class__, default_adapter.__class__)
|
self.assertEqual(client_network_adapter.__class__, default_adapter.__class__)
|
||||||
|
|||||||
@@ -108,11 +108,11 @@ class ConstantTest(unittest.TestCase):
|
|||||||
|
|
||||||
def test_equality(self):
|
def test_equality(self):
|
||||||
const_a_prime = self.MockConstant('a')
|
const_a_prime = self.MockConstant('a')
|
||||||
self.assertFalse(self.const_a == self.const_b)
|
self.assertNotEqual(self.const_a, self.const_b)
|
||||||
self.assertTrue(self.const_a == const_a_prime)
|
self.assertEqual(self.const_a, const_a_prime)
|
||||||
|
|
||||||
self.assertTrue(self.const_a != self.const_b)
|
self.assertNotEqual(self.const_a, self.const_b)
|
||||||
self.assertFalse(self.const_a != const_a_prime)
|
self.assertEqual(self.const_a, const_a_prime)
|
||||||
|
|
||||||
|
|
||||||
class DirectoryTest(unittest.TestCase):
|
class DirectoryTest(unittest.TestCase):
|
||||||
@@ -254,6 +254,19 @@ class RegistrationTest(unittest.TestCase):
|
|||||||
from acme.messages import Registration
|
from acme.messages import Registration
|
||||||
hash(Registration.from_json(self.jobj_from))
|
hash(Registration.from_json(self.jobj_from))
|
||||||
|
|
||||||
|
def test_default_not_transmitted(self):
|
||||||
|
from acme.messages import NewRegistration
|
||||||
|
empty_new_reg = NewRegistration()
|
||||||
|
new_reg_with_contact = NewRegistration(contact=())
|
||||||
|
|
||||||
|
self.assertEqual(empty_new_reg.contact, ())
|
||||||
|
self.assertEqual(new_reg_with_contact.contact, ())
|
||||||
|
|
||||||
|
self.assertTrue('contact' not in empty_new_reg.to_partial_json())
|
||||||
|
self.assertTrue('contact' not in empty_new_reg.fields_to_partial_json())
|
||||||
|
self.assertTrue('contact' in new_reg_with_contact.to_partial_json())
|
||||||
|
self.assertTrue('contact' in new_reg_with_contact.fields_to_partial_json())
|
||||||
|
|
||||||
|
|
||||||
class UpdateRegistrationTest(unittest.TestCase):
|
class UpdateRegistrationTest(unittest.TestCase):
|
||||||
"""Tests for acme.messages.UpdateRegistration."""
|
"""Tests for acme.messages.UpdateRegistration."""
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ Authors:
|
|||||||
Raphael Pinson <raphink@gmail.com>
|
Raphael Pinson <raphink@gmail.com>
|
||||||
|
|
||||||
About: Reference
|
About: Reference
|
||||||
Online Apache configuration manual: http://httpd.apache.org/docs/trunk/
|
Online Apache configuration manual: https://httpd.apache.org/docs/trunk/
|
||||||
|
|
||||||
About: License
|
About: License
|
||||||
This file is licensed under the LGPL v2+.
|
This file is licensed under the LGPL v2+.
|
||||||
|
|||||||
@@ -9,7 +9,6 @@ import re
|
|||||||
import socket
|
import socket
|
||||||
import time
|
import time
|
||||||
|
|
||||||
import six
|
|
||||||
import zope.component
|
import zope.component
|
||||||
import zope.interface
|
import zope.interface
|
||||||
try:
|
try:
|
||||||
@@ -328,6 +327,9 @@ class ApacheConfigurator(common.Installer):
|
|||||||
if self.version < (2, 2):
|
if self.version < (2, 2):
|
||||||
raise errors.NotSupportedError(
|
raise errors.NotSupportedError(
|
||||||
"Apache Version {0} not supported.".format(str(self.version)))
|
"Apache Version {0} not supported.".format(str(self.version)))
|
||||||
|
elif self.version < (2, 4):
|
||||||
|
logger.warning('Support for Apache 2.2 is deprecated and will be removed in a '
|
||||||
|
'future release.')
|
||||||
|
|
||||||
# Recover from previous crash before Augeas initialization to have the
|
# Recover from previous crash before Augeas initialization to have the
|
||||||
# correct parse tree from the get go.
|
# correct parse tree from the get go.
|
||||||
@@ -464,21 +466,6 @@ class ApacheConfigurator(common.Installer):
|
|||||||
metadata=metadata
|
metadata=metadata
|
||||||
)
|
)
|
||||||
|
|
||||||
def _wildcard_domain(self, domain):
|
|
||||||
"""
|
|
||||||
Checks if domain is a wildcard domain
|
|
||||||
|
|
||||||
:param str domain: Domain to check
|
|
||||||
|
|
||||||
:returns: If the domain is wildcard domain
|
|
||||||
:rtype: bool
|
|
||||||
"""
|
|
||||||
if isinstance(domain, six.text_type):
|
|
||||||
wildcard_marker = u"*."
|
|
||||||
else:
|
|
||||||
wildcard_marker = b"*."
|
|
||||||
return domain.startswith(wildcard_marker)
|
|
||||||
|
|
||||||
def deploy_cert(self, domain, cert_path, key_path,
|
def deploy_cert(self, domain, cert_path, key_path,
|
||||||
chain_path=None, fullchain_path=None):
|
chain_path=None, fullchain_path=None):
|
||||||
"""Deploys certificate to specified virtual host.
|
"""Deploys certificate to specified virtual host.
|
||||||
@@ -513,7 +500,7 @@ class ApacheConfigurator(common.Installer):
|
|||||||
:rtype: `list` of :class:`~certbot_apache._internal.obj.VirtualHost`
|
:rtype: `list` of :class:`~certbot_apache._internal.obj.VirtualHost`
|
||||||
"""
|
"""
|
||||||
|
|
||||||
if self._wildcard_domain(domain):
|
if util.is_wildcard_domain(domain):
|
||||||
if domain in self._wildcard_vhosts:
|
if domain in self._wildcard_vhosts:
|
||||||
# Vhosts for a wildcard domain were already selected
|
# Vhosts for a wildcard domain were already selected
|
||||||
return self._wildcard_vhosts[domain]
|
return self._wildcard_vhosts[domain]
|
||||||
@@ -1462,7 +1449,7 @@ class ApacheConfigurator(common.Installer):
|
|||||||
if not line.lower().lstrip().startswith("rewriterule"):
|
if not line.lower().lstrip().startswith("rewriterule"):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
# According to: http://httpd.apache.org/docs/2.4/rewrite/flags.html
|
# According to: https://httpd.apache.org/docs/2.4/rewrite/flags.html
|
||||||
# The syntax of a RewriteRule is:
|
# The syntax of a RewriteRule is:
|
||||||
# RewriteRule pattern target [Flag1,Flag2,Flag3]
|
# RewriteRule pattern target [Flag1,Flag2,Flag3]
|
||||||
# i.e. target is required, so it must exist.
|
# i.e. target is required, so it must exist.
|
||||||
|
|||||||
@@ -731,7 +731,6 @@ class ApacheParser(object):
|
|||||||
privileged users.
|
privileged users.
|
||||||
|
|
||||||
https://apr.apache.org/docs/apr/2.0/apr__fnmatch_8h_source.html
|
https://apr.apache.org/docs/apr/2.0/apr__fnmatch_8h_source.html
|
||||||
http://apache2.sourcearchive.com/documentation/2.2.16-6/apr__fnmatch_8h_source.html
|
|
||||||
|
|
||||||
:param str clean_fn_match: Apache style filename match, like globs
|
:param str clean_fn_match: Apache style filename match, like globs
|
||||||
|
|
||||||
@@ -799,7 +798,7 @@ class ApacheParser(object):
|
|||||||
def _parsed_by_parser_paths(self, filep, paths):
|
def _parsed_by_parser_paths(self, filep, paths):
|
||||||
"""Helper function that searches through provided paths and returns
|
"""Helper function that searches through provided paths and returns
|
||||||
True if file path is found in the set"""
|
True if file path is found in the set"""
|
||||||
for directory in paths.keys():
|
for directory in paths:
|
||||||
for filename in paths[directory]:
|
for filename in paths[directory]:
|
||||||
if fnmatch.fnmatch(filep, os.path.join(directory, filename)):
|
if fnmatch.fnmatch(filep, os.path.join(directory, filename)):
|
||||||
return True
|
return True
|
||||||
|
|||||||
@@ -4,9 +4,8 @@ import sys
|
|||||||
from setuptools import __version__ as setuptools_version
|
from setuptools import __version__ as setuptools_version
|
||||||
from setuptools import find_packages
|
from setuptools import find_packages
|
||||||
from setuptools import setup
|
from setuptools import setup
|
||||||
from setuptools.command.test import test as TestCommand
|
|
||||||
|
|
||||||
version = '1.8.0.dev0'
|
version = '1.11.0.dev0'
|
||||||
|
|
||||||
# Remember to update local-oldest-requirements.txt when changing the minimum
|
# Remember to update local-oldest-requirements.txt when changing the minimum
|
||||||
# acme/certbot version.
|
# acme/certbot version.
|
||||||
@@ -32,21 +31,6 @@ dev_extras = [
|
|||||||
'apacheconfig>=0.3.2',
|
'apacheconfig>=0.3.2',
|
||||||
]
|
]
|
||||||
|
|
||||||
class PyTest(TestCommand):
|
|
||||||
user_options = []
|
|
||||||
|
|
||||||
def initialize_options(self):
|
|
||||||
TestCommand.initialize_options(self)
|
|
||||||
self.pytest_args = ''
|
|
||||||
|
|
||||||
def run_tests(self):
|
|
||||||
import shlex
|
|
||||||
# import here, cause outside the eggs aren't loaded
|
|
||||||
import pytest
|
|
||||||
errno = pytest.main(shlex.split(self.pytest_args))
|
|
||||||
sys.exit(errno)
|
|
||||||
|
|
||||||
|
|
||||||
setup(
|
setup(
|
||||||
name='certbot-apache',
|
name='certbot-apache',
|
||||||
version=version,
|
version=version,
|
||||||
@@ -69,6 +53,7 @@ setup(
|
|||||||
'Programming Language :: Python :: 3.6',
|
'Programming Language :: Python :: 3.6',
|
||||||
'Programming Language :: Python :: 3.7',
|
'Programming Language :: Python :: 3.7',
|
||||||
'Programming Language :: Python :: 3.8',
|
'Programming Language :: Python :: 3.8',
|
||||||
|
'Programming Language :: Python :: 3.9',
|
||||||
'Topic :: Internet :: WWW/HTTP',
|
'Topic :: Internet :: WWW/HTTP',
|
||||||
'Topic :: Security',
|
'Topic :: Security',
|
||||||
'Topic :: System :: Installation/Setup',
|
'Topic :: System :: Installation/Setup',
|
||||||
@@ -88,7 +73,4 @@ setup(
|
|||||||
'apache = certbot_apache._internal.entrypoint:ENTRYPOINT',
|
'apache = certbot_apache._internal.entrypoint:ENTRYPOINT',
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
test_suite='certbot_apache',
|
|
||||||
tests_require=["pytest"],
|
|
||||||
cmdclass={"test": PyTest},
|
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -52,7 +52,7 @@ function Cleanup() {
|
|||||||
# if our environment asks us to enable modules, do our best!
|
# if our environment asks us to enable modules, do our best!
|
||||||
if [ "$1" = --debian-modules ] ; then
|
if [ "$1" = --debian-modules ] ; then
|
||||||
sudo apt-get install -y apache2
|
sudo apt-get install -y apache2
|
||||||
sudo apt-get install -y libapache2-mod-wsgi
|
sudo apt-get install -y libapache2-mod-wsgi-py3
|
||||||
sudo apt-get install -y libapache2-mod-macro
|
sudo apt-get install -y libapache2-mod-macro
|
||||||
|
|
||||||
for mod in ssl rewrite macro wsgi deflate userdir version mime setenvif ; do
|
for mod in ssl rewrite macro wsgi deflate userdir version mime setenvif ; do
|
||||||
|
|||||||
@@ -140,7 +140,7 @@ class MultipleVhostsTestCentOS(util.ApacheTest):
|
|||||||
self.assertEqual(mock_get.call_count, 3)
|
self.assertEqual(mock_get.call_count, 3)
|
||||||
self.assertEqual(len(self.config.parser.modules), 4)
|
self.assertEqual(len(self.config.parser.modules), 4)
|
||||||
self.assertEqual(len(self.config.parser.variables), 2)
|
self.assertEqual(len(self.config.parser.variables), 2)
|
||||||
self.assertTrue("TEST2" in self.config.parser.variables.keys())
|
self.assertTrue("TEST2" in self.config.parser.variables)
|
||||||
self.assertTrue("mod_another.c" in self.config.parser.modules)
|
self.assertTrue("mod_another.c" in self.config.parser.modules)
|
||||||
|
|
||||||
def test_get_virtual_hosts(self):
|
def test_get_virtual_hosts(self):
|
||||||
@@ -172,11 +172,11 @@ class MultipleVhostsTestCentOS(util.ApacheTest):
|
|||||||
mock_osi.return_value = ("centos", "7")
|
mock_osi.return_value = ("centos", "7")
|
||||||
self.config.parser.update_runtime_variables()
|
self.config.parser.update_runtime_variables()
|
||||||
|
|
||||||
self.assertTrue("mock_define" in self.config.parser.variables.keys())
|
self.assertTrue("mock_define" in self.config.parser.variables)
|
||||||
self.assertTrue("mock_define_too" in self.config.parser.variables.keys())
|
self.assertTrue("mock_define_too" in self.config.parser.variables)
|
||||||
self.assertTrue("mock_value" in self.config.parser.variables.keys())
|
self.assertTrue("mock_value" in self.config.parser.variables)
|
||||||
self.assertEqual("TRUE", self.config.parser.variables["mock_value"])
|
self.assertEqual("TRUE", self.config.parser.variables["mock_value"])
|
||||||
self.assertTrue("MOCK_NOSEP" in self.config.parser.variables.keys())
|
self.assertTrue("MOCK_NOSEP" in self.config.parser.variables)
|
||||||
self.assertEqual("NOSEP_VAL", self.config.parser.variables["NOSEP_TWO"])
|
self.assertEqual("NOSEP_VAL", self.config.parser.variables["NOSEP_TWO"])
|
||||||
|
|
||||||
@mock.patch("certbot_apache._internal.configurator.util.run_script")
|
@mock.patch("certbot_apache._internal.configurator.util.run_script")
|
||||||
|
|||||||
@@ -1337,13 +1337,6 @@ class MultipleVhostsTest(util.ApacheTest):
|
|||||||
self.config.enable_mod,
|
self.config.enable_mod,
|
||||||
"whatever")
|
"whatever")
|
||||||
|
|
||||||
def test_wildcard_domain(self):
|
|
||||||
# pylint: disable=protected-access
|
|
||||||
cases = {u"*.example.org": True, b"*.x.example.org": True,
|
|
||||||
u"a.example.org": False, b"a.x.example.org": False}
|
|
||||||
for key in cases:
|
|
||||||
self.assertEqual(self.config._wildcard_domain(key), cases[key])
|
|
||||||
|
|
||||||
def test_choose_vhosts_wildcard(self):
|
def test_choose_vhosts_wildcard(self):
|
||||||
# pylint: disable=protected-access
|
# pylint: disable=protected-access
|
||||||
mock_path = "certbot_apache._internal.display_ops.select_vhost_multiple"
|
mock_path = "certbot_apache._internal.display_ops.select_vhost_multiple"
|
||||||
@@ -1357,10 +1350,10 @@ class MultipleVhostsTest(util.ApacheTest):
|
|||||||
|
|
||||||
# And the actual returned values
|
# And the actual returned values
|
||||||
self.assertEqual(len(vhs), 1)
|
self.assertEqual(len(vhs), 1)
|
||||||
self.assertTrue(vhs[0].name == "certbot.demo")
|
self.assertEqual(vhs[0].name, "certbot.demo")
|
||||||
self.assertTrue(vhs[0].ssl)
|
self.assertTrue(vhs[0].ssl)
|
||||||
|
|
||||||
self.assertFalse(vhs[0] == self.vh_truth[3])
|
self.assertNotEqual(vhs[0], self.vh_truth[3])
|
||||||
|
|
||||||
@mock.patch("certbot_apache._internal.configurator.ApacheConfigurator.make_vhost_ssl")
|
@mock.patch("certbot_apache._internal.configurator.ApacheConfigurator.make_vhost_ssl")
|
||||||
def test_choose_vhosts_wildcard_no_ssl(self, mock_makessl):
|
def test_choose_vhosts_wildcard_no_ssl(self, mock_makessl):
|
||||||
@@ -1471,10 +1464,10 @@ class MultipleVhostsTest(util.ApacheTest):
|
|||||||
self.config.parser.aug.match = mock_match
|
self.config.parser.aug.match = mock_match
|
||||||
vhs = self.config.get_virtual_hosts()
|
vhs = self.config.get_virtual_hosts()
|
||||||
self.assertEqual(len(vhs), 2)
|
self.assertEqual(len(vhs), 2)
|
||||||
self.assertTrue(vhs[0] == self.vh_truth[1])
|
self.assertEqual(vhs[0], self.vh_truth[1])
|
||||||
# mock_vhost should have replaced the vh_truth[0], because its filepath
|
# mock_vhost should have replaced the vh_truth[0], because its filepath
|
||||||
# isn't a symlink
|
# isn't a symlink
|
||||||
self.assertTrue(vhs[1] == mock_vhost)
|
self.assertEqual(vhs[1], mock_vhost)
|
||||||
|
|
||||||
|
|
||||||
class AugeasVhostsTest(util.ApacheTest):
|
class AugeasVhostsTest(util.ApacheTest):
|
||||||
|
|||||||
@@ -412,9 +412,9 @@ class DualParserNodeTest(unittest.TestCase): # pylint: disable=too-many-public-
|
|||||||
ancestor=self.block,
|
ancestor=self.block,
|
||||||
filepath="/path/to/whatever",
|
filepath="/path/to/whatever",
|
||||||
metadata=self.metadata)
|
metadata=self.metadata)
|
||||||
self.assertFalse(self.block == ne_block)
|
self.assertNotEqual(self.block, ne_block)
|
||||||
self.assertFalse(self.directive == ne_directive)
|
self.assertNotEqual(self.directive, ne_directive)
|
||||||
self.assertFalse(self.comment == ne_comment)
|
self.assertNotEqual(self.comment, ne_comment)
|
||||||
|
|
||||||
def test_parsed_paths(self):
|
def test_parsed_paths(self):
|
||||||
mock_p = mock.MagicMock(return_value=['/path/file.conf',
|
mock_p = mock.MagicMock(return_value=['/path/file.conf',
|
||||||
|
|||||||
@@ -134,7 +134,7 @@ class MultipleVhostsTestFedora(util.ApacheTest):
|
|||||||
self.assertEqual(mock_get.call_count, 3)
|
self.assertEqual(mock_get.call_count, 3)
|
||||||
self.assertEqual(len(self.config.parser.modules), 4)
|
self.assertEqual(len(self.config.parser.modules), 4)
|
||||||
self.assertEqual(len(self.config.parser.variables), 2)
|
self.assertEqual(len(self.config.parser.variables), 2)
|
||||||
self.assertTrue("TEST2" in self.config.parser.variables.keys())
|
self.assertTrue("TEST2" in self.config.parser.variables)
|
||||||
self.assertTrue("mod_another.c" in self.config.parser.modules)
|
self.assertTrue("mod_another.c" in self.config.parser.modules)
|
||||||
|
|
||||||
@mock.patch("certbot_apache._internal.configurator.util.run_script")
|
@mock.patch("certbot_apache._internal.configurator.util.run_script")
|
||||||
@@ -172,11 +172,11 @@ class MultipleVhostsTestFedora(util.ApacheTest):
|
|||||||
mock_osi.return_value = ("fedora", "29")
|
mock_osi.return_value = ("fedora", "29")
|
||||||
self.config.parser.update_runtime_variables()
|
self.config.parser.update_runtime_variables()
|
||||||
|
|
||||||
self.assertTrue("mock_define" in self.config.parser.variables.keys())
|
self.assertTrue("mock_define" in self.config.parser.variables)
|
||||||
self.assertTrue("mock_define_too" in self.config.parser.variables.keys())
|
self.assertTrue("mock_define_too" in self.config.parser.variables)
|
||||||
self.assertTrue("mock_value" in self.config.parser.variables.keys())
|
self.assertTrue("mock_value" in self.config.parser.variables)
|
||||||
self.assertEqual("TRUE", self.config.parser.variables["mock_value"])
|
self.assertEqual("TRUE", self.config.parser.variables["mock_value"])
|
||||||
self.assertTrue("MOCK_NOSEP" in self.config.parser.variables.keys())
|
self.assertTrue("MOCK_NOSEP" in self.config.parser.variables)
|
||||||
self.assertEqual("NOSEP_VAL", self.config.parser.variables["NOSEP_TWO"])
|
self.assertEqual("NOSEP_VAL", self.config.parser.variables["NOSEP_TWO"])
|
||||||
|
|
||||||
@mock.patch("certbot_apache._internal.configurator.util.run_script")
|
@mock.patch("certbot_apache._internal.configurator.util.run_script")
|
||||||
|
|||||||
@@ -91,7 +91,7 @@ class MultipleVhostsTestGentoo(util.ApacheTest):
|
|||||||
with mock.patch("certbot_apache._internal.override_gentoo.GentooParser.update_modules"):
|
with mock.patch("certbot_apache._internal.override_gentoo.GentooParser.update_modules"):
|
||||||
self.config.parser.update_runtime_variables()
|
self.config.parser.update_runtime_variables()
|
||||||
for define in defines:
|
for define in defines:
|
||||||
self.assertTrue(define in self.config.parser.variables.keys())
|
self.assertTrue(define in self.config.parser.variables)
|
||||||
|
|
||||||
@mock.patch("certbot_apache._internal.apache_util.parse_from_subprocess")
|
@mock.patch("certbot_apache._internal.apache_util.parse_from_subprocess")
|
||||||
def test_no_binary_configdump(self, mock_subprocess):
|
def test_no_binary_configdump(self, mock_subprocess):
|
||||||
|
|||||||
@@ -27,14 +27,14 @@ class VirtualHostTest(unittest.TestCase):
|
|||||||
"certbot_apache._internal.obj.Addr(('127.0.0.1', '443'))")
|
"certbot_apache._internal.obj.Addr(('127.0.0.1', '443'))")
|
||||||
|
|
||||||
def test_eq(self):
|
def test_eq(self):
|
||||||
self.assertTrue(self.vhost1b == self.vhost1)
|
self.assertEqual(self.vhost1b, self.vhost1)
|
||||||
self.assertFalse(self.vhost1 == self.vhost2)
|
self.assertNotEqual(self.vhost1, self.vhost2)
|
||||||
self.assertEqual(str(self.vhost1b), str(self.vhost1))
|
self.assertEqual(str(self.vhost1b), str(self.vhost1))
|
||||||
self.assertFalse(self.vhost1b == 1234)
|
self.assertNotEqual(self.vhost1b, 1234)
|
||||||
|
|
||||||
def test_ne(self):
|
def test_ne(self):
|
||||||
self.assertTrue(self.vhost1 != self.vhost2)
|
self.assertNotEqual(self.vhost1, self.vhost2)
|
||||||
self.assertFalse(self.vhost1 != self.vhost1b)
|
self.assertEqual(self.vhost1, self.vhost1b)
|
||||||
|
|
||||||
def test_conflicts(self):
|
def test_conflicts(self):
|
||||||
from certbot_apache._internal.obj import Addr
|
from certbot_apache._internal.obj import Addr
|
||||||
@@ -128,13 +128,13 @@ class AddrTest(unittest.TestCase):
|
|||||||
self.assertTrue(self.addr1.conflicts(self.addr2))
|
self.assertTrue(self.addr1.conflicts(self.addr2))
|
||||||
|
|
||||||
def test_equal(self):
|
def test_equal(self):
|
||||||
self.assertTrue(self.addr1 == self.addr2)
|
self.assertEqual(self.addr1, self.addr2)
|
||||||
self.assertFalse(self.addr == self.addr1)
|
self.assertNotEqual(self.addr, self.addr1)
|
||||||
self.assertFalse(self.addr == 123)
|
self.assertNotEqual(self.addr, 123)
|
||||||
|
|
||||||
def test_not_equal(self):
|
def test_not_equal(self):
|
||||||
self.assertFalse(self.addr1 != self.addr2)
|
self.assertEqual(self.addr1, self.addr2)
|
||||||
self.assertTrue(self.addr != self.addr1)
|
self.assertNotEqual(self.addr, self.addr1)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
|||||||
@@ -26,8 +26,6 @@ class ApacheTest(unittest.TestCase):
|
|||||||
config_root="debian_apache_2_4/multiple_vhosts/apache2",
|
config_root="debian_apache_2_4/multiple_vhosts/apache2",
|
||||||
vhost_root="debian_apache_2_4/multiple_vhosts/apache2/sites-available"):
|
vhost_root="debian_apache_2_4/multiple_vhosts/apache2/sites-available"):
|
||||||
# pylint: disable=arguments-differ
|
# pylint: disable=arguments-differ
|
||||||
super(ApacheTest, self).setUp()
|
|
||||||
|
|
||||||
self.temp_dir, self.config_dir, self.work_dir = common.dir_setup(
|
self.temp_dir, self.config_dir, self.work_dir = common.dir_setup(
|
||||||
test_dir=test_dir,
|
test_dir=test_dir,
|
||||||
pkg=__name__)
|
pkg=__name__)
|
||||||
|
|||||||
84
certbot-auto
84
certbot-auto
@@ -31,7 +31,7 @@ if [ -z "$VENV_PATH" ]; then
|
|||||||
fi
|
fi
|
||||||
VENV_BIN="$VENV_PATH/bin"
|
VENV_BIN="$VENV_PATH/bin"
|
||||||
BOOTSTRAP_VERSION_PATH="$VENV_PATH/certbot-auto-bootstrap-version.txt"
|
BOOTSTRAP_VERSION_PATH="$VENV_PATH/certbot-auto-bootstrap-version.txt"
|
||||||
LE_AUTO_VERSION="1.7.0"
|
LE_AUTO_VERSION="1.10.1"
|
||||||
BASENAME=$(basename $0)
|
BASENAME=$(basename $0)
|
||||||
USAGE="Usage: $BASENAME [OPTIONS]
|
USAGE="Usage: $BASENAME [OPTIONS]
|
||||||
A self-updating wrapper script for the Certbot ACME client. When run, updates
|
A self-updating wrapper script for the Certbot ACME client. When run, updates
|
||||||
@@ -258,7 +258,7 @@ DeprecationBootstrap() {
|
|||||||
|
|
||||||
MIN_PYTHON_2_VERSION="2.7"
|
MIN_PYTHON_2_VERSION="2.7"
|
||||||
MIN_PYVER2=$(echo "$MIN_PYTHON_2_VERSION" | sed 's/\.//')
|
MIN_PYVER2=$(echo "$MIN_PYTHON_2_VERSION" | sed 's/\.//')
|
||||||
MIN_PYTHON_3_VERSION="3.5"
|
MIN_PYTHON_3_VERSION="3.6"
|
||||||
MIN_PYVER3=$(echo "$MIN_PYTHON_3_VERSION" | sed 's/\.//')
|
MIN_PYVER3=$(echo "$MIN_PYTHON_3_VERSION" | sed 's/\.//')
|
||||||
# Sets LE_PYTHON to Python version string and PYVER to the first two
|
# Sets LE_PYTHON to Python version string and PYVER to the first two
|
||||||
# digits of the python version.
|
# digits of the python version.
|
||||||
@@ -799,17 +799,10 @@ BootstrapMageiaCommon() {
|
|||||||
# that function. If Bootstrap is set to a function that doesn't install any
|
# that function. If Bootstrap is set to a function that doesn't install any
|
||||||
# packages BOOTSTRAP_VERSION is not set.
|
# packages BOOTSTRAP_VERSION is not set.
|
||||||
if [ -f /etc/debian_version ]; then
|
if [ -f /etc/debian_version ]; then
|
||||||
Bootstrap() {
|
DEPRECATED_OS=1
|
||||||
BootstrapMessage "Debian-based OSes"
|
|
||||||
BootstrapDebCommon
|
|
||||||
}
|
|
||||||
BOOTSTRAP_VERSION="BootstrapDebCommon $BOOTSTRAP_DEB_COMMON_VERSION"
|
|
||||||
elif [ -f /etc/mageia-release ]; then
|
elif [ -f /etc/mageia-release ]; then
|
||||||
# Mageia has both /etc/mageia-release and /etc/redhat-release
|
# Mageia has both /etc/mageia-release and /etc/redhat-release
|
||||||
Bootstrap() {
|
DEPRECATED_OS=1
|
||||||
ExperimentalBootstrap "Mageia" BootstrapMageiaCommon
|
|
||||||
}
|
|
||||||
BOOTSTRAP_VERSION="BootstrapMageiaCommon $BOOTSTRAP_MAGEIA_COMMON_VERSION"
|
|
||||||
elif [ -f /etc/redhat-release ]; then
|
elif [ -f /etc/redhat-release ]; then
|
||||||
# Run DeterminePythonVersion to decide on the basis of available Python versions
|
# Run DeterminePythonVersion to decide on the basis of available Python versions
|
||||||
# whether to use 2.x or 3.x on RedHat-like systems.
|
# whether to use 2.x or 3.x on RedHat-like systems.
|
||||||
@@ -884,31 +877,11 @@ elif [ -f /etc/redhat-release ]; then
|
|||||||
|
|
||||||
LE_PYTHON="$prev_le_python"
|
LE_PYTHON="$prev_le_python"
|
||||||
elif [ -f /etc/os-release ] && `grep -q openSUSE /etc/os-release` ; then
|
elif [ -f /etc/os-release ] && `grep -q openSUSE /etc/os-release` ; then
|
||||||
Bootstrap() {
|
DEPRECATED_OS=1
|
||||||
BootstrapMessage "openSUSE-based OSes"
|
|
||||||
BootstrapSuseCommon
|
|
||||||
}
|
|
||||||
BOOTSTRAP_VERSION="BootstrapSuseCommon $BOOTSTRAP_SUSE_COMMON_VERSION"
|
|
||||||
elif [ -f /etc/arch-release ]; then
|
elif [ -f /etc/arch-release ]; then
|
||||||
Bootstrap() {
|
DEPRECATED_OS=1
|
||||||
if [ "$DEBUG" = 1 ]; then
|
|
||||||
BootstrapMessage "Archlinux"
|
|
||||||
BootstrapArchCommon
|
|
||||||
else
|
|
||||||
error "Please use pacman to install letsencrypt packages:"
|
|
||||||
error "# pacman -S certbot certbot-apache"
|
|
||||||
error
|
|
||||||
error "If you would like to use the virtualenv way, please run the script again with the"
|
|
||||||
error "--debug flag."
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
BOOTSTRAP_VERSION="BootstrapArchCommon $BOOTSTRAP_ARCH_COMMON_VERSION"
|
|
||||||
elif [ -f /etc/manjaro-release ]; then
|
elif [ -f /etc/manjaro-release ]; then
|
||||||
Bootstrap() {
|
DEPRECATED_OS=1
|
||||||
ExperimentalBootstrap "Manjaro Linux" BootstrapArchCommon
|
|
||||||
}
|
|
||||||
BOOTSTRAP_VERSION="BootstrapArchCommon $BOOTSTRAP_ARCH_COMMON_VERSION"
|
|
||||||
elif [ -f /etc/gentoo-release ]; then
|
elif [ -f /etc/gentoo-release ]; then
|
||||||
DEPRECATED_OS=1
|
DEPRECATED_OS=1
|
||||||
elif uname | grep -iq FreeBSD ; then
|
elif uname | grep -iq FreeBSD ; then
|
||||||
@@ -921,19 +894,9 @@ elif [ -f /etc/issue ] && grep -iq "Amazon Linux" /etc/issue ; then
|
|||||||
}
|
}
|
||||||
BOOTSTRAP_VERSION="BootstrapRpmCommon $BOOTSTRAP_RPM_COMMON_VERSION"
|
BOOTSTRAP_VERSION="BootstrapRpmCommon $BOOTSTRAP_RPM_COMMON_VERSION"
|
||||||
elif [ -f /etc/product ] && grep -q "Joyent Instance" /etc/product ; then
|
elif [ -f /etc/product ] && grep -q "Joyent Instance" /etc/product ; then
|
||||||
Bootstrap() {
|
DEPRECATED_OS=1
|
||||||
ExperimentalBootstrap "Joyent SmartOS Zone" BootstrapSmartOS
|
|
||||||
}
|
|
||||||
BOOTSTRAP_VERSION="BootstrapSmartOS $BOOTSTRAP_SMARTOS_VERSION"
|
|
||||||
else
|
else
|
||||||
Bootstrap() {
|
DEPRECATED_OS=1
|
||||||
error "Sorry, I don't know how to bootstrap Certbot on your operating system!"
|
|
||||||
error
|
|
||||||
error "You will need to install OS dependencies, configure virtualenv, and run pip install manually."
|
|
||||||
error "Please see https://letsencrypt.readthedocs.org/en/latest/contributing.html#prerequisites"
|
|
||||||
error "for more info."
|
|
||||||
exit 1
|
|
||||||
}
|
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# We handle this case after determining the normal bootstrap version to allow
|
# We handle this case after determining the normal bootstrap version to allow
|
||||||
@@ -1530,18 +1493,18 @@ letsencrypt==0.7.0 \
|
|||||||
--hash=sha256:105a5fb107e45bcd0722eb89696986dcf5f08a86a321d6aef25a0c7c63375ade \
|
--hash=sha256:105a5fb107e45bcd0722eb89696986dcf5f08a86a321d6aef25a0c7c63375ade \
|
||||||
--hash=sha256:c36e532c486a7e92155ee09da54b436a3c420813ec1c590b98f635d924720de9
|
--hash=sha256:c36e532c486a7e92155ee09da54b436a3c420813ec1c590b98f635d924720de9
|
||||||
|
|
||||||
certbot==1.7.0 \
|
certbot==1.10.1 \
|
||||||
--hash=sha256:84877127caf779c212d131d36399a45a8e13e06274e7a5e029845df5c84cd974 \
|
--hash=sha256:011ac980fa21b9f29e02c9b8d8b86e8a4bf4670b51b6ad91656e401e9d2d2231 \
|
||||||
--hash=sha256:10b95bb86fb8f1dbbd27558bb42454d5995cbdb45d6c00d961ebba2a4bdc4355
|
--hash=sha256:0d9ee3fc09e0d03b2d1b1f1c4916e61ecfc6904b4216ddef4e6a5ca1424d9cb7
|
||||||
acme==1.7.0 \
|
acme==1.10.1 \
|
||||||
--hash=sha256:ef0e84d670f59c096e9ed8c3bf9e6a7d22ee378fdb4175503c06cc485672c79a \
|
--hash=sha256:752d598e54e98ad1e874de53fd50c61044f1b566d6deb790db5676ce9c573546 \
|
||||||
--hash=sha256:288d9bbb075278961d224e43f7f386c491d25366a98ed89a62771c5022978386
|
--hash=sha256:fcbb559aedc96b404edf593e78517dcd7291984d5a37036c3fc77f3c5c122fd8
|
||||||
certbot-apache==1.7.0 \
|
certbot-apache==1.10.1 \
|
||||||
--hash=sha256:514c09a892964332c2485a38bd5720e4cf93e35998341af36eef5401ab165d89 \
|
--hash=sha256:f077b4b7f166627ef5e0921fe7cde57700670fc86e9ad9dbdfaf2c573cc0f2fa \
|
||||||
--hash=sha256:99943b6406e0315f31c1f81e2ced6be38aee3ea24974ef4d7aeeda8202c1c3bc
|
--hash=sha256:97ed637b4c7b03820db6c69aa90145dc989933351d46a3d62baf6b71674f0a10
|
||||||
certbot-nginx==1.7.0 \
|
certbot-nginx==1.10.1 \
|
||||||
--hash=sha256:fea2387c92155635fbddb02758d5ba73f0d7af459959f91be0a1606fd2e43c55 \
|
--hash=sha256:7c36459021f8a1ec3b6c062e4c4fc866bfaa1dbf26ccd29e043dd6848003be08 \
|
||||||
--hash=sha256:d52ec3e711884100636c42b639d8959378562ea78183a273d120df808de2724f
|
--hash=sha256:c0bbeccf85f46b728fd95e6bb8c2649d32d3383d7f47ea4b9c312d12bf04d2f0
|
||||||
|
|
||||||
UNLIKELY_EOF
|
UNLIKELY_EOF
|
||||||
# -------------------------------------------------------------------------
|
# -------------------------------------------------------------------------
|
||||||
@@ -1615,6 +1578,11 @@ maybe_argparse = (
|
|||||||
if sys.version_info < (2, 7, 0) else [])
|
if sys.version_info < (2, 7, 0) else [])
|
||||||
|
|
||||||
|
|
||||||
|
# Be careful when updating the pinned versions here, in particular for pip.
|
||||||
|
# Indeed starting from 10.0, pip will build dependencies in isolation if the
|
||||||
|
# related projects are compliant with PEP 517. This is not something we want
|
||||||
|
# as of now, so the isolation build will need to be disabled wherever
|
||||||
|
# pipstrap is used (see https://github.com/certbot/certbot/issues/8256).
|
||||||
PACKAGES = maybe_argparse + [
|
PACKAGES = maybe_argparse + [
|
||||||
# Pip has no dependencies, as it vendors everything:
|
# Pip has no dependencies, as it vendors everything:
|
||||||
('11/b6/abcb525026a4be042b486df43905d6893fb04f05aac21c32c638e939e447/'
|
('11/b6/abcb525026a4be042b486df43905d6893fb04f05aac21c32c638e939e447/'
|
||||||
|
|||||||
@@ -0,0 +1,60 @@
|
|||||||
|
options {
|
||||||
|
directory "/var/cache/bind";
|
||||||
|
|
||||||
|
// Running inside Docker. Bind address on Docker host is 127.0.0.1.
|
||||||
|
listen-on { any; };
|
||||||
|
listen-on-v6 { any; };
|
||||||
|
|
||||||
|
// We are allowing BIND to service recursive queries, but only in an extremely limimited sense
|
||||||
|
// where it is entirely disconnected from public DNS:
|
||||||
|
// - Iterative queries are disabled. Only forwarding to a non-existent forwarder.
|
||||||
|
// - The only recursive answers we can get (that will not be a SERVFAIL) will come from the
|
||||||
|
// RPZ "mock-recursion" zone. Effectively this means we are mocking out the entirety of
|
||||||
|
// public DNS.
|
||||||
|
allow-recursion { any; }; // BIND will only answer using RPZ if recursion is enabled
|
||||||
|
forwarders { 192.0.2.254; }; // Nobody is listening, this is TEST-NET-1
|
||||||
|
forward only; // Do NOT perform iterative queries from the root zone
|
||||||
|
dnssec-validation no; // Do not bother fetching the root DNSKEY set (performance)
|
||||||
|
response-policy { // All recursive queries will be served from here.
|
||||||
|
zone "mock-recursion"
|
||||||
|
log yes;
|
||||||
|
} recursive-only no // Allow RPZs to affect authoritative zones too.
|
||||||
|
qname-wait-recurse no // No real recursion.
|
||||||
|
nsip-wait-recurse no; // No real recursion.
|
||||||
|
|
||||||
|
allow-transfer { none; };
|
||||||
|
allow-update { none; };
|
||||||
|
};
|
||||||
|
|
||||||
|
key "default-key." {
|
||||||
|
algorithm hmac-sha512;
|
||||||
|
secret "91CgOwzihr0nAVEHKFXJPQCbuBBbBI19Ks5VAweUXgbF40NWTD83naeg3c5y2MPdEiFRXnRLJxL6M+AfHCGLNw==";
|
||||||
|
};
|
||||||
|
|
||||||
|
zone "mock-recursion" {
|
||||||
|
type primary;
|
||||||
|
file "/var/lib/bind/rpz.mock-recursion";
|
||||||
|
allow-query {
|
||||||
|
none;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
zone "example.com." {
|
||||||
|
type primary;
|
||||||
|
file "/var/lib/bind/db.example.com";
|
||||||
|
journal "/var/cache/bind/db.example.com.jnl";
|
||||||
|
|
||||||
|
update-policy {
|
||||||
|
grant default-key zonesub TXT;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
zone "sub.example.com." {
|
||||||
|
type primary;
|
||||||
|
file "/var/lib/bind/db.sub.example.com";
|
||||||
|
journal "/var/cache/bind/db.sub.example.com.jnl";
|
||||||
|
|
||||||
|
update-policy {
|
||||||
|
grant default-key zonesub TXT;
|
||||||
|
};
|
||||||
|
};
|
||||||
@@ -0,0 +1,10 @@
|
|||||||
|
# Target DNS server
|
||||||
|
dns_rfc2136_server = {server_address}
|
||||||
|
# Target DNS port
|
||||||
|
dns_rfc2136_port = {server_port}
|
||||||
|
# TSIG key name
|
||||||
|
dns_rfc2136_name = default-key.
|
||||||
|
# TSIG key secret
|
||||||
|
dns_rfc2136_secret = 91CgOwzihr0nAVEHKFXJPQCbuBBbBI19Ks5VAweUXgbF40NWTD83naeg3c5y2MPdEiFRXnRLJxL6M+AfHCGLNw==
|
||||||
|
# TSIG key algorithm
|
||||||
|
dns_rfc2136_algorithm = HMAC-SHA512
|
||||||
@@ -0,0 +1,11 @@
|
|||||||
|
$ORIGIN example.com.
|
||||||
|
$TTL 3600
|
||||||
|
example.com. IN SOA ns1.example.com. admin.example.com. ( 2020091025 7200 3600 1209600 3600 )
|
||||||
|
|
||||||
|
example.com. IN NS ns1
|
||||||
|
example.com. IN NS ns2
|
||||||
|
|
||||||
|
ns1 IN A 192.0.2.2
|
||||||
|
ns2 IN A 192.0.2.3
|
||||||
|
|
||||||
|
@ IN A 192.0.2.1
|
||||||
@@ -0,0 +1,9 @@
|
|||||||
|
$ORIGIN sub.example.com.
|
||||||
|
$TTL 3600
|
||||||
|
sub.example.com. IN SOA ns1.example.com. admin.example.com. ( 2020091025 7200 3600 1209600 3600 )
|
||||||
|
|
||||||
|
sub.example.com. IN NS ns1
|
||||||
|
sub.example.com. IN NS ns2
|
||||||
|
|
||||||
|
ns1 IN A 192.0.2.2
|
||||||
|
ns2 IN A 192.0.2.3
|
||||||
@@ -0,0 +1,6 @@
|
|||||||
|
$TTL 3600
|
||||||
|
|
||||||
|
@ SOA ns1.example.test. dummy.example.test. 1 12h 15m 3w 2h
|
||||||
|
NS ns1.example.test.
|
||||||
|
|
||||||
|
_acme-challenge.aliased.example IN CNAME _acme-challenge.example.com.
|
||||||
@@ -0,0 +1,14 @@
|
|||||||
|
This directory contains your keys and certificates.
|
||||||
|
|
||||||
|
`privkey.pem` : the private key for your certificate.
|
||||||
|
`fullchain.pem`: the certificate file used in most server software.
|
||||||
|
`chain.pem` : used for OCSP stapling in Nginx >=1.3.7.
|
||||||
|
`cert.pem` : will break many server configurations, and should not be used
|
||||||
|
without reading further documentation (see link below).
|
||||||
|
|
||||||
|
WARNING: DO NOT MOVE OR RENAME THESE FILES!
|
||||||
|
Certbot expects these files to remain in this location in order
|
||||||
|
to function properly!
|
||||||
|
|
||||||
|
We recommend not moving these files. For more information, see the Certbot
|
||||||
|
User Guide at https://certbot.eff.org/docs/using.html#where-are-my-certificates.
|
||||||
@@ -0,0 +1,18 @@
|
|||||||
|
-----BEGIN CERTIFICATE-----
|
||||||
|
MIIC2zCCAcOgAwIBAgIIBvrEnbPRYu8wDQYJKoZIhvcNAQELBQAwKDEmMCQGA1UE
|
||||||
|
AxMdUGViYmxlIEludGVybWVkaWF0ZSBDQSAxMjZjNGIwHhcNMjAxMDEyMjEwNzQw
|
||||||
|
WhcNMjUxMDEyMjEwNzQwWjAjMSEwHwYDVQQDExhjLmVuY3J5cHRpb24tZXhhbXBs
|
||||||
|
ZS5jb20wWTATBgcqhkjOPQIBBggqhkjOPQMBBwNCAARjMhuW0ENPPC33PjB5XsYU
|
||||||
|
CRw640kPQENIDatcTJaENZIZdqKd6rI6jc+lpbmXot7Zi52clJlSJS+V6oDAt2Lh
|
||||||
|
o4HYMIHVMA4GA1UdDwEB/wQEAwIFoDAdBgNVHSUEFjAUBggrBgEFBQcDAQYIKwYB
|
||||||
|
BQUHAwIwDAYDVR0TAQH/BAIwADAdBgNVHQ4EFgQUj7Kd3ENqxlPf8B2bIGhsjydX
|
||||||
|
mPswHwYDVR0jBBgwFoAUEiGxlkRsi+VvcogH5dVD3h1laAcwMQYIKwYBBQUHAQEE
|
||||||
|
JTAjMCEGCCsGAQUFBzABhhVodHRwOi8vMTI3LjAuMC4xOjQwMDIwIwYDVR0RBBww
|
||||||
|
GoIYYy5lbmNyeXB0aW9uLWV4YW1wbGUuY29tMA0GCSqGSIb3DQEBCwUAA4IBAQCl
|
||||||
|
k0JXsa8y7fg41WWMDhw60bPW77O0FtOmTcnhdI5daYNemQVk+Q5EMaBLQ/oGjgXd
|
||||||
|
9QXFzXH1PL904YEnSLt+iTpXn++7rQSNzQsdYqw0neWk4f5pEBiN+WORpb6mwobV
|
||||||
|
ifMtBOkNEHvrJ2Pkci9U1lLwtKD/DSew6QtJU5DSkmH1XdGuMJiubygEIvELtvgq
|
||||||
|
cP9S368ZvPmPGmKaJQXBiuaR8MTjY/Bkr79aXQMjKbf+mpn7h0POCcePk1DY/rm6
|
||||||
|
Da+X16lf0hHyQhSUa7Vgyim6rK1/hlw+Z00i+sQCKD9Ih7kXuuGqfSDC33cfO8Tj
|
||||||
|
o/MXO8lcxkrem5zU5QWP
|
||||||
|
-----END CERTIFICATE-----
|
||||||
@@ -0,0 +1,20 @@
|
|||||||
|
-----BEGIN CERTIFICATE-----
|
||||||
|
MIIDUDCCAjigAwIBAgIIbi787yVrcMAwDQYJKoZIhvcNAQELBQAwIDEeMBwGA1UE
|
||||||
|
AxMVUGViYmxlIFJvb3QgQ0EgMGM1MjI1MCAXDTIwMTAxMjIwMjI0NloYDzIwNTAx
|
||||||
|
MDEyMjEyMjQ2WjAoMSYwJAYDVQQDEx1QZWJibGUgSW50ZXJtZWRpYXRlIENBIDEy
|
||||||
|
NmM0YjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALGeVk1BMJraeqRq
|
||||||
|
mJ2+hgso8VOAv2s2CVxUJjIVcn7f2adE8NyTsSQ1brlsnKCUYUw7yLTQH0izLQRB
|
||||||
|
qKVIDFkUqo5/FuTJ2QlfA2EwBL8J7s/7L7vj3L0DiVpwgxPSyFEwdl/Y5y7ofsX5
|
||||||
|
CIhCFcaMAmTIuKLiSfCJjGwkbEMuolm+lO8Mikxxc/JtDVUC479ugU7PU9O09bMH
|
||||||
|
nm+sD6Bgd+KMoPkCCCoeShJS9X3Ziq9HGc7Z6nhM/zirFARt2XkonEdAZ8br01zY
|
||||||
|
MRiY9txhlWQ7mUkOtzOSoEuYJNoUbvMUf0+tNzto26WRyF7dJmh7lTBsYrvAwUTx
|
||||||
|
PzNyst0CAwEAAaOBgzCBgDAOBgNVHQ8BAf8EBAMCAoQwHQYDVR0lBBYwFAYIKwYB
|
||||||
|
BQUHAwEGCCsGAQUFBwMCMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFBIhsZZE
|
||||||
|
bIvlb3KIB+XVQ94dZWgHMB8GA1UdIwQYMBaAFOaKTaXg37vKgRt7d79YOjAoAtJT
|
||||||
|
MA0GCSqGSIb3DQEBCwUAA4IBAQAU2mZii7PH2pkw2lNM0QqPbcW/UYyvFoUeM8Aq
|
||||||
|
uCtsI2s+oxCJTqzfLsA0N8NY4nHLQ5wAlNJfJekngni8hbmJTKU4JFTMe7kLQO8P
|
||||||
|
fJbk0pTzhhHVQw7CVwB6Pwq3u2m/JV+d6xDIDc+AVkuEl19ZJU0rTWyooClfFLZV
|
||||||
|
EdZmEiUtA3PGlxoYwYhoGHYlhFxsoFONhCsBEdN7k7FKtFGVxN7oc5SKmKp0YZTW
|
||||||
|
fcrEtrdNThATO4ymhCC2zh33NI/MT1O74fpaAc2k6LcTl57MKiLfTYX4LTL6v9JG
|
||||||
|
9tlNqjFVRRmzEbtXTPcCb+w9g1VqoOGok7mGXYLTYtShCuvE
|
||||||
|
-----END CERTIFICATE-----
|
||||||
@@ -0,0 +1,38 @@
|
|||||||
|
-----BEGIN CERTIFICATE-----
|
||||||
|
MIIC2zCCAcOgAwIBAgIILlmGtZhUFEwwDQYJKoZIhvcNAQELBQAwKDEmMCQGA1UE
|
||||||
|
AxMdUGViYmxlIEludGVybWVkaWF0ZSBDQSAxMjZjNGIwHhcNMjAxMDEyMjA1MDM0
|
||||||
|
WhcNMjUxMDEyMjA1MDM0WjAjMSEwHwYDVQQDExhjLmVuY3J5cHRpb24tZXhhbXBs
|
||||||
|
ZS5jb20wWTATBgcqhkjOPQIBBggqhkjOPQMBBwNCAARHEzR8JPWrEmpmgM+F2bk5
|
||||||
|
9mT0u6CjzmJG0QpbaqprLiG5NGpW84VQ5TFCrmC4KxYfigCfMhfHRNfFYvNUK3V/
|
||||||
|
o4HYMIHVMA4GA1UdDwEB/wQEAwIFoDAdBgNVHSUEFjAUBggrBgEFBQcDAQYIKwYB
|
||||||
|
BQUHAwIwDAYDVR0TAQH/BAIwADAdBgNVHQ4EFgQU1CsVL+bPnzaxxQ5jUENmQJIO
|
||||||
|
lKwwHwYDVR0jBBgwFoAUEiGxlkRsi+VvcogH5dVD3h1laAcwMQYIKwYBBQUHAQEE
|
||||||
|
JTAjMCEGCCsGAQUFBzABhhVodHRwOi8vMTI3LjAuMC4xOjQwMDIwIwYDVR0RBBww
|
||||||
|
GoIYYy5lbmNyeXB0aW9uLWV4YW1wbGUuY29tMA0GCSqGSIb3DQEBCwUAA4IBAQBn
|
||||||
|
2D8loC7pfk28JYpFLr5lmFKJWWmtLGlpsWDj61fVjtTfGKLziJz+MM6il4Y3hIz5
|
||||||
|
58qiFK0ue0M63dIBJ33N+XxSEXon4Q0gy/zRWfH9jtPJ3FwfjkU/RT9PAUClYi0G
|
||||||
|
ptNWnTmgQkNzousbcAtRNXuuShH3856vhUnwkX+xM+cbIDi1JVmFjcGrEEQJ0rUF
|
||||||
|
mv2ZTyfbWbUs3v4rReETi2NVzr1Ql6J+ByNcMvHODzFy3t0L6yelAw2ca1I+c9HU
|
||||||
|
+Z0tnp/ykR7eXNuVLivok8UBf5OC413lh8ZO5g+Bgzh/LdtkUuavg1MYtEX0H6mX
|
||||||
|
9U7y3nVI8WEbPGf+HDeu
|
||||||
|
-----END CERTIFICATE-----
|
||||||
|
-----BEGIN CERTIFICATE-----
|
||||||
|
MIIDUDCCAjigAwIBAgIIbi787yVrcMAwDQYJKoZIhvcNAQELBQAwIDEeMBwGA1UE
|
||||||
|
AxMVUGViYmxlIFJvb3QgQ0EgMGM1MjI1MCAXDTIwMTAxMjIwMjI0NloYDzIwNTAx
|
||||||
|
MDEyMjEyMjQ2WjAoMSYwJAYDVQQDEx1QZWJibGUgSW50ZXJtZWRpYXRlIENBIDEy
|
||||||
|
NmM0YjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALGeVk1BMJraeqRq
|
||||||
|
mJ2+hgso8VOAv2s2CVxUJjIVcn7f2adE8NyTsSQ1brlsnKCUYUw7yLTQH0izLQRB
|
||||||
|
qKVIDFkUqo5/FuTJ2QlfA2EwBL8J7s/7L7vj3L0DiVpwgxPSyFEwdl/Y5y7ofsX5
|
||||||
|
CIhCFcaMAmTIuKLiSfCJjGwkbEMuolm+lO8Mikxxc/JtDVUC479ugU7PU9O09bMH
|
||||||
|
nm+sD6Bgd+KMoPkCCCoeShJS9X3Ziq9HGc7Z6nhM/zirFARt2XkonEdAZ8br01zY
|
||||||
|
MRiY9txhlWQ7mUkOtzOSoEuYJNoUbvMUf0+tNzto26WRyF7dJmh7lTBsYrvAwUTx
|
||||||
|
PzNyst0CAwEAAaOBgzCBgDAOBgNVHQ8BAf8EBAMCAoQwHQYDVR0lBBYwFAYIKwYB
|
||||||
|
BQUHAwEGCCsGAQUFBwMCMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFBIhsZZE
|
||||||
|
bIvlb3KIB+XVQ94dZWgHMB8GA1UdIwQYMBaAFOaKTaXg37vKgRt7d79YOjAoAtJT
|
||||||
|
MA0GCSqGSIb3DQEBCwUAA4IBAQAU2mZii7PH2pkw2lNM0QqPbcW/UYyvFoUeM8Aq
|
||||||
|
uCtsI2s+oxCJTqzfLsA0N8NY4nHLQ5wAlNJfJekngni8hbmJTKU4JFTMe7kLQO8P
|
||||||
|
fJbk0pTzhhHVQw7CVwB6Pwq3u2m/JV+d6xDIDc+AVkuEl19ZJU0rTWyooClfFLZV
|
||||||
|
EdZmEiUtA3PGlxoYwYhoGHYlhFxsoFONhCsBEdN7k7FKtFGVxN7oc5SKmKp0YZTW
|
||||||
|
fcrEtrdNThATO4ymhCC2zh33NI/MT1O74fpaAc2k6LcTl57MKiLfTYX4LTL6v9JG
|
||||||
|
9tlNqjFVRRmzEbtXTPcCb+w9g1VqoOGok7mGXYLTYtShCuvE
|
||||||
|
-----END CERTIFICATE-----
|
||||||
@@ -0,0 +1,5 @@
|
|||||||
|
-----BEGIN PRIVATE KEY-----
|
||||||
|
MIGHAgEAMBMGByqGSM49AgEGCCqGSM49AwEHBG0wawIBAQQgNgefv2dad4U1VYEi
|
||||||
|
0WkdHuqywi5QXAe30OwNTTGjhbihRANCAARHEzR8JPWrEmpmgM+F2bk59mT0u6Cj
|
||||||
|
zmJG0QpbaqprLiG5NGpW84VQ5TFCrmC4KxYfigCfMhfHRNfFYvNUK3V/
|
||||||
|
-----END PRIVATE KEY-----
|
||||||
@@ -0,0 +1,14 @@
|
|||||||
|
This directory contains your keys and certificates.
|
||||||
|
|
||||||
|
`privkey.pem` : the private key for your certificate.
|
||||||
|
`fullchain.pem`: the certificate file used in most server software.
|
||||||
|
`chain.pem` : used for OCSP stapling in Nginx >=1.3.7.
|
||||||
|
`cert.pem` : will break many server configurations, and should not be used
|
||||||
|
without reading further documentation (see link below).
|
||||||
|
|
||||||
|
WARNING: DO NOT MOVE OR RENAME THESE FILES!
|
||||||
|
Certbot expects these files to remain in this location in order
|
||||||
|
to function properly!
|
||||||
|
|
||||||
|
We recommend not moving these files. For more information, see the Certbot
|
||||||
|
User Guide at https://certbot.eff.org/docs/using.html#where-are-my-certificates.
|
||||||
@@ -0,0 +1 @@
|
|||||||
|
../../archive/c.encryption-example.com/cert.pem
|
||||||
@@ -0,0 +1 @@
|
|||||||
|
../../archive/c.encryption-example.com/chain.pem
|
||||||
@@ -0,0 +1 @@
|
|||||||
|
../../archive/c.encryption-example.com/fullchain.pem
|
||||||
@@ -0,0 +1 @@
|
|||||||
|
../../archive/c.encryption-example.com/privkey.pem
|
||||||
@@ -0,0 +1,17 @@
|
|||||||
|
# renew_before_expiry = 30 days
|
||||||
|
version = 1.10.0.dev0
|
||||||
|
archive_dir = sample-config/archive/c.encryption-example.com
|
||||||
|
cert = sample-config/live/c.encryption-example.com/cert.pem
|
||||||
|
privkey = sample-config/live/c.encryption-example.com/privkey.pem
|
||||||
|
chain = sample-config/live/c.encryption-example.com/chain.pem
|
||||||
|
fullchain = sample-config/live/c.encryption-example.com/fullchain.pem
|
||||||
|
|
||||||
|
# Options used in the renewal process
|
||||||
|
[renewalparams]
|
||||||
|
authenticator = apache
|
||||||
|
installer = apache
|
||||||
|
account = 48d6b9e8d767eccf7e4d877d6ffa81e3
|
||||||
|
key_type = ecdsa
|
||||||
|
config_dir = sample-config-ec
|
||||||
|
elliptic_curve = secp256r1
|
||||||
|
manual_public_ip_logging_ok = True
|
||||||
@@ -1,3 +1,4 @@
|
|||||||
|
# pylint: disable=missing-module-docstring
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
# Custom assertions defined in the following package need to be registered to be properly
|
# Custom assertions defined in the following package need to be registered to be properly
|
||||||
|
|||||||
@@ -2,6 +2,11 @@
|
|||||||
import io
|
import io
|
||||||
import os
|
import os
|
||||||
|
|
||||||
|
from cryptography.hazmat.backends import default_backend
|
||||||
|
from cryptography.hazmat.primitives.asymmetric.ec import EllipticCurvePrivateKey
|
||||||
|
from cryptography.hazmat.primitives.asymmetric.rsa import RSAPrivateKey
|
||||||
|
from cryptography.hazmat.primitives.serialization import load_pem_private_key
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import grp
|
import grp
|
||||||
POSIX_MODE = True
|
POSIX_MODE = True
|
||||||
@@ -16,6 +21,33 @@ SYSTEM_SID = 'S-1-5-18'
|
|||||||
ADMINS_SID = 'S-1-5-32-544'
|
ADMINS_SID = 'S-1-5-32-544'
|
||||||
|
|
||||||
|
|
||||||
|
def assert_elliptic_key(key, curve):
|
||||||
|
"""
|
||||||
|
Asserts that the key at the given path is an EC key using the given curve.
|
||||||
|
:param key: path to key
|
||||||
|
:param curve: name of the expected elliptic curve
|
||||||
|
"""
|
||||||
|
with open(key, 'rb') as file:
|
||||||
|
privkey1 = file.read()
|
||||||
|
|
||||||
|
key = load_pem_private_key(data=privkey1, password=None, backend=default_backend())
|
||||||
|
|
||||||
|
assert isinstance(key, EllipticCurvePrivateKey)
|
||||||
|
assert isinstance(key.curve, curve)
|
||||||
|
|
||||||
|
|
||||||
|
def assert_rsa_key(key):
|
||||||
|
"""
|
||||||
|
Asserts that the key at the given path is an RSA key.
|
||||||
|
:param key: path to key
|
||||||
|
"""
|
||||||
|
with open(key, 'rb') as file:
|
||||||
|
privkey1 = file.read()
|
||||||
|
|
||||||
|
key = load_pem_private_key(data=privkey1, password=None, backend=default_backend())
|
||||||
|
assert isinstance(key, RSAPrivateKey)
|
||||||
|
|
||||||
|
|
||||||
def assert_hook_execution(probe_path, probe_content):
|
def assert_hook_execution(probe_path, probe_content):
|
||||||
"""
|
"""
|
||||||
Assert that a certbot hook has been executed
|
Assert that a certbot hook has been executed
|
||||||
|
|||||||
@@ -77,6 +77,6 @@ class IntegrationTestsContext(object):
|
|||||||
appending the pytest worker id to the subdomain, using this pattern:
|
appending the pytest worker id to the subdomain, using this pattern:
|
||||||
{subdomain}.{worker_id}.wtf
|
{subdomain}.{worker_id}.wtf
|
||||||
:param subdomain: the subdomain to use in the generated domain (default 'le')
|
:param subdomain: the subdomain to use in the generated domain (default 'le')
|
||||||
:return: the well-formed domain suitable for redirection on
|
:return: the well-formed domain suitable for redirection on
|
||||||
"""
|
"""
|
||||||
return '{0}.{1}.wtf'.format(subdomain, self.worker_id)
|
return '{0}.{1}.wtf'.format(subdomain, self.worker_id)
|
||||||
|
|||||||
@@ -9,12 +9,15 @@ import shutil
|
|||||||
import subprocess
|
import subprocess
|
||||||
import time
|
import time
|
||||||
|
|
||||||
|
from cryptography.hazmat.primitives.asymmetric.ec import SECP256R1, SECP384R1
|
||||||
from cryptography.x509 import NameOID
|
from cryptography.x509 import NameOID
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from certbot_integration_tests.certbot_tests import context as certbot_context
|
from certbot_integration_tests.certbot_tests import context as certbot_context
|
||||||
from certbot_integration_tests.certbot_tests.assertions import assert_cert_count_for_lineage
|
from certbot_integration_tests.certbot_tests.assertions import assert_cert_count_for_lineage
|
||||||
|
from certbot_integration_tests.certbot_tests.assertions import assert_elliptic_key
|
||||||
|
from certbot_integration_tests.certbot_tests.assertions import assert_rsa_key
|
||||||
from certbot_integration_tests.certbot_tests.assertions import assert_equals_group_owner
|
from certbot_integration_tests.certbot_tests.assertions import assert_equals_group_owner
|
||||||
from certbot_integration_tests.certbot_tests.assertions import assert_equals_group_permissions
|
from certbot_integration_tests.certbot_tests.assertions import assert_equals_group_permissions
|
||||||
from certbot_integration_tests.certbot_tests.assertions import assert_equals_world_read_permissions
|
from certbot_integration_tests.certbot_tests.assertions import assert_equals_world_read_permissions
|
||||||
@@ -26,8 +29,9 @@ from certbot_integration_tests.certbot_tests.assertions import EVERYBODY_SID
|
|||||||
from certbot_integration_tests.utils import misc
|
from certbot_integration_tests.utils import misc
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture()
|
@pytest.fixture(name='context')
|
||||||
def context(request):
|
def test_context(request):
|
||||||
|
# pylint: disable=missing-function-docstring
|
||||||
# Fixture request is a built-in pytest fixture describing current test request.
|
# Fixture request is a built-in pytest fixture describing current test request.
|
||||||
integration_test_context = certbot_context.IntegrationTestsContext(request)
|
integration_test_context = certbot_context.IntegrationTestsContext(request)
|
||||||
try:
|
try:
|
||||||
@@ -219,14 +223,16 @@ def test_renew_files_propagate_permissions(context):
|
|||||||
if os.name != 'nt':
|
if os.name != 'nt':
|
||||||
os.chmod(privkey1, 0o444)
|
os.chmod(privkey1, 0o444)
|
||||||
else:
|
else:
|
||||||
import win32security
|
import win32security # pylint: disable=import-error
|
||||||
import ntsecuritycon
|
import ntsecuritycon # pylint: disable=import-error
|
||||||
# Get the current DACL of the private key
|
# Get the current DACL of the private key
|
||||||
security = win32security.GetFileSecurity(privkey1, win32security.DACL_SECURITY_INFORMATION)
|
security = win32security.GetFileSecurity(privkey1, win32security.DACL_SECURITY_INFORMATION)
|
||||||
dacl = security.GetSecurityDescriptorDacl()
|
dacl = security.GetSecurityDescriptorDacl()
|
||||||
# Create a read permission for Everybody group
|
# Create a read permission for Everybody group
|
||||||
everybody = win32security.ConvertStringSidToSid(EVERYBODY_SID)
|
everybody = win32security.ConvertStringSidToSid(EVERYBODY_SID)
|
||||||
dacl.AddAccessAllowedAce(win32security.ACL_REVISION, ntsecuritycon.FILE_GENERIC_READ, everybody)
|
dacl.AddAccessAllowedAce(
|
||||||
|
win32security.ACL_REVISION, ntsecuritycon.FILE_GENERIC_READ, everybody
|
||||||
|
)
|
||||||
# Apply the updated DACL to the private key
|
# Apply the updated DACL to the private key
|
||||||
security.SetSecurityDescriptorDacl(1, dacl, 0)
|
security.SetSecurityDescriptorDacl(1, dacl, 0)
|
||||||
win32security.SetFileSecurity(privkey1, win32security.DACL_SECURITY_INFORMATION, security)
|
win32security.SetFileSecurity(privkey1, win32security.DACL_SECURITY_INFORMATION, security)
|
||||||
@@ -235,12 +241,14 @@ def test_renew_files_propagate_permissions(context):
|
|||||||
|
|
||||||
assert_cert_count_for_lineage(context.config_dir, certname, 2)
|
assert_cert_count_for_lineage(context.config_dir, certname, 2)
|
||||||
if os.name != 'nt':
|
if os.name != 'nt':
|
||||||
# On Linux, read world permissions + all group permissions will be copied from the previous private key
|
# On Linux, read world permissions + all group permissions
|
||||||
|
# will be copied from the previous private key
|
||||||
assert_world_read_permissions(privkey2)
|
assert_world_read_permissions(privkey2)
|
||||||
assert_equals_world_read_permissions(privkey1, privkey2)
|
assert_equals_world_read_permissions(privkey1, privkey2)
|
||||||
assert_equals_group_permissions(privkey1, privkey2)
|
assert_equals_group_permissions(privkey1, privkey2)
|
||||||
else:
|
else:
|
||||||
# On Windows, world will never have any permissions, and group permission is irrelevant for this platform
|
# On Windows, world will never have any permissions, and
|
||||||
|
# group permission is irrelevant for this platform
|
||||||
assert_world_no_permissions(privkey2)
|
assert_world_no_permissions(privkey2)
|
||||||
|
|
||||||
|
|
||||||
@@ -289,7 +297,7 @@ def test_renew_with_changed_private_key_complexity(context):
|
|||||||
assert_cert_count_for_lineage(context.config_dir, certname, 1)
|
assert_cert_count_for_lineage(context.config_dir, certname, 1)
|
||||||
|
|
||||||
context.certbot(['renew'])
|
context.certbot(['renew'])
|
||||||
|
|
||||||
assert_cert_count_for_lineage(context.config_dir, certname, 2)
|
assert_cert_count_for_lineage(context.config_dir, certname, 2)
|
||||||
key2 = join(context.config_dir, 'archive', certname, 'privkey2.pem')
|
key2 = join(context.config_dir, 'archive', certname, 'privkey2.pem')
|
||||||
assert os.stat(key2).st_size > 3000
|
assert os.stat(key2).st_size > 3000
|
||||||
@@ -421,20 +429,93 @@ def test_reuse_key(context):
|
|||||||
assert len({cert1, cert2, cert3}) == 3
|
assert len({cert1, cert2, cert3}) == 3
|
||||||
|
|
||||||
|
|
||||||
|
def test_incorrect_key_type(context):
|
||||||
|
with pytest.raises(subprocess.CalledProcessError):
|
||||||
|
context.certbot(['--key-type="failwhale"'])
|
||||||
|
|
||||||
|
|
||||||
def test_ecdsa(context):
|
def test_ecdsa(context):
|
||||||
"""Test certificate issuance with ECDSA key."""
|
"""Test issuance for ECDSA CSR based request (legacy supported mode)."""
|
||||||
key_path = join(context.workspace, 'privkey-p384.pem')
|
key_path = join(context.workspace, 'privkey-p384.pem')
|
||||||
csr_path = join(context.workspace, 'csr-p384.der')
|
csr_path = join(context.workspace, 'csr-p384.der')
|
||||||
cert_path = join(context.workspace, 'cert-p384.pem')
|
cert_path = join(context.workspace, 'cert-p384.pem')
|
||||||
chain_path = join(context.workspace, 'chain-p384.pem')
|
chain_path = join(context.workspace, 'chain-p384.pem')
|
||||||
|
|
||||||
misc.generate_csr([context.get_domain('ecdsa')], key_path, csr_path, key_type=misc.ECDSA_KEY_TYPE)
|
misc.generate_csr(
|
||||||
context.certbot(['auth', '--csr', csr_path, '--cert-path', cert_path, '--chain-path', chain_path])
|
[context.get_domain('ecdsa')],
|
||||||
|
key_path, csr_path,
|
||||||
|
key_type=misc.ECDSA_KEY_TYPE
|
||||||
|
)
|
||||||
|
context.certbot([
|
||||||
|
'auth', '--csr', csr_path, '--cert-path', cert_path,
|
||||||
|
'--chain-path', chain_path,
|
||||||
|
])
|
||||||
|
|
||||||
certificate = misc.read_certificate(cert_path)
|
certificate = misc.read_certificate(cert_path)
|
||||||
assert 'ASN1 OID: secp384r1' in certificate
|
assert 'ASN1 OID: secp384r1' in certificate
|
||||||
|
|
||||||
|
|
||||||
|
def test_default_key_type(context):
|
||||||
|
"""Test default key type is RSA"""
|
||||||
|
certname = context.get_domain('renew')
|
||||||
|
context.certbot([
|
||||||
|
'certonly',
|
||||||
|
'--cert-name', certname, '-d', certname
|
||||||
|
])
|
||||||
|
filename = join(context.config_dir, 'archive/{0}/privkey1.pem').format(certname)
|
||||||
|
assert_rsa_key(filename)
|
||||||
|
|
||||||
|
|
||||||
|
def test_default_curve_type(context):
|
||||||
|
"""test that the curve used when not specifying any is secp256r1"""
|
||||||
|
certname = context.get_domain('renew')
|
||||||
|
context.certbot([
|
||||||
|
'--key-type', 'ecdsa', '--cert-name', certname, '-d', certname
|
||||||
|
])
|
||||||
|
key1 = join(context.config_dir, 'archive/{0}/privkey1.pem'.format(certname))
|
||||||
|
assert_elliptic_key(key1, SECP256R1)
|
||||||
|
|
||||||
|
|
||||||
|
def test_renew_with_ec_keys(context):
|
||||||
|
"""Test proper renew with updated private key complexity."""
|
||||||
|
certname = context.get_domain('renew')
|
||||||
|
context.certbot([
|
||||||
|
'certonly',
|
||||||
|
'--cert-name', certname,
|
||||||
|
'--key-type', 'ecdsa', '--elliptic-curve', 'secp256r1',
|
||||||
|
'--force-renewal', '-d', certname,
|
||||||
|
])
|
||||||
|
|
||||||
|
key1 = join(context.config_dir, "archive", certname, 'privkey1.pem')
|
||||||
|
assert 200 < os.stat(key1).st_size < 250 # ec keys of 256 bits are ~225 bytes
|
||||||
|
assert_elliptic_key(key1, SECP256R1)
|
||||||
|
assert_cert_count_for_lineage(context.config_dir, certname, 1)
|
||||||
|
|
||||||
|
context.certbot(['renew', '--elliptic-curve', 'secp384r1'])
|
||||||
|
|
||||||
|
assert_cert_count_for_lineage(context.config_dir, certname, 2)
|
||||||
|
key2 = join(context.config_dir, 'archive', certname, 'privkey2.pem')
|
||||||
|
assert_elliptic_key(key2, SECP384R1)
|
||||||
|
assert 280 < os.stat(key2).st_size < 320 # ec keys of 384 bits are ~310 bytes
|
||||||
|
|
||||||
|
# We expect here that the command will fail because without --key-type specified,
|
||||||
|
# Certbot must error out to prevent changing an existing certificate key type,
|
||||||
|
# without explicit user consent (by specifying both --cert-name and --key-type).
|
||||||
|
with pytest.raises(subprocess.CalledProcessError):
|
||||||
|
context.certbot([
|
||||||
|
'certonly',
|
||||||
|
'--force-renewal',
|
||||||
|
'-d', certname
|
||||||
|
])
|
||||||
|
|
||||||
|
# We expect that the previous behavior of requiring both --cert-name and
|
||||||
|
# --key-type to be set to not apply to the renew subcommand.
|
||||||
|
context.certbot(['renew', '--force-renewal', '--key-type', 'rsa'])
|
||||||
|
assert_cert_count_for_lineage(context.config_dir, certname, 3)
|
||||||
|
key3 = join(context.config_dir, 'archive', certname, 'privkey3.pem')
|
||||||
|
assert_rsa_key(key3)
|
||||||
|
|
||||||
|
|
||||||
def test_ocsp_must_staple(context):
|
def test_ocsp_must_staple(context):
|
||||||
"""Test that OCSP Must-Staple is correctly set in the generated certificate."""
|
"""Test that OCSP Must-Staple is correctly set in the generated certificate."""
|
||||||
if context.acme_server == 'pebble':
|
if context.acme_server == 'pebble':
|
||||||
@@ -533,18 +614,22 @@ def test_revoke_multiple_lineages(context):
|
|||||||
with open(join(context.config_dir, 'renewal', '{0}.conf'.format(cert2)), 'r') as file:
|
with open(join(context.config_dir, 'renewal', '{0}.conf'.format(cert2)), 'r') as file:
|
||||||
data = file.read()
|
data = file.read()
|
||||||
|
|
||||||
data = re.sub('archive_dir = .*\n',
|
data = re.sub(
|
||||||
'archive_dir = {0}\n'.format(join(context.config_dir, 'archive', cert1).replace('\\', '\\\\')),
|
'archive_dir = .*\n',
|
||||||
data)
|
'archive_dir = {0}\n'.format(
|
||||||
|
join(context.config_dir, 'archive', cert1).replace('\\', '\\\\')
|
||||||
|
), data
|
||||||
|
)
|
||||||
|
|
||||||
with open(join(context.config_dir, 'renewal', '{0}.conf'.format(cert2)), 'w') as file:
|
with open(join(context.config_dir, 'renewal', '{0}.conf'.format(cert2)), 'w') as file:
|
||||||
file.write(data)
|
file.write(data)
|
||||||
|
|
||||||
output = context.certbot([
|
context.certbot([
|
||||||
'revoke', '--cert-path', join(context.config_dir, 'live', cert1, 'cert.pem')
|
'revoke', '--cert-path', join(context.config_dir, 'live', cert1, 'cert.pem')
|
||||||
])
|
])
|
||||||
|
|
||||||
assert 'Not deleting revoked certs due to overlapping archive dirs' in output
|
with open(join(context.workspace, 'logs', 'letsencrypt.log'), 'r') as f:
|
||||||
|
assert 'Not deleting revoked certificates due to overlapping archive dirs' in f.read()
|
||||||
|
|
||||||
|
|
||||||
def test_wildcard_certificates(context):
|
def test_wildcard_certificates(context):
|
||||||
@@ -657,4 +742,4 @@ def test_preferred_chain(context):
|
|||||||
|
|
||||||
with open(conf_path, 'r') as f:
|
with open(conf_path, 'r') as f:
|
||||||
assert 'preferred_chain = {}'.format(requested) in f.read(), \
|
assert 'preferred_chain = {}'.format(requested) in f.read(), \
|
||||||
'Expected preferred_chain to be set in renewal config'
|
'Expected preferred_chain to be set in renewal config'
|
||||||
|
|||||||
@@ -12,6 +12,7 @@ import subprocess
|
|||||||
import sys
|
import sys
|
||||||
|
|
||||||
from certbot_integration_tests.utils import acme_server as acme_lib
|
from certbot_integration_tests.utils import acme_server as acme_lib
|
||||||
|
from certbot_integration_tests.utils import dns_server as dns_lib
|
||||||
|
|
||||||
|
|
||||||
def pytest_addoption(parser):
|
def pytest_addoption(parser):
|
||||||
@@ -23,6 +24,10 @@ def pytest_addoption(parser):
|
|||||||
choices=['boulder-v1', 'boulder-v2', 'pebble'],
|
choices=['boulder-v1', 'boulder-v2', 'pebble'],
|
||||||
help='select the ACME server to use (boulder-v1, boulder-v2, '
|
help='select the ACME server to use (boulder-v1, boulder-v2, '
|
||||||
'pebble), defaulting to pebble')
|
'pebble), defaulting to pebble')
|
||||||
|
parser.addoption('--dns-server', default='challtestsrv',
|
||||||
|
choices=['bind', 'challtestsrv'],
|
||||||
|
help='select the DNS server to use (bind, challtestsrv), '
|
||||||
|
'defaulting to challtestsrv')
|
||||||
|
|
||||||
|
|
||||||
def pytest_configure(config):
|
def pytest_configure(config):
|
||||||
@@ -32,7 +37,7 @@ def pytest_configure(config):
|
|||||||
"""
|
"""
|
||||||
if not hasattr(config, 'slaveinput'): # If true, this is the primary node
|
if not hasattr(config, 'slaveinput'): # If true, this is the primary node
|
||||||
with _print_on_err():
|
with _print_on_err():
|
||||||
config.acme_xdist = _setup_primary_node(config)
|
_setup_primary_node(config)
|
||||||
|
|
||||||
|
|
||||||
def pytest_configure_node(node):
|
def pytest_configure_node(node):
|
||||||
@@ -41,6 +46,7 @@ def pytest_configure_node(node):
|
|||||||
:param node: current worker node
|
:param node: current worker node
|
||||||
"""
|
"""
|
||||||
node.slaveinput['acme_xdist'] = node.config.acme_xdist
|
node.slaveinput['acme_xdist'] = node.config.acme_xdist
|
||||||
|
node.slaveinput['dns_xdist'] = node.config.dns_xdist
|
||||||
|
|
||||||
|
|
||||||
@contextlib.contextmanager
|
@contextlib.contextmanager
|
||||||
@@ -61,12 +67,18 @@ def _print_on_err():
|
|||||||
def _setup_primary_node(config):
|
def _setup_primary_node(config):
|
||||||
"""
|
"""
|
||||||
Setup the environment for integration tests.
|
Setup the environment for integration tests.
|
||||||
Will:
|
|
||||||
|
This function will:
|
||||||
- check runtime compatibility (Docker, docker-compose, Nginx)
|
- check runtime compatibility (Docker, docker-compose, Nginx)
|
||||||
- create a temporary workspace and the persistent GIT repositories space
|
- create a temporary workspace and the persistent GIT repositories space
|
||||||
|
- configure and start a DNS server using Docker, if configured
|
||||||
- configure and start paralleled ACME CA servers using Docker
|
- configure and start paralleled ACME CA servers using Docker
|
||||||
- transfer ACME CA servers configurations to pytest nodes using env variables
|
- transfer ACME CA and DNS servers configurations to pytest nodes using env variables
|
||||||
:param config: Configuration of the pytest primary node
|
|
||||||
|
This function modifies `config` by injecting the ACME CA and DNS server configurations,
|
||||||
|
in addition to cleanup functions for those servers.
|
||||||
|
|
||||||
|
:param config: Configuration of the pytest primary node. Is modified by this function.
|
||||||
"""
|
"""
|
||||||
# Check for runtime compatibility: some tools are required to be available in PATH
|
# Check for runtime compatibility: some tools are required to be available in PATH
|
||||||
if 'boulder' in config.option.acme_server:
|
if 'boulder' in config.option.acme_server:
|
||||||
@@ -79,18 +91,35 @@ def _setup_primary_node(config):
|
|||||||
try:
|
try:
|
||||||
subprocess.check_output(['docker-compose', '-v'], stderr=subprocess.STDOUT)
|
subprocess.check_output(['docker-compose', '-v'], stderr=subprocess.STDOUT)
|
||||||
except (subprocess.CalledProcessError, OSError):
|
except (subprocess.CalledProcessError, OSError):
|
||||||
raise ValueError('Error: docker-compose is required in PATH to launch the integration tests, '
|
raise ValueError(
|
||||||
'but is not installed or not available for current user.')
|
'Error: docker-compose is required in PATH to launch the integration tests, '
|
||||||
|
'but is not installed or not available for current user.'
|
||||||
|
)
|
||||||
|
|
||||||
# Parameter numprocesses is added to option by pytest-xdist
|
# Parameter numprocesses is added to option by pytest-xdist
|
||||||
workers = ['primary'] if not config.option.numprocesses\
|
workers = ['primary'] if not config.option.numprocesses\
|
||||||
else ['gw{0}'.format(i) for i in range(config.option.numprocesses)]
|
else ['gw{0}'.format(i) for i in range(config.option.numprocesses)]
|
||||||
|
|
||||||
|
# If a non-default DNS server is configured, start it and feed it to the ACME server
|
||||||
|
dns_server = None
|
||||||
|
acme_dns_server = None
|
||||||
|
if config.option.dns_server == 'bind':
|
||||||
|
dns_server = dns_lib.DNSServer(workers)
|
||||||
|
config.add_cleanup(dns_server.stop)
|
||||||
|
print('DNS xdist config:\n{0}'.format(dns_server.dns_xdist))
|
||||||
|
dns_server.start()
|
||||||
|
acme_dns_server = '{}:{}'.format(
|
||||||
|
dns_server.dns_xdist['address'],
|
||||||
|
dns_server.dns_xdist['port']
|
||||||
|
)
|
||||||
|
|
||||||
# By calling setup_acme_server we ensure that all necessary acme server instances will be
|
# By calling setup_acme_server we ensure that all necessary acme server instances will be
|
||||||
# fully started. This runtime is reflected by the acme_xdist returned.
|
# fully started. This runtime is reflected by the acme_xdist returned.
|
||||||
acme_server = acme_lib.ACMEServer(config.option.acme_server, workers)
|
acme_server = acme_lib.ACMEServer(config.option.acme_server, workers,
|
||||||
|
dns_server=acme_dns_server)
|
||||||
config.add_cleanup(acme_server.stop)
|
config.add_cleanup(acme_server.stop)
|
||||||
print('ACME xdist config:\n{0}'.format(acme_server.acme_xdist))
|
print('ACME xdist config:\n{0}'.format(acme_server.acme_xdist))
|
||||||
acme_server.start()
|
acme_server.start()
|
||||||
|
|
||||||
return acme_server.acme_xdist
|
config.acme_xdist = acme_server.acme_xdist
|
||||||
|
config.dns_xdist = dns_server.dns_xdist if dns_server else None
|
||||||
|
|||||||
@@ -1,3 +1,4 @@
|
|||||||
|
"""Module to handle the context of nginx integration tests."""
|
||||||
import os
|
import os
|
||||||
import subprocess
|
import subprocess
|
||||||
|
|
||||||
|
|||||||
@@ -1,3 +1,4 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
"""General purpose nginx test configuration generator."""
|
"""General purpose nginx test configuration generator."""
|
||||||
import getpass
|
import getpass
|
||||||
|
|
||||||
@@ -42,6 +43,8 @@ events {{
|
|||||||
worker_connections 1024;
|
worker_connections 1024;
|
||||||
}}
|
}}
|
||||||
|
|
||||||
|
# “This comment contains valid Unicode”.
|
||||||
|
|
||||||
http {{
|
http {{
|
||||||
# Set an array of temp, cache and log file options that will otherwise default to
|
# Set an array of temp, cache and log file options that will otherwise default to
|
||||||
# restricted locations accessible only to root.
|
# restricted locations accessible only to root.
|
||||||
@@ -51,61 +54,61 @@ http {{
|
|||||||
#scgi_temp_path {nginx_root}/scgi_temp;
|
#scgi_temp_path {nginx_root}/scgi_temp;
|
||||||
#uwsgi_temp_path {nginx_root}/uwsgi_temp;
|
#uwsgi_temp_path {nginx_root}/uwsgi_temp;
|
||||||
access_log {nginx_root}/error.log;
|
access_log {nginx_root}/error.log;
|
||||||
|
|
||||||
# This should be turned off in a Virtualbox VM, as it can cause some
|
# This should be turned off in a Virtualbox VM, as it can cause some
|
||||||
# interesting issues with data corruption in delivered files.
|
# interesting issues with data corruption in delivered files.
|
||||||
sendfile off;
|
sendfile off;
|
||||||
|
|
||||||
tcp_nopush on;
|
tcp_nopush on;
|
||||||
tcp_nodelay on;
|
tcp_nodelay on;
|
||||||
keepalive_timeout 65;
|
keepalive_timeout 65;
|
||||||
types_hash_max_size 2048;
|
types_hash_max_size 2048;
|
||||||
|
|
||||||
#include /etc/nginx/mime.types;
|
#include /etc/nginx/mime.types;
|
||||||
index index.html index.htm index.php;
|
index index.html index.htm index.php;
|
||||||
|
|
||||||
log_format main '$remote_addr - $remote_user [$time_local] $status '
|
log_format main '$remote_addr - $remote_user [$time_local] $status '
|
||||||
'"$request" $body_bytes_sent "$http_referer" '
|
'"$request" $body_bytes_sent "$http_referer" '
|
||||||
'"$http_user_agent" "$http_x_forwarded_for"';
|
'"$http_user_agent" "$http_x_forwarded_for"';
|
||||||
|
|
||||||
default_type application/octet-stream;
|
default_type application/octet-stream;
|
||||||
|
|
||||||
server {{
|
server {{
|
||||||
# IPv4.
|
# IPv4.
|
||||||
listen {http_port} {default_server};
|
listen {http_port} {default_server};
|
||||||
# IPv6.
|
# IPv6.
|
||||||
listen [::]:{http_port} {default_server};
|
listen [::]:{http_port} {default_server};
|
||||||
server_name nginx.{wtf_prefix}.wtf nginx2.{wtf_prefix}.wtf;
|
server_name nginx.{wtf_prefix}.wtf nginx2.{wtf_prefix}.wtf;
|
||||||
|
|
||||||
root {nginx_webroot};
|
root {nginx_webroot};
|
||||||
|
|
||||||
location / {{
|
location / {{
|
||||||
# First attempt to serve request as file, then as directory, then fall
|
# First attempt to serve request as file, then as directory, then fall
|
||||||
# back to index.html.
|
# back to index.html.
|
||||||
try_files $uri $uri/ /index.html;
|
try_files $uri $uri/ /index.html;
|
||||||
}}
|
}}
|
||||||
}}
|
}}
|
||||||
|
|
||||||
server {{
|
server {{
|
||||||
listen {http_port};
|
listen {http_port};
|
||||||
listen [::]:{http_port};
|
listen [::]:{http_port};
|
||||||
server_name nginx3.{wtf_prefix}.wtf;
|
server_name nginx3.{wtf_prefix}.wtf;
|
||||||
|
|
||||||
root {nginx_webroot};
|
root {nginx_webroot};
|
||||||
|
|
||||||
location /.well-known/ {{
|
location /.well-known/ {{
|
||||||
return 404;
|
return 404;
|
||||||
}}
|
}}
|
||||||
|
|
||||||
return 301 https://$host$request_uri;
|
return 301 https://$host$request_uri;
|
||||||
}}
|
}}
|
||||||
|
|
||||||
server {{
|
server {{
|
||||||
listen {other_port};
|
listen {other_port};
|
||||||
listen [::]:{other_port};
|
listen [::]:{other_port};
|
||||||
server_name nginx4.{wtf_prefix}.wtf nginx5.{wtf_prefix}.wtf;
|
server_name nginx4.{wtf_prefix}.wtf nginx5.{wtf_prefix}.wtf;
|
||||||
}}
|
}}
|
||||||
|
|
||||||
server {{
|
server {{
|
||||||
listen {http_port};
|
listen {http_port};
|
||||||
listen [::]:{http_port};
|
listen [::]:{http_port};
|
||||||
|
|||||||
@@ -2,13 +2,14 @@
|
|||||||
import os
|
import os
|
||||||
import ssl
|
import ssl
|
||||||
|
|
||||||
|
from typing import List
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from certbot_integration_tests.nginx_tests import context as nginx_context
|
from certbot_integration_tests.nginx_tests import context as nginx_context
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture()
|
@pytest.fixture(name='context')
|
||||||
def context(request):
|
def test_context(request):
|
||||||
# Fixture request is a built-in pytest fixture describing current test request.
|
# Fixture request is a built-in pytest fixture describing current test request.
|
||||||
integration_test_context = nginx_context.IntegrationTestsContext(request)
|
integration_test_context = nginx_context.IntegrationTestsContext(request)
|
||||||
try:
|
try:
|
||||||
@@ -27,10 +28,12 @@ def context(request):
|
|||||||
# No matching server block; default_server does not exist
|
# No matching server block; default_server does not exist
|
||||||
('nginx5.{0}.wtf', ['--preferred-challenges', 'http'], {'default_server': False}),
|
('nginx5.{0}.wtf', ['--preferred-challenges', 'http'], {'default_server': False}),
|
||||||
# Multiple domains, mix of matching and not
|
# Multiple domains, mix of matching and not
|
||||||
('nginx6.{0}.wtf,nginx7.{0}.wtf', ['--preferred-challenges', 'http'], {'default_server': False}),
|
('nginx6.{0}.wtf,nginx7.{0}.wtf', [
|
||||||
|
'--preferred-challenges', 'http'
|
||||||
|
], {'default_server': False}),
|
||||||
], indirect=['context'])
|
], indirect=['context'])
|
||||||
def test_certificate_deployment(certname_pattern, params, context):
|
def test_certificate_deployment(certname_pattern, params, context):
|
||||||
# type: (str, list, nginx_context.IntegrationTestsContext) -> None
|
# type: (str, List[str], nginx_context.IntegrationTestsContext) -> None
|
||||||
"""
|
"""
|
||||||
Test various scenarios to deploy a certificate to nginx using certbot.
|
Test various scenarios to deploy a certificate to nginx using certbot.
|
||||||
"""
|
"""
|
||||||
@@ -41,7 +44,9 @@ def test_certificate_deployment(certname_pattern, params, context):
|
|||||||
|
|
||||||
lineage = domains.split(',')[0]
|
lineage = domains.split(',')[0]
|
||||||
server_cert = ssl.get_server_certificate(('localhost', context.tls_alpn_01_port))
|
server_cert = ssl.get_server_certificate(('localhost', context.tls_alpn_01_port))
|
||||||
with open(os.path.join(context.workspace, 'conf/live/{0}/cert.pem'.format(lineage)), 'r') as file:
|
with open(os.path.join(
|
||||||
|
context.workspace, 'conf/live/{0}/cert.pem'.format(lineage)), 'r'
|
||||||
|
) as file:
|
||||||
certbot_cert = file.read()
|
certbot_cert = file.read()
|
||||||
|
|
||||||
assert server_cert == certbot_cert
|
assert server_cert == certbot_cert
|
||||||
|
|||||||
@@ -0,0 +1,66 @@
|
|||||||
|
"""Module to handle the context of RFC2136 integration tests."""
|
||||||
|
|
||||||
|
import tempfile
|
||||||
|
from contextlib import contextmanager
|
||||||
|
|
||||||
|
from pkg_resources import resource_filename
|
||||||
|
from pytest import skip
|
||||||
|
|
||||||
|
from certbot_integration_tests.certbot_tests import context as certbot_context
|
||||||
|
from certbot_integration_tests.utils import certbot_call
|
||||||
|
|
||||||
|
|
||||||
|
class IntegrationTestsContext(certbot_context.IntegrationTestsContext):
|
||||||
|
"""Integration test context for certbot-dns-rfc2136"""
|
||||||
|
def __init__(self, request):
|
||||||
|
super(IntegrationTestsContext, self).__init__(request)
|
||||||
|
|
||||||
|
self.request = request
|
||||||
|
|
||||||
|
self._dns_xdist = None
|
||||||
|
if hasattr(request.config, 'slaveinput'): # Worker node
|
||||||
|
self._dns_xdist = request.config.slaveinput['dns_xdist']
|
||||||
|
else: # Primary node
|
||||||
|
self._dns_xdist = request.config.dns_xdist
|
||||||
|
|
||||||
|
def certbot_test_rfc2136(self, args):
|
||||||
|
"""
|
||||||
|
Main command to execute certbot using the RFC2136 DNS authenticator.
|
||||||
|
:param list args: list of arguments to pass to Certbot
|
||||||
|
"""
|
||||||
|
command = ['--authenticator', 'dns-rfc2136', '--dns-rfc2136-propagation-seconds', '2']
|
||||||
|
command.extend(args)
|
||||||
|
return certbot_call.certbot_test(
|
||||||
|
command, self.directory_url, self.http_01_port, self.tls_alpn_01_port,
|
||||||
|
self.config_dir, self.workspace, force_renew=True)
|
||||||
|
|
||||||
|
@contextmanager
|
||||||
|
def rfc2136_credentials(self, label='default'):
|
||||||
|
"""
|
||||||
|
Produces the contents of a certbot-dns-rfc2136 credentials file.
|
||||||
|
:param str label: which RFC2136 credential to use
|
||||||
|
:yields: Path to credentials file
|
||||||
|
:rtype: str
|
||||||
|
"""
|
||||||
|
src_file = resource_filename('certbot_integration_tests',
|
||||||
|
'assets/bind-config/rfc2136-credentials-{}.ini.tpl'
|
||||||
|
.format(label))
|
||||||
|
contents = None
|
||||||
|
|
||||||
|
with open(src_file, 'r') as f:
|
||||||
|
contents = f.read().format(
|
||||||
|
server_address=self._dns_xdist['address'],
|
||||||
|
server_port=self._dns_xdist['port']
|
||||||
|
)
|
||||||
|
|
||||||
|
with tempfile.NamedTemporaryFile('w+', prefix='rfc2136-creds-{}'.format(label),
|
||||||
|
suffix='.ini', dir=self.workspace) as fp:
|
||||||
|
fp.write(contents)
|
||||||
|
fp.flush()
|
||||||
|
yield fp.name
|
||||||
|
|
||||||
|
def skip_if_no_bind9_server(self):
|
||||||
|
"""Skips the test if there was no RFC2136-capable DNS server configured
|
||||||
|
in the test environment"""
|
||||||
|
if not self._dns_xdist:
|
||||||
|
skip('No RFC2136-capable DNS server is configured')
|
||||||
@@ -0,0 +1,26 @@
|
|||||||
|
"""Module executing integration tests against Certbot with the RFC2136 DNS authenticator."""
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from certbot_integration_tests.rfc2136_tests import context as rfc2136_context
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(name="context")
|
||||||
|
def pytest_context(request):
|
||||||
|
# pylint: disable=missing-function-docstring
|
||||||
|
# Fixture request is a built-in pytest fixture describing current test request.
|
||||||
|
integration_test_context = rfc2136_context.IntegrationTestsContext(request)
|
||||||
|
try:
|
||||||
|
yield integration_test_context
|
||||||
|
finally:
|
||||||
|
integration_test_context.cleanup()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('domain', [('example.com'), ('sub.example.com')])
|
||||||
|
def test_get_certificate(domain, context):
|
||||||
|
context.skip_if_no_bind9_server()
|
||||||
|
|
||||||
|
with context.rfc2136_credentials() as creds:
|
||||||
|
context.certbot_test_rfc2136([
|
||||||
|
'certonly', '--dns-rfc2136-credentials', creds,
|
||||||
|
'-d', domain, '-d', '*.{}'.format(domain)
|
||||||
|
])
|
||||||
@@ -2,6 +2,7 @@
|
|||||||
"""Module to setup an ACME CA server environment able to run multiple tests in parallel"""
|
"""Module to setup an ACME CA server environment able to run multiple tests in parallel"""
|
||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
|
|
||||||
|
import argparse
|
||||||
import errno
|
import errno
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
@@ -12,11 +13,13 @@ import sys
|
|||||||
import tempfile
|
import tempfile
|
||||||
import time
|
import time
|
||||||
|
|
||||||
|
from typing import List
|
||||||
import requests
|
import requests
|
||||||
|
|
||||||
from certbot_integration_tests.utils import misc
|
from certbot_integration_tests.utils import misc
|
||||||
from certbot_integration_tests.utils import pebble_artifacts
|
from certbot_integration_tests.utils import pebble_artifacts
|
||||||
from certbot_integration_tests.utils import proxy
|
from certbot_integration_tests.utils import proxy
|
||||||
|
# pylint: disable=wildcard-import,unused-wildcard-import
|
||||||
from certbot_integration_tests.utils.constants import *
|
from certbot_integration_tests.utils.constants import *
|
||||||
|
|
||||||
|
|
||||||
@@ -29,24 +32,34 @@ class ACMEServer(object):
|
|||||||
ACMEServer gives access the acme_xdist parameter, listing the ports and directory url to use
|
ACMEServer gives access the acme_xdist parameter, listing the ports and directory url to use
|
||||||
for each pytest node. It exposes also start and stop methods in order to start the stack, and
|
for each pytest node. It exposes also start and stop methods in order to start the stack, and
|
||||||
stop it with proper resources cleanup.
|
stop it with proper resources cleanup.
|
||||||
ACMEServer is also a context manager, and so can be used to ensure ACME server is started/stopped
|
ACMEServer is also a context manager, and so can be used to ensure ACME server is
|
||||||
upon context enter/exit.
|
started/stopped upon context enter/exit.
|
||||||
"""
|
"""
|
||||||
def __init__(self, acme_server, nodes, http_proxy=True, stdout=False):
|
def __init__(self, acme_server, nodes, http_proxy=True, stdout=False,
|
||||||
|
dns_server=None, http_01_port=DEFAULT_HTTP_01_PORT):
|
||||||
"""
|
"""
|
||||||
Create an ACMEServer instance.
|
Create an ACMEServer instance.
|
||||||
:param str acme_server: the type of acme server used (boulder-v1, boulder-v2 or pebble)
|
:param str acme_server: the type of acme server used (boulder-v1, boulder-v2 or pebble)
|
||||||
:param list nodes: list of node names that will be setup by pytest xdist
|
:param list nodes: list of node names that will be setup by pytest xdist
|
||||||
:param bool http_proxy: if False do not start the HTTP proxy
|
:param bool http_proxy: if False do not start the HTTP proxy
|
||||||
:param bool stdout: if True stream all subprocesses stdout to standard stdout
|
:param bool stdout: if True stream all subprocesses stdout to standard stdout
|
||||||
|
:param str dns_server: if set, Pebble/Boulder will use it to resolve domains
|
||||||
|
:param int http_01_port: port to use for http-01 validation; currently
|
||||||
|
only supported for pebble without an HTTP proxy
|
||||||
"""
|
"""
|
||||||
self._construct_acme_xdist(acme_server, nodes)
|
self._construct_acme_xdist(acme_server, nodes)
|
||||||
|
|
||||||
self._acme_type = 'pebble' if acme_server == 'pebble' else 'boulder'
|
self._acme_type = 'pebble' if acme_server == 'pebble' else 'boulder'
|
||||||
self._proxy = http_proxy
|
self._proxy = http_proxy
|
||||||
self._workspace = tempfile.mkdtemp()
|
self._workspace = tempfile.mkdtemp()
|
||||||
self._processes = []
|
self._processes = [] # type: List[subprocess.Popen]
|
||||||
self._stdout = sys.stdout if stdout else open(os.devnull, 'w')
|
self._stdout = sys.stdout if stdout else open(os.devnull, 'w')
|
||||||
|
self._dns_server = dns_server
|
||||||
|
self._http_01_port = http_01_port
|
||||||
|
if http_01_port != DEFAULT_HTTP_01_PORT:
|
||||||
|
if self._acme_type != 'pebble' or self._proxy:
|
||||||
|
raise ValueError('setting http_01_port is not currently supported '
|
||||||
|
'with boulder or the HTTP proxy')
|
||||||
|
|
||||||
def start(self):
|
def start(self):
|
||||||
"""Start the test stack"""
|
"""Start the test stack"""
|
||||||
@@ -103,26 +116,34 @@ class ACMEServer(object):
|
|||||||
"""Generate and return the acme_xdist dict"""
|
"""Generate and return the acme_xdist dict"""
|
||||||
acme_xdist = {'acme_server': acme_server, 'challtestsrv_port': CHALLTESTSRV_PORT}
|
acme_xdist = {'acme_server': acme_server, 'challtestsrv_port': CHALLTESTSRV_PORT}
|
||||||
|
|
||||||
# Directory and ACME port are set implicitly in the docker-compose.yml files of Boulder/Pebble.
|
# Directory and ACME port are set implicitly in the docker-compose.yml
|
||||||
|
# files of Boulder/Pebble.
|
||||||
if acme_server == 'pebble':
|
if acme_server == 'pebble':
|
||||||
acme_xdist['directory_url'] = PEBBLE_DIRECTORY_URL
|
acme_xdist['directory_url'] = PEBBLE_DIRECTORY_URL
|
||||||
else: # boulder
|
else: # boulder
|
||||||
acme_xdist['directory_url'] = BOULDER_V2_DIRECTORY_URL \
|
acme_xdist['directory_url'] = BOULDER_V2_DIRECTORY_URL \
|
||||||
if acme_server == 'boulder-v2' else BOULDER_V1_DIRECTORY_URL
|
if acme_server == 'boulder-v2' else BOULDER_V1_DIRECTORY_URL
|
||||||
|
|
||||||
acme_xdist['http_port'] = {node: port for (node, port)
|
acme_xdist['http_port'] = {
|
||||||
in zip(nodes, range(5200, 5200 + len(nodes)))}
|
node: port for (node, port) in # pylint: disable=unnecessary-comprehension
|
||||||
acme_xdist['https_port'] = {node: port for (node, port)
|
zip(nodes, range(5200, 5200 + len(nodes)))
|
||||||
in zip(nodes, range(5100, 5100 + len(nodes)))}
|
}
|
||||||
acme_xdist['other_port'] = {node: port for (node, port)
|
acme_xdist['https_port'] = {
|
||||||
in zip(nodes, range(5300, 5300 + len(nodes)))}
|
node: port for (node, port) in # pylint: disable=unnecessary-comprehension
|
||||||
|
zip(nodes, range(5100, 5100 + len(nodes)))
|
||||||
|
}
|
||||||
|
acme_xdist['other_port'] = {
|
||||||
|
node: port for (node, port) in # pylint: disable=unnecessary-comprehension
|
||||||
|
zip(nodes, range(5300, 5300 + len(nodes)))
|
||||||
|
}
|
||||||
|
|
||||||
self.acme_xdist = acme_xdist
|
self.acme_xdist = acme_xdist
|
||||||
|
|
||||||
def _prepare_pebble_server(self):
|
def _prepare_pebble_server(self):
|
||||||
"""Configure and launch the Pebble server"""
|
"""Configure and launch the Pebble server"""
|
||||||
print('=> Starting pebble instance deployment...')
|
print('=> Starting pebble instance deployment...')
|
||||||
pebble_path, challtestsrv_path, pebble_config_path = pebble_artifacts.fetch(self._workspace)
|
pebble_artifacts_rv = pebble_artifacts.fetch(self._workspace, self._http_01_port)
|
||||||
|
pebble_path, challtestsrv_path, pebble_config_path = pebble_artifacts_rv
|
||||||
|
|
||||||
# Configure Pebble at full speed (PEBBLE_VA_NOSLEEP=1) and not randomly refusing valid
|
# Configure Pebble at full speed (PEBBLE_VA_NOSLEEP=1) and not randomly refusing valid
|
||||||
# nonce (PEBBLE_WFE_NONCEREJECT=0) to have a stable test environment.
|
# nonce (PEBBLE_WFE_NONCEREJECT=0) to have a stable test environment.
|
||||||
@@ -132,18 +153,23 @@ class ACMEServer(object):
|
|||||||
environ['PEBBLE_AUTHZREUSE'] = '100'
|
environ['PEBBLE_AUTHZREUSE'] = '100'
|
||||||
environ['PEBBLE_ALTERNATE_ROOTS'] = str(PEBBLE_ALTERNATE_ROOTS)
|
environ['PEBBLE_ALTERNATE_ROOTS'] = str(PEBBLE_ALTERNATE_ROOTS)
|
||||||
|
|
||||||
|
if self._dns_server:
|
||||||
|
dns_server = self._dns_server
|
||||||
|
else:
|
||||||
|
dns_server = '127.0.0.1:8053'
|
||||||
|
self._launch_process(
|
||||||
|
[challtestsrv_path, '-management', ':{0}'.format(CHALLTESTSRV_PORT),
|
||||||
|
'-defaultIPv6', '""', '-defaultIPv4', '127.0.0.1', '-http01', '""',
|
||||||
|
'-tlsalpn01', '""', '-https01', '""'])
|
||||||
|
|
||||||
self._launch_process(
|
self._launch_process(
|
||||||
[pebble_path, '-config', pebble_config_path, '-dnsserver', '127.0.0.1:8053', '-strict'],
|
[pebble_path, '-config', pebble_config_path, '-dnsserver', dns_server, '-strict'],
|
||||||
env=environ)
|
env=environ)
|
||||||
|
|
||||||
self._launch_process(
|
# pebble_ocsp_server is imported here and not at the top of module in order to avoid a
|
||||||
[challtestsrv_path, '-management', ':{0}'.format(CHALLTESTSRV_PORT), '-defaultIPv6', '""',
|
# useless ImportError, in the case where cryptography dependency is too old to support
|
||||||
'-defaultIPv4', '127.0.0.1', '-http01', '""', '-tlsalpn01', '""', '-https01', '""'])
|
# ocsp, but Boulder is used instead of Pebble, so pebble_ocsp_server is not used. This is
|
||||||
|
# the typical situation of integration-certbot-oldest tox testenv.
|
||||||
# pebble_ocsp_server is imported here and not at the top of module in order to avoid a useless
|
|
||||||
# ImportError, in the case where cryptography dependency is too old to support ocsp, but
|
|
||||||
# Boulder is used instead of Pebble, so pebble_ocsp_server is not used. This is the typical
|
|
||||||
# situation of integration-certbot-oldest tox testenv.
|
|
||||||
from certbot_integration_tests.utils import pebble_ocsp_server
|
from certbot_integration_tests.utils import pebble_ocsp_server
|
||||||
self._launch_process([sys.executable, pebble_ocsp_server.__file__])
|
self._launch_process([sys.executable, pebble_ocsp_server.__file__])
|
||||||
|
|
||||||
@@ -167,6 +193,15 @@ class ACMEServer(object):
|
|||||||
os.rename(join(instance_path, 'test/rate-limit-policies-b.yml'),
|
os.rename(join(instance_path, 'test/rate-limit-policies-b.yml'),
|
||||||
join(instance_path, 'test/rate-limit-policies.yml'))
|
join(instance_path, 'test/rate-limit-policies.yml'))
|
||||||
|
|
||||||
|
if self._dns_server:
|
||||||
|
# Change Boulder config to use the provided DNS server
|
||||||
|
for suffix in ["", "-remote-a", "-remote-b"]:
|
||||||
|
with open(join(instance_path, 'test/config/va{}.json'.format(suffix)), 'r') as f:
|
||||||
|
config = json.loads(f.read())
|
||||||
|
config['va']['dnsResolvers'] = [self._dns_server]
|
||||||
|
with open(join(instance_path, 'test/config/va{}.json'.format(suffix)), 'w') as f:
|
||||||
|
f.write(json.dumps(config, indent=2, separators=(',', ': ')))
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# Launch the Boulder server
|
# Launch the Boulder server
|
||||||
self._launch_process(['docker-compose', 'up', '--force-recreate'], cwd=instance_path)
|
self._launch_process(['docker-compose', 'up', '--force-recreate'], cwd=instance_path)
|
||||||
@@ -175,14 +210,18 @@ class ACMEServer(object):
|
|||||||
print('=> Waiting for boulder instance to respond...')
|
print('=> Waiting for boulder instance to respond...')
|
||||||
misc.check_until_timeout(self.acme_xdist['directory_url'], attempts=300)
|
misc.check_until_timeout(self.acme_xdist['directory_url'], attempts=300)
|
||||||
|
|
||||||
# Configure challtestsrv to answer any A record request with ip of the docker host.
|
if not self._dns_server:
|
||||||
response = requests.post('http://localhost:{0}/set-default-ipv4'.format(CHALLTESTSRV_PORT),
|
# Configure challtestsrv to answer any A record request with ip of the docker host.
|
||||||
json={'ip': '10.77.77.1'})
|
response = requests.post('http://localhost:{0}/set-default-ipv4'.format(
|
||||||
response.raise_for_status()
|
CHALLTESTSRV_PORT), json={'ip': '10.77.77.1'}
|
||||||
|
)
|
||||||
|
response.raise_for_status()
|
||||||
except BaseException:
|
except BaseException:
|
||||||
# If we failed to set up boulder, print its logs.
|
# If we failed to set up boulder, print its logs.
|
||||||
print('=> Boulder setup failed. Boulder logs are:')
|
print('=> Boulder setup failed. Boulder logs are:')
|
||||||
process = self._launch_process(['docker-compose', 'logs'], cwd=instance_path, force_stderr=True)
|
process = self._launch_process([
|
||||||
|
'docker-compose', 'logs'], cwd=instance_path, force_stderr=True
|
||||||
|
)
|
||||||
process.wait()
|
process.wait()
|
||||||
raise
|
raise
|
||||||
|
|
||||||
@@ -193,7 +232,7 @@ class ACMEServer(object):
|
|||||||
print('=> Configuring the HTTP proxy...')
|
print('=> Configuring the HTTP proxy...')
|
||||||
mapping = {r'.+\.{0}\.wtf'.format(node): 'http://127.0.0.1:{0}'.format(port)
|
mapping = {r'.+\.{0}\.wtf'.format(node): 'http://127.0.0.1:{0}'.format(port)
|
||||||
for node, port in self.acme_xdist['http_port'].items()}
|
for node, port in self.acme_xdist['http_port'].items()}
|
||||||
command = [sys.executable, proxy.__file__, str(HTTP_01_PORT), json.dumps(mapping)]
|
command = [sys.executable, proxy.__file__, str(DEFAULT_HTTP_01_PORT), json.dumps(mapping)]
|
||||||
self._launch_process(command)
|
self._launch_process(command)
|
||||||
print('=> Finished configuring the HTTP proxy.')
|
print('=> Finished configuring the HTTP proxy.')
|
||||||
|
|
||||||
@@ -202,20 +241,34 @@ class ACMEServer(object):
|
|||||||
if not env:
|
if not env:
|
||||||
env = os.environ
|
env = os.environ
|
||||||
stdout = sys.stderr if force_stderr else self._stdout
|
stdout = sys.stderr if force_stderr else self._stdout
|
||||||
process = subprocess.Popen(command, stdout=stdout, stderr=subprocess.STDOUT, cwd=cwd, env=env)
|
process = subprocess.Popen(
|
||||||
|
command, stdout=stdout, stderr=subprocess.STDOUT, cwd=cwd, env=env
|
||||||
|
)
|
||||||
self._processes.append(process)
|
self._processes.append(process)
|
||||||
return process
|
return process
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
args = sys.argv[1:]
|
# pylint: disable=missing-function-docstring
|
||||||
server_type = args[0] if args else 'pebble'
|
parser = argparse.ArgumentParser(
|
||||||
possible_values = ('pebble', 'boulder-v1', 'boulder-v2')
|
description='CLI tool to start a local instance of Pebble or Boulder CA server.')
|
||||||
if server_type not in possible_values:
|
parser.add_argument('--server-type', '-s',
|
||||||
raise ValueError('Invalid server value {0}, should be one of {1}'
|
choices=['pebble', 'boulder-v1', 'boulder-v2'], default='pebble',
|
||||||
.format(server_type, possible_values))
|
help='type of CA server to start: can be Pebble or Boulder '
|
||||||
|
'(in ACMEv1 or ACMEv2 mode), Pebble is used if not set.')
|
||||||
|
parser.add_argument('--dns-server', '-d',
|
||||||
|
help='specify the DNS server as `IP:PORT` to use by '
|
||||||
|
'Pebble; if not specified, a local mock DNS server will be used to '
|
||||||
|
'resolve domains to localhost.')
|
||||||
|
parser.add_argument('--http-01-port', type=int, default=DEFAULT_HTTP_01_PORT,
|
||||||
|
help='specify the port to use for http-01 validation; '
|
||||||
|
'this is currently only supported for Pebble.')
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
acme_server = ACMEServer(server_type, [], http_proxy=False, stdout=True)
|
acme_server = ACMEServer(
|
||||||
|
args.server_type, [], http_proxy=False, stdout=True,
|
||||||
|
dns_server=args.dns_server, http_01_port=args.http_01_port,
|
||||||
|
)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
with acme_server as acme_xdist:
|
with acme_server as acme_xdist:
|
||||||
|
|||||||
@@ -2,12 +2,13 @@
|
|||||||
"""Module to call certbot in test mode"""
|
"""Module to call certbot in test mode"""
|
||||||
from __future__ import absolute_import
|
from __future__ import absolute_import
|
||||||
|
|
||||||
from distutils.version import LooseVersion
|
|
||||||
import os
|
import os
|
||||||
import subprocess
|
import subprocess
|
||||||
import sys
|
import sys
|
||||||
|
from distutils.version import LooseVersion
|
||||||
|
|
||||||
import certbot_integration_tests
|
import certbot_integration_tests
|
||||||
|
# pylint: disable=wildcard-import,unused-wildcard-import
|
||||||
from certbot_integration_tests.utils.constants import *
|
from certbot_integration_tests.utils.constants import *
|
||||||
|
|
||||||
|
|
||||||
@@ -35,6 +36,8 @@ def certbot_test(certbot_args, directory_url, http_01_port, tls_alpn_01_port,
|
|||||||
|
|
||||||
|
|
||||||
def _prepare_environ(workspace):
|
def _prepare_environ(workspace):
|
||||||
|
# pylint: disable=missing-function-docstring
|
||||||
|
|
||||||
new_environ = os.environ.copy()
|
new_environ = os.environ.copy()
|
||||||
new_environ['TMPDIR'] = workspace
|
new_environ['TMPDIR'] = workspace
|
||||||
|
|
||||||
@@ -58,8 +61,13 @@ def _prepare_environ(workspace):
|
|||||||
# certbot_integration_tests.__file__ is:
|
# certbot_integration_tests.__file__ is:
|
||||||
# '/path/to/certbot/certbot-ci/certbot_integration_tests/__init__.pyc'
|
# '/path/to/certbot/certbot-ci/certbot_integration_tests/__init__.pyc'
|
||||||
# ... and we want '/path/to/certbot'
|
# ... and we want '/path/to/certbot'
|
||||||
certbot_root = os.path.dirname(os.path.dirname(os.path.dirname(certbot_integration_tests.__file__)))
|
certbot_root = os.path.dirname(os.path.dirname(
|
||||||
python_paths = [path for path in new_environ['PYTHONPATH'].split(':') if path != certbot_root]
|
os.path.dirname(certbot_integration_tests.__file__))
|
||||||
|
)
|
||||||
|
python_paths = [
|
||||||
|
path for path in new_environ['PYTHONPATH'].split(':')
|
||||||
|
if path != certbot_root
|
||||||
|
]
|
||||||
new_environ['PYTHONPATH'] = ':'.join(python_paths)
|
new_environ['PYTHONPATH'] = ':'.join(python_paths)
|
||||||
|
|
||||||
return new_environ
|
return new_environ
|
||||||
@@ -70,7 +78,8 @@ def _compute_additional_args(workspace, environ, force_renew):
|
|||||||
output = subprocess.check_output(['certbot', '--version'],
|
output = subprocess.check_output(['certbot', '--version'],
|
||||||
universal_newlines=True, stderr=subprocess.STDOUT,
|
universal_newlines=True, stderr=subprocess.STDOUT,
|
||||||
cwd=workspace, env=environ)
|
cwd=workspace, env=environ)
|
||||||
version_str = output.split(' ')[1].strip() # Typical response is: output = 'certbot 0.31.0.dev0'
|
# Typical response is: output = 'certbot 0.31.0.dev0'
|
||||||
|
version_str = output.split(' ')[1].strip()
|
||||||
if LooseVersion(version_str) >= LooseVersion('0.30.0'):
|
if LooseVersion(version_str) >= LooseVersion('0.30.0'):
|
||||||
additional_args.append('--no-random-sleep-on-renew')
|
additional_args.append('--no-random-sleep-on-renew')
|
||||||
|
|
||||||
@@ -113,11 +122,12 @@ def _prepare_args_env(certbot_args, directory_url, http_01_port, tls_alpn_01_por
|
|||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
|
# pylint: disable=missing-function-docstring
|
||||||
args = sys.argv[1:]
|
args = sys.argv[1:]
|
||||||
|
|
||||||
# Default config is pebble
|
# Default config is pebble
|
||||||
directory_url = os.environ.get('SERVER', PEBBLE_DIRECTORY_URL)
|
directory_url = os.environ.get('SERVER', PEBBLE_DIRECTORY_URL)
|
||||||
http_01_port = int(os.environ.get('HTTP_01_PORT', HTTP_01_PORT))
|
http_01_port = int(os.environ.get('HTTP_01_PORT', DEFAULT_HTTP_01_PORT))
|
||||||
tls_alpn_01_port = int(os.environ.get('TLS_ALPN_01_PORT', TLS_ALPN_01_PORT))
|
tls_alpn_01_port = int(os.environ.get('TLS_ALPN_01_PORT', TLS_ALPN_01_PORT))
|
||||||
|
|
||||||
# Execution of certbot in a self-contained workspace
|
# Execution of certbot in a self-contained workspace
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
"""Some useful constants to use throughout certbot-ci integration tests"""
|
"""Some useful constants to use throughout certbot-ci integration tests"""
|
||||||
HTTP_01_PORT = 5002
|
DEFAULT_HTTP_01_PORT = 5002
|
||||||
TLS_ALPN_01_PORT = 5001
|
TLS_ALPN_01_PORT = 5001
|
||||||
CHALLTESTSRV_PORT = 8055
|
CHALLTESTSRV_PORT = 8055
|
||||||
BOULDER_V1_DIRECTORY_URL = 'http://localhost:4000/directory'
|
BOULDER_V1_DIRECTORY_URL = 'http://localhost:4000/directory'
|
||||||
@@ -7,4 +7,4 @@ BOULDER_V2_DIRECTORY_URL = 'http://localhost:4001/directory'
|
|||||||
PEBBLE_DIRECTORY_URL = 'https://localhost:14000/dir'
|
PEBBLE_DIRECTORY_URL = 'https://localhost:14000/dir'
|
||||||
PEBBLE_MANAGEMENT_URL = 'https://localhost:15000'
|
PEBBLE_MANAGEMENT_URL = 'https://localhost:15000'
|
||||||
MOCK_OCSP_SERVER_PORT = 4002
|
MOCK_OCSP_SERVER_PORT = 4002
|
||||||
PEBBLE_ALTERNATE_ROOTS = 2
|
PEBBLE_ALTERNATE_ROOTS = 2
|
||||||
|
|||||||
155
certbot-ci/certbot_integration_tests/utils/dns_server.py
Normal file
155
certbot-ci/certbot_integration_tests/utils/dns_server.py
Normal file
@@ -0,0 +1,155 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
"""Module to setup an RFC2136-capable DNS server"""
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
|
import os
|
||||||
|
import os.path
|
||||||
|
import shutil
|
||||||
|
import socket
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
import tempfile
|
||||||
|
import time
|
||||||
|
|
||||||
|
from pkg_resources import resource_filename
|
||||||
|
|
||||||
|
BIND_DOCKER_IMAGE = "internetsystemsconsortium/bind9:9.16"
|
||||||
|
BIND_BIND_ADDRESS = ("127.0.0.1", 45953)
|
||||||
|
|
||||||
|
# A TCP DNS message which is a query for '. CH A' transaction ID 0xcb37. This is used
|
||||||
|
# by _wait_until_ready to check that BIND is responding without depending on dnspython.
|
||||||
|
BIND_TEST_QUERY = bytearray.fromhex("0011cb37000000010000000000000000010003")
|
||||||
|
|
||||||
|
|
||||||
|
class DNSServer(object):
|
||||||
|
"""
|
||||||
|
DNSServer configures and handles the lifetime of an RFC2136-capable server.
|
||||||
|
DNServer provides access to the dns_xdist parameter, listing the address and port
|
||||||
|
to use for each pytest node.
|
||||||
|
|
||||||
|
At this time, DNSServer should only be used with a single node, but may be expanded in
|
||||||
|
future to support parallelization (https://github.com/certbot/certbot/issues/8455).
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, unused_nodes, show_output=False):
|
||||||
|
"""
|
||||||
|
Create an DNSServer instance.
|
||||||
|
:param list nodes: list of node names that will be setup by pytest xdist
|
||||||
|
:param bool show_output: if True, print the output of the DNS server
|
||||||
|
"""
|
||||||
|
|
||||||
|
self.bind_root = tempfile.mkdtemp()
|
||||||
|
|
||||||
|
self.process = None # type: subprocess.Popen
|
||||||
|
|
||||||
|
self.dns_xdist = {"address": BIND_BIND_ADDRESS[0], "port": BIND_BIND_ADDRESS[1]}
|
||||||
|
|
||||||
|
# Unfortunately the BIND9 image forces everything to stderr with -g and we can't
|
||||||
|
# modify the verbosity.
|
||||||
|
self._output = sys.stderr if show_output else open(os.devnull, "w")
|
||||||
|
|
||||||
|
def start(self):
|
||||||
|
"""Start the DNS server"""
|
||||||
|
try:
|
||||||
|
self._configure_bind()
|
||||||
|
self._start_bind()
|
||||||
|
except:
|
||||||
|
self.stop()
|
||||||
|
raise
|
||||||
|
|
||||||
|
def stop(self):
|
||||||
|
"""Stop the DNS server, and clean its resources"""
|
||||||
|
if self.process:
|
||||||
|
try:
|
||||||
|
self.process.terminate()
|
||||||
|
self.process.wait()
|
||||||
|
except BaseException as e:
|
||||||
|
print("BIND9 did not stop cleanly: {}".format(e), file=sys.stderr)
|
||||||
|
|
||||||
|
shutil.rmtree(self.bind_root, ignore_errors=True)
|
||||||
|
|
||||||
|
if self._output != sys.stderr:
|
||||||
|
self._output.close()
|
||||||
|
|
||||||
|
def _configure_bind(self):
|
||||||
|
"""Configure the BIND9 server based on the prebaked configuration"""
|
||||||
|
bind_conf_src = resource_filename(
|
||||||
|
"certbot_integration_tests", "assets/bind-config"
|
||||||
|
)
|
||||||
|
for directory in ("conf", "zones"):
|
||||||
|
shutil.copytree(
|
||||||
|
os.path.join(bind_conf_src, directory), os.path.join(self.bind_root, directory)
|
||||||
|
)
|
||||||
|
|
||||||
|
def _start_bind(self):
|
||||||
|
"""Launch the BIND9 server as a Docker container"""
|
||||||
|
addr_str = "{}:{}".format(BIND_BIND_ADDRESS[0], BIND_BIND_ADDRESS[1])
|
||||||
|
self.process = subprocess.Popen(
|
||||||
|
[
|
||||||
|
"docker",
|
||||||
|
"run",
|
||||||
|
"--rm",
|
||||||
|
"-p",
|
||||||
|
"{}:53/udp".format(addr_str),
|
||||||
|
"-p",
|
||||||
|
"{}:53/tcp".format(addr_str),
|
||||||
|
"-v",
|
||||||
|
"{}/conf:/etc/bind".format(self.bind_root),
|
||||||
|
"-v",
|
||||||
|
"{}/zones:/var/lib/bind".format(self.bind_root),
|
||||||
|
BIND_DOCKER_IMAGE,
|
||||||
|
],
|
||||||
|
stdout=self._output,
|
||||||
|
stderr=self._output,
|
||||||
|
)
|
||||||
|
|
||||||
|
if self.process.poll():
|
||||||
|
raise ValueError("BIND9 server stopped unexpectedly")
|
||||||
|
|
||||||
|
try:
|
||||||
|
self._wait_until_ready()
|
||||||
|
except:
|
||||||
|
# The container might be running even if we think it isn't
|
||||||
|
self.stop()
|
||||||
|
raise
|
||||||
|
|
||||||
|
def _wait_until_ready(self, attempts=30):
|
||||||
|
# type: (int) -> None
|
||||||
|
"""
|
||||||
|
Polls the DNS server over TCP until it gets a response, or until
|
||||||
|
it runs out of attempts and raises a ValueError.
|
||||||
|
The DNS response message must match the txn_id of the DNS query message,
|
||||||
|
but otherwise the contents are ignored.
|
||||||
|
:param int attempts: The number of attempts to make.
|
||||||
|
"""
|
||||||
|
for _ in range(attempts):
|
||||||
|
if self.process.poll():
|
||||||
|
raise ValueError("BIND9 server stopped unexpectedly")
|
||||||
|
|
||||||
|
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||||
|
sock.settimeout(5.0)
|
||||||
|
try:
|
||||||
|
sock.connect(BIND_BIND_ADDRESS)
|
||||||
|
sock.sendall(BIND_TEST_QUERY)
|
||||||
|
buf = sock.recv(1024)
|
||||||
|
# We should receive a DNS message with the same tx_id
|
||||||
|
if buf and len(buf) > 4 and buf[2:4] == BIND_TEST_QUERY[2:4]:
|
||||||
|
return
|
||||||
|
# If we got a response but it wasn't the one we wanted, wait a little
|
||||||
|
time.sleep(1)
|
||||||
|
except: # pylint: disable=bare-except
|
||||||
|
# If there was a network error, wait a little
|
||||||
|
time.sleep(1)
|
||||||
|
finally:
|
||||||
|
sock.close()
|
||||||
|
|
||||||
|
raise ValueError(
|
||||||
|
"Gave up waiting for DNS server {} to respond".format(BIND_BIND_ADDRESS)
|
||||||
|
)
|
||||||
|
|
||||||
|
def __enter__(self):
|
||||||
|
self.start()
|
||||||
|
return self.dns_xdist
|
||||||
|
|
||||||
|
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||||
|
self.stop()
|
||||||
@@ -39,6 +39,7 @@ def _suppress_x509_verification_warnings():
|
|||||||
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
|
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
|
||||||
except ImportError:
|
except ImportError:
|
||||||
# Handle old versions of request with vendorized urllib3
|
# Handle old versions of request with vendorized urllib3
|
||||||
|
# pylint: disable=no-member
|
||||||
from requests.packages.urllib3.exceptions import InsecureRequestWarning
|
from requests.packages.urllib3.exceptions import InsecureRequestWarning
|
||||||
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
|
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
|
||||||
|
|
||||||
@@ -256,7 +257,8 @@ def generate_csr(domains, key_path, csr_path, key_type=RSA_KEY_TYPE):
|
|||||||
|
|
||||||
def read_certificate(cert_path):
|
def read_certificate(cert_path):
|
||||||
"""
|
"""
|
||||||
Load the certificate from the provided path, and return a human readable version of it (TEXT mode).
|
Load the certificate from the provided path, and return a human readable version
|
||||||
|
of it (TEXT mode).
|
||||||
:param str cert_path: the path to the certificate
|
:param str cert_path: the path to the certificate
|
||||||
:returns: the TEXT version of the certificate, as it would be displayed by openssl binary
|
:returns: the TEXT version of the certificate, as it would be displayed by openssl binary
|
||||||
"""
|
"""
|
||||||
@@ -280,7 +282,11 @@ def load_sample_data_path(workspace):
|
|||||||
|
|
||||||
if os.name == 'nt':
|
if os.name == 'nt':
|
||||||
# Fix the symlinks on Windows if GIT is not configured to create them upon checkout
|
# Fix the symlinks on Windows if GIT is not configured to create them upon checkout
|
||||||
for lineage in ['a.encryption-example.com', 'b.encryption-example.com']:
|
for lineage in [
|
||||||
|
'a.encryption-example.com',
|
||||||
|
'b.encryption-example.com',
|
||||||
|
'c.encryption-example.com',
|
||||||
|
]:
|
||||||
current_live = os.path.join(copied, 'live', lineage)
|
current_live = os.path.join(copied, 'live', lineage)
|
||||||
for name in os.listdir(current_live):
|
for name in os.listdir(current_live):
|
||||||
if name != 'README':
|
if name != 'README':
|
||||||
|
|||||||
@@ -1,3 +1,5 @@
|
|||||||
|
# pylint: disable=missing-module-docstring
|
||||||
|
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
import stat
|
import stat
|
||||||
@@ -5,18 +7,19 @@ import stat
|
|||||||
import pkg_resources
|
import pkg_resources
|
||||||
import requests
|
import requests
|
||||||
|
|
||||||
from certbot_integration_tests.utils.constants import MOCK_OCSP_SERVER_PORT
|
from certbot_integration_tests.utils.constants import DEFAULT_HTTP_01_PORT, MOCK_OCSP_SERVER_PORT
|
||||||
|
|
||||||
PEBBLE_VERSION = 'v2.3.0'
|
PEBBLE_VERSION = 'v2.3.0'
|
||||||
ASSETS_PATH = pkg_resources.resource_filename('certbot_integration_tests', 'assets')
|
ASSETS_PATH = pkg_resources.resource_filename('certbot_integration_tests', 'assets')
|
||||||
|
|
||||||
|
|
||||||
def fetch(workspace):
|
def fetch(workspace, http_01_port=DEFAULT_HTTP_01_PORT):
|
||||||
|
# pylint: disable=missing-function-docstring
|
||||||
suffix = 'linux-amd64' if os.name != 'nt' else 'windows-amd64.exe'
|
suffix = 'linux-amd64' if os.name != 'nt' else 'windows-amd64.exe'
|
||||||
|
|
||||||
pebble_path = _fetch_asset('pebble', suffix)
|
pebble_path = _fetch_asset('pebble', suffix)
|
||||||
challtestsrv_path = _fetch_asset('pebble-challtestsrv', suffix)
|
challtestsrv_path = _fetch_asset('pebble-challtestsrv', suffix)
|
||||||
pebble_config_path = _build_pebble_config(workspace)
|
pebble_config_path = _build_pebble_config(workspace, http_01_port)
|
||||||
|
|
||||||
return pebble_path, challtestsrv_path, pebble_config_path
|
return pebble_path, challtestsrv_path, pebble_config_path
|
||||||
|
|
||||||
@@ -35,7 +38,7 @@ def _fetch_asset(asset, suffix):
|
|||||||
return asset_path
|
return asset_path
|
||||||
|
|
||||||
|
|
||||||
def _build_pebble_config(workspace):
|
def _build_pebble_config(workspace, http_01_port):
|
||||||
config_path = os.path.join(workspace, 'pebble-config.json')
|
config_path = os.path.join(workspace, 'pebble-config.json')
|
||||||
with open(config_path, 'w') as file_h:
|
with open(config_path, 'w') as file_h:
|
||||||
file_h.write(json.dumps({
|
file_h.write(json.dumps({
|
||||||
@@ -44,7 +47,7 @@ def _build_pebble_config(workspace):
|
|||||||
'managementListenAddress': '0.0.0.0:15000',
|
'managementListenAddress': '0.0.0.0:15000',
|
||||||
'certificate': os.path.join(ASSETS_PATH, 'cert.pem'),
|
'certificate': os.path.join(ASSETS_PATH, 'cert.pem'),
|
||||||
'privateKey': os.path.join(ASSETS_PATH, 'key.pem'),
|
'privateKey': os.path.join(ASSETS_PATH, 'key.pem'),
|
||||||
'httpPort': 5002,
|
'httpPort': http_01_port,
|
||||||
'tlsPort': 5001,
|
'tlsPort': 5001,
|
||||||
'ocspResponderURL': 'http://127.0.0.1:{0}'.format(MOCK_OCSP_SERVER_PORT),
|
'ocspResponderURL': 'http://127.0.0.1:{0}'.format(MOCK_OCSP_SERVER_PORT),
|
||||||
},
|
},
|
||||||
|
|||||||
@@ -21,6 +21,7 @@ from certbot_integration_tests.utils.misc import GracefulTCPServer
|
|||||||
|
|
||||||
|
|
||||||
class _ProxyHandler(BaseHTTPServer.BaseHTTPRequestHandler):
|
class _ProxyHandler(BaseHTTPServer.BaseHTTPRequestHandler):
|
||||||
|
# pylint: disable=missing-function-docstring
|
||||||
def do_POST(self):
|
def do_POST(self):
|
||||||
request = requests.get(PEBBLE_MANAGEMENT_URL + '/intermediate-keys/0', verify=False)
|
request = requests.get(PEBBLE_MANAGEMENT_URL + '/intermediate-keys/0', verify=False)
|
||||||
issuer_key = serialization.load_pem_private_key(request.content, None, default_backend())
|
issuer_key = serialization.load_pem_private_key(request.content, None, default_backend())
|
||||||
@@ -35,20 +36,28 @@ class _ProxyHandler(BaseHTTPServer.BaseHTTPRequestHandler):
|
|||||||
|
|
||||||
ocsp_request = ocsp.load_der_ocsp_request(self.rfile.read(content_len))
|
ocsp_request = ocsp.load_der_ocsp_request(self.rfile.read(content_len))
|
||||||
response = requests.get('{0}/cert-status-by-serial/{1}'.format(
|
response = requests.get('{0}/cert-status-by-serial/{1}'.format(
|
||||||
PEBBLE_MANAGEMENT_URL, str(hex(ocsp_request.serial_number)).replace('0x', '')), verify=False)
|
PEBBLE_MANAGEMENT_URL, str(hex(ocsp_request.serial_number)).replace('0x', '')),
|
||||||
|
verify=False
|
||||||
|
)
|
||||||
|
|
||||||
if not response.ok:
|
if not response.ok:
|
||||||
ocsp_response = ocsp.OCSPResponseBuilder.build_unsuccessful(ocsp.OCSPResponseStatus.UNAUTHORIZED)
|
ocsp_response = ocsp.OCSPResponseBuilder.build_unsuccessful(
|
||||||
|
ocsp.OCSPResponseStatus.UNAUTHORIZED
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
data = response.json()
|
data = response.json()
|
||||||
|
|
||||||
now = datetime.datetime.utcnow()
|
now = datetime.datetime.utcnow()
|
||||||
cert = x509.load_pem_x509_certificate(data['Certificate'].encode(), default_backend())
|
cert = x509.load_pem_x509_certificate(data['Certificate'].encode(), default_backend())
|
||||||
if data['Status'] != 'Revoked':
|
if data['Status'] != 'Revoked':
|
||||||
ocsp_status, revocation_time, revocation_reason = ocsp.OCSPCertStatus.GOOD, None, None
|
ocsp_status = ocsp.OCSPCertStatus.GOOD
|
||||||
|
revocation_time = None
|
||||||
|
revocation_reason = None
|
||||||
else:
|
else:
|
||||||
ocsp_status, revocation_reason = ocsp.OCSPCertStatus.REVOKED, x509.ReasonFlags.unspecified
|
ocsp_status = ocsp.OCSPCertStatus.REVOKED
|
||||||
revoked_at = re.sub(r'( \+\d{4}).*$', r'\1', data['RevokedAt']) # "... +0000 UTC" => "+0000"
|
revocation_reason = x509.ReasonFlags.unspecified
|
||||||
|
# "... +0000 UTC" => "+0000"
|
||||||
|
revoked_at = re.sub(r'( \+\d{4}).*$', r'\1', data['RevokedAt'])
|
||||||
revocation_time = parser.parse(revoked_at)
|
revocation_time = parser.parse(revoked_at)
|
||||||
|
|
||||||
ocsp_response = ocsp.OCSPResponseBuilder().add_response(
|
ocsp_response = ocsp.OCSPResponseBuilder().add_response(
|
||||||
|
|||||||
@@ -1,4 +1,6 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
# pylint: disable=missing-module-docstring
|
||||||
|
|
||||||
import json
|
import json
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
@@ -10,7 +12,9 @@ from certbot_integration_tests.utils.misc import GracefulTCPServer
|
|||||||
|
|
||||||
|
|
||||||
def _create_proxy(mapping):
|
def _create_proxy(mapping):
|
||||||
|
# pylint: disable=missing-function-docstring
|
||||||
class ProxyHandler(BaseHTTPServer.BaseHTTPRequestHandler):
|
class ProxyHandler(BaseHTTPServer.BaseHTTPRequestHandler):
|
||||||
|
# pylint: disable=missing-class-docstring
|
||||||
def do_GET(self):
|
def do_GET(self):
|
||||||
headers = {key.lower(): value for key, value in self.headers.items()}
|
headers = {key.lower(): value for key, value in self.headers.items()}
|
||||||
backend = [backend for pattern, backend in mapping.items()
|
backend = [backend for pattern, backend in mapping.items()
|
||||||
|
|||||||
@@ -18,7 +18,7 @@ install_requires = [
|
|||||||
'python-dateutil',
|
'python-dateutil',
|
||||||
'pyyaml',
|
'pyyaml',
|
||||||
'requests',
|
'requests',
|
||||||
'six',
|
'six'
|
||||||
]
|
]
|
||||||
|
|
||||||
# Add pywin32 on Windows platforms to handle low-level system calls.
|
# Add pywin32 on Windows platforms to handle low-level system calls.
|
||||||
@@ -52,6 +52,7 @@ setup(
|
|||||||
'Programming Language :: Python :: 3.6',
|
'Programming Language :: Python :: 3.6',
|
||||||
'Programming Language :: Python :: 3.7',
|
'Programming Language :: Python :: 3.7',
|
||||||
'Programming Language :: Python :: 3.8',
|
'Programming Language :: Python :: 3.8',
|
||||||
|
'Programming Language :: Python :: 3.9',
|
||||||
'Topic :: Internet :: WWW/HTTP',
|
'Topic :: Internet :: WWW/HTTP',
|
||||||
'Topic :: Security',
|
'Topic :: Security',
|
||||||
],
|
],
|
||||||
|
|||||||
@@ -9,8 +9,6 @@ See https://docs.pytest.org/en/latest/reference.html#hook-reference
|
|||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
import os
|
import os
|
||||||
|
|
||||||
import pytest
|
|
||||||
|
|
||||||
ROOT_PATH = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))
|
ROOT_PATH = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -1,47 +1,18 @@
|
|||||||
FROM debian:stretch
|
FROM debian:buster
|
||||||
MAINTAINER Brad Warren <bmw@eff.org>
|
MAINTAINER Brad Warren <bmw@eff.org>
|
||||||
|
|
||||||
# no need to mkdir anything:
|
RUN apt-get update && \
|
||||||
# https://docs.docker.com/reference/builder/#copy
|
apt install python3-dev python3-venv gcc libaugeas0 libssl-dev \
|
||||||
# If <dest> doesn't exist, it is created along with all missing
|
libffi-dev ca-certificates openssl -y
|
||||||
# directories in its path.
|
|
||||||
|
|
||||||
# TODO: Install non-default Python versions for tox.
|
WORKDIR /opt/certbot/src
|
||||||
# TODO: Install Apache/Nginx for plugin development.
|
|
||||||
COPY letsencrypt-auto-source /opt/certbot/src/letsencrypt-auto-source
|
|
||||||
RUN /opt/certbot/src/letsencrypt-auto-source/letsencrypt-auto --os-packages-only
|
|
||||||
|
|
||||||
# the above is not likely to change, so by putting it further up the
|
# We copy all contents of the build directory to allow us to easily use
|
||||||
# Dockerfile we make sure we cache as much as possible
|
# things like tools/venv3.py which expects all of our packages to be available.
|
||||||
|
COPY . .
|
||||||
|
|
||||||
COPY certbot/setup.py certbot/README.rst certbot/CHANGELOG.md certbot/MANIFEST.in linter_plugin.py tox.cover.py tox.ini .pylintrc /opt/certbot/src/
|
RUN tools/venv3.py
|
||||||
|
ENV PATH /opt/certbot/src/venv3/bin:$PATH
|
||||||
# all above files are necessary for setup.py, however, package source
|
|
||||||
# code directory has to be copied separately to a subdirectory...
|
|
||||||
# https://docs.docker.com/reference/builder/#copy: "If <src> is a
|
|
||||||
# directory, the entire contents of the directory are copied,
|
|
||||||
# including filesystem metadata. Note: The directory itself is not
|
|
||||||
# copied, just its contents." Order again matters, three files are far
|
|
||||||
# more likely to be cached than the whole project directory
|
|
||||||
|
|
||||||
COPY certbot /opt/certbot/src/certbot/
|
|
||||||
COPY acme /opt/certbot/src/acme/
|
|
||||||
COPY certbot-apache /opt/certbot/src/certbot-apache/
|
|
||||||
COPY certbot-nginx /opt/certbot/src/certbot-nginx/
|
|
||||||
COPY certbot-compatibility-test /opt/certbot/src/certbot-compatibility-test/
|
|
||||||
COPY tools /opt/certbot/src/tools
|
|
||||||
|
|
||||||
RUN VIRTUALENV_NO_DOWNLOAD=1 virtualenv -p python2 /opt/certbot/venv && \
|
|
||||||
/opt/certbot/venv/bin/pip install -U setuptools && \
|
|
||||||
/opt/certbot/venv/bin/pip install -U pip
|
|
||||||
ENV PATH /opt/certbot/venv/bin:$PATH
|
|
||||||
RUN /opt/certbot/venv/bin/python \
|
|
||||||
/opt/certbot/src/tools/pip_install_editable.py \
|
|
||||||
/opt/certbot/src/acme \
|
|
||||||
/opt/certbot/src/certbot \
|
|
||||||
/opt/certbot/src/certbot-apache \
|
|
||||||
/opt/certbot/src/certbot-nginx \
|
|
||||||
/opt/certbot/src/certbot-compatibility-test
|
|
||||||
|
|
||||||
# install in editable mode (-e) to save space: it's not possible to
|
# install in editable mode (-e) to save space: it's not possible to
|
||||||
# "rm -rf /opt/certbot/src" (it's stays in the underlaying image);
|
# "rm -rf /opt/certbot/src" (it's stays in the underlaying image);
|
||||||
|
|||||||
@@ -57,7 +57,7 @@ class Proxy(configurators_common.Proxy):
|
|||||||
|
|
||||||
def _prepare_configurator(self):
|
def _prepare_configurator(self):
|
||||||
"""Prepares the Apache plugin for testing"""
|
"""Prepares the Apache plugin for testing"""
|
||||||
for k in entrypoint.ENTRYPOINT.OS_DEFAULTS.keys():
|
for k in entrypoint.ENTRYPOINT.OS_DEFAULTS:
|
||||||
setattr(self.le_config, "apache_" + k,
|
setattr(self.le_config, "apache_" + k,
|
||||||
entrypoint.ENTRYPOINT.OS_DEFAULTS[k])
|
entrypoint.ENTRYPOINT.OS_DEFAULTS[k])
|
||||||
|
|
||||||
|
|||||||
@@ -69,11 +69,10 @@ class Proxy(object):
|
|||||||
shutil.copy(cert_path, cert)
|
shutil.copy(cert_path, cert)
|
||||||
key = os.path.join(cert_and_key_dir, "key")
|
key = os.path.join(cert_and_key_dir, "key")
|
||||||
shutil.copy(key_path, key)
|
shutil.copy(key_path, key)
|
||||||
|
chain = None
|
||||||
if chain_path:
|
if chain_path:
|
||||||
chain = os.path.join(cert_and_key_dir, "chain")
|
chain = os.path.join(cert_and_key_dir, "chain")
|
||||||
shutil.copy(chain_path, chain)
|
shutil.copy(chain_path, chain)
|
||||||
else:
|
|
||||||
chain = None
|
|
||||||
|
|
||||||
return cert, key, chain
|
return cert, key, chain
|
||||||
|
|
||||||
|
|||||||
@@ -102,8 +102,10 @@ def _create_achalls(plugin):
|
|||||||
prefs = plugin.get_chall_pref(domain)
|
prefs = plugin.get_chall_pref(domain)
|
||||||
for chall_type in prefs:
|
for chall_type in prefs:
|
||||||
if chall_type == challenges.HTTP01:
|
if chall_type == challenges.HTTP01:
|
||||||
|
# challenges.HTTP01.TOKEN_SIZE is a float but os.urandom
|
||||||
|
# expects an integer.
|
||||||
chall = challenges.HTTP01(
|
chall = challenges.HTTP01(
|
||||||
token=os.urandom(challenges.HTTP01.TOKEN_SIZE))
|
token=os.urandom(int(challenges.HTTP01.TOKEN_SIZE)))
|
||||||
challb = acme_util.chall_to_challb(
|
challb = acme_util.chall_to_challb(
|
||||||
chall, messages.STATUS_PENDING)
|
chall, messages.STATUS_PENDING)
|
||||||
achall = achallenges.KeyAuthorizationAnnotatedChallenge(
|
achall = achallenges.KeyAuthorizationAnnotatedChallenge(
|
||||||
@@ -137,7 +139,7 @@ def test_deploy_cert(plugin, temp_dir, domains):
|
|||||||
"""Tests deploy_cert returning True if the tests are successful"""
|
"""Tests deploy_cert returning True if the tests are successful"""
|
||||||
cert = crypto_util.gen_ss_cert(util.KEY, domains)
|
cert = crypto_util.gen_ss_cert(util.KEY, domains)
|
||||||
cert_path = os.path.join(temp_dir, "cert.pem")
|
cert_path = os.path.join(temp_dir, "cert.pem")
|
||||||
with open(cert_path, "w") as f:
|
with open(cert_path, "wb") as f:
|
||||||
f.write(OpenSSL.crypto.dump_certificate(
|
f.write(OpenSSL.crypto.dump_certificate(
|
||||||
OpenSSL.crypto.FILETYPE_PEM, cert))
|
OpenSSL.crypto.FILETYPE_PEM, cert))
|
||||||
|
|
||||||
@@ -273,7 +275,7 @@ def _dirs_are_unequal(dir1, dir2):
|
|||||||
logger.error(str(dircmp.diff_files))
|
logger.error(str(dircmp.diff_files))
|
||||||
return True
|
return True
|
||||||
|
|
||||||
for subdir in dircmp.subdirs.itervalues():
|
for subdir in dircmp.subdirs.values():
|
||||||
dircmps.append(subdir)
|
dircmps.append(subdir)
|
||||||
|
|
||||||
return False
|
return False
|
||||||
|
|||||||
@@ -1,13 +1,17 @@
|
|||||||
-----BEGIN CERTIFICATE-----
|
-----BEGIN CERTIFICATE-----
|
||||||
MIICATCCAWoCCQCvMbKu4FHZ6zANBgkqhkiG9w0BAQsFADBFMQswCQYDVQQGEwJB
|
MIICqDCCAZACCQCRC1UKg2WfRTANBgkqhkiG9w0BAQsFADAWMRQwEgYDVQQDDAtl
|
||||||
VTETMBEGA1UECAwKU29tZS1TdGF0ZTEhMB8GA1UECgwYSW50ZXJuZXQgV2lkZ2l0
|
eGFtcGxlLmNvbTAeFw0yMDA4MTkyMzM5MjdaFw0yMDA5MTgyMzM5MjdaMBYxFDAS
|
||||||
cyBQdHkgTHRkMB4XDTE1MDcyMzIzMjc1MFoXDTE2MDcyMjIzMjc1MFowRTELMAkG
|
BgNVBAMMC2V4YW1wbGUuY29tMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC
|
||||||
A1UEBhMCQVUxEzARBgNVBAgMClNvbWUtU3RhdGUxITAfBgNVBAoMGEludGVybmV0
|
AQEA5tViHnJx4y+BbCb8Qz9uxsnqp1ynONR7ET/XL+M/jQ4xPeJg4L2uZ3YnogPc
|
||||||
IFdpZGdpdHMgUHR5IEx0ZDCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEAws3o
|
WdEoey17WXBg3KRqKfg+7PqIdGqVeonSCfXhD1HoGJRsThSUJ2fK3uoQ+zGgJTWR
|
||||||
y46PMLM9Gr68pbex0MhdPr7Cq4rRe9BBpnOuHFdF35Ak0aPrzFwVzLlGOir94U11
|
FYWa8Cb6xsuq0xaYtw2jaJBp+697Np60PWs4pY5FkadT50wZ0TYDnYt3NSAdn+Pt
|
||||||
e5JYJDWJi+4FwLBRkOAfanjJ5GJ9BnEHSOdbtO+sv9uhbt+7iYOOUOngKSiJyUrM
|
j3cpI4ocZZ2FLiOFn+UFOaRcetGtpnU1QwvmygD9tiL7kJ55B4CWGEv6DMRQk/UE
|
||||||
i1THAE+B1CenxZ1KHRQCke708zkK8jVuxLeIAOMCAwEAATANBgkqhkiG9w0BAQsF
|
eMUETzse1NkVlaxQ1TCd5iAfBTluiV30EpmmWa+OsXJWxCK+EEOkXD1r3CdXAldY
|
||||||
AAOBgQCC3LUP3MHk+IBmwHHZAZCX+6p4lop9SP6y6rDpWgnqEEeb9oFleHi2Rvzq
|
nRYxJrn4udrFe69QX95wiRZNXwIDAQABMA0GCSqGSIb3DQEBCwUAA4IBAQCJvtDC
|
||||||
7gxl6nS5AsaSzfAygJ3zWKTwVAZyU4GOQ8QTK+nHk3+LO1X4cDbUlQfm5+YuwKDa
|
875CK7SKNf006gSciXsNPNSVORGPjc/5OQ23baK4iPhxftI4LGZN8773N14jWp3E
|
||||||
4LFKeovmrK6BiMLIc1J+MxUjLfCeVHYSdkZULTVXue0zif0BUA==
|
QnQLL1gZ9/G+98SlI5lm97a4m4XZyNaULbmQwRKgI22H0F1AWbvsG0SppjnhVlJ+
|
||||||
|
93ZUqSQBXgbXelFHSsNfk1AB6Kvo6+UvS8s0vkz7SfkPOZGx0b+3RJSJZnZHvYih
|
||||||
|
ggudN/jJggSgRrb+F6lpaelJE9pZsznJFb9R7mFI33AGBpQWV4r3p1ZbM1vGMqGc
|
||||||
|
4PGBzDzi28BhLBplSOPZZxqRiINQzGiQ5T2SfN06usr7EafFr6+7YKNhgrCdlVjU
|
||||||
|
thzJ5MgHZgALNXsh
|
||||||
-----END CERTIFICATE-----
|
-----END CERTIFICATE-----
|
||||||
|
|||||||
Binary file not shown.
@@ -18,7 +18,7 @@ class Validator(object):
|
|||||||
def certificate(self, cert, name, alt_host=None, port=443):
|
def certificate(self, cert, name, alt_host=None, port=443):
|
||||||
"""Verifies the certificate presented at name is cert"""
|
"""Verifies the certificate presented at name is cert"""
|
||||||
if alt_host is None:
|
if alt_host is None:
|
||||||
host = socket.gethostbyname(name)
|
host = socket.gethostbyname(name).encode()
|
||||||
elif isinstance(alt_host, six.binary_type):
|
elif isinstance(alt_host, six.binary_type):
|
||||||
host = alt_host
|
host = alt_host
|
||||||
else:
|
else:
|
||||||
|
|||||||
@@ -61,7 +61,6 @@ server {
|
|||||||
server {
|
server {
|
||||||
listen 80;
|
listen 80;
|
||||||
server_name random1413.example.org www.random1413.example.org;
|
server_name random1413.example.org www.random1413.example.org;
|
||||||
server_name random28524.example.org www.random28524.example.org;
|
|
||||||
server_name random25266.example.org www.random25266.example.org;
|
server_name random25266.example.org www.random25266.example.org;
|
||||||
server_name random26791.example.org www.random26791.example.org;
|
server_name random26791.example.org www.random26791.example.org;
|
||||||
|
|
||||||
|
|||||||
@@ -30,7 +30,6 @@ server {
|
|||||||
server_name www.random3140.example.org;
|
server_name www.random3140.example.org;
|
||||||
server_name random28398.example.org;
|
server_name random28398.example.org;
|
||||||
server_name random23689.example.org www.random23689.example.org;
|
server_name random23689.example.org www.random23689.example.org;
|
||||||
server_name random25863.example.org www.random25863.example.org;
|
|
||||||
|
|
||||||
rewrite ^ http://random3140.example.org$request_uri permanent;
|
rewrite ^ http://random3140.example.org$request_uri permanent;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -29,6 +29,5 @@ server {
|
|||||||
|
|
||||||
server {
|
server {
|
||||||
server_name www.random1413.example.org;
|
server_name www.random1413.example.org;
|
||||||
server_name random28524.example.org www.random28524.example.org;
|
|
||||||
rewrite ^ http://random1413.example.org$request_uri permanent;
|
rewrite ^ http://random1413.example.org$request_uri permanent;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -5,7 +5,7 @@ from setuptools import __version__ as setuptools_version
|
|||||||
from setuptools import find_packages
|
from setuptools import find_packages
|
||||||
from setuptools import setup
|
from setuptools import setup
|
||||||
|
|
||||||
version = '1.8.0.dev0'
|
version = '1.11.0.dev0'
|
||||||
|
|
||||||
install_requires = [
|
install_requires = [
|
||||||
'certbot',
|
'certbot',
|
||||||
@@ -50,6 +50,7 @@ setup(
|
|||||||
'Programming Language :: Python :: 3.6',
|
'Programming Language :: Python :: 3.6',
|
||||||
'Programming Language :: Python :: 3.7',
|
'Programming Language :: Python :: 3.7',
|
||||||
'Programming Language :: Python :: 3.8',
|
'Programming Language :: Python :: 3.8',
|
||||||
|
'Programming Language :: Python :: 3.9',
|
||||||
'Topic :: Internet :: WWW/HTTP',
|
'Topic :: Internet :: WWW/HTTP',
|
||||||
'Topic :: Security',
|
'Topic :: Security',
|
||||||
],
|
],
|
||||||
|
|||||||
@@ -3,6 +3,10 @@ The `~certbot_dns_cloudflare.dns_cloudflare` plugin automates the process of
|
|||||||
completing a ``dns-01`` challenge (`~acme.challenges.DNS01`) by creating, and
|
completing a ``dns-01`` challenge (`~acme.challenges.DNS01`) by creating, and
|
||||||
subsequently removing, TXT records using the Cloudflare API.
|
subsequently removing, TXT records using the Cloudflare API.
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
The plugin is not installed by default. It can be installed by heading to
|
||||||
|
`certbot.eff.org <https://certbot.eff.org/instructions#wildcard>`_, choosing your system and
|
||||||
|
selecting the Wildcard tab.
|
||||||
|
|
||||||
Named Arguments
|
Named Arguments
|
||||||
---------------
|
---------------
|
||||||
|
|||||||
@@ -93,7 +93,7 @@ todo_include_todos = False
|
|||||||
# a list of builtin themes.
|
# a list of builtin themes.
|
||||||
#
|
#
|
||||||
|
|
||||||
# http://docs.readthedocs.org/en/latest/theme.html#how-do-i-use-this-locally-and-on-read-the-docs
|
# https://docs.readthedocs.io/en/stable/faq.html#i-want-to-use-the-read-the-docs-theme-locally
|
||||||
# on_rtd is whether we are on readthedocs.org
|
# on_rtd is whether we are on readthedocs.org
|
||||||
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
|
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
|
||||||
if not on_rtd: # only import and set the theme if we're building docs locally
|
if not on_rtd: # only import and set the theme if we're building docs locally
|
||||||
|
|||||||
@@ -22,7 +22,7 @@ if errorlevel 9009 (
|
|||||||
echo.may add the Sphinx directory to PATH.
|
echo.may add the Sphinx directory to PATH.
|
||||||
echo.
|
echo.
|
||||||
echo.If you don't have Sphinx installed, grab it from
|
echo.If you don't have Sphinx installed, grab it from
|
||||||
echo.http://sphinx-doc.org/
|
echo.https://www.sphinx-doc.org/
|
||||||
exit /b 1
|
exit /b 1
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|||||||
@@ -5,9 +5,8 @@ import sys
|
|||||||
from setuptools import __version__ as setuptools_version
|
from setuptools import __version__ as setuptools_version
|
||||||
from setuptools import find_packages
|
from setuptools import find_packages
|
||||||
from setuptools import setup
|
from setuptools import setup
|
||||||
from setuptools.command.test import test as TestCommand
|
|
||||||
|
|
||||||
version = '1.8.0.dev0'
|
version = '1.11.0.dev0'
|
||||||
|
|
||||||
# Remember to update local-oldest-requirements.txt when changing the minimum
|
# Remember to update local-oldest-requirements.txt when changing the minimum
|
||||||
# acme/certbot version.
|
# acme/certbot version.
|
||||||
@@ -42,20 +41,6 @@ docs_extras = [
|
|||||||
'sphinx_rtd_theme',
|
'sphinx_rtd_theme',
|
||||||
]
|
]
|
||||||
|
|
||||||
class PyTest(TestCommand):
|
|
||||||
user_options = []
|
|
||||||
|
|
||||||
def initialize_options(self):
|
|
||||||
TestCommand.initialize_options(self)
|
|
||||||
self.pytest_args = ''
|
|
||||||
|
|
||||||
def run_tests(self):
|
|
||||||
import shlex
|
|
||||||
# import here, cause outside the eggs aren't loaded
|
|
||||||
import pytest
|
|
||||||
errno = pytest.main(shlex.split(self.pytest_args))
|
|
||||||
sys.exit(errno)
|
|
||||||
|
|
||||||
setup(
|
setup(
|
||||||
name='certbot-dns-cloudflare',
|
name='certbot-dns-cloudflare',
|
||||||
version=version,
|
version=version,
|
||||||
@@ -78,6 +63,7 @@ setup(
|
|||||||
'Programming Language :: Python :: 3.6',
|
'Programming Language :: Python :: 3.6',
|
||||||
'Programming Language :: Python :: 3.7',
|
'Programming Language :: Python :: 3.7',
|
||||||
'Programming Language :: Python :: 3.8',
|
'Programming Language :: Python :: 3.8',
|
||||||
|
'Programming Language :: Python :: 3.9',
|
||||||
'Topic :: Internet :: WWW/HTTP',
|
'Topic :: Internet :: WWW/HTTP',
|
||||||
'Topic :: Security',
|
'Topic :: Security',
|
||||||
'Topic :: System :: Installation/Setup',
|
'Topic :: System :: Installation/Setup',
|
||||||
@@ -97,7 +83,4 @@ setup(
|
|||||||
'dns-cloudflare = certbot_dns_cloudflare._internal.dns_cloudflare:Authenticator',
|
'dns-cloudflare = certbot_dns_cloudflare._internal.dns_cloudflare:Authenticator',
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
tests_require=["pytest"],
|
|
||||||
test_suite='certbot_dns_cloudflare',
|
|
||||||
cmdclass={"test": PyTest},
|
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -1,21 +0,0 @@
|
|||||||
#!/bin/sh -e
|
|
||||||
# This file is generated by tools/generate_dnsplugins_postrefreshhook.sh and should not be edited manually.
|
|
||||||
|
|
||||||
# get certbot version
|
|
||||||
if [ ! -f "$SNAP/certbot-shared/certbot-version.txt" ]; then
|
|
||||||
echo "No certbot version available; not doing version comparison check" >> "$SNAP_DATA/debuglog"
|
|
||||||
exit 0
|
|
||||||
fi
|
|
||||||
cb_installed=$(cat $SNAP/certbot-shared/certbot-version.txt)
|
|
||||||
|
|
||||||
# get required certbot version for plugin. certbot version must be at least the plugin's
|
|
||||||
# version. note that this is not the required version in setup.py, but the version number itself.
|
|
||||||
cb_required=$(grep -oP "version = '\K.*(?=')" $SNAP/setup.py)
|
|
||||||
|
|
||||||
|
|
||||||
$SNAP/bin/python3 -c "import sys; from packaging import version; sys.exit(1) if version.parse('$cb_installed') < version.parse('$cb_required') else sys.exit(0)" || exit_code=$?
|
|
||||||
if [ "$exit_code" -eq 1 ]; then
|
|
||||||
echo "Certbot is version $cb_installed but needs to be at least $cb_required before" \
|
|
||||||
"this plugin can be updated; will try again on next refresh."
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
@@ -1,41 +0,0 @@
|
|||||||
# This file is generated by tools/generate_dnsplugins_snapcraft.sh and should not be edited manually.
|
|
||||||
name: certbot-dns-cloudflare
|
|
||||||
summary: Cloudflare DNS Authenticator plugin for Certbot
|
|
||||||
description: Cloudflare DNS Authenticator plugin for Certbot
|
|
||||||
confinement: strict
|
|
||||||
grade: devel
|
|
||||||
base: core20
|
|
||||||
adopt-info: certbot-dns-cloudflare
|
|
||||||
|
|
||||||
parts:
|
|
||||||
certbot-dns-cloudflare:
|
|
||||||
plugin: python
|
|
||||||
source: .
|
|
||||||
constraints: [$SNAPCRAFT_PART_SRC/snap-constraints.txt]
|
|
||||||
override-pull: |
|
|
||||||
snapcraftctl pull
|
|
||||||
snapcraftctl set-version `grep ^version $SNAPCRAFT_PART_SRC/setup.py | cut -f2 -d= | tr -d "'[:space:]"`
|
|
||||||
build-environment:
|
|
||||||
- SNAP_BUILD: "True"
|
|
||||||
# To build cryptography and cffi if needed
|
|
||||||
build-packages: [gcc, libffi-dev, libssl-dev, python3-dev]
|
|
||||||
certbot-metadata:
|
|
||||||
plugin: dump
|
|
||||||
source: .
|
|
||||||
stage: [setup.py, certbot-shared]
|
|
||||||
override-pull: |
|
|
||||||
snapcraftctl pull
|
|
||||||
mkdir -p $SNAPCRAFT_PART_SRC/certbot-shared
|
|
||||||
|
|
||||||
slots:
|
|
||||||
certbot:
|
|
||||||
interface: content
|
|
||||||
content: certbot-1
|
|
||||||
read:
|
|
||||||
- $SNAP/lib/python3.8/site-packages
|
|
||||||
|
|
||||||
plugs:
|
|
||||||
certbot-metadata:
|
|
||||||
interface: content
|
|
||||||
content: metadata-1
|
|
||||||
target: $SNAP/certbot-shared
|
|
||||||
@@ -3,6 +3,10 @@ The `~certbot_dns_cloudxns.dns_cloudxns` plugin automates the process of
|
|||||||
completing a ``dns-01`` challenge (`~acme.challenges.DNS01`) by creating, and
|
completing a ``dns-01`` challenge (`~acme.challenges.DNS01`) by creating, and
|
||||||
subsequently removing, TXT records using the CloudXNS API.
|
subsequently removing, TXT records using the CloudXNS API.
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
The plugin is not installed by default. It can be installed by heading to
|
||||||
|
`certbot.eff.org <https://certbot.eff.org/instructions#wildcard>`_, choosing your system and
|
||||||
|
selecting the Wildcard tab.
|
||||||
|
|
||||||
Named Arguments
|
Named Arguments
|
||||||
---------------
|
---------------
|
||||||
|
|||||||
@@ -93,7 +93,7 @@ todo_include_todos = False
|
|||||||
# a list of builtin themes.
|
# a list of builtin themes.
|
||||||
#
|
#
|
||||||
|
|
||||||
# http://docs.readthedocs.org/en/latest/theme.html#how-do-i-use-this-locally-and-on-read-the-docs
|
# https://docs.readthedocs.io/en/stable/faq.html#i-want-to-use-the-read-the-docs-theme-locally
|
||||||
# on_rtd is whether we are on readthedocs.org
|
# on_rtd is whether we are on readthedocs.org
|
||||||
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
|
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
|
||||||
if not on_rtd: # only import and set the theme if we're building docs locally
|
if not on_rtd: # only import and set the theme if we're building docs locally
|
||||||
|
|||||||
@@ -22,7 +22,7 @@ if errorlevel 9009 (
|
|||||||
echo.may add the Sphinx directory to PATH.
|
echo.may add the Sphinx directory to PATH.
|
||||||
echo.
|
echo.
|
||||||
echo.If you don't have Sphinx installed, grab it from
|
echo.If you don't have Sphinx installed, grab it from
|
||||||
echo.http://sphinx-doc.org/
|
echo.https://www.sphinx-doc.org/
|
||||||
exit /b 1
|
exit /b 1
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|||||||
@@ -5,9 +5,8 @@ import sys
|
|||||||
from setuptools import __version__ as setuptools_version
|
from setuptools import __version__ as setuptools_version
|
||||||
from setuptools import find_packages
|
from setuptools import find_packages
|
||||||
from setuptools import setup
|
from setuptools import setup
|
||||||
from setuptools.command.test import test as TestCommand
|
|
||||||
|
|
||||||
version = '1.8.0.dev0'
|
version = '1.11.0.dev0'
|
||||||
|
|
||||||
# Remember to update local-oldest-requirements.txt when changing the minimum
|
# Remember to update local-oldest-requirements.txt when changing the minimum
|
||||||
# acme/certbot version.
|
# acme/certbot version.
|
||||||
@@ -42,20 +41,6 @@ docs_extras = [
|
|||||||
'sphinx_rtd_theme',
|
'sphinx_rtd_theme',
|
||||||
]
|
]
|
||||||
|
|
||||||
class PyTest(TestCommand):
|
|
||||||
user_options = []
|
|
||||||
|
|
||||||
def initialize_options(self):
|
|
||||||
TestCommand.initialize_options(self)
|
|
||||||
self.pytest_args = ''
|
|
||||||
|
|
||||||
def run_tests(self):
|
|
||||||
import shlex
|
|
||||||
# import here, cause outside the eggs aren't loaded
|
|
||||||
import pytest
|
|
||||||
errno = pytest.main(shlex.split(self.pytest_args))
|
|
||||||
sys.exit(errno)
|
|
||||||
|
|
||||||
setup(
|
setup(
|
||||||
name='certbot-dns-cloudxns',
|
name='certbot-dns-cloudxns',
|
||||||
version=version,
|
version=version,
|
||||||
@@ -78,6 +63,7 @@ setup(
|
|||||||
'Programming Language :: Python :: 3.6',
|
'Programming Language :: Python :: 3.6',
|
||||||
'Programming Language :: Python :: 3.7',
|
'Programming Language :: Python :: 3.7',
|
||||||
'Programming Language :: Python :: 3.8',
|
'Programming Language :: Python :: 3.8',
|
||||||
|
'Programming Language :: Python :: 3.9',
|
||||||
'Topic :: Internet :: WWW/HTTP',
|
'Topic :: Internet :: WWW/HTTP',
|
||||||
'Topic :: Security',
|
'Topic :: Security',
|
||||||
'Topic :: System :: Installation/Setup',
|
'Topic :: System :: Installation/Setup',
|
||||||
@@ -97,7 +83,4 @@ setup(
|
|||||||
'dns-cloudxns = certbot_dns_cloudxns._internal.dns_cloudxns:Authenticator',
|
'dns-cloudxns = certbot_dns_cloudxns._internal.dns_cloudxns:Authenticator',
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
tests_require=["pytest"],
|
|
||||||
test_suite='certbot_dns_cloudxns',
|
|
||||||
cmdclass={"test": PyTest},
|
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -1,21 +0,0 @@
|
|||||||
#!/bin/sh -e
|
|
||||||
# This file is generated by tools/generate_dnsplugins_postrefreshhook.sh and should not be edited manually.
|
|
||||||
|
|
||||||
# get certbot version
|
|
||||||
if [ ! -f "$SNAP/certbot-shared/certbot-version.txt" ]; then
|
|
||||||
echo "No certbot version available; not doing version comparison check" >> "$SNAP_DATA/debuglog"
|
|
||||||
exit 0
|
|
||||||
fi
|
|
||||||
cb_installed=$(cat $SNAP/certbot-shared/certbot-version.txt)
|
|
||||||
|
|
||||||
# get required certbot version for plugin. certbot version must be at least the plugin's
|
|
||||||
# version. note that this is not the required version in setup.py, but the version number itself.
|
|
||||||
cb_required=$(grep -oP "version = '\K.*(?=')" $SNAP/setup.py)
|
|
||||||
|
|
||||||
|
|
||||||
$SNAP/bin/python3 -c "import sys; from packaging import version; sys.exit(1) if version.parse('$cb_installed') < version.parse('$cb_required') else sys.exit(0)" || exit_code=$?
|
|
||||||
if [ "$exit_code" -eq 1 ]; then
|
|
||||||
echo "Certbot is version $cb_installed but needs to be at least $cb_required before" \
|
|
||||||
"this plugin can be updated; will try again on next refresh."
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
@@ -1,41 +0,0 @@
|
|||||||
# This file is generated by tools/generate_dnsplugins_snapcraft.sh and should not be edited manually.
|
|
||||||
name: certbot-dns-cloudxns
|
|
||||||
summary: CloudXNS DNS Authenticator plugin for Certbot
|
|
||||||
description: CloudXNS DNS Authenticator plugin for Certbot
|
|
||||||
confinement: strict
|
|
||||||
grade: devel
|
|
||||||
base: core20
|
|
||||||
adopt-info: certbot-dns-cloudxns
|
|
||||||
|
|
||||||
parts:
|
|
||||||
certbot-dns-cloudxns:
|
|
||||||
plugin: python
|
|
||||||
source: .
|
|
||||||
constraints: [$SNAPCRAFT_PART_SRC/snap-constraints.txt]
|
|
||||||
override-pull: |
|
|
||||||
snapcraftctl pull
|
|
||||||
snapcraftctl set-version `grep ^version $SNAPCRAFT_PART_SRC/setup.py | cut -f2 -d= | tr -d "'[:space:]"`
|
|
||||||
build-environment:
|
|
||||||
- SNAP_BUILD: "True"
|
|
||||||
# To build cryptography and cffi if needed
|
|
||||||
build-packages: [gcc, libffi-dev, libssl-dev, python3-dev]
|
|
||||||
certbot-metadata:
|
|
||||||
plugin: dump
|
|
||||||
source: .
|
|
||||||
stage: [setup.py, certbot-shared]
|
|
||||||
override-pull: |
|
|
||||||
snapcraftctl pull
|
|
||||||
mkdir -p $SNAPCRAFT_PART_SRC/certbot-shared
|
|
||||||
|
|
||||||
slots:
|
|
||||||
certbot:
|
|
||||||
interface: content
|
|
||||||
content: certbot-1
|
|
||||||
read:
|
|
||||||
- $SNAP/lib/python3.8/site-packages
|
|
||||||
|
|
||||||
plugs:
|
|
||||||
certbot-metadata:
|
|
||||||
interface: content
|
|
||||||
content: metadata-1
|
|
||||||
target: $SNAP/certbot-shared
|
|
||||||
@@ -3,6 +3,10 @@ The `~certbot_dns_digitalocean.dns_digitalocean` plugin automates the process of
|
|||||||
completing a ``dns-01`` challenge (`~acme.challenges.DNS01`) by creating, and
|
completing a ``dns-01`` challenge (`~acme.challenges.DNS01`) by creating, and
|
||||||
subsequently removing, TXT records using the DigitalOcean API.
|
subsequently removing, TXT records using the DigitalOcean API.
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
The plugin is not installed by default. It can be installed by heading to
|
||||||
|
`certbot.eff.org <https://certbot.eff.org/instructions#wildcard>`_, choosing your system and
|
||||||
|
selecting the Wildcard tab.
|
||||||
|
|
||||||
Named Arguments
|
Named Arguments
|
||||||
---------------
|
---------------
|
||||||
|
|||||||
@@ -19,7 +19,8 @@ class Authenticator(dns_common.DNSAuthenticator):
|
|||||||
This Authenticator uses the DigitalOcean API to fulfill a dns-01 challenge.
|
This Authenticator uses the DigitalOcean API to fulfill a dns-01 challenge.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
description = 'Obtain certs using a DNS TXT record (if you are using DigitalOcean for DNS).'
|
description = 'Obtain certificates using a DNS TXT record (if you are ' + \
|
||||||
|
'using DigitalOcean for DNS).'
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
super(Authenticator, self).__init__(*args, **kwargs)
|
super(Authenticator, self).__init__(*args, **kwargs)
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user