Compare commits
319 Commits
travis-tes
...
test-drop-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a89508e436 | ||
|
|
00235d3807 | ||
|
|
adb7e5e62f | ||
|
|
261b5a76d8 | ||
|
|
2fca48caaa | ||
|
|
c0917a0302 | ||
|
|
13d4a99251 | ||
|
|
b9de48e93e | ||
|
|
7a02deeeba | ||
|
|
42f20455cd | ||
|
|
434ca1985f | ||
|
|
d1ae73ff2e | ||
|
|
3274de3a48 | ||
|
|
b32b9d70b6 | ||
|
|
f096343a36 | ||
|
|
cff3c65af8 | ||
|
|
4a9748ace5 | ||
|
|
fb8cd063eb | ||
|
|
e602736bda | ||
|
|
ccde1eef64 | ||
|
|
c44a5a7701 | ||
|
|
6e1d042f76 | ||
|
|
daf989fc21 | ||
|
|
5c3fd7d9ee | ||
|
|
fc6c238bf9 | ||
|
|
a49b84d64e | ||
|
|
7567e8d8db | ||
|
|
02a5d000cb | ||
|
|
98fb9d2d93 | ||
|
|
32fb89df7e | ||
|
|
d3b82a4e8e | ||
|
|
18faf4f7ab | ||
|
|
a7c3c0b90c | ||
|
|
421e8b6270 | ||
|
|
8e7353900c | ||
|
|
1146f35519 | ||
|
|
198f7d66e6 | ||
|
|
e9bdfcc94b | ||
|
|
a8b6a1c98d | ||
|
|
d714ccec05 | ||
|
|
0465643d0a | ||
|
|
cbf42ffae1 | ||
|
|
fcdfed9c2c | ||
|
|
96a05d946c | ||
|
|
d38766e05c | ||
|
|
c5a0b1ae5d | ||
|
|
fcc8b38c02 | ||
|
|
7febc18bb0 | ||
|
|
5151e2afee | ||
|
|
3889311557 | ||
|
|
6d71378c05 | ||
|
|
e9a96f5e2a | ||
|
|
878c3e396f | ||
|
|
148246b85b | ||
|
|
9045c03949 | ||
|
|
447b6ffaef | ||
|
|
38017473c5 | ||
|
|
dc3ac13750 | ||
|
|
5871de0c07 | ||
|
|
356e8d84d6 | ||
|
|
d476aa4389 | ||
|
|
22cf94f930 | ||
|
|
d3166d7072 | ||
|
|
67fecbe1e0 | ||
|
|
1dfac955c7 | ||
|
|
38f3d3d185 | ||
|
|
64543d4970 | ||
|
|
4c896fd87c | ||
|
|
a71e22678f | ||
|
|
45e48b565d | ||
|
|
5f73274390 | ||
|
|
87386769f7 | ||
|
|
7497c51f34 | ||
|
|
1a3c96a955 | ||
|
|
d1e7404358 | ||
|
|
e5113d5815 | ||
|
|
ff3a07dca3 | ||
|
|
31b5f1310e | ||
|
|
faa8d230c7 | ||
|
|
baab69e653 | ||
|
|
7b687611a4 | ||
|
|
adacc4ab6d | ||
|
|
43ee2993f1 | ||
|
|
f5a88ade54 | ||
|
|
aea416f654 | ||
|
|
9a4e95e25a | ||
|
|
9ca7f76505 | ||
|
|
a8cede6ae1 | ||
|
|
be3d0d872f | ||
|
|
5a85825493 | ||
|
|
e8139e80be | ||
|
|
7ba35b4407 | ||
|
|
90557921e3 | ||
|
|
78edb2889e | ||
|
|
553d3279c6 | ||
|
|
b742b60c4d | ||
|
|
2132cf7f04 | ||
|
|
f15f4f9838 | ||
|
|
2a118f3e83 | ||
|
|
8f5787008d | ||
|
|
db2ffea351 | ||
|
|
bf20f39ceb | ||
|
|
11a4882128 | ||
|
|
c102ca66c3 | ||
|
|
75365f1d4e | ||
|
|
198f5a99bc | ||
|
|
47c1045f6d | ||
|
|
e570e8ad32 | ||
|
|
df138d0027 | ||
|
|
9567352002 | ||
|
|
6c7b99f7e0 | ||
|
|
3673ca77a5 | ||
|
|
bb45c9aa41 | ||
|
|
4c347f5576 | ||
|
|
bf07ec20b0 | ||
|
|
fc864543a7 | ||
|
|
4fa1df3075 | ||
|
|
cfd0a6ff1f | ||
|
|
00ed56afd6 | ||
|
|
b6e3a3ad02 | ||
|
|
c250957ab0 | ||
|
|
4eb0b560c5 | ||
|
|
cb916a0682 | ||
|
|
88386e8c82 | ||
|
|
a64e1f0129 | ||
|
|
fea176449c | ||
|
|
ff03e34c70 | ||
|
|
6fc832677e | ||
|
|
725870d558 | ||
|
|
631c88b209 | ||
|
|
6a093bd35a | ||
|
|
afb07cf50d | ||
|
|
aa61e6ad4e | ||
|
|
8a3aed0476 | ||
|
|
afc5baad4a | ||
|
|
eff761ab1e | ||
|
|
5f040a8e32 | ||
|
|
5173ab6b90 | ||
|
|
448fd9145a | ||
|
|
ac8798e818 | ||
|
|
34694251dd | ||
|
|
cc76906712 | ||
|
|
ef8c481634 | ||
|
|
c12404451d | ||
|
|
e378931eda | ||
|
|
160b209394 | ||
|
|
cac9d8f75e | ||
|
|
7f0fa18c57 | ||
|
|
fca7ec896a | ||
|
|
e066766cc9 | ||
|
|
be6c890874 | ||
|
|
feca125437 | ||
|
|
1be005289a | ||
|
|
79297ef5cb | ||
|
|
5ec29ca60b | ||
|
|
9a72db5b9b | ||
|
|
14cbf67d65 | ||
|
|
b20aaff661 | ||
|
|
a66f4e1150 | ||
|
|
501df0dc4e | ||
|
|
b551b6ee73 | ||
|
|
71d9dfa86e | ||
|
|
6628bc0e9b | ||
|
|
f43fa12fc0 | ||
|
|
2b425110dc | ||
|
|
55d411f1eb | ||
|
|
7ddd327f63 | ||
|
|
3a615176c5 | ||
|
|
e79af1b1de | ||
|
|
c8828dab30 | ||
|
|
f85b738e2f | ||
|
|
95a6b61cdc | ||
|
|
21b320ef42 | ||
|
|
8c81a1aaf8 | ||
|
|
ec147740ee | ||
|
|
b7b0ec321e | ||
|
|
7fe7a965f5 | ||
|
|
9f243c768f | ||
|
|
b841f0f307 | ||
|
|
8e736479f7 | ||
|
|
2ceabadb81 | ||
|
|
a2951b4db1 | ||
|
|
98615564ed | ||
|
|
3ce87d1fcb | ||
|
|
d62d853ea4 | ||
|
|
70731dd75b | ||
|
|
ae7b4a1755 | ||
|
|
f66a592e37 | ||
|
|
e8518bf206 | ||
|
|
2a047eb526 | ||
|
|
bc137103a3 | ||
|
|
085967ad29 | ||
|
|
4e9d3afcc4 | ||
|
|
acb6d34c5f | ||
|
|
63ec74276c | ||
|
|
e8a232297d | ||
|
|
575092d603 | ||
|
|
2d62dec7ec | ||
|
|
f93b90f87a | ||
|
|
f40e5bdefe | ||
|
|
9bbcc0046c | ||
|
|
b3dd2c09ba | ||
|
|
8574313841 | ||
|
|
a677534462 | ||
|
|
22730dc0ac | ||
|
|
086e6c46b6 | ||
|
|
bc0ed3cb01 | ||
|
|
220cc07239 | ||
|
|
271be07267 | ||
|
|
48a0cc0c42 | ||
|
|
5415fc201c | ||
|
|
b08fdc7dfb | ||
|
|
6eb5954f0e | ||
|
|
6ec83d52b5 | ||
|
|
403ded5c58 | ||
|
|
4d3f6c23be | ||
|
|
6d73b21dcf | ||
|
|
072c070c0c | ||
|
|
df1ca726f9 | ||
|
|
086c8b1b3e | ||
|
|
09ab4aea01 | ||
|
|
a6f2061ff7 | ||
|
|
02c1339753 | ||
|
|
a1cd909247 | ||
|
|
9ee4831f78 | ||
|
|
14dfbdbea5 | ||
|
|
270b5535e2 | ||
|
|
74b0340a13 | ||
|
|
b13dfc6437 | ||
|
|
c5bab9b07c | ||
|
|
b6964cae2e | ||
|
|
ebf1349b15 | ||
|
|
9d2e0ac013 | ||
|
|
05dbda4b51 | ||
|
|
40a2a5b99f | ||
|
|
68b3b048b9 | ||
|
|
d434b92945 | ||
|
|
1697d66ba7 | ||
|
|
a6a998d11b | ||
|
|
f82e2cc714 | ||
|
|
433c6f391c | ||
|
|
d64bb81864 | ||
|
|
88e183e69e | ||
|
|
590eeca38a | ||
|
|
b9a25c3987 | ||
|
|
41b99eba79 | ||
|
|
de39a42e6a | ||
|
|
183ccc64b1 | ||
|
|
6bca930752 | ||
|
|
cd993cdfb1 | ||
|
|
9f994d7a50 | ||
|
|
4f3dc8862d | ||
|
|
48139f382d | ||
|
|
8a3a8c7097 | ||
|
|
cb3ff9ef18 | ||
|
|
f743dbec3a | ||
|
|
2af297d72f | ||
|
|
95ef53e5d5 | ||
|
|
24c5fab8b6 | ||
|
|
713b91495b | ||
|
|
0f4c31c9c7 | ||
|
|
b9a8248541 | ||
|
|
8027430625 | ||
|
|
bce14ae65f | ||
|
|
25d1977d4f | ||
|
|
46eb4ec7e3 | ||
|
|
3ae8fa640b | ||
|
|
035b6514db | ||
|
|
f151099342 | ||
|
|
25e79e4aca | ||
|
|
db064a4109 | ||
|
|
860af81fef | ||
|
|
70c8481fd8 | ||
|
|
c5e5594ac3 | ||
|
|
ad8ffc1bf0 | ||
|
|
29a23d3148 | ||
|
|
44b1bd8e0e | ||
|
|
0bebdedcbc | ||
|
|
8ccc96bbdd | ||
|
|
4a2618d415 | ||
|
|
bcb3554836 | ||
|
|
0f53e8ad4e | ||
|
|
2a18ae6d57 | ||
|
|
3c4b922197 | ||
|
|
0bb1f0b2ce | ||
|
|
ba192f321d | ||
|
|
961c573864 | ||
|
|
fb39de7d01 | ||
|
|
97fcfd40d1 | ||
|
|
8192e3eb85 | ||
|
|
d8e9f558c2 | ||
|
|
3a997a5631 | ||
|
|
361d1f732e | ||
|
|
9483b33ec1 | ||
|
|
bc5b079b2a | ||
|
|
bca73f9932 | ||
|
|
a180d5d5c9 | ||
|
|
78624a2b8c | ||
|
|
695107bc98 | ||
|
|
fb323e083a | ||
|
|
5713decf23 | ||
|
|
c194381f04 | ||
|
|
b92eb6f620 | ||
|
|
ea44834c41 | ||
|
|
a730b00a36 | ||
|
|
5e01467e2c | ||
|
|
e9a9a180bb | ||
|
|
67fddae90d | ||
|
|
7337f64180 | ||
|
|
d296ef2dcd | ||
|
|
f64386c73c | ||
|
|
1666e85118 | ||
|
|
db522aa155 | ||
|
|
d0d7521215 | ||
|
|
2fc6f6e619 | ||
|
|
d8ab321894 | ||
|
|
62b054f265 | ||
|
|
1d1c096067 | ||
|
|
bcffaab602 |
@@ -2,12 +2,13 @@
|
||||
trigger:
|
||||
# When changing these triggers, please ensure the documentation under
|
||||
# "Running tests in CI" is still correct.
|
||||
- azure-test-*
|
||||
- test-*
|
||||
pr: none
|
||||
|
||||
jobs:
|
||||
# Any addition here should be reflected in the advanced and release pipelines.
|
||||
# It is advised to declare all jobs here as templates to improve maintainability.
|
||||
- template: templates/tests-suite.yml
|
||||
- template: templates/installer-tests.yml
|
||||
variables:
|
||||
# We don't publish our Docker images in this pipeline, but when building them
|
||||
# for testing, let's use the nightly tag.
|
||||
dockerTag: nightly
|
||||
|
||||
stages:
|
||||
- template: templates/stages/test-and-package-stage.yml
|
||||
|
||||
@@ -1,18 +0,0 @@
|
||||
# Advanced pipeline for running our full test suite on protected branches.
|
||||
trigger:
|
||||
- '*.x'
|
||||
pr: none
|
||||
# This pipeline is also nightly run on master
|
||||
schedules:
|
||||
- cron: "0 4 * * *"
|
||||
displayName: Nightly build
|
||||
branches:
|
||||
include:
|
||||
- master
|
||||
always: true
|
||||
|
||||
jobs:
|
||||
# Any addition here should be reflected in the advanced-test and release pipelines.
|
||||
# It is advised to declare all jobs here as templates to improve maintainability.
|
||||
- template: templates/tests-suite.yml
|
||||
- template: templates/installer-tests.yml
|
||||
@@ -1,8 +1,8 @@
|
||||
trigger:
|
||||
- master
|
||||
trigger: none
|
||||
pr:
|
||||
- master
|
||||
- '*.x'
|
||||
|
||||
jobs:
|
||||
- template: templates/tests-suite.yml
|
||||
- template: templates/jobs/standard-tests-jobs.yml
|
||||
|
||||
|
||||
18
.azure-pipelines/nightly.yml
Normal file
18
.azure-pipelines/nightly.yml
Normal file
@@ -0,0 +1,18 @@
|
||||
# Nightly pipeline running each day for master.
|
||||
trigger: none
|
||||
pr: none
|
||||
schedules:
|
||||
- cron: "30 4 * * *"
|
||||
displayName: Nightly build
|
||||
branches:
|
||||
include:
|
||||
- master
|
||||
always: true
|
||||
|
||||
variables:
|
||||
dockerTag: nightly
|
||||
|
||||
stages:
|
||||
- template: templates/stages/test-and-package-stage.yml
|
||||
- template: templates/stages/deploy-stage.yml
|
||||
- template: templates/stages/notify-failure-stage.yml
|
||||
@@ -1,13 +1,18 @@
|
||||
# Release pipeline to build and deploy Certbot for Windows for GitHub release tags
|
||||
# Release pipeline to run our full test suite, build artifacts, and deploy them
|
||||
# for GitHub release tags.
|
||||
trigger:
|
||||
tags:
|
||||
include:
|
||||
- v*
|
||||
pr: none
|
||||
|
||||
jobs:
|
||||
# Any addition here should be reflected in the advanced and advanced-test pipelines.
|
||||
# It is advised to declare all jobs here as templates to improve maintainability.
|
||||
- template: templates/tests-suite.yml
|
||||
- template: templates/installer-tests.yml
|
||||
- template: templates/changelog.yml
|
||||
variables:
|
||||
dockerTag: ${{variables['Build.SourceBranchName']}}
|
||||
|
||||
stages:
|
||||
- template: templates/stages/test-and-package-stage.yml
|
||||
- template: templates/stages/changelog-stage.yml
|
||||
- template: templates/stages/deploy-stage.yml
|
||||
parameters:
|
||||
snapReleaseChannel: beta
|
||||
- template: templates/stages/notify-failure-stage.yml
|
||||
|
||||
@@ -1,14 +0,0 @@
|
||||
jobs:
|
||||
- job: changelog
|
||||
pool:
|
||||
vmImage: vs2017-win2016
|
||||
steps:
|
||||
- bash: |
|
||||
CERTBOT_VERSION="$(cd certbot && python -c "import certbot; print(certbot.__version__)" && cd ~-)"
|
||||
"${BUILD_REPOSITORY_LOCALPATH}\tools\extract_changelog.py" "${CERTBOT_VERSION}" >> "${BUILD_ARTIFACTSTAGINGDIRECTORY}/release_notes.md"
|
||||
displayName: Prepare changelog
|
||||
- task: PublishPipelineArtifact@1
|
||||
inputs:
|
||||
path: $(Build.ArtifactStagingDirectory)
|
||||
artifact: changelog
|
||||
displayName: Publish changelog
|
||||
@@ -1,61 +0,0 @@
|
||||
jobs:
|
||||
- job: installer_build
|
||||
pool:
|
||||
vmImage: vs2017-win2016
|
||||
steps:
|
||||
- task: UsePythonVersion@0
|
||||
inputs:
|
||||
versionSpec: 3.7
|
||||
architecture: x86
|
||||
addToPath: true
|
||||
- script: python windows-installer/construct.py
|
||||
displayName: Build Certbot installer
|
||||
- task: CopyFiles@2
|
||||
inputs:
|
||||
sourceFolder: $(System.DefaultWorkingDirectory)/windows-installer/build/nsis
|
||||
contents: '*.exe'
|
||||
targetFolder: $(Build.ArtifactStagingDirectory)
|
||||
- task: PublishPipelineArtifact@1
|
||||
inputs:
|
||||
path: $(Build.ArtifactStagingDirectory)
|
||||
artifact: windows-installer
|
||||
displayName: Publish Windows installer
|
||||
- job: installer_run
|
||||
dependsOn: installer_build
|
||||
strategy:
|
||||
matrix:
|
||||
win2019:
|
||||
imageName: windows-2019
|
||||
win2016:
|
||||
imageName: vs2017-win2016
|
||||
pool:
|
||||
vmImage: $(imageName)
|
||||
steps:
|
||||
- powershell: |
|
||||
$currentVersion = $PSVersionTable.PSVersion
|
||||
if ($currentVersion.Major -ne 5) {
|
||||
throw "Powershell version is not 5.x"
|
||||
}
|
||||
condition: eq(variables['imageName'], 'vs2017-win2016')
|
||||
displayName: Check Powershell 5.x is used in vs2017-win2016
|
||||
- task: UsePythonVersion@0
|
||||
inputs:
|
||||
versionSpec: 3.8
|
||||
addToPath: true
|
||||
- task: DownloadPipelineArtifact@2
|
||||
inputs:
|
||||
artifact: windows-installer
|
||||
path: $(Build.SourcesDirectory)/bin
|
||||
displayName: Retrieve Windows installer
|
||||
- script: |
|
||||
py -3 -m venv venv
|
||||
venv\Scripts\python tools\pip_install.py -e certbot-ci
|
||||
displayName: Prepare Certbot-CI
|
||||
- script: |
|
||||
set PATH=%ProgramFiles(x86)%\Certbot\bin;%PATH%
|
||||
venv\Scripts\python -m pytest certbot-ci\windows_installer_integration_tests --allow-persistent-changes --installer-path $(Build.SourcesDirectory)\bin\certbot-beta-installer-win32.exe
|
||||
displayName: Run windows installer integration tests
|
||||
- script: |
|
||||
set PATH=%ProgramFiles(x86)%\Certbot\bin;%PATH%
|
||||
venv\Scripts\python -m pytest certbot-ci\certbot_integration_tests\certbot_tests -n 4
|
||||
displayName: Run certbot integration tests
|
||||
98
.azure-pipelines/templates/jobs/extended-tests-jobs.yml
Normal file
98
.azure-pipelines/templates/jobs/extended-tests-jobs.yml
Normal file
@@ -0,0 +1,98 @@
|
||||
jobs:
|
||||
- job: extended_test
|
||||
variables:
|
||||
- name: IMAGE_NAME
|
||||
value: ubuntu-18.04
|
||||
- name: PYTHON_VERSION
|
||||
value: 3.9
|
||||
- group: certbot-common
|
||||
strategy:
|
||||
matrix:
|
||||
linux-py36:
|
||||
PYTHON_VERSION: 3.6
|
||||
TOXENV: py36
|
||||
linux-py37:
|
||||
PYTHON_VERSION: 3.7
|
||||
TOXENV: py37
|
||||
linux-py38:
|
||||
PYTHON_VERSION: 3.8
|
||||
TOXENV: py38
|
||||
linux-py37-nopin:
|
||||
PYTHON_VERSION: 3.7
|
||||
TOXENV: py37
|
||||
CERTBOT_NO_PIN: 1
|
||||
linux-boulder-v1-integration-certbot-oldest:
|
||||
PYTHON_VERSION: 3.6
|
||||
TOXENV: integration-certbot-oldest
|
||||
ACME_SERVER: boulder-v1
|
||||
linux-boulder-v2-integration-certbot-oldest:
|
||||
PYTHON_VERSION: 3.6
|
||||
TOXENV: integration-certbot-oldest
|
||||
ACME_SERVER: boulder-v2
|
||||
linux-boulder-v1-integration-nginx-oldest:
|
||||
PYTHON_VERSION: 3.6
|
||||
TOXENV: integration-nginx-oldest
|
||||
ACME_SERVER: boulder-v1
|
||||
linux-boulder-v2-integration-nginx-oldest:
|
||||
PYTHON_VERSION: 3.6
|
||||
TOXENV: integration-nginx-oldest
|
||||
ACME_SERVER: boulder-v2
|
||||
linux-boulder-v1-py36-integration:
|
||||
PYTHON_VERSION: 3.6
|
||||
TOXENV: integration
|
||||
ACME_SERVER: boulder-v1
|
||||
linux-boulder-v2-py36-integration:
|
||||
PYTHON_VERSION: 3.6
|
||||
TOXENV: integration
|
||||
ACME_SERVER: boulder-v2
|
||||
linux-boulder-v1-py37-integration:
|
||||
PYTHON_VERSION: 3.7
|
||||
TOXENV: integration
|
||||
ACME_SERVER: boulder-v1
|
||||
linux-boulder-v2-py37-integration:
|
||||
PYTHON_VERSION: 3.7
|
||||
TOXENV: integration
|
||||
ACME_SERVER: boulder-v2
|
||||
linux-boulder-v1-py38-integration:
|
||||
PYTHON_VERSION: 3.8
|
||||
TOXENV: integration
|
||||
ACME_SERVER: boulder-v1
|
||||
linux-boulder-v2-py38-integration:
|
||||
PYTHON_VERSION: 3.8
|
||||
TOXENV: integration
|
||||
ACME_SERVER: boulder-v2
|
||||
linux-boulder-v1-py39-integration:
|
||||
PYTHON_VERSION: 3.9
|
||||
TOXENV: integration
|
||||
ACME_SERVER: boulder-v1
|
||||
linux-boulder-v2-py39-integration:
|
||||
PYTHON_VERSION: 3.9
|
||||
TOXENV: integration
|
||||
ACME_SERVER: boulder-v2
|
||||
nginx-compat:
|
||||
TOXENV: nginx_compat
|
||||
linux-integration-rfc2136:
|
||||
IMAGE_NAME: ubuntu-18.04
|
||||
PYTHON_VERSION: 3.8
|
||||
TOXENV: integration-dns-rfc2136
|
||||
docker-dev:
|
||||
TOXENV: docker_dev
|
||||
macos-farmtest-apache2:
|
||||
# We run one of these test farm tests on macOS to help ensure the
|
||||
# tests continue to work on the platform.
|
||||
IMAGE_NAME: macOS-10.15
|
||||
PYTHON_VERSION: 3.8
|
||||
TOXENV: test-farm-apache2
|
||||
farmtest-leauto-upgrades:
|
||||
PYTHON_VERSION: 3.7
|
||||
TOXENV: test-farm-leauto-upgrades
|
||||
farmtest-certonly-standalone:
|
||||
PYTHON_VERSION: 3.7
|
||||
TOXENV: test-farm-certonly-standalone
|
||||
farmtest-sdists:
|
||||
PYTHON_VERSION: 3.7
|
||||
TOXENV: test-farm-sdists
|
||||
pool:
|
||||
vmImage: $(IMAGE_NAME)
|
||||
steps:
|
||||
- template: ../steps/tox-steps.yml
|
||||
217
.azure-pipelines/templates/jobs/packaging-jobs.yml
Normal file
217
.azure-pipelines/templates/jobs/packaging-jobs.yml
Normal file
@@ -0,0 +1,217 @@
|
||||
jobs:
|
||||
- job: docker_build
|
||||
pool:
|
||||
vmImage: ubuntu-18.04
|
||||
strategy:
|
||||
matrix:
|
||||
amd64:
|
||||
DOCKER_ARCH: amd64
|
||||
# Do not run the heavy non-amd64 builds for test branches
|
||||
${{ if not(startsWith(variables['Build.SourceBranchName'], 'test-')) }}:
|
||||
arm32v6:
|
||||
DOCKER_ARCH: arm32v6
|
||||
arm64v8:
|
||||
DOCKER_ARCH: arm64v8
|
||||
steps:
|
||||
- bash: set -e && tools/docker/build.sh $(dockerTag) $DOCKER_ARCH
|
||||
displayName: Build the Docker images
|
||||
# We don't filter for the Docker Hub organization to continue to allow
|
||||
# easy testing of these scripts on forks.
|
||||
- bash: |
|
||||
set -e
|
||||
DOCKER_IMAGES=$(docker images --filter reference='*/certbot' --filter reference='*/dns-*' --format '{{.Repository}}')
|
||||
docker save --output images.tar $DOCKER_IMAGES
|
||||
displayName: Save the Docker images
|
||||
# If the name of the tar file or artifact changes, the deploy stage will
|
||||
# also need to be updated.
|
||||
- bash: set -e && mv images.tar $(Build.ArtifactStagingDirectory)
|
||||
displayName: Prepare Docker artifact
|
||||
- task: PublishPipelineArtifact@1
|
||||
inputs:
|
||||
path: $(Build.ArtifactStagingDirectory)
|
||||
artifact: docker_$(DOCKER_ARCH)
|
||||
displayName: Store Docker artifact
|
||||
- job: docker_run
|
||||
dependsOn: docker_build
|
||||
pool:
|
||||
vmImage: ubuntu-18.04
|
||||
steps:
|
||||
- task: DownloadPipelineArtifact@2
|
||||
inputs:
|
||||
artifact: docker_amd64
|
||||
path: $(Build.SourcesDirectory)
|
||||
displayName: Retrieve Docker images
|
||||
- bash: set -e && docker load --input $(Build.SourcesDirectory)/images.tar
|
||||
displayName: Load Docker images
|
||||
- bash: |
|
||||
set -ex
|
||||
DOCKER_IMAGES=$(docker images --filter reference='*/certbot' --filter reference='*/dns-*' --format '{{.Repository}}:{{.Tag}}')
|
||||
for DOCKER_IMAGE in ${DOCKER_IMAGES}
|
||||
do docker run --rm "${DOCKER_IMAGE}" plugins --prepare
|
||||
done
|
||||
displayName: Run integration tests for Docker images
|
||||
- job: installer_build
|
||||
pool:
|
||||
vmImage: vs2017-win2016
|
||||
steps:
|
||||
- task: UsePythonVersion@0
|
||||
inputs:
|
||||
versionSpec: 3.8
|
||||
architecture: x86
|
||||
addToPath: true
|
||||
- script: python windows-installer/construct.py
|
||||
displayName: Build Certbot installer
|
||||
- task: CopyFiles@2
|
||||
inputs:
|
||||
sourceFolder: $(System.DefaultWorkingDirectory)/windows-installer/build/nsis
|
||||
contents: '*.exe'
|
||||
targetFolder: $(Build.ArtifactStagingDirectory)
|
||||
- task: PublishPipelineArtifact@1
|
||||
inputs:
|
||||
path: $(Build.ArtifactStagingDirectory)
|
||||
# If we change the artifact's name, it should also be changed in tools/create_github_release.py
|
||||
artifact: windows-installer
|
||||
displayName: Publish Windows installer
|
||||
- job: installer_run
|
||||
dependsOn: installer_build
|
||||
strategy:
|
||||
matrix:
|
||||
win2019:
|
||||
imageName: windows-2019
|
||||
win2016:
|
||||
imageName: vs2017-win2016
|
||||
pool:
|
||||
vmImage: $(imageName)
|
||||
steps:
|
||||
- powershell: |
|
||||
if ($PSVersionTable.PSVersion.Major -ne 5) {
|
||||
throw "Powershell version is not 5.x"
|
||||
}
|
||||
condition: eq(variables['imageName'], 'vs2017-win2016')
|
||||
displayName: Check Powershell 5.x is used in vs2017-win2016
|
||||
- task: UsePythonVersion@0
|
||||
inputs:
|
||||
versionSpec: 3.8
|
||||
addToPath: true
|
||||
- task: DownloadPipelineArtifact@2
|
||||
inputs:
|
||||
artifact: windows-installer
|
||||
path: $(Build.SourcesDirectory)/bin
|
||||
displayName: Retrieve Windows installer
|
||||
- script: |
|
||||
python -m venv venv
|
||||
venv\Scripts\python tools\pipstrap.py
|
||||
venv\Scripts\python tools\pip_install.py -e certbot-ci
|
||||
env:
|
||||
PIP_NO_BUILD_ISOLATION: no
|
||||
displayName: Prepare Certbot-CI
|
||||
- script: |
|
||||
set PATH=%ProgramFiles(x86)%\Certbot\bin;%PATH%
|
||||
venv\Scripts\python -m pytest certbot-ci\windows_installer_integration_tests --allow-persistent-changes --installer-path $(Build.SourcesDirectory)\bin\certbot-beta-installer-win32.exe
|
||||
displayName: Run windows installer integration tests
|
||||
- script: |
|
||||
set PATH=%ProgramFiles(x86)%\Certbot\bin;%PATH%
|
||||
venv\Scripts\python -m pytest certbot-ci\certbot_integration_tests\certbot_tests -n 4
|
||||
displayName: Run certbot integration tests
|
||||
- job: snaps_build
|
||||
pool:
|
||||
vmImage: ubuntu-18.04
|
||||
timeoutInMinutes: 0
|
||||
variables:
|
||||
# Do not run the heavy non-amd64 builds for test branches
|
||||
${{ if not(startsWith(variables['Build.SourceBranchName'], 'test-')) }}:
|
||||
ARCHS: amd64 arm64 armhf
|
||||
${{ if startsWith(variables['Build.SourceBranchName'], 'test-') }}:
|
||||
ARCHS: amd64
|
||||
steps:
|
||||
- script: |
|
||||
set -e
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y --no-install-recommends snapd
|
||||
sudo snap install --classic snapcraft
|
||||
displayName: Install dependencies
|
||||
- task: UsePythonVersion@0
|
||||
inputs:
|
||||
versionSpec: 3.8
|
||||
addToPath: true
|
||||
- task: DownloadSecureFile@1
|
||||
name: credentials
|
||||
inputs:
|
||||
secureFile: launchpad-credentials
|
||||
- script: |
|
||||
set -e
|
||||
git config --global user.email "$(Build.RequestedForEmail)"
|
||||
git config --global user.name "$(Build.RequestedFor)"
|
||||
mkdir -p ~/.local/share/snapcraft/provider/launchpad
|
||||
cp $(credentials.secureFilePath) ~/.local/share/snapcraft/provider/launchpad/credentials
|
||||
python3 tools/snap/build_remote.py ALL --archs ${ARCHS} --timeout 19800
|
||||
displayName: Build snaps
|
||||
- script: |
|
||||
set -e
|
||||
mv *.snap $(Build.ArtifactStagingDirectory)
|
||||
mv certbot-dns-*/*.snap $(Build.ArtifactStagingDirectory)
|
||||
displayName: Prepare artifacts
|
||||
- task: PublishPipelineArtifact@1
|
||||
inputs:
|
||||
path: $(Build.ArtifactStagingDirectory)
|
||||
artifact: snaps
|
||||
displayName: Store snaps artifacts
|
||||
- job: snap_run
|
||||
dependsOn: snaps_build
|
||||
pool:
|
||||
vmImage: ubuntu-18.04
|
||||
steps:
|
||||
- task: UsePythonVersion@0
|
||||
inputs:
|
||||
versionSpec: 3.8
|
||||
addToPath: true
|
||||
- script: |
|
||||
set -e
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y --no-install-recommends nginx-light snapd
|
||||
python3 -m venv venv
|
||||
venv/bin/python tools/pipstrap.py
|
||||
venv/bin/python tools/pip_install.py -U tox
|
||||
displayName: Install dependencies
|
||||
- task: DownloadPipelineArtifact@2
|
||||
inputs:
|
||||
artifact: snaps
|
||||
path: $(Build.SourcesDirectory)/snap
|
||||
displayName: Retrieve Certbot snaps
|
||||
- script: |
|
||||
set -e
|
||||
sudo snap install --dangerous --classic snap/certbot_*_amd64.snap
|
||||
displayName: Install Certbot snap
|
||||
- script: |
|
||||
set -e
|
||||
venv/bin/python -m tox -e integration-external,apacheconftest-external-with-pebble
|
||||
displayName: Run tox
|
||||
- job: snap_dns_run
|
||||
dependsOn: snaps_build
|
||||
pool:
|
||||
vmImage: ubuntu-18.04
|
||||
steps:
|
||||
- script: |
|
||||
set -e
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y --no-install-recommends snapd
|
||||
displayName: Install dependencies
|
||||
- task: UsePythonVersion@0
|
||||
inputs:
|
||||
versionSpec: 3.8
|
||||
addToPath: true
|
||||
- task: DownloadPipelineArtifact@2
|
||||
inputs:
|
||||
artifact: snaps
|
||||
path: $(Build.SourcesDirectory)/snap
|
||||
displayName: Retrieve Certbot snaps
|
||||
- script: |
|
||||
set -e
|
||||
python3 -m venv venv
|
||||
venv/bin/python tools/pipstrap.py
|
||||
venv/bin/python tools/pip_install.py -e certbot-ci
|
||||
displayName: Prepare Certbot-CI
|
||||
- script: |
|
||||
set -e
|
||||
sudo -E venv/bin/pytest certbot-ci/snap_integration_tests/dns_tests --allow-persistent-changes --snap-folder $(Build.SourcesDirectory)/snap --snap-arch amd64
|
||||
displayName: Test DNS plugins snaps
|
||||
78
.azure-pipelines/templates/jobs/standard-tests-jobs.yml
Normal file
78
.azure-pipelines/templates/jobs/standard-tests-jobs.yml
Normal file
@@ -0,0 +1,78 @@
|
||||
jobs:
|
||||
- job: test
|
||||
variables:
|
||||
PYTHON_VERSION: 3.9
|
||||
strategy:
|
||||
matrix:
|
||||
macos-py36:
|
||||
IMAGE_NAME: macOS-10.15
|
||||
PYTHON_VERSION: 3.6
|
||||
TOXENV: py36
|
||||
macos-py39:
|
||||
IMAGE_NAME: macOS-10.15
|
||||
PYTHON_VERSION: 3.9
|
||||
TOXENV: py39
|
||||
windows-py36:
|
||||
IMAGE_NAME: vs2017-win2016
|
||||
PYTHON_VERSION: 3.6
|
||||
TOXENV: py36
|
||||
windows-py38-cover:
|
||||
IMAGE_NAME: vs2017-win2016
|
||||
PYTHON_VERSION: 3.8
|
||||
TOXENV: py38-cover
|
||||
windows-integration-certbot:
|
||||
IMAGE_NAME: vs2017-win2016
|
||||
PYTHON_VERSION: 3.8
|
||||
TOXENV: integration-certbot
|
||||
linux-oldest-tests-1:
|
||||
IMAGE_NAME: ubuntu-18.04
|
||||
PYTHON_VERSION: 3.6
|
||||
TOXENV: '{acme,apache,apache-v2,certbot}-oldest'
|
||||
linux-oldest-tests-2:
|
||||
IMAGE_NAME: ubuntu-18.04
|
||||
PYTHON_VERSION: 3.6
|
||||
TOXENV: '{dns,nginx}-oldest'
|
||||
linux-py36:
|
||||
IMAGE_NAME: ubuntu-18.04
|
||||
PYTHON_VERSION: 3.6
|
||||
TOXENV: py36
|
||||
linux-py39-cover:
|
||||
IMAGE_NAME: ubuntu-18.04
|
||||
PYTHON_VERSION: 3.9
|
||||
TOXENV: py39-cover
|
||||
linux-py37-lint:
|
||||
IMAGE_NAME: ubuntu-18.04
|
||||
PYTHON_VERSION: 3.7
|
||||
TOXENV: lint
|
||||
linux-py36-mypy:
|
||||
IMAGE_NAME: ubuntu-18.04
|
||||
PYTHON_VERSION: 3.6
|
||||
TOXENV: mypy
|
||||
linux-integration:
|
||||
IMAGE_NAME: ubuntu-18.04
|
||||
PYTHON_VERSION: 3.8
|
||||
TOXENV: integration
|
||||
ACME_SERVER: pebble
|
||||
apache-compat:
|
||||
IMAGE_NAME: ubuntu-18.04
|
||||
TOXENV: apache_compat
|
||||
le-modification:
|
||||
IMAGE_NAME: ubuntu-18.04
|
||||
TOXENV: modification
|
||||
apacheconftest:
|
||||
IMAGE_NAME: ubuntu-18.04
|
||||
PYTHON_VERSION: 3.6
|
||||
TOXENV: apacheconftest-with-pebble
|
||||
nginxroundtrip:
|
||||
IMAGE_NAME: ubuntu-18.04
|
||||
PYTHON_VERSION: 3.6
|
||||
TOXENV: nginxroundtrip
|
||||
pool:
|
||||
vmImage: $(IMAGE_NAME)
|
||||
steps:
|
||||
- template: ../steps/tox-steps.yml
|
||||
- job: test_sphinx_builds
|
||||
pool:
|
||||
vmImage: ubuntu-latest
|
||||
steps:
|
||||
- template: ../steps/sphinx-steps.yml
|
||||
19
.azure-pipelines/templates/stages/changelog-stage.yml
Normal file
19
.azure-pipelines/templates/stages/changelog-stage.yml
Normal file
@@ -0,0 +1,19 @@
|
||||
stages:
|
||||
- stage: Changelog
|
||||
jobs:
|
||||
- job: prepare
|
||||
pool:
|
||||
vmImage: vs2017-win2016
|
||||
steps:
|
||||
# If we change the output filename from `release_notes.md`, it should also be changed in tools/create_github_release.py
|
||||
- bash: |
|
||||
set -e
|
||||
CERTBOT_VERSION="$(cd certbot && python -c "import certbot; print(certbot.__version__)" && cd ~-)"
|
||||
"${BUILD_REPOSITORY_LOCALPATH}\tools\extract_changelog.py" "${CERTBOT_VERSION}" >> "${BUILD_ARTIFACTSTAGINGDIRECTORY}/release_notes.md"
|
||||
displayName: Prepare changelog
|
||||
- task: PublishPipelineArtifact@1
|
||||
inputs:
|
||||
path: $(Build.ArtifactStagingDirectory)
|
||||
# If we change the artifact's name, it should also be changed in tools/create_github_release.py
|
||||
artifact: changelog
|
||||
displayName: Publish changelog
|
||||
99
.azure-pipelines/templates/stages/deploy-stage.yml
Normal file
99
.azure-pipelines/templates/stages/deploy-stage.yml
Normal file
@@ -0,0 +1,99 @@
|
||||
parameters:
|
||||
- name: snapReleaseChannel
|
||||
type: string
|
||||
default: edge
|
||||
values:
|
||||
- edge
|
||||
- beta
|
||||
|
||||
stages:
|
||||
- stage: Deploy
|
||||
jobs:
|
||||
# This job relies on credentials used to publish the Certbot snaps. This
|
||||
# credential file was created by running:
|
||||
#
|
||||
# snapcraft logout
|
||||
# snapcraft login (provide the shared snapcraft credentials when prompted)
|
||||
# snapcraft export-login --channels=beta,edge snapcraft.cfg
|
||||
#
|
||||
# Then the file was added as a secure file in Azure pipelines
|
||||
# with the name snapcraft.cfg by following the instructions at
|
||||
# https://docs.microsoft.com/en-us/azure/devops/pipelines/library/secure-files?view=azure-devops
|
||||
# including authorizing the file in all pipelines as described at
|
||||
# https://docs.microsoft.com/en-us/azure/devops/pipelines/library/secure-files?view=azure-devops#how-do-i-authorize-a-secure-file-for-use-in-all-pipelines.
|
||||
#
|
||||
# This file has a maximum lifetime of one year and the current
|
||||
# file will expire on 2021-07-28 which is also tracked by
|
||||
# https://github.com/certbot/certbot/issues/7931. The file will
|
||||
# need to be updated before then to prevent automated deploys
|
||||
# from breaking.
|
||||
#
|
||||
# Revoking these credentials can be done by changing the password of the
|
||||
# account used to generate the credentials. See
|
||||
# https://forum.snapcraft.io/t/revoking-exported-credentials/19031 for
|
||||
# more info.
|
||||
- job: publish_snap
|
||||
pool:
|
||||
vmImage: ubuntu-18.04
|
||||
variables:
|
||||
- group: certbot-common
|
||||
steps:
|
||||
- bash: |
|
||||
set -e
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y --no-install-recommends snapd
|
||||
sudo snap install --classic snapcraft
|
||||
displayName: Install dependencies
|
||||
- task: DownloadPipelineArtifact@2
|
||||
inputs:
|
||||
artifact: snaps
|
||||
path: $(Build.SourcesDirectory)/snap
|
||||
displayName: Retrieve Certbot snaps
|
||||
- task: DownloadSecureFile@1
|
||||
name: snapcraftCfg
|
||||
inputs:
|
||||
secureFile: snapcraft.cfg
|
||||
- bash: |
|
||||
set -e
|
||||
mkdir -p .snapcraft
|
||||
ln -s $(snapcraftCfg.secureFilePath) .snapcraft/snapcraft.cfg
|
||||
for SNAP_FILE in snap/*.snap; do
|
||||
tools/retry.sh eval snapcraft upload --release=${{ parameters.snapReleaseChannel }} "${SNAP_FILE}"
|
||||
done
|
||||
displayName: Publish to Snap store
|
||||
- job: publish_docker
|
||||
pool:
|
||||
vmImage: ubuntu-18.04
|
||||
strategy:
|
||||
matrix:
|
||||
amd64:
|
||||
DOCKER_ARCH: amd64
|
||||
arm32v6:
|
||||
DOCKER_ARCH: arm32v6
|
||||
arm64v8:
|
||||
DOCKER_ARCH: arm64v8
|
||||
steps:
|
||||
- task: DownloadPipelineArtifact@2
|
||||
inputs:
|
||||
artifact: docker_$(DOCKER_ARCH)
|
||||
path: $(Build.SourcesDirectory)
|
||||
displayName: Retrieve Docker images
|
||||
- bash: set -e && docker load --input $(Build.SourcesDirectory)/images.tar
|
||||
displayName: Load Docker images
|
||||
- task: Docker@2
|
||||
inputs:
|
||||
command: login
|
||||
# The credentials used here are for the shared certbotbot account
|
||||
# on Docker Hub. The credentials are stored in a service account
|
||||
# which was created by following the instructions at
|
||||
# https://docs.microsoft.com/en-us/azure/devops/pipelines/library/service-endpoints?view=azure-devops&tabs=yaml#sep-docreg.
|
||||
# The name given to this service account must match the value
|
||||
# given to containerRegistry below. "Grant access to all
|
||||
# pipelines" should also be checked. To revoke these
|
||||
# credentials, we can change the password on the certbotbot
|
||||
# Docker Hub account or remove the account from the
|
||||
# Certbot organization on Docker Hub.
|
||||
containerRegistry: docker-hub
|
||||
displayName: Login to Docker Hub
|
||||
- bash: set -e && tools/docker/deploy.sh $(dockerTag) $DOCKER_ARCH
|
||||
displayName: Deploy the Docker images
|
||||
19
.azure-pipelines/templates/stages/notify-failure-stage.yml
Normal file
19
.azure-pipelines/templates/stages/notify-failure-stage.yml
Normal file
@@ -0,0 +1,19 @@
|
||||
stages:
|
||||
- stage: On_Failure
|
||||
jobs:
|
||||
- job: notify_mattermost
|
||||
variables:
|
||||
- group: certbot-common
|
||||
pool:
|
||||
vmImage: ubuntu-latest
|
||||
steps:
|
||||
- bash: |
|
||||
set -e
|
||||
MESSAGE="\
|
||||
---\n\
|
||||
##### Azure Pipeline
|
||||
*Repo* $(Build.Repository.ID) - *Pipeline* $(Build.DefinitionName) #$(Build.BuildNumber) - *Branch/PR* $(Build.SourceBranchName)\n\
|
||||
:warning: __Pipeline has failed__: [Link to the build](https://dev.azure.com/$(Build.Repository.ID)/_build/results?buildId=$(Build.BuildId)&view=results)\n\n\
|
||||
---"
|
||||
curl -i -X POST --data-urlencode "payload={\"text\":\"${MESSAGE}\"}" "$(MATTERMOST_URL)"
|
||||
condition: failed()
|
||||
@@ -0,0 +1,6 @@
|
||||
stages:
|
||||
- stage: TestAndPackage
|
||||
jobs:
|
||||
- template: ../jobs/standard-tests-jobs.yml
|
||||
- template: ../jobs/extended-tests-jobs.yml
|
||||
- template: ../jobs/packaging-jobs.yml
|
||||
23
.azure-pipelines/templates/steps/sphinx-steps.yml
Normal file
23
.azure-pipelines/templates/steps/sphinx-steps.yml
Normal file
@@ -0,0 +1,23 @@
|
||||
steps:
|
||||
- bash: |
|
||||
FINAL_STATUS=0
|
||||
declare -a FAILED_BUILDS
|
||||
python3 -m venv .venv
|
||||
source .venv/bin/activate
|
||||
python tools/pipstrap.py
|
||||
for doc_path in */docs
|
||||
do
|
||||
echo ""
|
||||
echo "##[group]Building $doc_path"
|
||||
pip install -q -e $doc_path/..[docs]
|
||||
if ! sphinx-build -W --keep-going -b html $doc_path $doc_path/_build/html; then
|
||||
FINAL_STATUS=1
|
||||
FAILED_BUILDS[${#FAILED_BUILDS[@]}]="${doc_path%/docs}"
|
||||
fi
|
||||
echo "##[endgroup]"
|
||||
done
|
||||
if [[ $FINAL_STATUS -ne 0 ]]; then
|
||||
echo "##[error]The following builds failed: ${FAILED_BUILDS[*]}"
|
||||
exit 1
|
||||
fi
|
||||
displayName: Build Sphinx Documentation
|
||||
53
.azure-pipelines/templates/steps/tox-steps.yml
Normal file
53
.azure-pipelines/templates/steps/tox-steps.yml
Normal file
@@ -0,0 +1,53 @@
|
||||
steps:
|
||||
- bash: |
|
||||
set -e
|
||||
brew install augeas
|
||||
condition: startswith(variables['IMAGE_NAME'], 'macOS')
|
||||
displayName: Install MacOS dependencies
|
||||
- bash: |
|
||||
set -e
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y --no-install-recommends \
|
||||
python-dev \
|
||||
gcc \
|
||||
libaugeas0 \
|
||||
libssl-dev \
|
||||
libffi-dev \
|
||||
ca-certificates \
|
||||
nginx-light \
|
||||
openssl
|
||||
sudo systemctl stop nginx
|
||||
condition: startswith(variables['IMAGE_NAME'], 'ubuntu')
|
||||
displayName: Install Linux dependencies
|
||||
- task: UsePythonVersion@0
|
||||
inputs:
|
||||
versionSpec: $(PYTHON_VERSION)
|
||||
addToPath: true
|
||||
# tools/pip_install.py is used to pin packages to a known working version
|
||||
# except in tests where the environment variable CERTBOT_NO_PIN is set.
|
||||
# virtualenv is listed here explicitly to make sure it is upgraded when
|
||||
# CERTBOT_NO_PIN is set to work around failures we've seen when using an older
|
||||
# version of virtualenv. The option "-I" is set so when CERTBOT_NO_PIN is also
|
||||
# set, pip updates dependencies it thinks are already satisfied to avoid some
|
||||
# problems with its lack of real dependency resolution.
|
||||
- bash: |
|
||||
set -e
|
||||
python tools/pipstrap.py
|
||||
python tools/pip_install.py -I tox virtualenv
|
||||
displayName: Install runtime dependencies
|
||||
- task: DownloadSecureFile@1
|
||||
name: testFarmPem
|
||||
inputs:
|
||||
secureFile: azure-test-farm.pem
|
||||
condition: contains(variables['TOXENV'], 'test-farm')
|
||||
- bash: |
|
||||
set -e
|
||||
export TARGET_BRANCH="`echo "${BUILD_SOURCEBRANCH}" | sed -E 's!refs/(heads|tags)/!!g'`"
|
||||
[ -z "${SYSTEM_PULLREQUEST_TARGETBRANCH}" ] || export TARGET_BRANCH="${SYSTEM_PULLREQUEST_TARGETBRANCH}"
|
||||
env
|
||||
python -m tox
|
||||
env:
|
||||
AWS_ACCESS_KEY_ID: $(AWS_ACCESS_KEY_ID)
|
||||
AWS_SECRET_ACCESS_KEY: $(AWS_SECRET_ACCESS_KEY)
|
||||
AWS_EC2_PEM_FILE: $(testFarmPem.secureFilePath)
|
||||
displayName: Run tox
|
||||
@@ -1,39 +0,0 @@
|
||||
jobs:
|
||||
- job: test
|
||||
strategy:
|
||||
matrix:
|
||||
macos-py27:
|
||||
IMAGE_NAME: macOS-10.14
|
||||
PYTHON_VERSION: 2.7
|
||||
TOXENV: py27
|
||||
macos-py38:
|
||||
IMAGE_NAME: macOS-10.14
|
||||
PYTHON_VERSION: 3.8
|
||||
TOXENV: py38
|
||||
windows-py35:
|
||||
IMAGE_NAME: vs2017-win2016
|
||||
PYTHON_VERSION: 3.5
|
||||
TOXENV: py35
|
||||
windows-py37-cover:
|
||||
IMAGE_NAME: vs2017-win2016
|
||||
PYTHON_VERSION: 3.7
|
||||
TOXENV: py37-cover
|
||||
windows-integration-certbot:
|
||||
IMAGE_NAME: vs2017-win2016
|
||||
PYTHON_VERSION: 3.7
|
||||
TOXENV: integration-certbot
|
||||
PYTEST_ADDOPTS: --numprocesses 4
|
||||
pool:
|
||||
vmImage: $(IMAGE_NAME)
|
||||
steps:
|
||||
- bash: brew install augeas
|
||||
condition: startswith(variables['IMAGE_NAME'], 'macOS')
|
||||
displayName: Install Augeas
|
||||
- task: UsePythonVersion@0
|
||||
inputs:
|
||||
versionSpec: $(PYTHON_VERSION)
|
||||
addToPath: true
|
||||
- script: python tools/pip_install.py -U tox coverage
|
||||
displayName: Install dependencies
|
||||
- script: python -m tox
|
||||
displayName: Run tox
|
||||
18
.editorconfig
Normal file
18
.editorconfig
Normal file
@@ -0,0 +1,18 @@
|
||||
# https://editorconfig.org/
|
||||
|
||||
root = true
|
||||
|
||||
[*]
|
||||
insert_final_newline = true
|
||||
trim_trailing_whitespace = true
|
||||
end_of_line = lf
|
||||
|
||||
[*.py]
|
||||
indent_style = space
|
||||
indent_size = 4
|
||||
charset = utf-8
|
||||
max_line_length = 100
|
||||
|
||||
[*.yaml]
|
||||
indent_style = space
|
||||
indent_size = 2
|
||||
12
.envrc
Normal file
12
.envrc
Normal file
@@ -0,0 +1,12 @@
|
||||
# This file is just a nicety for developers who use direnv. When you cd under
|
||||
# the Certbot repo, Certbot's virtual environment will be automatically
|
||||
# activated and then deactivated when you cd elsewhere. Developers have to have
|
||||
# direnv set up and run `direnv allow` to allow this file to execute on their
|
||||
# system. You can find more information at https://direnv.net/.
|
||||
. venv3/bin/activate
|
||||
# direnv doesn't support modifying PS1 so we unset it to squelch the error
|
||||
# it'll otherwise print about this being done in the activate script. See
|
||||
# https://github.com/direnv/direnv/wiki/PS1. If you would like your shell
|
||||
# prompt to change like it normally does, see
|
||||
# https://github.com/direnv/direnv/wiki/Python#restoring-the-ps1.
|
||||
unset PS1
|
||||
5
.gitignore
vendored
5
.gitignore
vendored
@@ -60,3 +60,8 @@ stage
|
||||
*.snap
|
||||
snap-constraints.txt
|
||||
qemu-*
|
||||
certbot-dns*/certbot-dns*_amd64*.txt
|
||||
certbot-dns*/certbot-dns*_arm*.txt
|
||||
/certbot_amd64*.txt
|
||||
/certbot_arm*.txt
|
||||
certbot-dns*/snap
|
||||
|
||||
328
.travis.yml
328
.travis.yml
@@ -1,328 +0,0 @@
|
||||
language: python
|
||||
dist: xenial
|
||||
|
||||
cache:
|
||||
directories:
|
||||
- $HOME/.cache/pip
|
||||
|
||||
before_script:
|
||||
# On Travis, the fastest parallelization for integration tests has proved to be 4.
|
||||
- 'if [[ "$TOXENV" == *"integration"* ]]; then export PYTEST_ADDOPTS="--numprocesses 4"; fi'
|
||||
# Use Travis retry feature for farm tests since they are flaky
|
||||
- 'if [[ "$TOXENV" == "travis-test-farm"* ]]; then export TRAVIS_RETRY=travis_retry; fi'
|
||||
- export TOX_TESTENV_PASSENV=TRAVIS
|
||||
- 'if [[ "$SNAP" == true ]]; then snap/local/build_and_install.sh $TARGET_ARCH; fi'
|
||||
|
||||
# Only build pushes to the master branch, PRs, and branches beginning with
|
||||
# `test-`, `travis-test-`, or of the form `digit(s).digit(s).x` or
|
||||
# `vdigit(s).digit(s).digit(s)`. As documented at
|
||||
# https://docs.travis-ci.com/user/customizing-the-build/#safelisting-or-blocklisting-branches,
|
||||
# this includes tags so pushing tags of the form `vdigit(s).digit(s).digit(s)`
|
||||
# will also trigger tests. This reduces the number of simultaneous Travis runs,
|
||||
# which speeds turnaround time on review since there is a cap of on the number
|
||||
# of simultaneous runs.
|
||||
branches:
|
||||
# When changing these branches, please ensure the documentation under
|
||||
# "Running tests in CI" is still correct.
|
||||
only:
|
||||
- master
|
||||
- /^\d+\.\d+\.x$/ # this matches our point release branches
|
||||
- /^v\d+\.\d+\.\d+$/ # this matches our release tags
|
||||
- /^(travis-)?test-.*$/
|
||||
|
||||
# Jobs for the main test suite are always executed (including on PRs) except for pushes on master.
|
||||
not-on-master: ¬-on-master
|
||||
if: NOT (type = push AND branch = master)
|
||||
|
||||
# Jobs for the extended test suite are executed for cron jobs and pushes to
|
||||
# non-development branches.
|
||||
extended-test-suite: &extended-test-suite
|
||||
if: type = cron OR (type = push AND branch != master)
|
||||
|
||||
# Common configuration for all snap tasks
|
||||
snap-config: &snap-config
|
||||
dist: bionic
|
||||
addons:
|
||||
apt:
|
||||
packages:
|
||||
- nginx-light
|
||||
git:
|
||||
# By default, Travis clones the repo to a depth of 50 commits which can
|
||||
# break the ability to use `git describe` to set the version of the
|
||||
# snap. This setting removes the --depth flag from git commands solving
|
||||
# this problem. See
|
||||
# https://docs.travis-ci.com/user/customizing-the-build#git-clone-depth
|
||||
# for more info.
|
||||
depth: false
|
||||
deploy:
|
||||
# This section relies on credentials stored in a SNAP_TOKEN environment
|
||||
# variable in Travis. See
|
||||
# https://docs.travis-ci.com/user/deployment/snaps/ for more info.
|
||||
# This credential has a maximum lifetime of 1 year and the current
|
||||
# credential will expire on 4/22/2021. The value of SNAP_TOKEN will
|
||||
# need to be updated to use a new credential before then to prevent
|
||||
# automated deploys from breaking. Remembering to do this is also
|
||||
# tracked by https://github.com/certbot/certbot/issues/7931.
|
||||
'on':
|
||||
# Deploy on release tags or nightly runs from any branch. We only try
|
||||
# to deploy from the certbot/certbot repo to prevent errors if forks
|
||||
# of this repo try to run tests.
|
||||
all_branches: true
|
||||
condition: -n $TRAVIS_TAG || $TRAVIS_EVENT_TYPE = cron
|
||||
repo: certbot/certbot
|
||||
provider: snap
|
||||
snap: certbot_*.snap
|
||||
channel: edge
|
||||
# skip_cleanup is needed to prevent Travis from deleting the snaps we
|
||||
# just built and tested. See
|
||||
# https://docs.travis-ci.com/user/deployment#uploading-files-and-skip_cleanup.
|
||||
skip_cleanup: true
|
||||
|
||||
matrix:
|
||||
include:
|
||||
# Main test suite
|
||||
- stage: "Test"
|
||||
python: "2.7"
|
||||
env: ACME_SERVER=pebble TOXENV=integration
|
||||
<<: *not-on-master
|
||||
|
||||
# As documented at
|
||||
# https://docs.travis-ci.com/user/build-stages/#how-to-define-build-stages,
|
||||
# the previous stage will be automatically applied to all subsequent jobs
|
||||
# until a new stage is defined.
|
||||
|
||||
# This job is always executed, including on master
|
||||
- python: "3.8"
|
||||
env: TOXENV=py38-cover FYI="py38 tests + code coverage"
|
||||
|
||||
- python: "3.7"
|
||||
env: TOXENV=lint
|
||||
<<: *not-on-master
|
||||
- python: "3.5"
|
||||
env: TOXENV=mypy
|
||||
<<: *not-on-master
|
||||
- python: "2.7"
|
||||
# Ubuntu Trusty or older must be used because the oldest version of
|
||||
# cryptography we support cannot be compiled against the version of
|
||||
# OpenSSL in Xenial or newer.
|
||||
dist: trusty
|
||||
env: TOXENV='py27-{acme,apache,apache-v2,certbot,dns,nginx}-oldest'
|
||||
<<: *not-on-master
|
||||
- python: "2.7"
|
||||
env: TOXENV=py27
|
||||
<<: *not-on-master
|
||||
- python: "3.5"
|
||||
env: TOXENV=py35
|
||||
<<: *not-on-master
|
||||
- sudo: required
|
||||
env: TOXENV=apache_compat
|
||||
services: docker
|
||||
addons:
|
||||
<<: *not-on-master
|
||||
- sudo: required
|
||||
env: TOXENV=le_auto_xenial
|
||||
services: docker
|
||||
<<: *not-on-master
|
||||
- python: "2.7"
|
||||
env: TOXENV=apacheconftest-with-pebble
|
||||
<<: *not-on-master
|
||||
- python: "2.7"
|
||||
env: TOXENV=nginxroundtrip
|
||||
<<: *not-on-master
|
||||
|
||||
# Extended test suite on cron jobs and pushes to tested branches other than master
|
||||
- sudo: required
|
||||
env: TOXENV=nginx_compat
|
||||
services: docker
|
||||
addons:
|
||||
<<: *extended-test-suite
|
||||
- python: "3.7"
|
||||
env:
|
||||
- TOXENV=travis-test-farm-apache2
|
||||
- secure: "f+j/Lj9s1lcuKo5sEFrlRd1kIAMnIJI4z0MTI7QF8jl9Fkmbx7KECGzw31TNgzrOSzxSapHbcueFYvNCLKST+kE/8ogMZBbwqXfEDuKpyF6BY3uYoJn+wPVE5pIb8Hhe08xPte8TTDSMIyHI3EyTfcAKrIreauoArePvh/cRvSw="
|
||||
<<: *extended-test-suite
|
||||
- python: "3.7"
|
||||
env:
|
||||
- TOXENV=travis-test-farm-leauto-upgrades
|
||||
- secure: "f+j/Lj9s1lcuKo5sEFrlRd1kIAMnIJI4z0MTI7QF8jl9Fkmbx7KECGzw31TNgzrOSzxSapHbcueFYvNCLKST+kE/8ogMZBbwqXfEDuKpyF6BY3uYoJn+wPVE5pIb8Hhe08xPte8TTDSMIyHI3EyTfcAKrIreauoArePvh/cRvSw="
|
||||
git:
|
||||
depth: false # This is needed to have the history to checkout old versions of certbot-auto.
|
||||
<<: *extended-test-suite
|
||||
- python: "3.7"
|
||||
env:
|
||||
- TOXENV=travis-test-farm-certonly-standalone
|
||||
- secure: "f+j/Lj9s1lcuKo5sEFrlRd1kIAMnIJI4z0MTI7QF8jl9Fkmbx7KECGzw31TNgzrOSzxSapHbcueFYvNCLKST+kE/8ogMZBbwqXfEDuKpyF6BY3uYoJn+wPVE5pIb8Hhe08xPte8TTDSMIyHI3EyTfcAKrIreauoArePvh/cRvSw="
|
||||
<<: *extended-test-suite
|
||||
- python: "3.7"
|
||||
env:
|
||||
- TOXENV=travis-test-farm-sdists
|
||||
- secure: "f+j/Lj9s1lcuKo5sEFrlRd1kIAMnIJI4z0MTI7QF8jl9Fkmbx7KECGzw31TNgzrOSzxSapHbcueFYvNCLKST+kE/8ogMZBbwqXfEDuKpyF6BY3uYoJn+wPVE5pIb8Hhe08xPte8TTDSMIyHI3EyTfcAKrIreauoArePvh/cRvSw="
|
||||
<<: *extended-test-suite
|
||||
- python: "3.7"
|
||||
env: TOXENV=py37 CERTBOT_NO_PIN=1
|
||||
<<: *extended-test-suite
|
||||
- python: "2.7"
|
||||
env: ACME_SERVER=boulder-v1 TOXENV=integration
|
||||
sudo: required
|
||||
services: docker
|
||||
<<: *extended-test-suite
|
||||
- python: "2.7"
|
||||
env: ACME_SERVER=boulder-v2 TOXENV=integration
|
||||
sudo: required
|
||||
services: docker
|
||||
<<: *extended-test-suite
|
||||
- python: "2.7"
|
||||
env: ACME_SERVER=boulder-v1 TOXENV=integration-certbot-oldest
|
||||
# Ubuntu Trusty or older must be used because the oldest version of
|
||||
# cryptography we support cannot be compiled against the version of
|
||||
# OpenSSL in Xenial or newer.
|
||||
dist: trusty
|
||||
sudo: required
|
||||
services: docker
|
||||
<<: *extended-test-suite
|
||||
- python: "2.7"
|
||||
env: ACME_SERVER=boulder-v2 TOXENV=integration-certbot-oldest
|
||||
# Ubuntu Trusty or older must be used because the oldest version of
|
||||
# cryptography we support cannot be compiled against the version of
|
||||
# OpenSSL in Xenial or newer.
|
||||
dist: trusty
|
||||
sudo: required
|
||||
services: docker
|
||||
<<: *extended-test-suite
|
||||
- python: "2.7"
|
||||
env: ACME_SERVER=boulder-v1 TOXENV=integration-nginx-oldest
|
||||
# Ubuntu Trusty or older must be used because the oldest version of
|
||||
# cryptography we support cannot be compiled against the version of
|
||||
# OpenSSL in Xenial or newer.
|
||||
dist: trusty
|
||||
sudo: required
|
||||
services: docker
|
||||
<<: *extended-test-suite
|
||||
- python: "2.7"
|
||||
env: ACME_SERVER=boulder-v2 TOXENV=integration-nginx-oldest
|
||||
# Ubuntu Trusty or older must be used because the oldest version of
|
||||
# cryptography we support cannot be compiled against the version of
|
||||
# OpenSSL in Xenial or newer.
|
||||
dist: trusty
|
||||
sudo: required
|
||||
services: docker
|
||||
<<: *extended-test-suite
|
||||
- python: "3.6"
|
||||
env: TOXENV=py36
|
||||
<<: *extended-test-suite
|
||||
- python: "3.7"
|
||||
env: TOXENV=py37
|
||||
<<: *extended-test-suite
|
||||
- python: "3.5"
|
||||
env: ACME_SERVER=boulder-v1 TOXENV=integration
|
||||
sudo: required
|
||||
services: docker
|
||||
<<: *extended-test-suite
|
||||
- python: "3.5"
|
||||
env: ACME_SERVER=boulder-v2 TOXENV=integration
|
||||
sudo: required
|
||||
services: docker
|
||||
<<: *extended-test-suite
|
||||
- python: "3.6"
|
||||
env: ACME_SERVER=boulder-v1 TOXENV=integration
|
||||
sudo: required
|
||||
services: docker
|
||||
<<: *extended-test-suite
|
||||
- python: "3.6"
|
||||
env: ACME_SERVER=boulder-v2 TOXENV=integration
|
||||
sudo: required
|
||||
services: docker
|
||||
<<: *extended-test-suite
|
||||
- python: "3.7"
|
||||
env: ACME_SERVER=boulder-v1 TOXENV=integration
|
||||
sudo: required
|
||||
services: docker
|
||||
<<: *extended-test-suite
|
||||
- python: "3.7"
|
||||
env: ACME_SERVER=boulder-v2 TOXENV=integration
|
||||
sudo: required
|
||||
services: docker
|
||||
<<: *extended-test-suite
|
||||
- python: "3.8"
|
||||
env: ACME_SERVER=boulder-v1 TOXENV=integration
|
||||
<<: *extended-test-suite
|
||||
- python: "3.8"
|
||||
env: ACME_SERVER=boulder-v2 TOXENV=integration
|
||||
<<: *extended-test-suite
|
||||
- sudo: required
|
||||
env: TOXENV=le_auto_jessie
|
||||
services: docker
|
||||
<<: *extended-test-suite
|
||||
- sudo: required
|
||||
env: TOXENV=le_auto_centos6
|
||||
services: docker
|
||||
<<: *extended-test-suite
|
||||
- sudo: required
|
||||
env: TOXENV=le_auto_oraclelinux6
|
||||
services: docker
|
||||
<<: *extended-test-suite
|
||||
- sudo: required
|
||||
env: TOXENV=docker_dev
|
||||
services: docker
|
||||
addons:
|
||||
apt:
|
||||
packages: # don't install nginx and apache
|
||||
- libaugeas0
|
||||
<<: *extended-test-suite
|
||||
- stage: "Snap"
|
||||
env: SNAP=true TOXENV=integration-external,apacheconftest-external-with-pebble TARGET_ARCH=amd64
|
||||
<<: *snap-config
|
||||
<<: *extended-test-suite
|
||||
- env: SNAP=true TARGET_ARCH=i386
|
||||
<<: *snap-config
|
||||
<<: *extended-test-suite
|
||||
- env: SNAP=true TARGET_ARCH=arm64
|
||||
<<: *snap-config
|
||||
<<: *extended-test-suite
|
||||
- env: SNAP=true TARGET_ARCH=armhf
|
||||
<<: *snap-config
|
||||
<<: *extended-test-suite
|
||||
|
||||
# container-based infrastructure
|
||||
sudo: false
|
||||
|
||||
addons:
|
||||
apt:
|
||||
packages: # Keep in sync with letsencrypt-auto-source/pieces/bootstrappers/deb_common.sh and Boulder.
|
||||
- python-dev
|
||||
- gcc
|
||||
- libaugeas0
|
||||
- libssl-dev
|
||||
- libffi-dev
|
||||
- ca-certificates
|
||||
# For certbot-nginx integration testing
|
||||
- nginx-light
|
||||
- openssl
|
||||
|
||||
# tools/pip_install.py is used to pin packages to a known working version
|
||||
# except in tests where the environment variable CERTBOT_NO_PIN is set.
|
||||
# virtualenv is listed here explicitly to make sure it is upgraded when
|
||||
# CERTBOT_NO_PIN is set to work around failures we've seen when using an older
|
||||
# version of virtualenv. The option "-I" is set so when CERTBOT_NO_PIN is also
|
||||
# set, pip updates dependencies it thinks are already satisfied to avoid some
|
||||
# problems with its lack of real dependency resolution.
|
||||
install: 'tools/pip_install.py -I tox virtualenv'
|
||||
# Most of the time TRAVIS_RETRY is an empty string, and has no effect on the
|
||||
# script command. It is set only to `travis_retry` during farm tests, in
|
||||
# order to trigger the Travis retry feature, and compensate the inherent
|
||||
# flakiness of these specific tests.
|
||||
script: 'if [[ ! -z "$TOXENV" ]]; then $TRAVIS_RETRY tox; fi'
|
||||
|
||||
notifications:
|
||||
email: false
|
||||
irc:
|
||||
if: NOT branch =~ ^(travis-)?test-.*$
|
||||
channels:
|
||||
# This is set to a secure variable to prevent forks from sending
|
||||
# notifications. This value was created by installing
|
||||
# https://github.com/travis-ci/travis.rb and running
|
||||
# `travis encrypt "chat.freenode.net#certbot-devel"`.
|
||||
- secure: "EWW66E2+KVPZyIPR8ViENZwfcup4Gx3/dlimmAZE0WuLwxDCshBBOd3O8Rf6pBokEoZlXM5eDT6XdyJj8n0DLslgjO62pExdunXpbcMwdY7l1ELxX2/UbnDTE6UnPYa09qVBHNG7156Z6yE0x2lH4M9Ykvp0G0cubjPQHylAwo0="
|
||||
on_cancel: never
|
||||
on_success: never
|
||||
on_failure: always
|
||||
@@ -1,6 +1,7 @@
|
||||
Authors
|
||||
=======
|
||||
|
||||
* [Aaron Gable](https://github.com/aarongable)
|
||||
* [Aaron Zirbes](https://github.com/aaronzirbes)
|
||||
* Aaron Zuehlke
|
||||
* Ada Lovelace
|
||||
@@ -60,7 +61,9 @@ Authors
|
||||
* [DanCld](https://github.com/DanCld)
|
||||
* [Daniel Albers](https://github.com/AID)
|
||||
* [Daniel Aleksandersen](https://github.com/da2x)
|
||||
* [Daniel Almasi](https://github.com/almasen)
|
||||
* [Daniel Convissor](https://github.com/convissor)
|
||||
* [Daniel "Drex" Drexler](https://github.com/aeturnum)
|
||||
* [Daniel Huang](https://github.com/dhuang)
|
||||
* [Dave Guarino](https://github.com/daguar)
|
||||
* [David cz](https://github.com/dave-cz)
|
||||
@@ -85,6 +88,7 @@ Authors
|
||||
* [Felix Schwarz](https://github.com/FelixSchwarz)
|
||||
* [Felix Yan](https://github.com/felixonmars)
|
||||
* [Filip Ochnik](https://github.com/filipochnik)
|
||||
* [Florian Klink](https://github.com/flokli)
|
||||
* [Francois Marier](https://github.com/fmarier)
|
||||
* [Frank](https://github.com/Frankkkkk)
|
||||
* [Frederic BLANC](https://github.com/fblanc)
|
||||
@@ -147,11 +151,13 @@ Authors
|
||||
* [Lior Sabag](https://github.com/liorsbg)
|
||||
* [Lipis](https://github.com/lipis)
|
||||
* [lord63](https://github.com/lord63)
|
||||
* [Lorenzo Fundaró](https://github.com/lfundaro)
|
||||
* [Luca Beltrame](https://github.com/lbeltrame)
|
||||
* [Luca Ebach](https://github.com/lucebac)
|
||||
* [Luca Olivetti](https://github.com/olivluca)
|
||||
* [Luke Rogers](https://github.com/lukeroge)
|
||||
* [Maarten](https://github.com/mrtndwrd)
|
||||
* [Mads Jensen](https://github.com/atombrella)
|
||||
* [Maikel Martens](https://github.com/krukas)
|
||||
* [Malte Janduda](https://github.com/MalteJ)
|
||||
* [Mantas Mikulėnas](https://github.com/grawity)
|
||||
@@ -211,6 +217,7 @@ Authors
|
||||
* [Richard Barnes](https://github.com/r-barnes)
|
||||
* [Richard Panek](https://github.com/kernelpanek)
|
||||
* [Robert Buchholz](https://github.com/rbu)
|
||||
* [Robert Dailey](https://github.com/pahrohfit)
|
||||
* [Robert Habermann](https://github.com/frennkie)
|
||||
* [Robert Xiao](https://github.com/nneonneo)
|
||||
* [Roland Shoemaker](https://github.com/rolandshoemaker)
|
||||
@@ -236,6 +243,7 @@ Authors
|
||||
* [Spencer Bliven](https://github.com/sbliven)
|
||||
* [Stacey Sheldon](https://github.com/solidgoldbomb)
|
||||
* [Stavros Korokithakis](https://github.com/skorokithakis)
|
||||
* [Ștefan Talpalaru](https://github.com/stefantalpalaru)
|
||||
* [Stefan Weil](https://github.com/stweil)
|
||||
* [Steve Desmond](https://github.com/stevedesmond-ca)
|
||||
* [sydneyli](https://github.com/sydneyli)
|
||||
|
||||
@@ -11,7 +11,7 @@ to the Sphinx generated docs is provided below.
|
||||
|
||||
|
||||
[1] https://github.com/blog/1184-contributing-guidelines
|
||||
[2] http://docutils.sourceforge.net/docs/user/rst/quickref.html#hyperlink-targets
|
||||
[2] https://docutils.sourceforge.io/docs/user/rst/quickref.html#hyperlink-targets
|
||||
|
||||
-->
|
||||
|
||||
|
||||
@@ -6,7 +6,6 @@ This module is an implementation of the `ACME protocol`_.
|
||||
|
||||
"""
|
||||
import sys
|
||||
import warnings
|
||||
|
||||
# This code exists to keep backwards compatibility with people using acme.jose
|
||||
# before it became the standalone josepy package.
|
||||
|
||||
@@ -150,7 +150,7 @@ class KeyAuthorizationChallenge(_TokenChallenge):
|
||||
"""Challenge based on Key Authorization.
|
||||
|
||||
:param response_cls: Subclass of `KeyAuthorizationChallengeResponse`
|
||||
that will be used to generate `response`.
|
||||
that will be used to generate ``response``.
|
||||
:param str typ: type of the challenge
|
||||
"""
|
||||
typ = NotImplemented
|
||||
|
||||
@@ -13,6 +13,7 @@ import josepy as jose
|
||||
import OpenSSL
|
||||
import requests
|
||||
from requests.adapters import HTTPAdapter
|
||||
from requests.utils import parse_header_links
|
||||
from requests_toolbelt.adapters.source import SourceAddressAdapter
|
||||
import six
|
||||
from six.moves import http_client
|
||||
@@ -200,7 +201,7 @@ class ClientBase(object):
|
||||
when = parsedate_tz(retry_after)
|
||||
if when is not None:
|
||||
try:
|
||||
tz_secs = datetime.timedelta(when[-1] if when[-1] else 0)
|
||||
tz_secs = datetime.timedelta(when[-1] if when[-1] is not None else 0)
|
||||
return datetime.datetime(*when[:7]) - tz_secs
|
||||
except (ValueError, OverflowError):
|
||||
pass
|
||||
@@ -447,7 +448,7 @@ class Client(ClientBase):
|
||||
heapq.heapify(waiting)
|
||||
# mapping between original Authorization Resource and the most
|
||||
# recently updated one
|
||||
updated = dict((authzr, authzr) for authzr in authzrs)
|
||||
updated = {authzr: authzr for authzr in authzrs}
|
||||
|
||||
while waiting:
|
||||
# find the smallest Retry-After, and sleep if necessary
|
||||
@@ -733,11 +734,13 @@ class ClientV2(ClientBase):
|
||||
raise errors.ValidationError(failed)
|
||||
return orderr.update(authorizations=responses)
|
||||
|
||||
def finalize_order(self, orderr, deadline):
|
||||
def finalize_order(self, orderr, deadline, fetch_alternative_chains=False):
|
||||
"""Finalize an order and obtain a certificate.
|
||||
|
||||
:param messages.OrderResource orderr: order to finalize
|
||||
:param datetime.datetime deadline: when to stop polling and timeout
|
||||
:param bool fetch_alternative_chains: whether to also fetch alternative
|
||||
certificate chains
|
||||
|
||||
:returns: finalized order
|
||||
:rtype: messages.OrderResource
|
||||
@@ -754,8 +757,13 @@ class ClientV2(ClientBase):
|
||||
if body.error is not None:
|
||||
raise errors.IssuanceError(body.error)
|
||||
if body.certificate is not None:
|
||||
certificate_response = self._post_as_get(body.certificate).text
|
||||
return orderr.update(body=body, fullchain_pem=certificate_response)
|
||||
certificate_response = self._post_as_get(body.certificate)
|
||||
orderr = orderr.update(body=body, fullchain_pem=certificate_response.text)
|
||||
if fetch_alternative_chains:
|
||||
alt_chains_urls = self._get_links(certificate_response, 'alternate')
|
||||
alt_chains = [self._post_as_get(url).text for url in alt_chains_urls]
|
||||
orderr = orderr.update(alternative_fullchains_pem=alt_chains)
|
||||
return orderr
|
||||
raise errors.TimeoutError()
|
||||
|
||||
def revoke(self, cert, rsn):
|
||||
@@ -785,6 +793,20 @@ class ClientV2(ClientBase):
|
||||
new_args = args[:1] + (None,) + args[1:]
|
||||
return self._post(*new_args, **kwargs)
|
||||
|
||||
def _get_links(self, response, relation_type):
|
||||
"""
|
||||
Retrieves all Link URIs of relation_type from the response.
|
||||
:param requests.Response response: The requests HTTP response.
|
||||
:param str relation_type: The relation type to filter by.
|
||||
"""
|
||||
# Can't use response.links directly because it drops multiple links
|
||||
# of the same relation type, which is possible in RFC8555 responses.
|
||||
if 'Link' not in response.headers:
|
||||
return []
|
||||
links = parse_header_links(response.headers['Link'])
|
||||
return [l['url'] for l in links
|
||||
if 'rel' in l and 'url' in l and l['rel'] == relation_type]
|
||||
|
||||
|
||||
class BackwardsCompatibleClientV2(object):
|
||||
"""ACME client wrapper that tends towards V2-style calls, but
|
||||
@@ -863,11 +885,13 @@ class BackwardsCompatibleClientV2(object):
|
||||
return messages.OrderResource(authorizations=authorizations, csr_pem=csr_pem)
|
||||
return self.client.new_order(csr_pem)
|
||||
|
||||
def finalize_order(self, orderr, deadline):
|
||||
def finalize_order(self, orderr, deadline, fetch_alternative_chains=False):
|
||||
"""Finalize an order and obtain a certificate.
|
||||
|
||||
:param messages.OrderResource orderr: order to finalize
|
||||
:param datetime.datetime deadline: when to stop polling and timeout
|
||||
:param bool fetch_alternative_chains: whether to also fetch alternative
|
||||
certificate chains
|
||||
|
||||
:returns: finalized order
|
||||
:rtype: messages.OrderResource
|
||||
@@ -898,7 +922,7 @@ class BackwardsCompatibleClientV2(object):
|
||||
chain = crypto_util.dump_pyopenssl_chain(chain).decode()
|
||||
|
||||
return orderr.update(fullchain_pem=(cert + chain))
|
||||
return self.client.finalize_order(orderr, deadline)
|
||||
return self.client.finalize_order(orderr, deadline, fetch_alternative_chains)
|
||||
|
||||
def revoke(self, cert, rsn):
|
||||
"""Revoke certificate.
|
||||
|
||||
@@ -166,7 +166,7 @@ def probe_sni(name, host, port=443, timeout=300, # pylint: disable=too-many-argu
|
||||
" from {0}:{1}".format(
|
||||
source_address[0],
|
||||
source_address[1]
|
||||
) if socket_kwargs else ""
|
||||
) if any(source_address) else ""
|
||||
)
|
||||
socket_tuple = (host, port) # type: Tuple[str, int]
|
||||
sock = socket.create_connection(socket_tuple, **socket_kwargs) # type: ignore
|
||||
@@ -186,6 +186,7 @@ def probe_sni(name, host, port=443, timeout=300, # pylint: disable=too-many-argu
|
||||
raise errors.Error(error)
|
||||
return client_ssl.get_peer_certificate()
|
||||
|
||||
|
||||
def make_csr(private_key_pem, domains, must_staple=False):
|
||||
"""Generate a CSR containing a list of domains as subjectAltNames.
|
||||
|
||||
@@ -217,6 +218,7 @@ def make_csr(private_key_pem, domains, must_staple=False):
|
||||
return crypto.dump_certificate_request(
|
||||
crypto.FILETYPE_PEM, csr)
|
||||
|
||||
|
||||
def _pyopenssl_cert_or_req_all_names(loaded_cert_or_req):
|
||||
common_name = loaded_cert_or_req.get_subject().CN
|
||||
sans = _pyopenssl_cert_or_req_san(loaded_cert_or_req)
|
||||
@@ -225,6 +227,7 @@ def _pyopenssl_cert_or_req_all_names(loaded_cert_or_req):
|
||||
return sans
|
||||
return [common_name] + [d for d in sans if d != common_name]
|
||||
|
||||
|
||||
def _pyopenssl_cert_or_req_san(cert_or_req):
|
||||
"""Get Subject Alternative Names from certificate or CSR using pyOpenSSL.
|
||||
|
||||
@@ -317,6 +320,7 @@ def gen_ss_cert(key, domains, not_before=None,
|
||||
cert.sign(key, "sha256")
|
||||
return cert
|
||||
|
||||
|
||||
def dump_pyopenssl_chain(chain, filetype=crypto.FILETYPE_PEM):
|
||||
"""Dump certificate chain into a bundle.
|
||||
|
||||
|
||||
@@ -49,7 +49,7 @@ class MissingNonce(NonceError):
|
||||
Replay-Nonce header field in each successful response to a POST it
|
||||
provides to a client (...)".
|
||||
|
||||
:ivar requests.Response response: HTTP Response
|
||||
:ivar requests.Response ~.response: HTTP Response
|
||||
|
||||
"""
|
||||
def __init__(self, response, *args, **kwargs):
|
||||
|
||||
@@ -12,4 +12,5 @@ try:
|
||||
from typing import * # pylint: disable=wildcard-import, unused-wildcard-import
|
||||
from typing import Collection, IO # type: ignore
|
||||
except ImportError:
|
||||
sys.modules[__name__] = TypingClass()
|
||||
# mypy complains because TypingClass is not a module
|
||||
sys.modules[__name__] = TypingClass() # type: ignore
|
||||
|
||||
@@ -206,7 +206,7 @@ class Directory(jose.JSONDeSerializable):
|
||||
external_account_required = jose.Field('externalAccountRequired', omitempty=True)
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
kwargs = dict((self._internal_name(k), v) for k, v in kwargs.items())
|
||||
kwargs = {self._internal_name(k): v for k, v in kwargs.items()}
|
||||
super(Directory.Meta, self).__init__(**kwargs)
|
||||
|
||||
@property
|
||||
@@ -275,7 +275,7 @@ class Resource(jose.JSONObjectWithFields):
|
||||
class ResourceWithURI(Resource):
|
||||
"""ACME Resource with URI.
|
||||
|
||||
:ivar unicode uri: Location of the resource.
|
||||
:ivar unicode ~.uri: Location of the resource.
|
||||
|
||||
"""
|
||||
uri = jose.Field('uri') # no ChallengeResource.uri
|
||||
@@ -315,6 +315,9 @@ class Registration(ResourceBody):
|
||||
# on new-reg key server ignores 'key' and populates it based on
|
||||
# JWS.signature.combined.jwk
|
||||
key = jose.Field('key', omitempty=True, decoder=jose.JWK.from_json)
|
||||
# Contact field implements special behavior to allow messages that clear existing
|
||||
# contacts while not expecting the `contact` field when loading from json.
|
||||
# This is implemented in the constructor and *_json methods.
|
||||
contact = jose.Field('contact', omitempty=True, default=())
|
||||
agreement = jose.Field('agreement', omitempty=True)
|
||||
status = jose.Field('status', omitempty=True)
|
||||
@@ -327,24 +330,73 @@ class Registration(ResourceBody):
|
||||
|
||||
@classmethod
|
||||
def from_data(cls, phone=None, email=None, external_account_binding=None, **kwargs):
|
||||
"""Create registration resource from contact details."""
|
||||
"""
|
||||
Create registration resource from contact details.
|
||||
|
||||
The `contact` keyword being passed to a Registration object is meaningful, so
|
||||
this function represents empty iterables in its kwargs by passing on an empty
|
||||
`tuple`.
|
||||
"""
|
||||
|
||||
# Note if `contact` was in kwargs.
|
||||
contact_provided = 'contact' in kwargs
|
||||
|
||||
# Pop `contact` from kwargs and add formatted email or phone numbers
|
||||
details = list(kwargs.pop('contact', ()))
|
||||
if phone is not None:
|
||||
details.append(cls.phone_prefix + phone)
|
||||
if email is not None:
|
||||
details.extend([cls.email_prefix + mail for mail in email.split(',')])
|
||||
kwargs['contact'] = tuple(details)
|
||||
|
||||
# Insert formatted contact information back into kwargs
|
||||
# or insert an empty tuple if `contact` provided.
|
||||
if details or contact_provided:
|
||||
kwargs['contact'] = tuple(details)
|
||||
|
||||
if external_account_binding:
|
||||
kwargs['external_account_binding'] = external_account_binding
|
||||
|
||||
return cls(**kwargs)
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
"""Note if the user provides a value for the `contact` member."""
|
||||
if 'contact' in kwargs:
|
||||
# Avoid the __setattr__ used by jose.TypedJSONObjectWithFields
|
||||
object.__setattr__(self, '_add_contact', True)
|
||||
super(Registration, self).__init__(**kwargs)
|
||||
|
||||
def _filter_contact(self, prefix):
|
||||
return tuple(
|
||||
detail[len(prefix):] for detail in self.contact # pylint: disable=not-an-iterable
|
||||
if detail.startswith(prefix))
|
||||
|
||||
def _add_contact_if_appropriate(self, jobj):
|
||||
"""
|
||||
The `contact` member of Registration objects should not be required when
|
||||
de-serializing (as it would be if the Fields' `omitempty` flag were `False`), but
|
||||
it should be included in serializations if it was provided.
|
||||
|
||||
:param jobj: Dictionary containing this Registrations' data
|
||||
:type jobj: dict
|
||||
|
||||
:returns: Dictionary containing Registrations data to transmit to the server
|
||||
:rtype: dict
|
||||
"""
|
||||
if getattr(self, '_add_contact', False):
|
||||
jobj['contact'] = self.encode('contact')
|
||||
|
||||
return jobj
|
||||
|
||||
def to_partial_json(self):
|
||||
"""Modify josepy.JSONDeserializable.to_partial_json()"""
|
||||
jobj = super(Registration, self).to_partial_json()
|
||||
return self._add_contact_if_appropriate(jobj)
|
||||
|
||||
def fields_to_partial_json(self):
|
||||
"""Modify josepy.JSONObjectWithFields.fields_to_partial_json()"""
|
||||
jobj = super(Registration, self).fields_to_partial_json()
|
||||
return self._add_contact_if_appropriate(jobj)
|
||||
|
||||
@property
|
||||
def phones(self):
|
||||
"""All phones found in the ``contact`` field."""
|
||||
@@ -413,7 +465,7 @@ class ChallengeBody(ResourceBody):
|
||||
omitempty=True, default=None)
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
kwargs = dict((self._internal_name(k), v) for k, v in kwargs.items())
|
||||
kwargs = {self._internal_name(k): v for k, v in kwargs.items()}
|
||||
super(ChallengeBody, self).__init__(**kwargs)
|
||||
|
||||
def encode(self, name):
|
||||
@@ -566,14 +618,16 @@ class Revocation(ResourceMixin, jose.JSONObjectWithFields):
|
||||
class Order(ResourceBody):
|
||||
"""Order Resource Body.
|
||||
|
||||
:ivar list of .Identifier: List of identifiers for the certificate.
|
||||
:ivar identifiers: List of identifiers for the certificate.
|
||||
:vartype identifiers: `list` of `.Identifier`
|
||||
:ivar acme.messages.Status status:
|
||||
:ivar list of str authorizations: URLs of authorizations.
|
||||
:ivar authorizations: URLs of authorizations.
|
||||
:vartype authorizations: `list` of `str`
|
||||
:ivar str certificate: URL to download certificate as a fullchain PEM.
|
||||
:ivar str finalize: URL to POST to to request issuance once all
|
||||
authorizations have "valid" status.
|
||||
:ivar datetime.datetime expires: When the order expires.
|
||||
:ivar .Error error: Any error that occurred during finalization, if applicable.
|
||||
:ivar ~.Error error: Any error that occurred during finalization, if applicable.
|
||||
"""
|
||||
identifiers = jose.Field('identifiers', omitempty=True)
|
||||
status = jose.Field('status', decoder=Status.from_json,
|
||||
@@ -593,15 +647,20 @@ class OrderResource(ResourceWithURI):
|
||||
|
||||
:ivar acme.messages.Order body:
|
||||
:ivar str csr_pem: The CSR this Order will be finalized with.
|
||||
:ivar list of acme.messages.AuthorizationResource authorizations:
|
||||
Fully-fetched AuthorizationResource objects.
|
||||
:ivar authorizations: Fully-fetched AuthorizationResource objects.
|
||||
:vartype authorizations: `list` of `acme.messages.AuthorizationResource`
|
||||
:ivar str fullchain_pem: The fetched contents of the certificate URL
|
||||
produced once the order was finalized, if it's present.
|
||||
:ivar alternative_fullchains_pem: The fetched contents of alternative certificate
|
||||
chain URLs produced once the order was finalized, if present and requested during
|
||||
finalization.
|
||||
:vartype alternative_fullchains_pem: `list` of `str`
|
||||
"""
|
||||
body = jose.Field('body', decoder=Order.from_json)
|
||||
csr_pem = jose.Field('csr_pem', omitempty=True)
|
||||
authorizations = jose.Field('authorizations')
|
||||
fullchain_pem = jose.Field('fullchain_pem', omitempty=True)
|
||||
alternative_fullchains_pem = jose.Field('alternative_fullchains_pem', omitempty=True)
|
||||
|
||||
@Directory.register
|
||||
class NewOrder(Order):
|
||||
|
||||
@@ -4,4 +4,4 @@ import six
|
||||
|
||||
def map_keys(dikt, func):
|
||||
"""Map dictionary keys."""
|
||||
return dict((func(key), value) for key, value in six.iteritems(dikt))
|
||||
return {func(key): value for key, value in six.iteritems(dikt)}
|
||||
|
||||
@@ -9,7 +9,7 @@ BUILDDIR = _build
|
||||
|
||||
# User-friendly check for sphinx-build
|
||||
ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1)
|
||||
$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/)
|
||||
$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from https://www.sphinx-doc.org/)
|
||||
endif
|
||||
|
||||
# Internal variables.
|
||||
|
||||
@@ -85,7 +85,10 @@ language = 'en'
|
||||
|
||||
# List of patterns, relative to source directory, that match files and
|
||||
# directories to ignore when looking for source files.
|
||||
exclude_patterns = ['_build']
|
||||
exclude_patterns = [
|
||||
'_build',
|
||||
'man/*'
|
||||
]
|
||||
|
||||
# The reST default role (used for this markup: `text`) to use for all
|
||||
# documents.
|
||||
@@ -120,7 +123,7 @@ todo_include_todos = False
|
||||
# The theme to use for HTML and HTML Help pages. See the documentation for
|
||||
# a list of builtin themes.
|
||||
|
||||
# http://docs.readthedocs.org/en/latest/theme.html#how-do-i-use-this-locally-and-on-read-the-docs
|
||||
# https://docs.readthedocs.io/en/stable/faq.html#i-want-to-use-the-read-the-docs-theme-locally
|
||||
# on_rtd is whether we are on readthedocs.org
|
||||
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
|
||||
if not on_rtd: # only import and set the theme if we're building docs locally
|
||||
|
||||
@@ -65,7 +65,7 @@ if errorlevel 9009 (
|
||||
echo.may add the Sphinx directory to PATH.
|
||||
echo.
|
||||
echo.If you don't have Sphinx installed, grab it from
|
||||
echo.http://sphinx-doc.org/
|
||||
echo.https://www.sphinx-doc.org/
|
||||
exit /b 1
|
||||
)
|
||||
|
||||
|
||||
@@ -1,33 +1,29 @@
|
||||
from distutils.version import StrictVersion
|
||||
from distutils.version import LooseVersion
|
||||
import sys
|
||||
|
||||
from setuptools import __version__ as setuptools_version
|
||||
from setuptools import find_packages
|
||||
from setuptools import setup
|
||||
from setuptools.command.test import test as TestCommand
|
||||
|
||||
version = '1.6.0.dev0'
|
||||
version = '1.12.0.dev0'
|
||||
|
||||
# Please update tox.ini when modifying dependency version requirements
|
||||
install_requires = [
|
||||
# load_pem_private/public_key (>=0.6)
|
||||
# rsa_recover_prime_factors (>=0.8)
|
||||
'cryptography>=1.2.3',
|
||||
'cryptography>=2.1.4',
|
||||
# formerly known as acme.jose:
|
||||
# 1.1.0+ is required to avoid the warnings described at
|
||||
# https://github.com/certbot/josepy/issues/13.
|
||||
'josepy>=1.1.0',
|
||||
# Connection.set_tlsext_host_name (>=0.13)
|
||||
'PyOpenSSL>=0.13.1',
|
||||
'PyOpenSSL>=17.3.0',
|
||||
'pyrfc3339',
|
||||
'pytz',
|
||||
'requests[security]>=2.6.0', # security extras added in 2.4.1
|
||||
'requests-toolbelt>=0.3.0',
|
||||
'setuptools',
|
||||
'six>=1.9.0', # needed for python_2_unicode_compatible
|
||||
'setuptools>=39.0.1',
|
||||
'six>=1.11.0',
|
||||
]
|
||||
|
||||
setuptools_known_environment_markers = (StrictVersion(setuptools_version) >= StrictVersion('36.2'))
|
||||
setuptools_known_environment_markers = (LooseVersion(setuptools_version) >= LooseVersion('36.2'))
|
||||
if setuptools_known_environment_markers:
|
||||
install_requires.append('mock ; python_version < "3.3"')
|
||||
elif 'bdist_wheel' in sys.argv[1:]:
|
||||
@@ -47,22 +43,6 @@ docs_extras = [
|
||||
'sphinx_rtd_theme',
|
||||
]
|
||||
|
||||
|
||||
class PyTest(TestCommand):
|
||||
user_options = []
|
||||
|
||||
def initialize_options(self):
|
||||
TestCommand.initialize_options(self)
|
||||
self.pytest_args = ''
|
||||
|
||||
def run_tests(self):
|
||||
import shlex
|
||||
# import here, cause outside the eggs aren't loaded
|
||||
import pytest
|
||||
errno = pytest.main(shlex.split(self.pytest_args))
|
||||
sys.exit(errno)
|
||||
|
||||
|
||||
setup(
|
||||
name='acme',
|
||||
version=version,
|
||||
@@ -71,19 +51,17 @@ setup(
|
||||
author="Certbot Project",
|
||||
author_email='client-dev@letsencrypt.org',
|
||||
license='Apache License 2.0',
|
||||
python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*',
|
||||
python_requires='>=3.6',
|
||||
classifiers=[
|
||||
'Development Status :: 5 - Production/Stable',
|
||||
'Intended Audience :: Developers',
|
||||
'License :: OSI Approved :: Apache Software License',
|
||||
'Programming Language :: Python',
|
||||
'Programming Language :: Python :: 2',
|
||||
'Programming Language :: Python :: 2.7',
|
||||
'Programming Language :: Python :: 3',
|
||||
'Programming Language :: Python :: 3.5',
|
||||
'Programming Language :: Python :: 3.6',
|
||||
'Programming Language :: Python :: 3.7',
|
||||
'Programming Language :: Python :: 3.8',
|
||||
'Programming Language :: Python :: 3.9',
|
||||
'Topic :: Internet :: WWW/HTTP',
|
||||
'Topic :: Security',
|
||||
],
|
||||
@@ -95,7 +73,4 @@ setup(
|
||||
'dev': dev_extras,
|
||||
'docs': docs_extras,
|
||||
},
|
||||
test_suite='acme',
|
||||
tests_require=["pytest"],
|
||||
cmdclass={"test": PyTest},
|
||||
)
|
||||
|
||||
@@ -263,7 +263,7 @@ class BackwardsCompatibleClientV2Test(ClientTestBase):
|
||||
with mock.patch('acme.client.ClientV2') as mock_client:
|
||||
client = self._init()
|
||||
client.finalize_order(mock_orderr, mock_deadline)
|
||||
mock_client().finalize_order.assert_called_once_with(mock_orderr, mock_deadline)
|
||||
mock_client().finalize_order.assert_called_once_with(mock_orderr, mock_deadline, False)
|
||||
|
||||
def test_revoke(self):
|
||||
self.response.json.return_value = DIRECTORY_V1.to_json()
|
||||
@@ -842,6 +842,32 @@ class ClientV2Test(ClientTestBase):
|
||||
deadline = datetime.datetime.now() - datetime.timedelta(seconds=60)
|
||||
self.assertRaises(errors.TimeoutError, self.client.finalize_order, self.orderr, deadline)
|
||||
|
||||
def test_finalize_order_alt_chains(self):
|
||||
updated_order = self.order.update(
|
||||
certificate='https://www.letsencrypt-demo.org/acme/cert/',
|
||||
)
|
||||
updated_orderr = self.orderr.update(body=updated_order,
|
||||
fullchain_pem=CERT_SAN_PEM,
|
||||
alternative_fullchains_pem=[CERT_SAN_PEM,
|
||||
CERT_SAN_PEM])
|
||||
self.response.json.return_value = updated_order.to_json()
|
||||
self.response.text = CERT_SAN_PEM
|
||||
self.response.headers['Link'] ='<https://example.com/acme/cert/1>;rel="alternate", ' + \
|
||||
'<https://example.com/dir>;rel="index", ' + \
|
||||
'<https://example.com/acme/cert/2>;title="foo";rel="alternate"'
|
||||
|
||||
deadline = datetime.datetime(9999, 9, 9)
|
||||
resp = self.client.finalize_order(self.orderr, deadline, fetch_alternative_chains=True)
|
||||
self.net.post.assert_any_call('https://example.com/acme/cert/1',
|
||||
mock.ANY, acme_version=2, new_nonce_url=mock.ANY)
|
||||
self.net.post.assert_any_call('https://example.com/acme/cert/2',
|
||||
mock.ANY, acme_version=2, new_nonce_url=mock.ANY)
|
||||
self.assertEqual(resp, updated_orderr)
|
||||
|
||||
del self.response.headers['Link']
|
||||
resp = self.client.finalize_order(self.orderr, deadline, fetch_alternative_chains=True)
|
||||
self.assertEqual(resp, updated_orderr.update(alternative_fullchains_pem=[]))
|
||||
|
||||
def test_revoke(self):
|
||||
self.client.revoke(messages_test.CERT, self.rsn)
|
||||
self.net.post.assert_called_once_with(
|
||||
@@ -1316,7 +1342,7 @@ class ClientNetworkSourceAddressBindingTest(unittest.TestCase):
|
||||
# test should fail if the default adapter type is changed by requests
|
||||
net = ClientNetwork(key=None, alg=None)
|
||||
session = requests.Session()
|
||||
for scheme in session.adapters.keys():
|
||||
for scheme in session.adapters:
|
||||
client_network_adapter = net.session.adapters.get(scheme)
|
||||
default_adapter = session.adapters.get(scheme)
|
||||
self.assertEqual(client_network_adapter.__class__, default_adapter.__class__)
|
||||
|
||||
@@ -108,11 +108,11 @@ class ConstantTest(unittest.TestCase):
|
||||
|
||||
def test_equality(self):
|
||||
const_a_prime = self.MockConstant('a')
|
||||
self.assertFalse(self.const_a == self.const_b)
|
||||
self.assertTrue(self.const_a == const_a_prime)
|
||||
self.assertNotEqual(self.const_a, self.const_b)
|
||||
self.assertEqual(self.const_a, const_a_prime)
|
||||
|
||||
self.assertTrue(self.const_a != self.const_b)
|
||||
self.assertFalse(self.const_a != const_a_prime)
|
||||
self.assertNotEqual(self.const_a, self.const_b)
|
||||
self.assertEqual(self.const_a, const_a_prime)
|
||||
|
||||
|
||||
class DirectoryTest(unittest.TestCase):
|
||||
@@ -254,6 +254,19 @@ class RegistrationTest(unittest.TestCase):
|
||||
from acme.messages import Registration
|
||||
hash(Registration.from_json(self.jobj_from))
|
||||
|
||||
def test_default_not_transmitted(self):
|
||||
from acme.messages import NewRegistration
|
||||
empty_new_reg = NewRegistration()
|
||||
new_reg_with_contact = NewRegistration(contact=())
|
||||
|
||||
self.assertEqual(empty_new_reg.contact, ())
|
||||
self.assertEqual(new_reg_with_contact.contact, ())
|
||||
|
||||
self.assertTrue('contact' not in empty_new_reg.to_partial_json())
|
||||
self.assertTrue('contact' not in empty_new_reg.fields_to_partial_json())
|
||||
self.assertTrue('contact' in new_reg_with_contact.to_partial_json())
|
||||
self.assertTrue('contact' in new_reg_with_contact.fields_to_partial_json())
|
||||
|
||||
|
||||
class UpdateRegistrationTest(unittest.TestCase):
|
||||
"""Tests for acme.messages.UpdateRegistration."""
|
||||
|
||||
@@ -225,7 +225,8 @@ def _get_runtime_cfg(command):
|
||||
command,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
universal_newlines=True)
|
||||
universal_newlines=True,
|
||||
env=util.env_no_snap_for_external_calls())
|
||||
stdout, stderr = proc.communicate()
|
||||
|
||||
except (OSError, ValueError):
|
||||
|
||||
@@ -6,7 +6,7 @@ Authors:
|
||||
Raphael Pinson <raphink@gmail.com>
|
||||
|
||||
About: Reference
|
||||
Online Apache configuration manual: http://httpd.apache.org/docs/trunk/
|
||||
Online Apache configuration manual: https://httpd.apache.org/docs/trunk/
|
||||
|
||||
About: License
|
||||
This file is licensed under the LGPL v2+.
|
||||
|
||||
@@ -9,7 +9,6 @@ import re
|
||||
import socket
|
||||
import time
|
||||
|
||||
import six
|
||||
import zope.component
|
||||
import zope.interface
|
||||
try:
|
||||
@@ -328,6 +327,9 @@ class ApacheConfigurator(common.Installer):
|
||||
if self.version < (2, 2):
|
||||
raise errors.NotSupportedError(
|
||||
"Apache Version {0} not supported.".format(str(self.version)))
|
||||
elif self.version < (2, 4):
|
||||
logger.warning('Support for Apache 2.2 is deprecated and will be removed in a '
|
||||
'future release.')
|
||||
|
||||
# Recover from previous crash before Augeas initialization to have the
|
||||
# correct parse tree from the get go.
|
||||
@@ -464,21 +466,6 @@ class ApacheConfigurator(common.Installer):
|
||||
metadata=metadata
|
||||
)
|
||||
|
||||
def _wildcard_domain(self, domain):
|
||||
"""
|
||||
Checks if domain is a wildcard domain
|
||||
|
||||
:param str domain: Domain to check
|
||||
|
||||
:returns: If the domain is wildcard domain
|
||||
:rtype: bool
|
||||
"""
|
||||
if isinstance(domain, six.text_type):
|
||||
wildcard_marker = u"*."
|
||||
else:
|
||||
wildcard_marker = b"*."
|
||||
return domain.startswith(wildcard_marker)
|
||||
|
||||
def deploy_cert(self, domain, cert_path, key_path,
|
||||
chain_path=None, fullchain_path=None):
|
||||
"""Deploys certificate to specified virtual host.
|
||||
@@ -513,7 +500,7 @@ class ApacheConfigurator(common.Installer):
|
||||
:rtype: `list` of :class:`~certbot_apache._internal.obj.VirtualHost`
|
||||
"""
|
||||
|
||||
if self._wildcard_domain(domain):
|
||||
if util.is_wildcard_domain(domain):
|
||||
if domain in self._wildcard_vhosts:
|
||||
# Vhosts for a wildcard domain were already selected
|
||||
return self._wildcard_vhosts[domain]
|
||||
@@ -1462,7 +1449,7 @@ class ApacheConfigurator(common.Installer):
|
||||
if not line.lower().lstrip().startswith("rewriterule"):
|
||||
return False
|
||||
|
||||
# According to: http://httpd.apache.org/docs/2.4/rewrite/flags.html
|
||||
# According to: https://httpd.apache.org/docs/2.4/rewrite/flags.html
|
||||
# The syntax of a RewriteRule is:
|
||||
# RewriteRule pattern target [Flag1,Flag2,Flag3]
|
||||
# i.e. target is required, so it must exist.
|
||||
|
||||
@@ -14,10 +14,10 @@ class OpenSUSEConfigurator(configurator.ApacheConfigurator):
|
||||
vhost_root="/etc/apache2/vhosts.d",
|
||||
vhost_files="*.conf",
|
||||
logs_root="/var/log/apache2",
|
||||
ctl="apache2ctl",
|
||||
version_cmd=['apache2ctl', '-v'],
|
||||
restart_cmd=['apache2ctl', 'graceful'],
|
||||
conftest_cmd=['apache2ctl', 'configtest'],
|
||||
ctl="apachectl",
|
||||
version_cmd=['apachectl', '-v'],
|
||||
restart_cmd=['apachectl', 'graceful'],
|
||||
conftest_cmd=['apachectl', 'configtest'],
|
||||
enmod="a2enmod",
|
||||
dismod="a2dismod",
|
||||
le_vhost_ext="-le-ssl.conf",
|
||||
|
||||
@@ -731,7 +731,6 @@ class ApacheParser(object):
|
||||
privileged users.
|
||||
|
||||
https://apr.apache.org/docs/apr/2.0/apr__fnmatch_8h_source.html
|
||||
http://apache2.sourcearchive.com/documentation/2.2.16-6/apr__fnmatch_8h_source.html
|
||||
|
||||
:param str clean_fn_match: Apache style filename match, like globs
|
||||
|
||||
@@ -799,7 +798,7 @@ class ApacheParser(object):
|
||||
def _parsed_by_parser_paths(self, filep, paths):
|
||||
"""Helper function that searches through provided paths and returns
|
||||
True if file path is found in the set"""
|
||||
for directory in paths.keys():
|
||||
for directory in paths:
|
||||
for filename in paths[directory]:
|
||||
if fnmatch.fnmatch(filep, os.path.join(directory, filename)):
|
||||
return True
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
# Remember to update setup.py to match the package versions below.
|
||||
acme[dev]==0.29.0
|
||||
-e certbot[dev]
|
||||
certbot[dev]==1.6.0
|
||||
@@ -1,25 +1,24 @@
|
||||
from distutils.version import StrictVersion
|
||||
from distutils.version import LooseVersion
|
||||
import sys
|
||||
|
||||
from setuptools import __version__ as setuptools_version
|
||||
from setuptools import find_packages
|
||||
from setuptools import setup
|
||||
from setuptools.command.test import test as TestCommand
|
||||
|
||||
version = '1.6.0.dev0'
|
||||
version = '1.12.0.dev0'
|
||||
|
||||
# Remember to update local-oldest-requirements.txt when changing the minimum
|
||||
# acme/certbot version.
|
||||
install_requires = [
|
||||
'acme>=0.29.0',
|
||||
'certbot>=1.6.0.dev0',
|
||||
'certbot>=1.6.0',
|
||||
'python-augeas',
|
||||
'setuptools',
|
||||
'setuptools>=39.0.1',
|
||||
'zope.component',
|
||||
'zope.interface',
|
||||
]
|
||||
|
||||
setuptools_known_environment_markers = (StrictVersion(setuptools_version) >= StrictVersion('36.2'))
|
||||
setuptools_known_environment_markers = (LooseVersion(setuptools_version) >= LooseVersion('36.2'))
|
||||
if setuptools_known_environment_markers:
|
||||
install_requires.append('mock ; python_version < "3.3"')
|
||||
elif 'bdist_wheel' in sys.argv[1:]:
|
||||
@@ -32,21 +31,6 @@ dev_extras = [
|
||||
'apacheconfig>=0.3.2',
|
||||
]
|
||||
|
||||
class PyTest(TestCommand):
|
||||
user_options = []
|
||||
|
||||
def initialize_options(self):
|
||||
TestCommand.initialize_options(self)
|
||||
self.pytest_args = ''
|
||||
|
||||
def run_tests(self):
|
||||
import shlex
|
||||
# import here, cause outside the eggs aren't loaded
|
||||
import pytest
|
||||
errno = pytest.main(shlex.split(self.pytest_args))
|
||||
sys.exit(errno)
|
||||
|
||||
|
||||
setup(
|
||||
name='certbot-apache',
|
||||
version=version,
|
||||
@@ -55,7 +39,7 @@ setup(
|
||||
author="Certbot Project",
|
||||
author_email='client-dev@letsencrypt.org',
|
||||
license='Apache License 2.0',
|
||||
python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*',
|
||||
python_requires='>=3.6',
|
||||
classifiers=[
|
||||
'Development Status :: 5 - Production/Stable',
|
||||
'Environment :: Plugins',
|
||||
@@ -63,13 +47,11 @@ setup(
|
||||
'License :: OSI Approved :: Apache Software License',
|
||||
'Operating System :: POSIX :: Linux',
|
||||
'Programming Language :: Python',
|
||||
'Programming Language :: Python :: 2',
|
||||
'Programming Language :: Python :: 2.7',
|
||||
'Programming Language :: Python :: 3',
|
||||
'Programming Language :: Python :: 3.5',
|
||||
'Programming Language :: Python :: 3.6',
|
||||
'Programming Language :: Python :: 3.7',
|
||||
'Programming Language :: Python :: 3.8',
|
||||
'Programming Language :: Python :: 3.9',
|
||||
'Topic :: Internet :: WWW/HTTP',
|
||||
'Topic :: Security',
|
||||
'Topic :: System :: Installation/Setup',
|
||||
@@ -89,7 +71,4 @@ setup(
|
||||
'apache = certbot_apache._internal.entrypoint:ENTRYPOINT',
|
||||
],
|
||||
},
|
||||
test_suite='certbot_apache',
|
||||
tests_require=["pytest"],
|
||||
cmdclass={"test": PyTest},
|
||||
)
|
||||
|
||||
@@ -52,7 +52,7 @@ function Cleanup() {
|
||||
# if our environment asks us to enable modules, do our best!
|
||||
if [ "$1" = --debian-modules ] ; then
|
||||
sudo apt-get install -y apache2
|
||||
sudo apt-get install -y libapache2-mod-wsgi
|
||||
sudo apt-get install -y libapache2-mod-wsgi-py3
|
||||
sudo apt-get install -y libapache2-mod-macro
|
||||
|
||||
for mod in ssl rewrite macro wsgi deflate userdir version mime setenvif ; do
|
||||
|
||||
@@ -140,7 +140,7 @@ class MultipleVhostsTestCentOS(util.ApacheTest):
|
||||
self.assertEqual(mock_get.call_count, 3)
|
||||
self.assertEqual(len(self.config.parser.modules), 4)
|
||||
self.assertEqual(len(self.config.parser.variables), 2)
|
||||
self.assertTrue("TEST2" in self.config.parser.variables.keys())
|
||||
self.assertTrue("TEST2" in self.config.parser.variables)
|
||||
self.assertTrue("mod_another.c" in self.config.parser.modules)
|
||||
|
||||
def test_get_virtual_hosts(self):
|
||||
@@ -172,11 +172,11 @@ class MultipleVhostsTestCentOS(util.ApacheTest):
|
||||
mock_osi.return_value = ("centos", "7")
|
||||
self.config.parser.update_runtime_variables()
|
||||
|
||||
self.assertTrue("mock_define" in self.config.parser.variables.keys())
|
||||
self.assertTrue("mock_define_too" in self.config.parser.variables.keys())
|
||||
self.assertTrue("mock_value" in self.config.parser.variables.keys())
|
||||
self.assertTrue("mock_define" in self.config.parser.variables)
|
||||
self.assertTrue("mock_define_too" in self.config.parser.variables)
|
||||
self.assertTrue("mock_value" in self.config.parser.variables)
|
||||
self.assertEqual("TRUE", self.config.parser.variables["mock_value"])
|
||||
self.assertTrue("MOCK_NOSEP" in self.config.parser.variables.keys())
|
||||
self.assertTrue("MOCK_NOSEP" in self.config.parser.variables)
|
||||
self.assertEqual("NOSEP_VAL", self.config.parser.variables["NOSEP_TWO"])
|
||||
|
||||
@mock.patch("certbot_apache._internal.configurator.util.run_script")
|
||||
|
||||
@@ -1337,13 +1337,6 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
self.config.enable_mod,
|
||||
"whatever")
|
||||
|
||||
def test_wildcard_domain(self):
|
||||
# pylint: disable=protected-access
|
||||
cases = {u"*.example.org": True, b"*.x.example.org": True,
|
||||
u"a.example.org": False, b"a.x.example.org": False}
|
||||
for key in cases:
|
||||
self.assertEqual(self.config._wildcard_domain(key), cases[key])
|
||||
|
||||
def test_choose_vhosts_wildcard(self):
|
||||
# pylint: disable=protected-access
|
||||
mock_path = "certbot_apache._internal.display_ops.select_vhost_multiple"
|
||||
@@ -1357,10 +1350,10 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
|
||||
# And the actual returned values
|
||||
self.assertEqual(len(vhs), 1)
|
||||
self.assertTrue(vhs[0].name == "certbot.demo")
|
||||
self.assertEqual(vhs[0].name, "certbot.demo")
|
||||
self.assertTrue(vhs[0].ssl)
|
||||
|
||||
self.assertFalse(vhs[0] == self.vh_truth[3])
|
||||
self.assertNotEqual(vhs[0], self.vh_truth[3])
|
||||
|
||||
@mock.patch("certbot_apache._internal.configurator.ApacheConfigurator.make_vhost_ssl")
|
||||
def test_choose_vhosts_wildcard_no_ssl(self, mock_makessl):
|
||||
@@ -1471,10 +1464,10 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
self.config.parser.aug.match = mock_match
|
||||
vhs = self.config.get_virtual_hosts()
|
||||
self.assertEqual(len(vhs), 2)
|
||||
self.assertTrue(vhs[0] == self.vh_truth[1])
|
||||
self.assertEqual(vhs[0], self.vh_truth[1])
|
||||
# mock_vhost should have replaced the vh_truth[0], because its filepath
|
||||
# isn't a symlink
|
||||
self.assertTrue(vhs[1] == mock_vhost)
|
||||
self.assertEqual(vhs[1], mock_vhost)
|
||||
|
||||
|
||||
class AugeasVhostsTest(util.ApacheTest):
|
||||
|
||||
@@ -412,9 +412,9 @@ class DualParserNodeTest(unittest.TestCase): # pylint: disable=too-many-public-
|
||||
ancestor=self.block,
|
||||
filepath="/path/to/whatever",
|
||||
metadata=self.metadata)
|
||||
self.assertFalse(self.block == ne_block)
|
||||
self.assertFalse(self.directive == ne_directive)
|
||||
self.assertFalse(self.comment == ne_comment)
|
||||
self.assertNotEqual(self.block, ne_block)
|
||||
self.assertNotEqual(self.directive, ne_directive)
|
||||
self.assertNotEqual(self.comment, ne_comment)
|
||||
|
||||
def test_parsed_paths(self):
|
||||
mock_p = mock.MagicMock(return_value=['/path/file.conf',
|
||||
|
||||
@@ -134,7 +134,7 @@ class MultipleVhostsTestFedora(util.ApacheTest):
|
||||
self.assertEqual(mock_get.call_count, 3)
|
||||
self.assertEqual(len(self.config.parser.modules), 4)
|
||||
self.assertEqual(len(self.config.parser.variables), 2)
|
||||
self.assertTrue("TEST2" in self.config.parser.variables.keys())
|
||||
self.assertTrue("TEST2" in self.config.parser.variables)
|
||||
self.assertTrue("mod_another.c" in self.config.parser.modules)
|
||||
|
||||
@mock.patch("certbot_apache._internal.configurator.util.run_script")
|
||||
@@ -172,11 +172,11 @@ class MultipleVhostsTestFedora(util.ApacheTest):
|
||||
mock_osi.return_value = ("fedora", "29")
|
||||
self.config.parser.update_runtime_variables()
|
||||
|
||||
self.assertTrue("mock_define" in self.config.parser.variables.keys())
|
||||
self.assertTrue("mock_define_too" in self.config.parser.variables.keys())
|
||||
self.assertTrue("mock_value" in self.config.parser.variables.keys())
|
||||
self.assertTrue("mock_define" in self.config.parser.variables)
|
||||
self.assertTrue("mock_define_too" in self.config.parser.variables)
|
||||
self.assertTrue("mock_value" in self.config.parser.variables)
|
||||
self.assertEqual("TRUE", self.config.parser.variables["mock_value"])
|
||||
self.assertTrue("MOCK_NOSEP" in self.config.parser.variables.keys())
|
||||
self.assertTrue("MOCK_NOSEP" in self.config.parser.variables)
|
||||
self.assertEqual("NOSEP_VAL", self.config.parser.variables["NOSEP_TWO"])
|
||||
|
||||
@mock.patch("certbot_apache._internal.configurator.util.run_script")
|
||||
|
||||
@@ -91,7 +91,7 @@ class MultipleVhostsTestGentoo(util.ApacheTest):
|
||||
with mock.patch("certbot_apache._internal.override_gentoo.GentooParser.update_modules"):
|
||||
self.config.parser.update_runtime_variables()
|
||||
for define in defines:
|
||||
self.assertTrue(define in self.config.parser.variables.keys())
|
||||
self.assertTrue(define in self.config.parser.variables)
|
||||
|
||||
@mock.patch("certbot_apache._internal.apache_util.parse_from_subprocess")
|
||||
def test_no_binary_configdump(self, mock_subprocess):
|
||||
|
||||
@@ -27,14 +27,14 @@ class VirtualHostTest(unittest.TestCase):
|
||||
"certbot_apache._internal.obj.Addr(('127.0.0.1', '443'))")
|
||||
|
||||
def test_eq(self):
|
||||
self.assertTrue(self.vhost1b == self.vhost1)
|
||||
self.assertFalse(self.vhost1 == self.vhost2)
|
||||
self.assertEqual(self.vhost1b, self.vhost1)
|
||||
self.assertNotEqual(self.vhost1, self.vhost2)
|
||||
self.assertEqual(str(self.vhost1b), str(self.vhost1))
|
||||
self.assertFalse(self.vhost1b == 1234)
|
||||
self.assertNotEqual(self.vhost1b, 1234)
|
||||
|
||||
def test_ne(self):
|
||||
self.assertTrue(self.vhost1 != self.vhost2)
|
||||
self.assertFalse(self.vhost1 != self.vhost1b)
|
||||
self.assertNotEqual(self.vhost1, self.vhost2)
|
||||
self.assertEqual(self.vhost1, self.vhost1b)
|
||||
|
||||
def test_conflicts(self):
|
||||
from certbot_apache._internal.obj import Addr
|
||||
@@ -128,13 +128,13 @@ class AddrTest(unittest.TestCase):
|
||||
self.assertTrue(self.addr1.conflicts(self.addr2))
|
||||
|
||||
def test_equal(self):
|
||||
self.assertTrue(self.addr1 == self.addr2)
|
||||
self.assertFalse(self.addr == self.addr1)
|
||||
self.assertFalse(self.addr == 123)
|
||||
self.assertEqual(self.addr1, self.addr2)
|
||||
self.assertNotEqual(self.addr, self.addr1)
|
||||
self.assertNotEqual(self.addr, 123)
|
||||
|
||||
def test_not_equal(self):
|
||||
self.assertFalse(self.addr1 != self.addr2)
|
||||
self.assertTrue(self.addr != self.addr1)
|
||||
self.assertEqual(self.addr1, self.addr2)
|
||||
self.assertNotEqual(self.addr, self.addr1)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
@@ -26,8 +26,6 @@ class ApacheTest(unittest.TestCase):
|
||||
config_root="debian_apache_2_4/multiple_vhosts/apache2",
|
||||
vhost_root="debian_apache_2_4/multiple_vhosts/apache2/sites-available"):
|
||||
# pylint: disable=arguments-differ
|
||||
super(ApacheTest, self).setUp()
|
||||
|
||||
self.temp_dir, self.config_dir, self.work_dir = common.dir_setup(
|
||||
test_dir=test_dir,
|
||||
pkg=__name__)
|
||||
|
||||
497
certbot-auto
497
certbot-auto
@@ -31,7 +31,7 @@ if [ -z "$VENV_PATH" ]; then
|
||||
fi
|
||||
VENV_BIN="$VENV_PATH/bin"
|
||||
BOOTSTRAP_VERSION_PATH="$VENV_PATH/certbot-auto-bootstrap-version.txt"
|
||||
LE_AUTO_VERSION="1.5.0"
|
||||
LE_AUTO_VERSION="1.11.0"
|
||||
BASENAME=$(basename $0)
|
||||
USAGE="Usage: $BASENAME [OPTIONS]
|
||||
A self-updating wrapper script for the Certbot ACME client. When run, updates
|
||||
@@ -258,7 +258,7 @@ DeprecationBootstrap() {
|
||||
|
||||
MIN_PYTHON_2_VERSION="2.7"
|
||||
MIN_PYVER2=$(echo "$MIN_PYTHON_2_VERSION" | sed 's/\.//')
|
||||
MIN_PYTHON_3_VERSION="3.5"
|
||||
MIN_PYTHON_3_VERSION="3.6"
|
||||
MIN_PYVER3=$(echo "$MIN_PYTHON_3_VERSION" | sed 's/\.//')
|
||||
# Sets LE_PYTHON to Python version string and PYVER to the first two
|
||||
# digits of the python version.
|
||||
@@ -799,18 +799,12 @@ BootstrapMageiaCommon() {
|
||||
# that function. If Bootstrap is set to a function that doesn't install any
|
||||
# packages BOOTSTRAP_VERSION is not set.
|
||||
if [ -f /etc/debian_version ]; then
|
||||
Bootstrap() {
|
||||
BootstrapMessage "Debian-based OSes"
|
||||
BootstrapDebCommon
|
||||
}
|
||||
BOOTSTRAP_VERSION="BootstrapDebCommon $BOOTSTRAP_DEB_COMMON_VERSION"
|
||||
DEPRECATED_OS=1
|
||||
elif [ -f /etc/mageia-release ]; then
|
||||
# Mageia has both /etc/mageia-release and /etc/redhat-release
|
||||
Bootstrap() {
|
||||
ExperimentalBootstrap "Mageia" BootstrapMageiaCommon
|
||||
}
|
||||
BOOTSTRAP_VERSION="BootstrapMageiaCommon $BOOTSTRAP_MAGEIA_COMMON_VERSION"
|
||||
DEPRECATED_OS=1
|
||||
elif [ -f /etc/redhat-release ]; then
|
||||
DEPRECATED_OS=1
|
||||
# Run DeterminePythonVersion to decide on the basis of available Python versions
|
||||
# whether to use 2.x or 3.x on RedHat-like systems.
|
||||
# Then, revert LE_PYTHON to its previous state.
|
||||
@@ -843,12 +837,7 @@ elif [ -f /etc/redhat-release ]; then
|
||||
INTERACTIVE_BOOTSTRAP=1
|
||||
fi
|
||||
|
||||
Bootstrap() {
|
||||
BootstrapMessage "Legacy RedHat-based OSes that will use Python3"
|
||||
BootstrapRpmPython3Legacy
|
||||
}
|
||||
USE_PYTHON_3=1
|
||||
BOOTSTRAP_VERSION="BootstrapRpmPython3Legacy $BOOTSTRAP_RPM_PYTHON3_LEGACY_VERSION"
|
||||
|
||||
# Try now to enable SCL rh-python36 for systems already bootstrapped
|
||||
# NB: EnablePython36SCL has been defined along with BootstrapRpmPython3Legacy in certbot-auto
|
||||
@@ -867,48 +856,17 @@ elif [ -f /etc/redhat-release ]; then
|
||||
fi
|
||||
|
||||
if [ "$RPM_USE_PYTHON_3" = 1 ]; then
|
||||
Bootstrap() {
|
||||
BootstrapMessage "RedHat-based OSes that will use Python3"
|
||||
BootstrapRpmPython3
|
||||
}
|
||||
USE_PYTHON_3=1
|
||||
BOOTSTRAP_VERSION="BootstrapRpmPython3 $BOOTSTRAP_RPM_PYTHON3_VERSION"
|
||||
else
|
||||
Bootstrap() {
|
||||
BootstrapMessage "RedHat-based OSes"
|
||||
BootstrapRpmCommon
|
||||
}
|
||||
BOOTSTRAP_VERSION="BootstrapRpmCommon $BOOTSTRAP_RPM_COMMON_VERSION"
|
||||
fi
|
||||
fi
|
||||
|
||||
LE_PYTHON="$prev_le_python"
|
||||
elif [ -f /etc/os-release ] && `grep -q openSUSE /etc/os-release` ; then
|
||||
Bootstrap() {
|
||||
BootstrapMessage "openSUSE-based OSes"
|
||||
BootstrapSuseCommon
|
||||
}
|
||||
BOOTSTRAP_VERSION="BootstrapSuseCommon $BOOTSTRAP_SUSE_COMMON_VERSION"
|
||||
DEPRECATED_OS=1
|
||||
elif [ -f /etc/arch-release ]; then
|
||||
Bootstrap() {
|
||||
if [ "$DEBUG" = 1 ]; then
|
||||
BootstrapMessage "Archlinux"
|
||||
BootstrapArchCommon
|
||||
else
|
||||
error "Please use pacman to install letsencrypt packages:"
|
||||
error "# pacman -S certbot certbot-apache"
|
||||
error
|
||||
error "If you would like to use the virtualenv way, please run the script again with the"
|
||||
error "--debug flag."
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
BOOTSTRAP_VERSION="BootstrapArchCommon $BOOTSTRAP_ARCH_COMMON_VERSION"
|
||||
DEPRECATED_OS=1
|
||||
elif [ -f /etc/manjaro-release ]; then
|
||||
Bootstrap() {
|
||||
ExperimentalBootstrap "Manjaro Linux" BootstrapArchCommon
|
||||
}
|
||||
BOOTSTRAP_VERSION="BootstrapArchCommon $BOOTSTRAP_ARCH_COMMON_VERSION"
|
||||
DEPRECATED_OS=1
|
||||
elif [ -f /etc/gentoo-release ]; then
|
||||
DEPRECATED_OS=1
|
||||
elif uname | grep -iq FreeBSD ; then
|
||||
@@ -916,24 +874,11 @@ elif uname | grep -iq FreeBSD ; then
|
||||
elif uname | grep -iq Darwin ; then
|
||||
DEPRECATED_OS=1
|
||||
elif [ -f /etc/issue ] && grep -iq "Amazon Linux" /etc/issue ; then
|
||||
Bootstrap() {
|
||||
ExperimentalBootstrap "Amazon Linux" BootstrapRpmCommon
|
||||
}
|
||||
BOOTSTRAP_VERSION="BootstrapRpmCommon $BOOTSTRAP_RPM_COMMON_VERSION"
|
||||
DEPRECATED_OS=1
|
||||
elif [ -f /etc/product ] && grep -q "Joyent Instance" /etc/product ; then
|
||||
Bootstrap() {
|
||||
ExperimentalBootstrap "Joyent SmartOS Zone" BootstrapSmartOS
|
||||
}
|
||||
BOOTSTRAP_VERSION="BootstrapSmartOS $BOOTSTRAP_SMARTOS_VERSION"
|
||||
DEPRECATED_OS=1
|
||||
else
|
||||
Bootstrap() {
|
||||
error "Sorry, I don't know how to bootstrap Certbot on your operating system!"
|
||||
error
|
||||
error "You will need to install OS dependencies, configure virtualenv, and run pip install manually."
|
||||
error "Please see https://letsencrypt.readthedocs.org/en/latest/contributing.html#prerequisites"
|
||||
error "for more info."
|
||||
exit 1
|
||||
}
|
||||
DEPRECATED_OS=1
|
||||
fi
|
||||
|
||||
# We handle this case after determining the normal bootstrap version to allow
|
||||
@@ -1265,45 +1210,40 @@ if [ "$1" = "--le-auto-phase2" ]; then
|
||||
# pip install hashin
|
||||
# hashin -r dependency-requirements.txt cryptography==1.5.2
|
||||
# ```
|
||||
ConfigArgParse==1.0 \
|
||||
--hash=sha256:bf378245bc9cdc403a527e5b7406b991680c2a530e7e81af747880b54eb57133
|
||||
certifi==2019.11.28 \
|
||||
--hash=sha256:017c25db2a153ce562900032d5bc68e9f191e44e9a0f762f373977de9df1fbb3 \
|
||||
--hash=sha256:25b64c7da4cd7479594d035c08c2d809eb4aab3a26e5a990ea98cc450c320f1f
|
||||
cffi==1.13.2 \
|
||||
--hash=sha256:0b49274afc941c626b605fb59b59c3485c17dc776dc3cc7cc14aca74cc19cc42 \
|
||||
--hash=sha256:0e3ea92942cb1168e38c05c1d56b0527ce31f1a370f6117f1d490b8dcd6b3a04 \
|
||||
--hash=sha256:135f69aecbf4517d5b3d6429207b2dff49c876be724ac0c8bf8e1ea99df3d7e5 \
|
||||
--hash=sha256:19db0cdd6e516f13329cba4903368bff9bb5a9331d3410b1b448daaadc495e54 \
|
||||
--hash=sha256:2781e9ad0e9d47173c0093321bb5435a9dfae0ed6a762aabafa13108f5f7b2ba \
|
||||
--hash=sha256:291f7c42e21d72144bb1c1b2e825ec60f46d0a7468f5346841860454c7aa8f57 \
|
||||
--hash=sha256:2c5e309ec482556397cb21ede0350c5e82f0eb2621de04b2633588d118da4396 \
|
||||
--hash=sha256:2e9c80a8c3344a92cb04661115898a9129c074f7ab82011ef4b612f645939f12 \
|
||||
--hash=sha256:32a262e2b90ffcfdd97c7a5e24a6012a43c61f1f5a57789ad80af1d26c6acd97 \
|
||||
--hash=sha256:3c9fff570f13480b201e9ab69453108f6d98244a7f495e91b6c654a47486ba43 \
|
||||
--hash=sha256:415bdc7ca8c1c634a6d7163d43fb0ea885a07e9618a64bda407e04b04333b7db \
|
||||
--hash=sha256:42194f54c11abc8583417a7cf4eaff544ce0de8187abaf5d29029c91b1725ad3 \
|
||||
--hash=sha256:4424e42199e86b21fc4db83bd76909a6fc2a2aefb352cb5414833c030f6ed71b \
|
||||
--hash=sha256:4a43c91840bda5f55249413037b7a9b79c90b1184ed504883b72c4df70778579 \
|
||||
--hash=sha256:599a1e8ff057ac530c9ad1778293c665cb81a791421f46922d80a86473c13346 \
|
||||
--hash=sha256:5c4fae4e9cdd18c82ba3a134be256e98dc0596af1e7285a3d2602c97dcfa5159 \
|
||||
--hash=sha256:5ecfa867dea6fabe2a58f03ac9186ea64da1386af2159196da51c4904e11d652 \
|
||||
--hash=sha256:62f2578358d3a92e4ab2d830cd1c2049c9c0d0e6d3c58322993cc341bdeac22e \
|
||||
--hash=sha256:6471a82d5abea994e38d2c2abc77164b4f7fbaaf80261cb98394d5793f11b12a \
|
||||
--hash=sha256:6d4f18483d040e18546108eb13b1dfa1000a089bcf8529e30346116ea6240506 \
|
||||
--hash=sha256:71a608532ab3bd26223c8d841dde43f3516aa5d2bf37b50ac410bb5e99053e8f \
|
||||
--hash=sha256:74a1d8c85fb6ff0b30fbfa8ad0ac23cd601a138f7509dc617ebc65ef305bb98d \
|
||||
--hash=sha256:7b93a885bb13073afb0aa73ad82059a4c41f4b7d8eb8368980448b52d4c7dc2c \
|
||||
--hash=sha256:7d4751da932caaec419d514eaa4215eaf14b612cff66398dd51129ac22680b20 \
|
||||
--hash=sha256:7f627141a26b551bdebbc4855c1157feeef18241b4b8366ed22a5c7d672ef858 \
|
||||
--hash=sha256:8169cf44dd8f9071b2b9248c35fc35e8677451c52f795daa2bb4643f32a540bc \
|
||||
--hash=sha256:aa00d66c0fab27373ae44ae26a66a9e43ff2a678bf63a9c7c1a9a4d61172827a \
|
||||
--hash=sha256:ccb032fda0873254380aa2bfad2582aedc2959186cce61e3a17abc1a55ff89c3 \
|
||||
--hash=sha256:d754f39e0d1603b5b24a7f8484b22d2904fa551fe865fd0d4c3332f078d20d4e \
|
||||
--hash=sha256:d75c461e20e29afc0aee7172a0950157c704ff0dd51613506bd7d82b718e7410 \
|
||||
--hash=sha256:dcd65317dd15bc0451f3e01c80da2216a31916bdcffd6221ca1202d96584aa25 \
|
||||
--hash=sha256:e570d3ab32e2c2861c4ebe6ffcad6a8abf9347432a37608fe1fbd157b3f0036b \
|
||||
--hash=sha256:fd43a88e045cf992ed09fa724b5315b790525f2676883a6ea64e3263bae6549d
|
||||
ConfigArgParse==1.2.3 \
|
||||
--hash=sha256:edd17be986d5c1ba2e307150b8e5f5107aba125f3574dddd02c85d5cdcfd37dc
|
||||
certifi==2020.4.5.1 \
|
||||
--hash=sha256:1d987a998c75633c40847cc966fcf5904906c920a7f17ef374f5aa4282abd304 \
|
||||
--hash=sha256:51fcb31174be6e6664c5f69e3e1691a2d72a1a12e90f872cbdb1567eb47b6519
|
||||
cffi==1.14.0 \
|
||||
--hash=sha256:001bf3242a1bb04d985d63e138230802c6c8d4db3668fb545fb5005ddf5bb5ff \
|
||||
--hash=sha256:00789914be39dffba161cfc5be31b55775de5ba2235fe49aa28c148236c4e06b \
|
||||
--hash=sha256:028a579fc9aed3af38f4892bdcc7390508adabc30c6af4a6e4f611b0c680e6ac \
|
||||
--hash=sha256:14491a910663bf9f13ddf2bc8f60562d6bc5315c1f09c704937ef17293fb85b0 \
|
||||
--hash=sha256:1cae98a7054b5c9391eb3249b86e0e99ab1e02bb0cc0575da191aedadbdf4384 \
|
||||
--hash=sha256:2089ed025da3919d2e75a4d963d008330c96751127dd6f73c8dc0c65041b4c26 \
|
||||
--hash=sha256:2d384f4a127a15ba701207f7639d94106693b6cd64173d6c8988e2c25f3ac2b6 \
|
||||
--hash=sha256:337d448e5a725bba2d8293c48d9353fc68d0e9e4088d62a9571def317797522b \
|
||||
--hash=sha256:399aed636c7d3749bbed55bc907c3288cb43c65c4389964ad5ff849b6370603e \
|
||||
--hash=sha256:3b911c2dbd4f423b4c4fcca138cadde747abdb20d196c4a48708b8a2d32b16dd \
|
||||
--hash=sha256:3d311bcc4a41408cf5854f06ef2c5cab88f9fded37a3b95936c9879c1640d4c2 \
|
||||
--hash=sha256:62ae9af2d069ea2698bf536dcfe1e4eed9090211dbaafeeedf5cb6c41b352f66 \
|
||||
--hash=sha256:66e41db66b47d0d8672d8ed2708ba91b2f2524ece3dee48b5dfb36be8c2f21dc \
|
||||
--hash=sha256:675686925a9fb403edba0114db74e741d8181683dcf216be697d208857e04ca8 \
|
||||
--hash=sha256:7e63cbcf2429a8dbfe48dcc2322d5f2220b77b2e17b7ba023d6166d84655da55 \
|
||||
--hash=sha256:8a6c688fefb4e1cd56feb6c511984a6c4f7ec7d2a1ff31a10254f3c817054ae4 \
|
||||
--hash=sha256:8c0ffc886aea5df6a1762d0019e9cb05f825d0eec1f520c51be9d198701daee5 \
|
||||
--hash=sha256:95cd16d3dee553f882540c1ffe331d085c9e629499ceadfbda4d4fde635f4b7d \
|
||||
--hash=sha256:99f748a7e71ff382613b4e1acc0ac83bf7ad167fb3802e35e90d9763daba4d78 \
|
||||
--hash=sha256:b8c78301cefcf5fd914aad35d3c04c2b21ce8629b5e4f4e45ae6812e461910fa \
|
||||
--hash=sha256:c420917b188a5582a56d8b93bdd8e0f6eca08c84ff623a4c16e809152cd35793 \
|
||||
--hash=sha256:c43866529f2f06fe0edc6246eb4faa34f03fe88b64a0a9a942561c8e22f4b71f \
|
||||
--hash=sha256:cab50b8c2250b46fe738c77dbd25ce017d5e6fb35d3407606e7a4180656a5a6a \
|
||||
--hash=sha256:cef128cb4d5e0b3493f058f10ce32365972c554572ff821e175dbc6f8ff6924f \
|
||||
--hash=sha256:cf16e3cf6c0a5fdd9bc10c21687e19d29ad1fe863372b5543deaec1039581a30 \
|
||||
--hash=sha256:e56c744aa6ff427a607763346e4170629caf7e48ead6921745986db3692f987f \
|
||||
--hash=sha256:e577934fc5f8779c554639376beeaa5657d54349096ef24abe8c74c5d9c117c3 \
|
||||
--hash=sha256:f2b0fa0c01d8a0c7483afd9f31d7ecf2d71760ca24499c8697aeb5ca37dc090c
|
||||
chardet==3.0.4 \
|
||||
--hash=sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae \
|
||||
--hash=sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691
|
||||
@@ -1331,67 +1271,66 @@ cryptography==2.8 \
|
||||
--hash=sha256:df6b4dca2e11865e6cfbfb708e800efb18370f5a46fd601d3755bc7f85b3a8a2 \
|
||||
--hash=sha256:ecadccc7ba52193963c0475ac9f6fa28ac01e01349a2ca48509667ef41ffd2cf \
|
||||
--hash=sha256:fb81c17e0ebe3358486cd8cc3ad78adbae58af12fc2bf2bc0bb84e8090fa5ce8
|
||||
distro==1.4.0 \
|
||||
--hash=sha256:362dde65d846d23baee4b5c058c8586f219b5a54be1cf5fc6ff55c4578392f57 \
|
||||
--hash=sha256:eedf82a470ebe7d010f1872c17237c79ab04097948800029994fa458e52fb4b4
|
||||
# Package enum34 needs to be explicitly limited to Python2.x, in order to avoid
|
||||
# certbot-auto failures on Python 3.6+ which enum34 doesn't support. See #5456.
|
||||
enum34==1.1.6 ; python_version < '3.4' \
|
||||
--hash=sha256:2d81cbbe0e73112bdfe6ef8576f2238f2ba27dd0d55752a776c41d38b7da2850 \
|
||||
--hash=sha256:644837f692e5f550741432dd3f223bbb9852018674981b1664e5dc339387588a \
|
||||
--hash=sha256:6bd0f6ad48ec2aa117d3d141940d484deccda84d4fcd884f5c3d93c23ecd8c79 \
|
||||
--hash=sha256:8ad8c4783bf61ded74527bffb48ed9b54166685e4230386a9ed9b1279e2df5b1
|
||||
distro==1.5.0 \
|
||||
--hash=sha256:0e58756ae38fbd8fc3020d54badb8eae17c5b9dcbed388b17bb55b8a5928df92 \
|
||||
--hash=sha256:df74eed763e18d10d0da624258524ae80486432cd17392d9c3d96f5e83cd2799
|
||||
enum34==1.1.10; python_version < '3.4' \
|
||||
--hash=sha256:a98a201d6de3f2ab3db284e70a33b0f896fbf35f8086594e8c9e74b909058d53 \
|
||||
--hash=sha256:c3858660960c984d6ab0ebad691265180da2b43f07e061c0f8dca9ef3cffd328 \
|
||||
--hash=sha256:cce6a7477ed816bd2542d03d53db9f0db935dd013b70f336a95c73979289f248
|
||||
funcsigs==1.0.2 \
|
||||
--hash=sha256:330cc27ccbf7f1e992e69fef78261dc7c6569012cf397db8d3de0234e6c937ca \
|
||||
--hash=sha256:a7bb0f2cf3a3fd1ab2732cb49eba4252c2af4240442415b4abce3b87022a8f50
|
||||
idna==2.8 \
|
||||
--hash=sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407 \
|
||||
--hash=sha256:ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c
|
||||
idna==2.9 \
|
||||
--hash=sha256:7588d1c14ae4c77d74036e8c22ff447b26d0fde8f007354fd48a7814db15b7cb \
|
||||
--hash=sha256:a068a21ceac8a4d63dbfd964670474107f541babbd2250d61922f029858365fa
|
||||
ipaddress==1.0.23 \
|
||||
--hash=sha256:6e0f4a39e66cb5bb9a137b00276a2eff74f93b71dcbdad6f10ff7df9d3557fcc \
|
||||
--hash=sha256:b7f8e0369580bb4a24d5ba1d7cc29660a4a6987763faf1d8a8046830e020e7e2
|
||||
josepy==1.2.0 \
|
||||
--hash=sha256:8ea15573203f28653c00f4ac0142520777b1c59d9eddd8da3f256c6ba3cac916 \
|
||||
--hash=sha256:9cec9a839fe9520f0420e4f38e7219525daccce4813296627436fe444cd002d3
|
||||
josepy==1.3.0 \
|
||||
--hash=sha256:c341ffa403399b18e9eae9012f804843045764d1390f9cb4648980a7569b1619 \
|
||||
--hash=sha256:e54882c64be12a2a76533f73d33cba9e331950fda9e2731e843490b774e7a01c
|
||||
mock==1.3.0 \
|
||||
--hash=sha256:1e247dbecc6ce057299eb7ee019ad68314bb93152e81d9a6110d35f4d5eca0f6 \
|
||||
--hash=sha256:3f573a18be94de886d1191f27c168427ef693e8dcfcecf95b170577b2eb69cbb
|
||||
parsedatetime==2.5 \
|
||||
--hash=sha256:3b835fc54e472c17ef447be37458b400e3fefdf14bb1ffdedb5d2c853acf4ba1 \
|
||||
--hash=sha256:d2e9ddb1e463de871d32088a3f3cea3dc8282b1b2800e081bd0ef86900451667
|
||||
pbr==5.4.4 \
|
||||
--hash=sha256:139d2625547dbfa5fb0b81daebb39601c478c21956dc57e2e07b74450a8c506b \
|
||||
--hash=sha256:61aa52a0f18b71c5cc58232d2cf8f8d09cd67fcad60b742a60124cb8d6951488
|
||||
pbr==5.4.5 \
|
||||
--hash=sha256:07f558fece33b05caf857474a366dfcc00562bca13dd8b47b2b3e22d9f9bf55c \
|
||||
--hash=sha256:579170e23f8e0c2f24b0de612f71f648eccb79fb1322c814ae6b3c07b5ba23e8
|
||||
pyOpenSSL==19.1.0 \
|
||||
--hash=sha256:621880965a720b8ece2f1b2f54ea2071966ab00e2970ad2ce11d596102063504 \
|
||||
--hash=sha256:9a24494b2602aaf402be5c9e30a0b82d4a5c67528fe8fb475e3f3bc00dd69507
|
||||
pyRFC3339==1.1 \
|
||||
--hash=sha256:67196cb83b470709c580bb4738b83165e67c6cc60e1f2e4f286cfcb402a926f4 \
|
||||
--hash=sha256:81b8cbe1519cdb79bed04910dd6fa4e181faf8c88dff1e1b987b5f7ab23a5b1a
|
||||
pycparser==2.19 \
|
||||
--hash=sha256:a988718abfad80b6b157acce7bf130a30876d27603738ac39f140993246b25b3
|
||||
pyparsing==2.4.6 \
|
||||
--hash=sha256:4c830582a84fb022400b85429791bc551f1f4871c33f23e44f353119e92f969f \
|
||||
--hash=sha256:c342dccb5250c08d45fd6f8b4a559613ca603b57498511740e65cd11a2e7dcec
|
||||
pycparser==2.20 \
|
||||
--hash=sha256:2d475327684562c3a96cc71adf7dc8c4f0565175cf86b6d7a404ff4c771f15f0 \
|
||||
--hash=sha256:7582ad22678f0fcd81102833f60ef8d0e57288b6b5fb00323d101be910e35705
|
||||
pyparsing==2.4.7 \
|
||||
--hash=sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1 \
|
||||
--hash=sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b
|
||||
python-augeas==0.5.0 \
|
||||
--hash=sha256:67d59d66cdba8d624e0389b87b2a83a176f21f16a87553b50f5703b23f29bac2
|
||||
pytz==2019.3 \
|
||||
--hash=sha256:1c557d7d0e871de1f5ccd5833f60fb2550652da6be2693c1e02300743d21500d \
|
||||
--hash=sha256:b02c06db6cf09c12dd25137e563b31700d3b80fcc4ad23abb7a315f2789819be
|
||||
requests==2.22.0 \
|
||||
--hash=sha256:11e007a8a2aa0323f5a921e9e6a2d7e4e67d9877e85773fba9ba6419025cbeb4 \
|
||||
--hash=sha256:9cf5292fcd0f598c671cfc1e0d7d1a7f13bb8085e9a590f48c010551dc6c4b31
|
||||
pytz==2020.1 \
|
||||
--hash=sha256:a494d53b6d39c3c6e44c3bec237336e14305e4f29bbf800b599253057fbb79ed \
|
||||
--hash=sha256:c35965d010ce31b23eeb663ed3cc8c906275d6be1a34393a1d73a41febf4a048
|
||||
requests==2.23.0 \
|
||||
--hash=sha256:43999036bfa82904b6af1d99e4882b560e5e2c68e5c4b0aa03b655f3d7d73fee \
|
||||
--hash=sha256:b3f43d496c6daba4493e7c431722aeb7dbc6288f52a6e04e7b6023b0247817e6
|
||||
requests-toolbelt==0.9.1 \
|
||||
--hash=sha256:380606e1d10dc85c3bd47bf5a6095f815ec007be7a8b69c878507068df059e6f \
|
||||
--hash=sha256:968089d4584ad4ad7c171454f0a5c6dac23971e9472521ea3b6d49d610aa6fc0
|
||||
six==1.14.0 \
|
||||
--hash=sha256:236bdbdce46e6e6a3d61a337c0f8b763ca1e8717c03b369e87a7ec7ce1319c0a \
|
||||
--hash=sha256:8f3cd2e254d8f793e7f3d6d9df77b92252b52637291d0f0da013c76ea2724b6c
|
||||
urllib3==1.25.8 \
|
||||
--hash=sha256:2f3db8b19923a873b3e5256dc9c2dedfa883e33d87c690d9c7913e1f40673cdc \
|
||||
--hash=sha256:87716c2d2a7121198ebcb7ce7cccf6ce5e9ba539041cfbaeecfb641dc0bf6acc
|
||||
zope.component==4.6 \
|
||||
--hash=sha256:ec2afc5bbe611dcace98bb39822c122d44743d635dafc7315b9aef25097db9e6
|
||||
six==1.15.0 \
|
||||
--hash=sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259 \
|
||||
--hash=sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced
|
||||
urllib3==1.25.9 \
|
||||
--hash=sha256:3018294ebefce6572a474f0604c2021e33b3fd8006ecd11d62107a5d2a963527 \
|
||||
--hash=sha256:88206b0eb87e6d677d424843ac5209e3fb9d0190d0ee169599165ec25e9d9115
|
||||
zope.component==4.6.1 \
|
||||
--hash=sha256:bfbe55d4a93e70a78b10edc3aad4de31bb8860919b7cbd8d66f717f7d7b279ac \
|
||||
--hash=sha256:d9c7c27673d787faff8a83797ce34d6ebcae26a370e25bddb465ac2182766aca
|
||||
zope.deferredimport==4.3.1 \
|
||||
--hash=sha256:57b2345e7b5eef47efcd4f634ff16c93e4265de3dcf325afc7315ade48d909e1 \
|
||||
--hash=sha256:9a0c211df44aa95f1c4e6d2626f90b400f56989180d3ef96032d708da3d23e0a
|
||||
@@ -1401,126 +1340,129 @@ zope.deprecation==4.4.0 \
|
||||
zope.event==4.4 \
|
||||
--hash=sha256:69c27debad9bdacd9ce9b735dad382142281ac770c4a432b533d6d65c4614bcf \
|
||||
--hash=sha256:d8e97d165fd5a0997b45f5303ae11ea3338becfe68c401dd88ffd2113fe5cae7
|
||||
zope.hookable==5.0.0 \
|
||||
--hash=sha256:0992a0dd692003c09fb958e1480cebd1a28f2ef32faa4857d864f3ca8e9d6952 \
|
||||
--hash=sha256:0f325838dbac827a1e2ed5d482c1f2656b6844dc96aa098f7727e76395fcd694 \
|
||||
--hash=sha256:22a317ba00f61bac99eac1a5e330be7cb8c316275a21269ec58aa396b602af0c \
|
||||
--hash=sha256:25531cb5e7b35e8a6d1d6eddef624b9a22ce5dcf8f4448ef0f165acfa8c3fc21 \
|
||||
--hash=sha256:30890892652766fc80d11f078aca9a5b8150bef6b88aba23799581a53515c404 \
|
||||
--hash=sha256:342d682d93937e5b8c232baffb32a87d5eee605d44f74566657c64a239b7f342 \
|
||||
--hash=sha256:46b2fddf1f5aeb526e02b91f7e62afbb9fff4ffd7aafc97cdb00a0d717641567 \
|
||||
--hash=sha256:523318ff96df9b8d378d997c00c5d4cbfbff68dc48ff5ee5addabdb697d27528 \
|
||||
--hash=sha256:53aa02eb8921d4e667c69d76adeed8fe426e43870c101cb08dcd2f3468aff742 \
|
||||
--hash=sha256:62e79e8fdde087cb20822d7874758f5acbedbffaf3c0fbe06309eb8a41ee4e06 \
|
||||
--hash=sha256:74bf2f757f7385b56dc3548adae508d8b3ef952d600b4b12b88f7d1706b05dcc \
|
||||
--hash=sha256:751ee9d89eb96e00c1d7048da9725ce392a708ed43406416dc5ed61e4d199764 \
|
||||
--hash=sha256:7b83bc341e682771fe810b360cd5d9c886a948976aea4b979ff214e10b8b523b \
|
||||
--hash=sha256:81eeeb27dbb0ddaed8070daee529f0d1bfe4f74c7351cce2aaca3ea287c4cc32 \
|
||||
--hash=sha256:856509191e16930335af4d773c0fc31a17bae8991eb6f167a09d5eddf25b56cc \
|
||||
--hash=sha256:8853e81fd07b18fa9193b19e070dc0557848d9945b1d2dac3b7782543458c87d \
|
||||
--hash=sha256:94506a732da2832029aecdfe6ea07eb1b70ee06d802fff34e1b3618fe7cdf026 \
|
||||
--hash=sha256:95ad874a8cc94e786969215d660143817f745225579bfe318c4676e218d3147c \
|
||||
--hash=sha256:9758ec9174966ffe5c499b6c3d149f80aa0a9238020006a2b87c6af5963fcf48 \
|
||||
--hash=sha256:a169823e331da939aa7178fc152e65699aeb78957e46c6f80ccb50ee4c3616c2 \
|
||||
--hash=sha256:a67878a798f6ca292729a28c2226592b3d000dc6ee7825d31887b553686c7ac7 \
|
||||
--hash=sha256:a9a6d9eb2319a09905670810e2de971d6c49013843700b4975e2fc0afe96c8db \
|
||||
--hash=sha256:b3e118b58a3d2301960e6f5f25736d92f6b9f861728d3b8c26d69f54d8a157d2 \
|
||||
--hash=sha256:ca6705c2a1fb5059a4efbe9f5426be4cdf71b3c9564816916fc7aa7902f19ede \
|
||||
--hash=sha256:cf711527c9d4ae72085f137caffb4be74fc007ffb17cd103628c7d5ba17e205f \
|
||||
--hash=sha256:d087602a6845ebe9d5a1c5a949fedde2c45f372d77fbce4f7fe44b68b28a1d03 \
|
||||
--hash=sha256:d1080e1074ddf75ad6662a9b34626650759c19a9093e1a32a503d37e48da135b \
|
||||
--hash=sha256:db9c60368aff2b7e6c47115f3ad9bd6e96aa298b12ed5f8cb13f5673b30be565 \
|
||||
--hash=sha256:dbeb127a04473f5a989169eb400b67beb921c749599b77650941c21fe39cb8d9 \
|
||||
--hash=sha256:dca336ca3682d869d291d7cd18284f6ff6876e4244eb1821430323056b000e2c \
|
||||
--hash=sha256:dd69a9be95346d10c853b6233fcafe3c0315b89424b378f2ad45170d8e161568 \
|
||||
--hash=sha256:dd79f8fae5894f1ee0a0042214685f2d039341250c994b825c10a4cd075d80f6 \
|
||||
--hash=sha256:e647d850aa1286d98910133cee12bd87c354f7b7bb3f3cd816a62ba7fa2f7007 \
|
||||
--hash=sha256:f37a210b5c04b2d4e4bac494ab15b70196f219a1e1649ddca78560757d4278fb \
|
||||
--hash=sha256:f67820b6d33a705dc3c1c457156e51686f7b350ff57f2112e1a9a4dad38ec268 \
|
||||
--hash=sha256:f68969978ccf0e6123902f7365aae5b7a9e99169d4b9105c47cf28e788116894 \
|
||||
--hash=sha256:f717a0b34460ae1ac0064e91b267c0588ac2c098ffd695992e72cd5462d97a67 \
|
||||
--hash=sha256:f9d58ccec8684ca276d5a4e7b0dfacca028336300a8f715d616d9f0ce9ae8096 \
|
||||
--hash=sha256:fcc3513a54e656067cbf7b98bab0d6b9534b9eabc666d1f78aad6acdf0962736
|
||||
zope.interface==4.7.1 \
|
||||
--hash=sha256:048b16ac882a05bc7ef534e8b9f15c9d7a6c190e24e8938a19b7617af4ed854a \
|
||||
--hash=sha256:05816cf8e7407cf62f2ec95c0a5d69ec4fa5741d9ccd10db9f21691916a9a098 \
|
||||
--hash=sha256:065d6a1ac89d35445168813bed45048ed4e67a4cdfc5a68fdb626a770378869f \
|
||||
--hash=sha256:14157421f4121a57625002cc4f48ac7521ea238d697c4a4459a884b62132b977 \
|
||||
--hash=sha256:18dc895945694f397a0be86be760ff664b790f95d8e7752d5bab80284ff9105d \
|
||||
--hash=sha256:1962c9f838bd6ae4075d0014f72697510daefc7e1c7e48b2607df0b6e157989c \
|
||||
--hash=sha256:1a67408cacd198c7e6274a19920bb4568d56459e659e23c4915528686ac1763a \
|
||||
--hash=sha256:21bf781076dd616bd07cf0223f79d61ab4f45176076f90bc2890e18c48195da4 \
|
||||
--hash=sha256:21c0a5d98650aebb84efa16ce2c8df1a46bdc4fe8a9e33237d0ca0b23f416ead \
|
||||
--hash=sha256:23cfeea25d1e42ff3bf4f9a0c31e9d5950aa9e7c4b12f0c4bd086f378f7b7a71 \
|
||||
--hash=sha256:24b6fce1fb71abf9f4093e3259084efcc0ef479f89356757780685bd2b06ef37 \
|
||||
--hash=sha256:24f84ce24eb6b5fcdcb38ad9761524f1ae96f7126abb5e597f8a3973d9921409 \
|
||||
--hash=sha256:25e0ef4a824017809d6d8b0ce4ab3288594ba283e4d4f94d8cfb81d73ed65114 \
|
||||
--hash=sha256:2e8fdd625e9aba31228e7ddbc36bad5c38dc3ee99a86aa420f89a290bd987ce9 \
|
||||
--hash=sha256:2f3bc2f49b67b1bea82b942d25bc958d4f4ea6709b411cb2b6b9718adf7914ce \
|
||||
--hash=sha256:35d24be9d04d50da3a6f4d61de028c1dd087045385a0ff374d93ef85af61b584 \
|
||||
--hash=sha256:35dbe4e8c73003dff40dfaeb15902910a4360699375e7b47d3c909a83ff27cd0 \
|
||||
--hash=sha256:3dfce831b824ab5cf446ed0c350b793ac6fa5fe33b984305cb4c966a86a8fb79 \
|
||||
--hash=sha256:3f7866365df5a36a7b8de8056cd1c605648f56f9a226d918ed84c85d25e8d55f \
|
||||
--hash=sha256:455cc8c01de3bac6f9c223967cea41f4449f58b4c2e724ec8177382ddd183ab4 \
|
||||
--hash=sha256:4bb937e998be9d5e345f486693e477ba79e4344674484001a0b646be1d530487 \
|
||||
--hash=sha256:52303a20902ca0888dfb83230ca3ee6fbe63c0ad1dd60aa0bba7958ccff454d8 \
|
||||
--hash=sha256:6e0a897d4e09859cc80c6a16a29697406ead752292ace17f1805126a4f63c838 \
|
||||
--hash=sha256:6e1816e7c10966330d77af45f77501f9a68818c065dec0ad11d22b50a0e212e7 \
|
||||
--hash=sha256:73b5921c5c6ce3358c836461b5470bf675601c96d5e5d8f2a446951470614f67 \
|
||||
--hash=sha256:8093cd45cdb5f6c8591cfd1af03d32b32965b0f79b94684cd0c9afdf841982bb \
|
||||
--hash=sha256:864b4a94b60db301899cf373579fd9ef92edddbf0fb2cd5ae99f53ef423ccc56 \
|
||||
--hash=sha256:8a27b4d3ea9c6d086ce8e7cdb3e8d319b6752e2a03238a388ccc83ccbe165f50 \
|
||||
--hash=sha256:91b847969d4784abd855165a2d163f72ac1e58e6dce09a5e46c20e58f19cc96d \
|
||||
--hash=sha256:b47b1028be4758c3167e474884ccc079b94835f058984b15c145966c4df64d27 \
|
||||
--hash=sha256:b68814a322835d8ad671b7acc23a3b2acecba527bb14f4b53fc925f8a27e44d8 \
|
||||
--hash=sha256:bcb50a032c3b6ec7fb281b3a83d2b31ab5246c5b119588725b1350d3a1d9f6a3 \
|
||||
--hash=sha256:c56db7d10b25ce8918b6aec6b08ac401842b47e6c136773bfb3b590753f7fb67 \
|
||||
--hash=sha256:c94b77a13d4f47883e4f97f9fa00f5feadd38af3e6b3c7be45cfdb0a14c7149b \
|
||||
--hash=sha256:db381f6fdaef483ad435f778086ccc4890120aff8df2ba5cfeeac24d280b3145 \
|
||||
--hash=sha256:e6487d01c8b7ed86af30ea141fcc4f93f8a7dde26f94177c1ad637c353bd5c07 \
|
||||
--hash=sha256:e86923fa728dfba39c5bb6046a450bd4eec8ad949ac404eca728cfce320d1732 \
|
||||
--hash=sha256:f6ca36dc1e9eeb46d779869c60001b3065fb670b5775c51421c099ea2a77c3c9 \
|
||||
--hash=sha256:fb62f2cbe790a50d95593fb40e8cca261c31a2f5637455ea39440d6457c2ba25
|
||||
zope.proxy==4.3.3 \
|
||||
--hash=sha256:04646ac04ffa9c8e32fb2b5c3cd42995b2548ea14251f3c21ca704afae88e42c \
|
||||
--hash=sha256:07b6bceea232559d24358832f1cd2ed344bbf05ca83855a5b9698b5f23c5ed60 \
|
||||
--hash=sha256:1ef452cc02e0e2f8e3c917b1a5b936ef3280f2c2ca854ee70ac2164d1655f7e6 \
|
||||
--hash=sha256:22bf61857c5977f34d4e391476d40f9a3b8c6ab24fb0cac448d42d8f8b9bf7b2 \
|
||||
--hash=sha256:299870e3428cbff1cd9f9b34144e76ecdc1d9e3192a8cf5f1b0258f47a239f58 \
|
||||
--hash=sha256:2bfc36bfccbe047671170ea5677efd3d5ab730a55d7e45611d76d495e5b96766 \
|
||||
--hash=sha256:32e82d5a640febc688c0789e15ea875bf696a10cf358f049e1ed841f01710a9b \
|
||||
--hash=sha256:3b2051bdc4bc3f02fa52483f6381cf40d4d48167645241993f9d7ebbd142ed9b \
|
||||
--hash=sha256:3f734bd8a08f5185a64fb6abb8f14dc97ec27a689ca808fb7a83cdd38d745e4f \
|
||||
--hash=sha256:3f78dd8de3112df8bbd970f0916ac876dc3fbe63810bd1cf7cc5eec4cbac4f04 \
|
||||
--hash=sha256:4eabeb48508953ba1f3590ad0773b8daea9e104eec66d661917e9bbcd7125a67 \
|
||||
--hash=sha256:4f05ecc33808187f430f249cb1ccab35c38f570b181f2d380fbe253da94b18d8 \
|
||||
--hash=sha256:4f4f4cbf23d3afc1526294a31e7b3eaa0f682cc28ac5366065dc1d6bb18bd7be \
|
||||
--hash=sha256:5483d5e70aacd06f0aa3effec9fed597c0b50f45060956eeeb1203c44d4338c3 \
|
||||
--hash=sha256:56a5f9b46892b115a75d0a1f2292431ad5988461175826600acc69a24cb3edee \
|
||||
--hash=sha256:64bb63af8a06f736927d260efdd4dfc5253d42244f281a8063e4b9eea2ddcbc5 \
|
||||
--hash=sha256:653f8cbefcf7c6ac4cece2cdef367c4faa2b7c19795d52bd7cbec11a8739a7c1 \
|
||||
--hash=sha256:664211d63306e4bd4eec35bf2b4bd9db61c394037911cf2d1804c43b511a49f1 \
|
||||
--hash=sha256:6651e6caed66a8fff0fef1a3e81c0ed2253bf361c0fdc834500488732c5d16e9 \
|
||||
--hash=sha256:6c1fba6cdfdf105739d3069cf7b07664f2944d82a8098218ab2300a82d8f40fc \
|
||||
--hash=sha256:6e64246e6e9044a4534a69dca1283c6ddab6e757be5e6874f69024329b3aa61f \
|
||||
--hash=sha256:838390245c7ec137af4993c0c8052f49d5ec79e422b4451bfa37fee9b9ccaa01 \
|
||||
--hash=sha256:856b410a14793069d8ba35f33fff667213ea66f2df25a0024cc72a7493c56d4c \
|
||||
--hash=sha256:8b932c364c1d1605a91907a41128ed0ee8a2d326fc0fafb2c55cd46f545f4599 \
|
||||
--hash=sha256:9086cf6d20f08dae7f296a78f6c77d1f8d24079d448f023ee0eb329078dd35e1 \
|
||||
--hash=sha256:9698533c14afa0548188de4968a7932d1f3f965f3f5ba1474de673596bb875af \
|
||||
--hash=sha256:9b12b05dd7c28f5068387c1afee8cb94f9d02501e7ef495a7c5c7e27139b96ad \
|
||||
--hash=sha256:a884c7426a5bc6fb7fc71a55ad14e66818e13f05b78b20a6f37175f324b7acb8 \
|
||||
--hash=sha256:abe9e7f1a3e76286c5f5baf2bf5162d41dc0310da493b34a2c36555f38d928f7 \
|
||||
--hash=sha256:bd6fde63b015a27262be06bd6bbdd895273cc2bdf2d4c7e1c83711d26a8fbace \
|
||||
--hash=sha256:bda7c62c954f47b87ed9a89f525eee1b318ec7c2162dfdba76c2ccfa334e0caa \
|
||||
--hash=sha256:be8a4908dd3f6e965993c0068b006bdbd0474fbcbd1da4893b49356e73fc1557 \
|
||||
--hash=sha256:ced65fc3c7d7205267506d854bb1815bb445899cca9d21d1d4b949070a635546 \
|
||||
--hash=sha256:dac4279aa05055d3897ab5e5ee5a7b39db121f91df65a530f8b1ac7f9bd93119 \
|
||||
--hash=sha256:e4f1863056e3e4f399c285b67fa816f411a7bfa1c81ef50e186126164e396e59 \
|
||||
--hash=sha256:ecd85f68b8cd9ab78a0141e87ea9a53b2f31fd9b1350a1c44da1f7481b5363ef \
|
||||
--hash=sha256:ed269b83750413e8fc5c96276372f49ee3fcb7ed61c49fe8e5a67f54459a5a4a \
|
||||
--hash=sha256:f19b0b80cba73b204dee68501870b11067711d21d243fb6774256d3ca2e5391f \
|
||||
--hash=sha256:ffdafb98db7574f9da84c489a10a5d582079a888cb43c64e9e6b0e3fe1034685
|
||||
zope.hookable==5.0.1 \
|
||||
--hash=sha256:0194b9b9e7f614abba60c90b231908861036578297515d3d6508eb10190f266d \
|
||||
--hash=sha256:0c2977473918bdefc6fa8dfb311f154e7f13c6133957fe649704deca79b92093 \
|
||||
--hash=sha256:17b8bdb3b77e03a152ca0d5ca185a7ae0156f5e5a2dbddf538676633a1f7380f \
|
||||
--hash=sha256:29d07681a78042cdd15b268ae9decffed9ace68a53eebeb61d65ae931d158841 \
|
||||
--hash=sha256:36fb1b35d1150267cb0543a1ddd950c0bc2c75ed0e6e92e3aaa6ac2e29416cb7 \
|
||||
--hash=sha256:3aed60c2bb5e812bbf9295c70f25b17ac37c233f30447a96c67913ba5073642f \
|
||||
--hash=sha256:3cac1565cc768911e72ca9ec4ddf5c5109e1fef0104f19f06649cf1874943b60 \
|
||||
--hash=sha256:3d4bc0cc4a37c3cd3081063142eeb2125511db3c13f6dc932d899c512690378e \
|
||||
--hash=sha256:3f73096f27b8c28be53ffb6604f7b570fbbb82f273c6febe5f58119009b59898 \
|
||||
--hash=sha256:522d1153d93f2d48aa0bd9fb778d8d4500be2e4dcf86c3150768f0e3adbbc4ef \
|
||||
--hash=sha256:523d2928fb7377bbdbc9af9c0b14ad73e6eaf226349f105733bdae27efd15b5a \
|
||||
--hash=sha256:5848309d4fc5c02150a45e8f8d2227e5bfda386a508bbd3160fed7c633c5a2fa \
|
||||
--hash=sha256:6781f86e6d54a110980a76e761eb54590630fd2af2a17d7edf02a079d2646c1d \
|
||||
--hash=sha256:6fd27921ebf3aaa945fa25d790f1f2046204f24dba4946f82f5f0a442577c3e9 \
|
||||
--hash=sha256:70d581862863f6bf9e175e85c9d70c2d7155f53fb04dcdb2f73cf288ca559a53 \
|
||||
--hash=sha256:81867c23b0dc66c8366f351d00923f2bc5902820a24c2534dfd7bf01a5879963 \
|
||||
--hash=sha256:81db29edadcbb740cd2716c95a297893a546ed89db1bfe9110168732d7f0afdd \
|
||||
--hash=sha256:86bd12624068cea60860a0759af5e2c3adc89c12aef6f71cf12f577e28deefe3 \
|
||||
--hash=sha256:9c184d8f9f7a76e1ced99855ccf390ffdd0ec3765e5cbf7b9cada600accc0a1e \
|
||||
--hash=sha256:acc789e8c29c13555e43fe4bf9fcd15a65512c9645e97bbaa5602e3201252b02 \
|
||||
--hash=sha256:afaa740206b7660d4cc3b8f120426c85761f51379af7a5b05451f624ad12b0af \
|
||||
--hash=sha256:b5f5fa323f878bb16eae68ea1ba7f6c0419d4695d0248bed4b18f51d7ce5ab85 \
|
||||
--hash=sha256:bd89e0e2c67bf4ac3aca2a19702b1a37269fb1923827f68324ac2e7afd6e3406 \
|
||||
--hash=sha256:c212de743283ec0735db24ec6ad913758df3af1b7217550ff270038062afd6ae \
|
||||
--hash=sha256:ca553f524293a0bdea05e7f44c3e685e4b7b022cb37d87bc4a3efa0f86587a8d \
|
||||
--hash=sha256:cab67065a3db92f636128d3157cc5424a145f82d96fb47159c539132833a6d36 \
|
||||
--hash=sha256:d3b3b3eedfdbf6b02898216e85aa6baf50207f4378a2a6803d6d47650cd37031 \
|
||||
--hash=sha256:d9f4a5a72f40256b686d31c5c0b1fde503172307beb12c1568296e76118e402c \
|
||||
--hash=sha256:df5067d87aaa111ed5d050e1ee853ba284969497f91806efd42425f5348f1c06 \
|
||||
--hash=sha256:e2587644812c6138f05b8a41594a8337c6790e3baf9a01915e52438c13fc6bef \
|
||||
--hash=sha256:e27fd877662db94f897f3fd532ef211ca4901eb1a70ba456f15c0866a985464a \
|
||||
--hash=sha256:e427ebbdd223c72e06ba94c004bb04e996c84dec8a0fa84e837556ae145c439e \
|
||||
--hash=sha256:e583ad4309c203ef75a09d43434cf9c2b4fa247997ecb0dcad769982c39411c7 \
|
||||
--hash=sha256:e760b2bc8ece9200804f0c2b64d10147ecaf18455a2a90827fbec4c9d84f3ad5 \
|
||||
--hash=sha256:ea9a9cc8bcc70e18023f30fa2f53d11ae069572a162791224e60cd65df55fb69 \
|
||||
--hash=sha256:ecb3f17dce4803c1099bd21742cd126b59817a4e76a6544d31d2cca6e30dbffd \
|
||||
--hash=sha256:ed794e3b3de42486d30444fb60b5561e724ee8a2d1b17b0c2e0f81e3ddaf7a87 \
|
||||
--hash=sha256:ee885d347279e38226d0a437b6a932f207f691c502ee565aba27a7022f1285df \
|
||||
--hash=sha256:fd5e7bc5f24f7e3d490698f7b854659a9851da2187414617cd5ed360af7efd63 \
|
||||
--hash=sha256:fe45f6870f7588ac7b2763ff1ce98cce59369717afe70cc353ec5218bc854bcc
|
||||
zope.interface==5.1.0 \
|
||||
--hash=sha256:0103cba5ed09f27d2e3de7e48bb320338592e2fabc5ce1432cf33808eb2dfd8b \
|
||||
--hash=sha256:14415d6979356629f1c386c8c4249b4d0082f2ea7f75871ebad2e29584bd16c5 \
|
||||
--hash=sha256:1ae4693ccee94c6e0c88a4568fb3b34af8871c60f5ba30cf9f94977ed0e53ddd \
|
||||
--hash=sha256:1b87ed2dc05cb835138f6a6e3595593fea3564d712cb2eb2de963a41fd35758c \
|
||||
--hash=sha256:269b27f60bcf45438e8683269f8ecd1235fa13e5411de93dae3b9ee4fe7f7bc7 \
|
||||
--hash=sha256:27d287e61639d692563d9dab76bafe071fbeb26818dd6a32a0022f3f7ca884b5 \
|
||||
--hash=sha256:39106649c3082972106f930766ae23d1464a73b7d30b3698c986f74bf1256a34 \
|
||||
--hash=sha256:40e4c42bd27ed3c11b2c983fecfb03356fae1209de10686d03c02c8696a1d90e \
|
||||
--hash=sha256:461d4339b3b8f3335d7e2c90ce335eb275488c587b61aca4b305196dde2ff086 \
|
||||
--hash=sha256:4f98f70328bc788c86a6a1a8a14b0ea979f81ae6015dd6c72978f1feff70ecda \
|
||||
--hash=sha256:558a20a0845d1a5dc6ff87cd0f63d7dac982d7c3be05d2ffb6322a87c17fa286 \
|
||||
--hash=sha256:562dccd37acec149458c1791da459f130c6cf8902c94c93b8d47c6337b9fb826 \
|
||||
--hash=sha256:5e86c66a6dea8ab6152e83b0facc856dc4d435fe0f872f01d66ce0a2131b7f1d \
|
||||
--hash=sha256:60a207efcd8c11d6bbeb7862e33418fba4e4ad79846d88d160d7231fcb42a5ee \
|
||||
--hash=sha256:645a7092b77fdbc3f68d3cc98f9d3e71510e419f54019d6e282328c0dd140dcd \
|
||||
--hash=sha256:6874367586c020705a44eecdad5d6b587c64b892e34305bb6ed87c9bbe22a5e9 \
|
||||
--hash=sha256:74bf0a4f9091131de09286f9a605db449840e313753949fe07c8d0fe7659ad1e \
|
||||
--hash=sha256:7b726194f938791a6691c7592c8b9e805fc6d1b9632a833b9c0640828cd49cbc \
|
||||
--hash=sha256:8149ded7f90154fdc1a40e0c8975df58041a6f693b8f7edcd9348484e9dc17fe \
|
||||
--hash=sha256:8cccf7057c7d19064a9e27660f5aec4e5c4001ffcf653a47531bde19b5aa2a8a \
|
||||
--hash=sha256:911714b08b63d155f9c948da2b5534b223a1a4fc50bb67139ab68b277c938578 \
|
||||
--hash=sha256:a5f8f85986197d1dd6444763c4a15c991bfed86d835a1f6f7d476f7198d5f56a \
|
||||
--hash=sha256:a744132d0abaa854d1aad50ba9bc64e79c6f835b3e92521db4235a1991176813 \
|
||||
--hash=sha256:af2c14efc0bb0e91af63d00080ccc067866fb8cbbaca2b0438ab4105f5e0f08d \
|
||||
--hash=sha256:b054eb0a8aa712c8e9030065a59b5e6a5cf0746ecdb5f087cca5ec7685690c19 \
|
||||
--hash=sha256:b0becb75418f8a130e9d465e718316cd17c7a8acce6fe8fe07adc72762bee425 \
|
||||
--hash=sha256:b1d2ed1cbda2ae107283befd9284e650d840f8f7568cb9060b5466d25dc48975 \
|
||||
--hash=sha256:ba4261c8ad00b49d48bbb3b5af388bb7576edfc0ca50a49c11dcb77caa1d897e \
|
||||
--hash=sha256:d1fe9d7d09bb07228650903d6a9dc48ea649e3b8c69b1d263419cc722b3938e8 \
|
||||
--hash=sha256:d7804f6a71fc2dda888ef2de266727ec2f3915373d5a785ed4ddc603bbc91e08 \
|
||||
--hash=sha256:da2844fba024dd58eaa712561da47dcd1e7ad544a257482392472eae1c86d5e5 \
|
||||
--hash=sha256:dcefc97d1daf8d55199420e9162ab584ed0893a109f45e438b9794ced44c9fd0 \
|
||||
--hash=sha256:dd98c436a1fc56f48c70882cc243df89ad036210d871c7427dc164b31500dc11 \
|
||||
--hash=sha256:e74671e43ed4569fbd7989e5eecc7d06dc134b571872ab1d5a88f4a123814e9f \
|
||||
--hash=sha256:eb9b92f456ff3ec746cd4935b73c1117538d6124b8617bc0fe6fda0b3816e345 \
|
||||
--hash=sha256:ebb4e637a1fb861c34e48a00d03cffa9234f42bef923aec44e5625ffb9a8e8f9 \
|
||||
--hash=sha256:ef739fe89e7f43fb6494a43b1878a36273e5924869ba1d866f752c5812ae8d58 \
|
||||
--hash=sha256:f40db0e02a8157d2b90857c24d89b6310f9b6c3642369852cdc3b5ac49b92afc \
|
||||
--hash=sha256:f68bf937f113b88c866d090fea0bc52a098695173fc613b055a17ff0cf9683b6 \
|
||||
--hash=sha256:fb55c182a3f7b84c1a2d6de5fa7b1a05d4660d866b91dbf8d74549c57a1499e8
|
||||
zope.proxy==4.3.5 \
|
||||
--hash=sha256:00573dfa755d0703ab84bb23cb6ecf97bb683c34b340d4df76651f97b0bab068 \
|
||||
--hash=sha256:092049280f2848d2ba1b57b71fe04881762a220a97b65288bcb0968bb199ec30 \
|
||||
--hash=sha256:0cbd27b4d3718b5ec74fc65ffa53c78d34c65c6fd9411b8352d2a4f855220cf1 \
|
||||
--hash=sha256:17fc7e16d0c81f833a138818a30f366696653d521febc8e892858041c4d88785 \
|
||||
--hash=sha256:19577dfeb70e8a67249ba92c8ad20589a1a2d86a8d693647fa8385408a4c17b0 \
|
||||
--hash=sha256:207aa914576b1181597a1516e1b90599dc690c095343ae281b0772e44945e6a4 \
|
||||
--hash=sha256:219a7db5ed53e523eb4a4769f13105118b6d5b04ed169a283c9775af221e231f \
|
||||
--hash=sha256:2b50ea79849e46b5f4f2b0247a3687505d32d161eeb16a75f6f7e6cd81936e43 \
|
||||
--hash=sha256:5903d38362b6c716e66bbe470f190579c530a5baf03dbc8500e5c2357aa569a5 \
|
||||
--hash=sha256:5c24903675e271bd688c6e9e7df5775ac6b168feb87dbe0e4bcc90805f21b28f \
|
||||
--hash=sha256:5ef6bc5ed98139e084f4e91100f2b098a0cd3493d4e76f9d6b3f7b95d7ad0f06 \
|
||||
--hash=sha256:61b55ae3c23a126a788b33ffb18f37d6668e79a05e756588d9e4d4be7246ab1c \
|
||||
--hash=sha256:63ddb992931a5e616c87d3d89f5a58db086e617548005c7f9059fac68c03a5cc \
|
||||
--hash=sha256:6943da9c09870490dcfd50c4909c0cc19f434fa6948f61282dc9cb07bcf08160 \
|
||||
--hash=sha256:6ad40f85c1207803d581d5d75e9ea25327cd524925699a83dfc03bf8e4ba72b7 \
|
||||
--hash=sha256:6b44433a79bdd7af0e3337bd7bbcf53dd1f9b0fa66bf21bcb756060ce32a96c1 \
|
||||
--hash=sha256:6bbaa245015d933a4172395baad7874373f162955d73612f0b66b6c2c33b6366 \
|
||||
--hash=sha256:7007227f4ea85b40a2f5e5a244479f6a6dfcf906db9b55e812a814a8f0e2c28d \
|
||||
--hash=sha256:74884a0aec1f1609190ec8b34b5d58fb3b5353cf22b96161e13e0e835f13518f \
|
||||
--hash=sha256:7d25fe5571ddb16369054f54cdd883f23de9941476d97f2b92eb6d7d83afe22d \
|
||||
--hash=sha256:7e162bdc5e3baad26b2262240be7d2bab36991d85a6a556e48b9dfb402370261 \
|
||||
--hash=sha256:814d62678dc3a30f4aa081982d830b7c342cf230ffc9d030b020cb154eeebf9e \
|
||||
--hash=sha256:8878a34c5313ee52e20aa50b03138af8d472bae465710fb954d133a9bfd3c38d \
|
||||
--hash=sha256:a66a0d94e5b081d5d695e66d6667e91e74d79e273eee95c1747717ba9cb70792 \
|
||||
--hash=sha256:a69f5cbf4addcfdf03dda564a671040127a6b7c34cf9fe4973582e68441b63fa \
|
||||
--hash=sha256:b00f9f0c334d07709d3f73a7cb8ae63c6ca1a90c790a63b5e7effa666ef96021 \
|
||||
--hash=sha256:b6ed71e4a7b4690447b626f499d978aa13197a0e592950e5d7020308f6054698 \
|
||||
--hash=sha256:bdf5041e5851526e885af579d2f455348dba68d74f14a32781933569a327fddf \
|
||||
--hash=sha256:be034360dd34e62608419f86e799c97d389c10a0e677a25f236a971b2f40dac9 \
|
||||
--hash=sha256:cc8f590a5eed30b314ae6b0232d925519ade433f663de79cc3783e4b10d662ba \
|
||||
--hash=sha256:cd7a318a15fe6cc4584bf3c4426f092ed08c0fd012cf2a9173114234fe193e11 \
|
||||
--hash=sha256:cf19b5f63a59c20306e034e691402b02055c8f4e38bf6792c23cad489162a642 \
|
||||
--hash=sha256:cfc781ce442ec407c841e9aa51d0e1024f72b6ec34caa8fdb6ef9576d549acf2 \
|
||||
--hash=sha256:dea9f6f8633571e18bc20cad83603072e697103a567f4b0738d52dd0211b4527 \
|
||||
--hash=sha256:e4a86a1d5eb2cce83c5972b3930c7c1eac81ab3508464345e2b8e54f119d5505 \
|
||||
--hash=sha256:e7106374d4a74ed9ff00c46cc00f0a9f06a0775f8868e423f85d4464d2333679 \
|
||||
--hash=sha256:e98a8a585b5668aa9e34d10f7785abf9545fe72663b4bfc16c99a115185ae6a5 \
|
||||
--hash=sha256:f64840e68483316eb58d82c376ad3585ca995e69e33b230436de0cdddf7363f9 \
|
||||
--hash=sha256:f8f4b0a9e6683e43889852130595c8854d8ae237f2324a053cdd884de936aa9b \
|
||||
--hash=sha256:fc45a53219ed30a7f670a6d8c98527af0020e6fd4ee4c0a8fb59f147f06d816c
|
||||
|
||||
# Contains the requirements for the letsencrypt package.
|
||||
#
|
||||
@@ -1533,18 +1475,18 @@ letsencrypt==0.7.0 \
|
||||
--hash=sha256:105a5fb107e45bcd0722eb89696986dcf5f08a86a321d6aef25a0c7c63375ade \
|
||||
--hash=sha256:c36e532c486a7e92155ee09da54b436a3c420813ec1c590b98f635d924720de9
|
||||
|
||||
certbot==1.5.0 \
|
||||
--hash=sha256:ec1f01af06b52a6f079f5b02cb70e88f0671a7b13ecb3e45b040563e32c6e53a \
|
||||
--hash=sha256:c52017a4f84137e1312c898d6ae69c5f7977d79d2bd4c2df013cbbf39b6539bf
|
||||
acme==1.5.0 \
|
||||
--hash=sha256:66de67b394bb7606f97f2c21507e6eb6a88936db2a940f5c4893025f87e3852a \
|
||||
--hash=sha256:b051ff7dd3935b2032c2f8c8386e905d9b658eba9f3455e352650d85bea9c8f0
|
||||
certbot-apache==1.5.0 \
|
||||
--hash=sha256:d2c28be6dcd6c56a8040c8c733e72c1341238b1b47fb59f544eb832b9d5c81ba \
|
||||
--hash=sha256:3eec5a49ae4fcf86213f962eb1e11d8a725b65e7dcee18f9b92c7aa73f821764
|
||||
certbot-nginx==1.5.0 \
|
||||
--hash=sha256:3d27fd02ebe15b07ce5fa9525ceeda82aa5fdc45aa064729434faff0442d1f91 \
|
||||
--hash=sha256:b38f101588af6d2b8ea7c2e3334f249afbe14461a85add2f1420091d860df983
|
||||
certbot==1.11.0 \
|
||||
--hash=sha256:b7faa66c40a1ce5a31bfc8668d8feb5d2db6f7af9e791079a6d95c77b6593bf4 \
|
||||
--hash=sha256:6b0ce04e55379aff0a47f873fa05c084538ad0f4a9b79f33108dbb0a7a668b43
|
||||
acme==1.11.0 \
|
||||
--hash=sha256:77d6ce61b155315d7d7031489bbd245c0ea42c0453a04d4304393414e741a56d \
|
||||
--hash=sha256:092eb09a074a935da4c10f66cb8634ffb2cc2d2cc1035d2998d608996efab924
|
||||
certbot-apache==1.11.0 \
|
||||
--hash=sha256:ea7ac88733aad91a89c700289effda2a0c0658778da1ae2c54a0aefaee351285 \
|
||||
--hash=sha256:3ed001427ec0b49324f2b9af7170fa6e6e88948fa51c3678b07bf17f8138863d
|
||||
certbot-nginx==1.11.0 \
|
||||
--hash=sha256:79de69782a1199e577787ff9790dee02a44aac17dbecd6a7287593030842a306 \
|
||||
--hash=sha256:9afe611f99a78b8898941b8ad7bdcf7f3c2b6e0fce27125268f7c713e64b34ee
|
||||
|
||||
UNLIKELY_EOF
|
||||
# -------------------------------------------------------------------------
|
||||
@@ -1618,6 +1560,11 @@ maybe_argparse = (
|
||||
if sys.version_info < (2, 7, 0) else [])
|
||||
|
||||
|
||||
# Be careful when updating the pinned versions here, in particular for pip.
|
||||
# Indeed starting from 10.0, pip will build dependencies in isolation if the
|
||||
# related projects are compliant with PEP 517. This is not something we want
|
||||
# as of now, so the isolation build will need to be disabled wherever
|
||||
# pipstrap is used (see https://github.com/certbot/certbot/issues/8256).
|
||||
PACKAGES = maybe_argparse + [
|
||||
# Pip has no dependencies, as it vendors everything:
|
||||
('11/b6/abcb525026a4be042b486df43905d6893fb04f05aac21c32c638e939e447/'
|
||||
|
||||
@@ -0,0 +1,60 @@
|
||||
options {
|
||||
directory "/var/cache/bind";
|
||||
|
||||
// Running inside Docker. Bind address on Docker host is 127.0.0.1.
|
||||
listen-on { any; };
|
||||
listen-on-v6 { any; };
|
||||
|
||||
// We are allowing BIND to service recursive queries, but only in an extremely limimited sense
|
||||
// where it is entirely disconnected from public DNS:
|
||||
// - Iterative queries are disabled. Only forwarding to a non-existent forwarder.
|
||||
// - The only recursive answers we can get (that will not be a SERVFAIL) will come from the
|
||||
// RPZ "mock-recursion" zone. Effectively this means we are mocking out the entirety of
|
||||
// public DNS.
|
||||
allow-recursion { any; }; // BIND will only answer using RPZ if recursion is enabled
|
||||
forwarders { 192.0.2.254; }; // Nobody is listening, this is TEST-NET-1
|
||||
forward only; // Do NOT perform iterative queries from the root zone
|
||||
dnssec-validation no; // Do not bother fetching the root DNSKEY set (performance)
|
||||
response-policy { // All recursive queries will be served from here.
|
||||
zone "mock-recursion"
|
||||
log yes;
|
||||
} recursive-only no // Allow RPZs to affect authoritative zones too.
|
||||
qname-wait-recurse no // No real recursion.
|
||||
nsip-wait-recurse no; // No real recursion.
|
||||
|
||||
allow-transfer { none; };
|
||||
allow-update { none; };
|
||||
};
|
||||
|
||||
key "default-key." {
|
||||
algorithm hmac-sha512;
|
||||
secret "91CgOwzihr0nAVEHKFXJPQCbuBBbBI19Ks5VAweUXgbF40NWTD83naeg3c5y2MPdEiFRXnRLJxL6M+AfHCGLNw==";
|
||||
};
|
||||
|
||||
zone "mock-recursion" {
|
||||
type primary;
|
||||
file "/var/lib/bind/rpz.mock-recursion";
|
||||
allow-query {
|
||||
none;
|
||||
};
|
||||
};
|
||||
|
||||
zone "example.com." {
|
||||
type primary;
|
||||
file "/var/lib/bind/db.example.com";
|
||||
journal "/var/cache/bind/db.example.com.jnl";
|
||||
|
||||
update-policy {
|
||||
grant default-key zonesub TXT;
|
||||
};
|
||||
};
|
||||
|
||||
zone "sub.example.com." {
|
||||
type primary;
|
||||
file "/var/lib/bind/db.sub.example.com";
|
||||
journal "/var/cache/bind/db.sub.example.com.jnl";
|
||||
|
||||
update-policy {
|
||||
grant default-key zonesub TXT;
|
||||
};
|
||||
};
|
||||
@@ -0,0 +1,10 @@
|
||||
# Target DNS server
|
||||
dns_rfc2136_server = {server_address}
|
||||
# Target DNS port
|
||||
dns_rfc2136_port = {server_port}
|
||||
# TSIG key name
|
||||
dns_rfc2136_name = default-key.
|
||||
# TSIG key secret
|
||||
dns_rfc2136_secret = 91CgOwzihr0nAVEHKFXJPQCbuBBbBI19Ks5VAweUXgbF40NWTD83naeg3c5y2MPdEiFRXnRLJxL6M+AfHCGLNw==
|
||||
# TSIG key algorithm
|
||||
dns_rfc2136_algorithm = HMAC-SHA512
|
||||
@@ -0,0 +1,11 @@
|
||||
$ORIGIN example.com.
|
||||
$TTL 3600
|
||||
example.com. IN SOA ns1.example.com. admin.example.com. ( 2020091025 7200 3600 1209600 3600 )
|
||||
|
||||
example.com. IN NS ns1
|
||||
example.com. IN NS ns2
|
||||
|
||||
ns1 IN A 192.0.2.2
|
||||
ns2 IN A 192.0.2.3
|
||||
|
||||
@ IN A 192.0.2.1
|
||||
@@ -0,0 +1,9 @@
|
||||
$ORIGIN sub.example.com.
|
||||
$TTL 3600
|
||||
sub.example.com. IN SOA ns1.example.com. admin.example.com. ( 2020091025 7200 3600 1209600 3600 )
|
||||
|
||||
sub.example.com. IN NS ns1
|
||||
sub.example.com. IN NS ns2
|
||||
|
||||
ns1 IN A 192.0.2.2
|
||||
ns2 IN A 192.0.2.3
|
||||
@@ -0,0 +1,6 @@
|
||||
$TTL 3600
|
||||
|
||||
@ SOA ns1.example.test. dummy.example.test. 1 12h 15m 3w 2h
|
||||
NS ns1.example.test.
|
||||
|
||||
_acme-challenge.aliased.example IN CNAME _acme-challenge.example.com.
|
||||
@@ -0,0 +1,14 @@
|
||||
This directory contains your keys and certificates.
|
||||
|
||||
`privkey.pem` : the private key for your certificate.
|
||||
`fullchain.pem`: the certificate file used in most server software.
|
||||
`chain.pem` : used for OCSP stapling in Nginx >=1.3.7.
|
||||
`cert.pem` : will break many server configurations, and should not be used
|
||||
without reading further documentation (see link below).
|
||||
|
||||
WARNING: DO NOT MOVE OR RENAME THESE FILES!
|
||||
Certbot expects these files to remain in this location in order
|
||||
to function properly!
|
||||
|
||||
We recommend not moving these files. For more information, see the Certbot
|
||||
User Guide at https://certbot.eff.org/docs/using.html#where-are-my-certificates.
|
||||
@@ -0,0 +1,18 @@
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIC2zCCAcOgAwIBAgIIBvrEnbPRYu8wDQYJKoZIhvcNAQELBQAwKDEmMCQGA1UE
|
||||
AxMdUGViYmxlIEludGVybWVkaWF0ZSBDQSAxMjZjNGIwHhcNMjAxMDEyMjEwNzQw
|
||||
WhcNMjUxMDEyMjEwNzQwWjAjMSEwHwYDVQQDExhjLmVuY3J5cHRpb24tZXhhbXBs
|
||||
ZS5jb20wWTATBgcqhkjOPQIBBggqhkjOPQMBBwNCAARjMhuW0ENPPC33PjB5XsYU
|
||||
CRw640kPQENIDatcTJaENZIZdqKd6rI6jc+lpbmXot7Zi52clJlSJS+V6oDAt2Lh
|
||||
o4HYMIHVMA4GA1UdDwEB/wQEAwIFoDAdBgNVHSUEFjAUBggrBgEFBQcDAQYIKwYB
|
||||
BQUHAwIwDAYDVR0TAQH/BAIwADAdBgNVHQ4EFgQUj7Kd3ENqxlPf8B2bIGhsjydX
|
||||
mPswHwYDVR0jBBgwFoAUEiGxlkRsi+VvcogH5dVD3h1laAcwMQYIKwYBBQUHAQEE
|
||||
JTAjMCEGCCsGAQUFBzABhhVodHRwOi8vMTI3LjAuMC4xOjQwMDIwIwYDVR0RBBww
|
||||
GoIYYy5lbmNyeXB0aW9uLWV4YW1wbGUuY29tMA0GCSqGSIb3DQEBCwUAA4IBAQCl
|
||||
k0JXsa8y7fg41WWMDhw60bPW77O0FtOmTcnhdI5daYNemQVk+Q5EMaBLQ/oGjgXd
|
||||
9QXFzXH1PL904YEnSLt+iTpXn++7rQSNzQsdYqw0neWk4f5pEBiN+WORpb6mwobV
|
||||
ifMtBOkNEHvrJ2Pkci9U1lLwtKD/DSew6QtJU5DSkmH1XdGuMJiubygEIvELtvgq
|
||||
cP9S368ZvPmPGmKaJQXBiuaR8MTjY/Bkr79aXQMjKbf+mpn7h0POCcePk1DY/rm6
|
||||
Da+X16lf0hHyQhSUa7Vgyim6rK1/hlw+Z00i+sQCKD9Ih7kXuuGqfSDC33cfO8Tj
|
||||
o/MXO8lcxkrem5zU5QWP
|
||||
-----END CERTIFICATE-----
|
||||
@@ -0,0 +1,20 @@
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIDUDCCAjigAwIBAgIIbi787yVrcMAwDQYJKoZIhvcNAQELBQAwIDEeMBwGA1UE
|
||||
AxMVUGViYmxlIFJvb3QgQ0EgMGM1MjI1MCAXDTIwMTAxMjIwMjI0NloYDzIwNTAx
|
||||
MDEyMjEyMjQ2WjAoMSYwJAYDVQQDEx1QZWJibGUgSW50ZXJtZWRpYXRlIENBIDEy
|
||||
NmM0YjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALGeVk1BMJraeqRq
|
||||
mJ2+hgso8VOAv2s2CVxUJjIVcn7f2adE8NyTsSQ1brlsnKCUYUw7yLTQH0izLQRB
|
||||
qKVIDFkUqo5/FuTJ2QlfA2EwBL8J7s/7L7vj3L0DiVpwgxPSyFEwdl/Y5y7ofsX5
|
||||
CIhCFcaMAmTIuKLiSfCJjGwkbEMuolm+lO8Mikxxc/JtDVUC479ugU7PU9O09bMH
|
||||
nm+sD6Bgd+KMoPkCCCoeShJS9X3Ziq9HGc7Z6nhM/zirFARt2XkonEdAZ8br01zY
|
||||
MRiY9txhlWQ7mUkOtzOSoEuYJNoUbvMUf0+tNzto26WRyF7dJmh7lTBsYrvAwUTx
|
||||
PzNyst0CAwEAAaOBgzCBgDAOBgNVHQ8BAf8EBAMCAoQwHQYDVR0lBBYwFAYIKwYB
|
||||
BQUHAwEGCCsGAQUFBwMCMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFBIhsZZE
|
||||
bIvlb3KIB+XVQ94dZWgHMB8GA1UdIwQYMBaAFOaKTaXg37vKgRt7d79YOjAoAtJT
|
||||
MA0GCSqGSIb3DQEBCwUAA4IBAQAU2mZii7PH2pkw2lNM0QqPbcW/UYyvFoUeM8Aq
|
||||
uCtsI2s+oxCJTqzfLsA0N8NY4nHLQ5wAlNJfJekngni8hbmJTKU4JFTMe7kLQO8P
|
||||
fJbk0pTzhhHVQw7CVwB6Pwq3u2m/JV+d6xDIDc+AVkuEl19ZJU0rTWyooClfFLZV
|
||||
EdZmEiUtA3PGlxoYwYhoGHYlhFxsoFONhCsBEdN7k7FKtFGVxN7oc5SKmKp0YZTW
|
||||
fcrEtrdNThATO4ymhCC2zh33NI/MT1O74fpaAc2k6LcTl57MKiLfTYX4LTL6v9JG
|
||||
9tlNqjFVRRmzEbtXTPcCb+w9g1VqoOGok7mGXYLTYtShCuvE
|
||||
-----END CERTIFICATE-----
|
||||
@@ -0,0 +1,38 @@
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIC2zCCAcOgAwIBAgIILlmGtZhUFEwwDQYJKoZIhvcNAQELBQAwKDEmMCQGA1UE
|
||||
AxMdUGViYmxlIEludGVybWVkaWF0ZSBDQSAxMjZjNGIwHhcNMjAxMDEyMjA1MDM0
|
||||
WhcNMjUxMDEyMjA1MDM0WjAjMSEwHwYDVQQDExhjLmVuY3J5cHRpb24tZXhhbXBs
|
||||
ZS5jb20wWTATBgcqhkjOPQIBBggqhkjOPQMBBwNCAARHEzR8JPWrEmpmgM+F2bk5
|
||||
9mT0u6CjzmJG0QpbaqprLiG5NGpW84VQ5TFCrmC4KxYfigCfMhfHRNfFYvNUK3V/
|
||||
o4HYMIHVMA4GA1UdDwEB/wQEAwIFoDAdBgNVHSUEFjAUBggrBgEFBQcDAQYIKwYB
|
||||
BQUHAwIwDAYDVR0TAQH/BAIwADAdBgNVHQ4EFgQU1CsVL+bPnzaxxQ5jUENmQJIO
|
||||
lKwwHwYDVR0jBBgwFoAUEiGxlkRsi+VvcogH5dVD3h1laAcwMQYIKwYBBQUHAQEE
|
||||
JTAjMCEGCCsGAQUFBzABhhVodHRwOi8vMTI3LjAuMC4xOjQwMDIwIwYDVR0RBBww
|
||||
GoIYYy5lbmNyeXB0aW9uLWV4YW1wbGUuY29tMA0GCSqGSIb3DQEBCwUAA4IBAQBn
|
||||
2D8loC7pfk28JYpFLr5lmFKJWWmtLGlpsWDj61fVjtTfGKLziJz+MM6il4Y3hIz5
|
||||
58qiFK0ue0M63dIBJ33N+XxSEXon4Q0gy/zRWfH9jtPJ3FwfjkU/RT9PAUClYi0G
|
||||
ptNWnTmgQkNzousbcAtRNXuuShH3856vhUnwkX+xM+cbIDi1JVmFjcGrEEQJ0rUF
|
||||
mv2ZTyfbWbUs3v4rReETi2NVzr1Ql6J+ByNcMvHODzFy3t0L6yelAw2ca1I+c9HU
|
||||
+Z0tnp/ykR7eXNuVLivok8UBf5OC413lh8ZO5g+Bgzh/LdtkUuavg1MYtEX0H6mX
|
||||
9U7y3nVI8WEbPGf+HDeu
|
||||
-----END CERTIFICATE-----
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIDUDCCAjigAwIBAgIIbi787yVrcMAwDQYJKoZIhvcNAQELBQAwIDEeMBwGA1UE
|
||||
AxMVUGViYmxlIFJvb3QgQ0EgMGM1MjI1MCAXDTIwMTAxMjIwMjI0NloYDzIwNTAx
|
||||
MDEyMjEyMjQ2WjAoMSYwJAYDVQQDEx1QZWJibGUgSW50ZXJtZWRpYXRlIENBIDEy
|
||||
NmM0YjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALGeVk1BMJraeqRq
|
||||
mJ2+hgso8VOAv2s2CVxUJjIVcn7f2adE8NyTsSQ1brlsnKCUYUw7yLTQH0izLQRB
|
||||
qKVIDFkUqo5/FuTJ2QlfA2EwBL8J7s/7L7vj3L0DiVpwgxPSyFEwdl/Y5y7ofsX5
|
||||
CIhCFcaMAmTIuKLiSfCJjGwkbEMuolm+lO8Mikxxc/JtDVUC479ugU7PU9O09bMH
|
||||
nm+sD6Bgd+KMoPkCCCoeShJS9X3Ziq9HGc7Z6nhM/zirFARt2XkonEdAZ8br01zY
|
||||
MRiY9txhlWQ7mUkOtzOSoEuYJNoUbvMUf0+tNzto26WRyF7dJmh7lTBsYrvAwUTx
|
||||
PzNyst0CAwEAAaOBgzCBgDAOBgNVHQ8BAf8EBAMCAoQwHQYDVR0lBBYwFAYIKwYB
|
||||
BQUHAwEGCCsGAQUFBwMCMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFBIhsZZE
|
||||
bIvlb3KIB+XVQ94dZWgHMB8GA1UdIwQYMBaAFOaKTaXg37vKgRt7d79YOjAoAtJT
|
||||
MA0GCSqGSIb3DQEBCwUAA4IBAQAU2mZii7PH2pkw2lNM0QqPbcW/UYyvFoUeM8Aq
|
||||
uCtsI2s+oxCJTqzfLsA0N8NY4nHLQ5wAlNJfJekngni8hbmJTKU4JFTMe7kLQO8P
|
||||
fJbk0pTzhhHVQw7CVwB6Pwq3u2m/JV+d6xDIDc+AVkuEl19ZJU0rTWyooClfFLZV
|
||||
EdZmEiUtA3PGlxoYwYhoGHYlhFxsoFONhCsBEdN7k7FKtFGVxN7oc5SKmKp0YZTW
|
||||
fcrEtrdNThATO4ymhCC2zh33NI/MT1O74fpaAc2k6LcTl57MKiLfTYX4LTL6v9JG
|
||||
9tlNqjFVRRmzEbtXTPcCb+w9g1VqoOGok7mGXYLTYtShCuvE
|
||||
-----END CERTIFICATE-----
|
||||
@@ -0,0 +1,5 @@
|
||||
-----BEGIN PRIVATE KEY-----
|
||||
MIGHAgEAMBMGByqGSM49AgEGCCqGSM49AwEHBG0wawIBAQQgNgefv2dad4U1VYEi
|
||||
0WkdHuqywi5QXAe30OwNTTGjhbihRANCAARHEzR8JPWrEmpmgM+F2bk59mT0u6Cj
|
||||
zmJG0QpbaqprLiG5NGpW84VQ5TFCrmC4KxYfigCfMhfHRNfFYvNUK3V/
|
||||
-----END PRIVATE KEY-----
|
||||
@@ -0,0 +1,14 @@
|
||||
This directory contains your keys and certificates.
|
||||
|
||||
`privkey.pem` : the private key for your certificate.
|
||||
`fullchain.pem`: the certificate file used in most server software.
|
||||
`chain.pem` : used for OCSP stapling in Nginx >=1.3.7.
|
||||
`cert.pem` : will break many server configurations, and should not be used
|
||||
without reading further documentation (see link below).
|
||||
|
||||
WARNING: DO NOT MOVE OR RENAME THESE FILES!
|
||||
Certbot expects these files to remain in this location in order
|
||||
to function properly!
|
||||
|
||||
We recommend not moving these files. For more information, see the Certbot
|
||||
User Guide at https://certbot.eff.org/docs/using.html#where-are-my-certificates.
|
||||
@@ -0,0 +1 @@
|
||||
../../archive/c.encryption-example.com/cert.pem
|
||||
@@ -0,0 +1 @@
|
||||
../../archive/c.encryption-example.com/chain.pem
|
||||
@@ -0,0 +1 @@
|
||||
../../archive/c.encryption-example.com/fullchain.pem
|
||||
@@ -0,0 +1 @@
|
||||
../../archive/c.encryption-example.com/privkey.pem
|
||||
@@ -0,0 +1,17 @@
|
||||
# renew_before_expiry = 30 days
|
||||
version = 1.10.0.dev0
|
||||
archive_dir = sample-config/archive/c.encryption-example.com
|
||||
cert = sample-config/live/c.encryption-example.com/cert.pem
|
||||
privkey = sample-config/live/c.encryption-example.com/privkey.pem
|
||||
chain = sample-config/live/c.encryption-example.com/chain.pem
|
||||
fullchain = sample-config/live/c.encryption-example.com/fullchain.pem
|
||||
|
||||
# Options used in the renewal process
|
||||
[renewalparams]
|
||||
authenticator = apache
|
||||
installer = apache
|
||||
account = 48d6b9e8d767eccf7e4d877d6ffa81e3
|
||||
key_type = ecdsa
|
||||
config_dir = sample-config-ec
|
||||
elliptic_curve = secp256r1
|
||||
manual_public_ip_logging_ok = True
|
||||
@@ -1,3 +1,4 @@
|
||||
# pylint: disable=missing-module-docstring
|
||||
import pytest
|
||||
|
||||
# Custom assertions defined in the following package need to be registered to be properly
|
||||
|
||||
@@ -2,6 +2,11 @@
|
||||
import io
|
||||
import os
|
||||
|
||||
from cryptography.hazmat.backends import default_backend
|
||||
from cryptography.hazmat.primitives.asymmetric.ec import EllipticCurvePrivateKey
|
||||
from cryptography.hazmat.primitives.asymmetric.rsa import RSAPrivateKey
|
||||
from cryptography.hazmat.primitives.serialization import load_pem_private_key
|
||||
|
||||
try:
|
||||
import grp
|
||||
POSIX_MODE = True
|
||||
@@ -16,6 +21,33 @@ SYSTEM_SID = 'S-1-5-18'
|
||||
ADMINS_SID = 'S-1-5-32-544'
|
||||
|
||||
|
||||
def assert_elliptic_key(key, curve):
|
||||
"""
|
||||
Asserts that the key at the given path is an EC key using the given curve.
|
||||
:param key: path to key
|
||||
:param curve: name of the expected elliptic curve
|
||||
"""
|
||||
with open(key, 'rb') as file:
|
||||
privkey1 = file.read()
|
||||
|
||||
key = load_pem_private_key(data=privkey1, password=None, backend=default_backend())
|
||||
|
||||
assert isinstance(key, EllipticCurvePrivateKey)
|
||||
assert isinstance(key.curve, curve)
|
||||
|
||||
|
||||
def assert_rsa_key(key):
|
||||
"""
|
||||
Asserts that the key at the given path is an RSA key.
|
||||
:param key: path to key
|
||||
"""
|
||||
with open(key, 'rb') as file:
|
||||
privkey1 = file.read()
|
||||
|
||||
key = load_pem_private_key(data=privkey1, password=None, backend=default_backend())
|
||||
assert isinstance(key, RSAPrivateKey)
|
||||
|
||||
|
||||
def assert_hook_execution(probe_path, probe_content):
|
||||
"""
|
||||
Assert that a certbot hook has been executed
|
||||
|
||||
@@ -77,6 +77,6 @@ class IntegrationTestsContext(object):
|
||||
appending the pytest worker id to the subdomain, using this pattern:
|
||||
{subdomain}.{worker_id}.wtf
|
||||
:param subdomain: the subdomain to use in the generated domain (default 'le')
|
||||
:return: the well-formed domain suitable for redirection on
|
||||
:return: the well-formed domain suitable for redirection on
|
||||
"""
|
||||
return '{0}.{1}.wtf'.format(subdomain, self.worker_id)
|
||||
|
||||
@@ -9,10 +9,15 @@ import shutil
|
||||
import subprocess
|
||||
import time
|
||||
|
||||
from cryptography.hazmat.primitives.asymmetric.ec import SECP256R1, SECP384R1, SECP521R1
|
||||
from cryptography.x509 import NameOID
|
||||
|
||||
import pytest
|
||||
|
||||
from certbot_integration_tests.certbot_tests import context as certbot_context
|
||||
from certbot_integration_tests.certbot_tests.assertions import assert_cert_count_for_lineage
|
||||
from certbot_integration_tests.certbot_tests.assertions import assert_elliptic_key
|
||||
from certbot_integration_tests.certbot_tests.assertions import assert_rsa_key
|
||||
from certbot_integration_tests.certbot_tests.assertions import assert_equals_group_owner
|
||||
from certbot_integration_tests.certbot_tests.assertions import assert_equals_group_permissions
|
||||
from certbot_integration_tests.certbot_tests.assertions import assert_equals_world_read_permissions
|
||||
@@ -24,8 +29,9 @@ from certbot_integration_tests.certbot_tests.assertions import EVERYBODY_SID
|
||||
from certbot_integration_tests.utils import misc
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def context(request):
|
||||
@pytest.fixture(name='context')
|
||||
def test_context(request):
|
||||
# pylint: disable=missing-function-docstring
|
||||
# Fixture request is a built-in pytest fixture describing current test request.
|
||||
integration_test_context = certbot_context.IntegrationTestsContext(request)
|
||||
try:
|
||||
@@ -142,6 +148,17 @@ def test_certonly(context):
|
||||
"""Test the certonly verb on certbot."""
|
||||
context.certbot(['certonly', '--cert-name', 'newname', '-d', context.get_domain('newname')])
|
||||
|
||||
assert_cert_count_for_lineage(context.config_dir, 'newname', 1)
|
||||
|
||||
|
||||
def test_certonly_webroot(context):
|
||||
"""Test the certonly verb with webroot plugin"""
|
||||
with misc.create_http_server(context.http_01_port) as webroot:
|
||||
certname = context.get_domain('webroot')
|
||||
context.certbot(['certonly', '-a', 'webroot', '--webroot-path', webroot, '-d', certname])
|
||||
|
||||
assert_cert_count_for_lineage(context.config_dir, certname, 1)
|
||||
|
||||
|
||||
def test_auth_and_install_with_csr(context):
|
||||
"""Test certificate issuance and install using an existing CSR."""
|
||||
@@ -217,14 +234,16 @@ def test_renew_files_propagate_permissions(context):
|
||||
if os.name != 'nt':
|
||||
os.chmod(privkey1, 0o444)
|
||||
else:
|
||||
import win32security
|
||||
import ntsecuritycon
|
||||
import win32security # pylint: disable=import-error
|
||||
import ntsecuritycon # pylint: disable=import-error
|
||||
# Get the current DACL of the private key
|
||||
security = win32security.GetFileSecurity(privkey1, win32security.DACL_SECURITY_INFORMATION)
|
||||
dacl = security.GetSecurityDescriptorDacl()
|
||||
# Create a read permission for Everybody group
|
||||
everybody = win32security.ConvertStringSidToSid(EVERYBODY_SID)
|
||||
dacl.AddAccessAllowedAce(win32security.ACL_REVISION, ntsecuritycon.FILE_GENERIC_READ, everybody)
|
||||
dacl.AddAccessAllowedAce(
|
||||
win32security.ACL_REVISION, ntsecuritycon.FILE_GENERIC_READ, everybody
|
||||
)
|
||||
# Apply the updated DACL to the private key
|
||||
security.SetSecurityDescriptorDacl(1, dacl, 0)
|
||||
win32security.SetFileSecurity(privkey1, win32security.DACL_SECURITY_INFORMATION, security)
|
||||
@@ -233,12 +252,14 @@ def test_renew_files_propagate_permissions(context):
|
||||
|
||||
assert_cert_count_for_lineage(context.config_dir, certname, 2)
|
||||
if os.name != 'nt':
|
||||
# On Linux, read world permissions + all group permissions will be copied from the previous private key
|
||||
# On Linux, read world permissions + all group permissions
|
||||
# will be copied from the previous private key
|
||||
assert_world_read_permissions(privkey2)
|
||||
assert_equals_world_read_permissions(privkey1, privkey2)
|
||||
assert_equals_group_permissions(privkey1, privkey2)
|
||||
else:
|
||||
# On Windows, world will never have any permissions, and group permission is irrelevant for this platform
|
||||
# On Windows, world will never have any permissions, and
|
||||
# group permission is irrelevant for this platform
|
||||
assert_world_no_permissions(privkey2)
|
||||
|
||||
|
||||
@@ -287,7 +308,7 @@ def test_renew_with_changed_private_key_complexity(context):
|
||||
assert_cert_count_for_lineage(context.config_dir, certname, 1)
|
||||
|
||||
context.certbot(['renew'])
|
||||
|
||||
|
||||
assert_cert_count_for_lineage(context.config_dir, certname, 2)
|
||||
key2 = join(context.config_dir, 'archive', certname, 'privkey2.pem')
|
||||
assert os.stat(key2).st_size > 3000
|
||||
@@ -419,20 +440,115 @@ def test_reuse_key(context):
|
||||
assert len({cert1, cert2, cert3}) == 3
|
||||
|
||||
|
||||
def test_incorrect_key_type(context):
|
||||
with pytest.raises(subprocess.CalledProcessError):
|
||||
context.certbot(['--key-type="failwhale"'])
|
||||
|
||||
|
||||
def test_ecdsa(context):
|
||||
"""Test certificate issuance with ECDSA key."""
|
||||
"""Test issuance for ECDSA CSR based request (legacy supported mode)."""
|
||||
key_path = join(context.workspace, 'privkey-p384.pem')
|
||||
csr_path = join(context.workspace, 'csr-p384.der')
|
||||
cert_path = join(context.workspace, 'cert-p384.pem')
|
||||
chain_path = join(context.workspace, 'chain-p384.pem')
|
||||
|
||||
misc.generate_csr([context.get_domain('ecdsa')], key_path, csr_path, key_type=misc.ECDSA_KEY_TYPE)
|
||||
context.certbot(['auth', '--csr', csr_path, '--cert-path', cert_path, '--chain-path', chain_path])
|
||||
misc.generate_csr(
|
||||
[context.get_domain('ecdsa')],
|
||||
key_path, csr_path,
|
||||
key_type=misc.ECDSA_KEY_TYPE
|
||||
)
|
||||
context.certbot([
|
||||
'auth', '--csr', csr_path, '--cert-path', cert_path,
|
||||
'--chain-path', chain_path,
|
||||
])
|
||||
|
||||
certificate = misc.read_certificate(cert_path)
|
||||
assert 'ASN1 OID: secp384r1' in certificate
|
||||
|
||||
|
||||
def test_default_key_type(context):
|
||||
"""Test default key type is RSA"""
|
||||
certname = context.get_domain('renew')
|
||||
context.certbot([
|
||||
'certonly',
|
||||
'--cert-name', certname, '-d', certname
|
||||
])
|
||||
filename = join(context.config_dir, 'archive/{0}/privkey1.pem').format(certname)
|
||||
assert_rsa_key(filename)
|
||||
|
||||
|
||||
def test_default_curve_type(context):
|
||||
"""test that the curve used when not specifying any is secp256r1"""
|
||||
certname = context.get_domain('renew')
|
||||
context.certbot([
|
||||
'--key-type', 'ecdsa', '--cert-name', certname, '-d', certname
|
||||
])
|
||||
key1 = join(context.config_dir, 'archive/{0}/privkey1.pem'.format(certname))
|
||||
assert_elliptic_key(key1, SECP256R1)
|
||||
|
||||
|
||||
@pytest.mark.parametrize('curve,curve_cls,skip_servers', [
|
||||
# Curve name, Curve class, ACME servers to skip
|
||||
('secp256r1', SECP256R1, []),
|
||||
('secp384r1', SECP384R1, []),
|
||||
('secp521r1', SECP521R1, ['boulder-v1', 'boulder-v2'])]
|
||||
)
|
||||
def test_ecdsa_curves(context, curve, curve_cls, skip_servers):
|
||||
"""Test issuance for each supported ECDSA curve"""
|
||||
if context.acme_server in skip_servers:
|
||||
pytest.skip('ACME server {} does not support ECDSA curve {}'
|
||||
.format(context.acme_server, curve))
|
||||
|
||||
domain = context.get_domain('curve')
|
||||
context.certbot([
|
||||
'certonly',
|
||||
'--key-type', 'ecdsa', '--elliptic-curve', curve,
|
||||
'--force-renewal', '-d', domain,
|
||||
])
|
||||
key = join(context.config_dir, "live", domain, 'privkey.pem')
|
||||
assert_elliptic_key(key, curve_cls)
|
||||
|
||||
|
||||
def test_renew_with_ec_keys(context):
|
||||
"""Test proper renew with updated private key complexity."""
|
||||
certname = context.get_domain('renew')
|
||||
context.certbot([
|
||||
'certonly',
|
||||
'--cert-name', certname,
|
||||
'--key-type', 'ecdsa', '--elliptic-curve', 'secp256r1',
|
||||
'--force-renewal', '-d', certname,
|
||||
])
|
||||
|
||||
key1 = join(context.config_dir, "archive", certname, 'privkey1.pem')
|
||||
assert 200 < os.stat(key1).st_size < 250 # ec keys of 256 bits are ~225 bytes
|
||||
assert_elliptic_key(key1, SECP256R1)
|
||||
assert_cert_count_for_lineage(context.config_dir, certname, 1)
|
||||
|
||||
context.certbot(['renew', '--elliptic-curve', 'secp384r1'])
|
||||
|
||||
assert_cert_count_for_lineage(context.config_dir, certname, 2)
|
||||
key2 = join(context.config_dir, 'archive', certname, 'privkey2.pem')
|
||||
assert_elliptic_key(key2, SECP384R1)
|
||||
assert 280 < os.stat(key2).st_size < 320 # ec keys of 384 bits are ~310 bytes
|
||||
|
||||
# We expect here that the command will fail because without --key-type specified,
|
||||
# Certbot must error out to prevent changing an existing certificate key type,
|
||||
# without explicit user consent (by specifying both --cert-name and --key-type).
|
||||
with pytest.raises(subprocess.CalledProcessError):
|
||||
context.certbot([
|
||||
'certonly',
|
||||
'--force-renewal',
|
||||
'-d', certname
|
||||
])
|
||||
|
||||
# We expect that the previous behavior of requiring both --cert-name and
|
||||
# --key-type to be set to not apply to the renew subcommand.
|
||||
context.certbot(['renew', '--force-renewal', '--key-type', 'rsa'])
|
||||
assert_cert_count_for_lineage(context.config_dir, certname, 3)
|
||||
key3 = join(context.config_dir, 'archive', certname, 'privkey3.pem')
|
||||
assert_rsa_key(key3)
|
||||
|
||||
|
||||
def test_ocsp_must_staple(context):
|
||||
"""Test that OCSP Must-Staple is correctly set in the generated certificate."""
|
||||
if context.acme_server == 'pebble':
|
||||
@@ -531,18 +647,22 @@ def test_revoke_multiple_lineages(context):
|
||||
with open(join(context.config_dir, 'renewal', '{0}.conf'.format(cert2)), 'r') as file:
|
||||
data = file.read()
|
||||
|
||||
data = re.sub('archive_dir = .*\n',
|
||||
'archive_dir = {0}\n'.format(join(context.config_dir, 'archive', cert1).replace('\\', '\\\\')),
|
||||
data)
|
||||
data = re.sub(
|
||||
'archive_dir = .*\n',
|
||||
'archive_dir = {0}\n'.format(
|
||||
join(context.config_dir, 'archive', cert1).replace('\\', '\\\\')
|
||||
), data
|
||||
)
|
||||
|
||||
with open(join(context.config_dir, 'renewal', '{0}.conf'.format(cert2)), 'w') as file:
|
||||
file.write(data)
|
||||
|
||||
output = context.certbot([
|
||||
context.certbot([
|
||||
'revoke', '--cert-path', join(context.config_dir, 'live', cert1, 'cert.pem')
|
||||
])
|
||||
|
||||
assert 'Not deleting revoked certs due to overlapping archive dirs' in output
|
||||
with open(join(context.workspace, 'logs', 'letsencrypt.log'), 'r') as f:
|
||||
assert 'Not deleting revoked certificates due to overlapping archive dirs' in f.read()
|
||||
|
||||
|
||||
def test_wildcard_certificates(context):
|
||||
@@ -628,3 +748,31 @@ def test_dry_run_deactivate_authzs(context):
|
||||
context.certbot(args)
|
||||
with open(join(context.workspace, 'logs', 'letsencrypt.log'), 'r') as f:
|
||||
assert log_line in f.read(), 'Second order should have been recreated due to authz reuse'
|
||||
|
||||
|
||||
def test_preferred_chain(context):
|
||||
"""Test that --preferred-chain results in the correct chain.pem being produced"""
|
||||
try:
|
||||
issuers = misc.get_acme_issuers(context)
|
||||
except NotImplementedError:
|
||||
pytest.skip('This ACME server does not support alternative issuers.')
|
||||
|
||||
names = [i.issuer.get_attributes_for_oid(NameOID.COMMON_NAME)[0].value \
|
||||
for i in issuers]
|
||||
|
||||
domain = context.get_domain('preferred-chain')
|
||||
cert_path = join(context.config_dir, 'live', domain, 'chain.pem')
|
||||
conf_path = join(context.config_dir, 'renewal', '{}.conf'.format(domain))
|
||||
|
||||
for (requested, expected) in [(n, n) for n in names] + [('nonexistent', names[0])]:
|
||||
args = ['certonly', '--cert-name', domain, '-d', domain,
|
||||
'--preferred-chain', requested, '--force-renewal']
|
||||
context.certbot(args)
|
||||
|
||||
dumped = misc.read_certificate(cert_path)
|
||||
assert 'Issuer: CN={}'.format(expected) in dumped, \
|
||||
'Expected chain issuer to be {} when preferring {}'.format(expected, requested)
|
||||
|
||||
with open(conf_path, 'r') as f:
|
||||
assert 'preferred_chain = {}'.format(requested) in f.read(), \
|
||||
'Expected preferred_chain to be set in renewal config'
|
||||
|
||||
@@ -12,6 +12,7 @@ import subprocess
|
||||
import sys
|
||||
|
||||
from certbot_integration_tests.utils import acme_server as acme_lib
|
||||
from certbot_integration_tests.utils import dns_server as dns_lib
|
||||
|
||||
|
||||
def pytest_addoption(parser):
|
||||
@@ -23,6 +24,10 @@ def pytest_addoption(parser):
|
||||
choices=['boulder-v1', 'boulder-v2', 'pebble'],
|
||||
help='select the ACME server to use (boulder-v1, boulder-v2, '
|
||||
'pebble), defaulting to pebble')
|
||||
parser.addoption('--dns-server', default='challtestsrv',
|
||||
choices=['bind', 'challtestsrv'],
|
||||
help='select the DNS server to use (bind, challtestsrv), '
|
||||
'defaulting to challtestsrv')
|
||||
|
||||
|
||||
def pytest_configure(config):
|
||||
@@ -32,7 +37,7 @@ def pytest_configure(config):
|
||||
"""
|
||||
if not hasattr(config, 'slaveinput'): # If true, this is the primary node
|
||||
with _print_on_err():
|
||||
config.acme_xdist = _setup_primary_node(config)
|
||||
_setup_primary_node(config)
|
||||
|
||||
|
||||
def pytest_configure_node(node):
|
||||
@@ -41,6 +46,7 @@ def pytest_configure_node(node):
|
||||
:param node: current worker node
|
||||
"""
|
||||
node.slaveinput['acme_xdist'] = node.config.acme_xdist
|
||||
node.slaveinput['dns_xdist'] = node.config.dns_xdist
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
@@ -61,12 +67,18 @@ def _print_on_err():
|
||||
def _setup_primary_node(config):
|
||||
"""
|
||||
Setup the environment for integration tests.
|
||||
Will:
|
||||
|
||||
This function will:
|
||||
- check runtime compatibility (Docker, docker-compose, Nginx)
|
||||
- create a temporary workspace and the persistent GIT repositories space
|
||||
- configure and start a DNS server using Docker, if configured
|
||||
- configure and start paralleled ACME CA servers using Docker
|
||||
- transfer ACME CA servers configurations to pytest nodes using env variables
|
||||
:param config: Configuration of the pytest primary node
|
||||
- transfer ACME CA and DNS servers configurations to pytest nodes using env variables
|
||||
|
||||
This function modifies `config` by injecting the ACME CA and DNS server configurations,
|
||||
in addition to cleanup functions for those servers.
|
||||
|
||||
:param config: Configuration of the pytest primary node. Is modified by this function.
|
||||
"""
|
||||
# Check for runtime compatibility: some tools are required to be available in PATH
|
||||
if 'boulder' in config.option.acme_server:
|
||||
@@ -79,18 +91,35 @@ def _setup_primary_node(config):
|
||||
try:
|
||||
subprocess.check_output(['docker-compose', '-v'], stderr=subprocess.STDOUT)
|
||||
except (subprocess.CalledProcessError, OSError):
|
||||
raise ValueError('Error: docker-compose is required in PATH to launch the integration tests, '
|
||||
'but is not installed or not available for current user.')
|
||||
raise ValueError(
|
||||
'Error: docker-compose is required in PATH to launch the integration tests, '
|
||||
'but is not installed or not available for current user.'
|
||||
)
|
||||
|
||||
# Parameter numprocesses is added to option by pytest-xdist
|
||||
workers = ['primary'] if not config.option.numprocesses\
|
||||
else ['gw{0}'.format(i) for i in range(config.option.numprocesses)]
|
||||
|
||||
# If a non-default DNS server is configured, start it and feed it to the ACME server
|
||||
dns_server = None
|
||||
acme_dns_server = None
|
||||
if config.option.dns_server == 'bind':
|
||||
dns_server = dns_lib.DNSServer(workers)
|
||||
config.add_cleanup(dns_server.stop)
|
||||
print('DNS xdist config:\n{0}'.format(dns_server.dns_xdist))
|
||||
dns_server.start()
|
||||
acme_dns_server = '{}:{}'.format(
|
||||
dns_server.dns_xdist['address'],
|
||||
dns_server.dns_xdist['port']
|
||||
)
|
||||
|
||||
# By calling setup_acme_server we ensure that all necessary acme server instances will be
|
||||
# fully started. This runtime is reflected by the acme_xdist returned.
|
||||
acme_server = acme_lib.ACMEServer(config.option.acme_server, workers)
|
||||
acme_server = acme_lib.ACMEServer(config.option.acme_server, workers,
|
||||
dns_server=acme_dns_server)
|
||||
config.add_cleanup(acme_server.stop)
|
||||
print('ACME xdist config:\n{0}'.format(acme_server.acme_xdist))
|
||||
acme_server.start()
|
||||
|
||||
return acme_server.acme_xdist
|
||||
config.acme_xdist = acme_server.acme_xdist
|
||||
config.dns_xdist = dns_server.dns_xdist if dns_server else None
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
"""Module to handle the context of nginx integration tests."""
|
||||
import os
|
||||
import subprocess
|
||||
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""General purpose nginx test configuration generator."""
|
||||
import getpass
|
||||
|
||||
@@ -42,6 +43,8 @@ events {{
|
||||
worker_connections 1024;
|
||||
}}
|
||||
|
||||
# “This comment contains valid Unicode”.
|
||||
|
||||
http {{
|
||||
# Set an array of temp, cache and log file options that will otherwise default to
|
||||
# restricted locations accessible only to root.
|
||||
@@ -51,61 +54,61 @@ http {{
|
||||
#scgi_temp_path {nginx_root}/scgi_temp;
|
||||
#uwsgi_temp_path {nginx_root}/uwsgi_temp;
|
||||
access_log {nginx_root}/error.log;
|
||||
|
||||
|
||||
# This should be turned off in a Virtualbox VM, as it can cause some
|
||||
# interesting issues with data corruption in delivered files.
|
||||
sendfile off;
|
||||
|
||||
|
||||
tcp_nopush on;
|
||||
tcp_nodelay on;
|
||||
keepalive_timeout 65;
|
||||
types_hash_max_size 2048;
|
||||
|
||||
|
||||
#include /etc/nginx/mime.types;
|
||||
index index.html index.htm index.php;
|
||||
|
||||
|
||||
log_format main '$remote_addr - $remote_user [$time_local] $status '
|
||||
'"$request" $body_bytes_sent "$http_referer" '
|
||||
'"$http_user_agent" "$http_x_forwarded_for"';
|
||||
|
||||
|
||||
default_type application/octet-stream;
|
||||
|
||||
|
||||
server {{
|
||||
# IPv4.
|
||||
listen {http_port} {default_server};
|
||||
# IPv6.
|
||||
listen [::]:{http_port} {default_server};
|
||||
server_name nginx.{wtf_prefix}.wtf nginx2.{wtf_prefix}.wtf;
|
||||
|
||||
|
||||
root {nginx_webroot};
|
||||
|
||||
|
||||
location / {{
|
||||
# First attempt to serve request as file, then as directory, then fall
|
||||
# back to index.html.
|
||||
try_files $uri $uri/ /index.html;
|
||||
}}
|
||||
}}
|
||||
|
||||
|
||||
server {{
|
||||
listen {http_port};
|
||||
listen [::]:{http_port};
|
||||
server_name nginx3.{wtf_prefix}.wtf;
|
||||
|
||||
|
||||
root {nginx_webroot};
|
||||
|
||||
|
||||
location /.well-known/ {{
|
||||
return 404;
|
||||
}}
|
||||
|
||||
|
||||
return 301 https://$host$request_uri;
|
||||
}}
|
||||
|
||||
|
||||
server {{
|
||||
listen {other_port};
|
||||
listen [::]:{other_port};
|
||||
server_name nginx4.{wtf_prefix}.wtf nginx5.{wtf_prefix}.wtf;
|
||||
}}
|
||||
|
||||
|
||||
server {{
|
||||
listen {http_port};
|
||||
listen [::]:{http_port};
|
||||
|
||||
@@ -2,13 +2,14 @@
|
||||
import os
|
||||
import ssl
|
||||
|
||||
from typing import List
|
||||
import pytest
|
||||
|
||||
from certbot_integration_tests.nginx_tests import context as nginx_context
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def context(request):
|
||||
@pytest.fixture(name='context')
|
||||
def test_context(request):
|
||||
# Fixture request is a built-in pytest fixture describing current test request.
|
||||
integration_test_context = nginx_context.IntegrationTestsContext(request)
|
||||
try:
|
||||
@@ -27,10 +28,12 @@ def context(request):
|
||||
# No matching server block; default_server does not exist
|
||||
('nginx5.{0}.wtf', ['--preferred-challenges', 'http'], {'default_server': False}),
|
||||
# Multiple domains, mix of matching and not
|
||||
('nginx6.{0}.wtf,nginx7.{0}.wtf', ['--preferred-challenges', 'http'], {'default_server': False}),
|
||||
('nginx6.{0}.wtf,nginx7.{0}.wtf', [
|
||||
'--preferred-challenges', 'http'
|
||||
], {'default_server': False}),
|
||||
], indirect=['context'])
|
||||
def test_certificate_deployment(certname_pattern, params, context):
|
||||
# type: (str, list, nginx_context.IntegrationTestsContext) -> None
|
||||
# type: (str, List[str], nginx_context.IntegrationTestsContext) -> None
|
||||
"""
|
||||
Test various scenarios to deploy a certificate to nginx using certbot.
|
||||
"""
|
||||
@@ -41,7 +44,9 @@ def test_certificate_deployment(certname_pattern, params, context):
|
||||
|
||||
lineage = domains.split(',')[0]
|
||||
server_cert = ssl.get_server_certificate(('localhost', context.tls_alpn_01_port))
|
||||
with open(os.path.join(context.workspace, 'conf/live/{0}/cert.pem'.format(lineage)), 'r') as file:
|
||||
with open(os.path.join(
|
||||
context.workspace, 'conf/live/{0}/cert.pem'.format(lineage)), 'r'
|
||||
) as file:
|
||||
certbot_cert = file.read()
|
||||
|
||||
assert server_cert == certbot_cert
|
||||
|
||||
@@ -0,0 +1,66 @@
|
||||
"""Module to handle the context of RFC2136 integration tests."""
|
||||
|
||||
import tempfile
|
||||
from contextlib import contextmanager
|
||||
|
||||
from pkg_resources import resource_filename
|
||||
from pytest import skip
|
||||
|
||||
from certbot_integration_tests.certbot_tests import context as certbot_context
|
||||
from certbot_integration_tests.utils import certbot_call
|
||||
|
||||
|
||||
class IntegrationTestsContext(certbot_context.IntegrationTestsContext):
|
||||
"""Integration test context for certbot-dns-rfc2136"""
|
||||
def __init__(self, request):
|
||||
super(IntegrationTestsContext, self).__init__(request)
|
||||
|
||||
self.request = request
|
||||
|
||||
self._dns_xdist = None
|
||||
if hasattr(request.config, 'slaveinput'): # Worker node
|
||||
self._dns_xdist = request.config.slaveinput['dns_xdist']
|
||||
else: # Primary node
|
||||
self._dns_xdist = request.config.dns_xdist
|
||||
|
||||
def certbot_test_rfc2136(self, args):
|
||||
"""
|
||||
Main command to execute certbot using the RFC2136 DNS authenticator.
|
||||
:param list args: list of arguments to pass to Certbot
|
||||
"""
|
||||
command = ['--authenticator', 'dns-rfc2136', '--dns-rfc2136-propagation-seconds', '2']
|
||||
command.extend(args)
|
||||
return certbot_call.certbot_test(
|
||||
command, self.directory_url, self.http_01_port, self.tls_alpn_01_port,
|
||||
self.config_dir, self.workspace, force_renew=True)
|
||||
|
||||
@contextmanager
|
||||
def rfc2136_credentials(self, label='default'):
|
||||
"""
|
||||
Produces the contents of a certbot-dns-rfc2136 credentials file.
|
||||
:param str label: which RFC2136 credential to use
|
||||
:yields: Path to credentials file
|
||||
:rtype: str
|
||||
"""
|
||||
src_file = resource_filename('certbot_integration_tests',
|
||||
'assets/bind-config/rfc2136-credentials-{}.ini.tpl'
|
||||
.format(label))
|
||||
contents = None
|
||||
|
||||
with open(src_file, 'r') as f:
|
||||
contents = f.read().format(
|
||||
server_address=self._dns_xdist['address'],
|
||||
server_port=self._dns_xdist['port']
|
||||
)
|
||||
|
||||
with tempfile.NamedTemporaryFile('w+', prefix='rfc2136-creds-{}'.format(label),
|
||||
suffix='.ini', dir=self.workspace) as fp:
|
||||
fp.write(contents)
|
||||
fp.flush()
|
||||
yield fp.name
|
||||
|
||||
def skip_if_no_bind9_server(self):
|
||||
"""Skips the test if there was no RFC2136-capable DNS server configured
|
||||
in the test environment"""
|
||||
if not self._dns_xdist:
|
||||
skip('No RFC2136-capable DNS server is configured')
|
||||
@@ -0,0 +1,26 @@
|
||||
"""Module executing integration tests against Certbot with the RFC2136 DNS authenticator."""
|
||||
import pytest
|
||||
|
||||
from certbot_integration_tests.rfc2136_tests import context as rfc2136_context
|
||||
|
||||
|
||||
@pytest.fixture(name="context")
|
||||
def pytest_context(request):
|
||||
# pylint: disable=missing-function-docstring
|
||||
# Fixture request is a built-in pytest fixture describing current test request.
|
||||
integration_test_context = rfc2136_context.IntegrationTestsContext(request)
|
||||
try:
|
||||
yield integration_test_context
|
||||
finally:
|
||||
integration_test_context.cleanup()
|
||||
|
||||
|
||||
@pytest.mark.parametrize('domain', [('example.com'), ('sub.example.com')])
|
||||
def test_get_certificate(domain, context):
|
||||
context.skip_if_no_bind9_server()
|
||||
|
||||
with context.rfc2136_credentials() as creds:
|
||||
context.certbot_test_rfc2136([
|
||||
'certonly', '--dns-rfc2136-credentials', creds,
|
||||
'-d', domain, '-d', '*.{}'.format(domain)
|
||||
])
|
||||
@@ -2,6 +2,7 @@
|
||||
"""Module to setup an ACME CA server environment able to run multiple tests in parallel"""
|
||||
from __future__ import print_function
|
||||
|
||||
import argparse
|
||||
import errno
|
||||
import json
|
||||
import os
|
||||
@@ -12,11 +13,13 @@ import sys
|
||||
import tempfile
|
||||
import time
|
||||
|
||||
from typing import List
|
||||
import requests
|
||||
|
||||
from certbot_integration_tests.utils import misc
|
||||
from certbot_integration_tests.utils import pebble_artifacts
|
||||
from certbot_integration_tests.utils import proxy
|
||||
# pylint: disable=wildcard-import,unused-wildcard-import
|
||||
from certbot_integration_tests.utils.constants import *
|
||||
|
||||
|
||||
@@ -29,24 +32,34 @@ class ACMEServer(object):
|
||||
ACMEServer gives access the acme_xdist parameter, listing the ports and directory url to use
|
||||
for each pytest node. It exposes also start and stop methods in order to start the stack, and
|
||||
stop it with proper resources cleanup.
|
||||
ACMEServer is also a context manager, and so can be used to ensure ACME server is started/stopped
|
||||
upon context enter/exit.
|
||||
ACMEServer is also a context manager, and so can be used to ensure ACME server is
|
||||
started/stopped upon context enter/exit.
|
||||
"""
|
||||
def __init__(self, acme_server, nodes, http_proxy=True, stdout=False):
|
||||
def __init__(self, acme_server, nodes, http_proxy=True, stdout=False,
|
||||
dns_server=None, http_01_port=DEFAULT_HTTP_01_PORT):
|
||||
"""
|
||||
Create an ACMEServer instance.
|
||||
:param str acme_server: the type of acme server used (boulder-v1, boulder-v2 or pebble)
|
||||
:param list nodes: list of node names that will be setup by pytest xdist
|
||||
:param bool http_proxy: if False do not start the HTTP proxy
|
||||
:param bool stdout: if True stream all subprocesses stdout to standard stdout
|
||||
:param str dns_server: if set, Pebble/Boulder will use it to resolve domains
|
||||
:param int http_01_port: port to use for http-01 validation; currently
|
||||
only supported for pebble without an HTTP proxy
|
||||
"""
|
||||
self._construct_acme_xdist(acme_server, nodes)
|
||||
|
||||
self._acme_type = 'pebble' if acme_server == 'pebble' else 'boulder'
|
||||
self._proxy = http_proxy
|
||||
self._workspace = tempfile.mkdtemp()
|
||||
self._processes = []
|
||||
self._processes = [] # type: List[subprocess.Popen]
|
||||
self._stdout = sys.stdout if stdout else open(os.devnull, 'w')
|
||||
self._dns_server = dns_server
|
||||
self._http_01_port = http_01_port
|
||||
if http_01_port != DEFAULT_HTTP_01_PORT:
|
||||
if self._acme_type != 'pebble' or self._proxy:
|
||||
raise ValueError('setting http_01_port is not currently supported '
|
||||
'with boulder or the HTTP proxy')
|
||||
|
||||
def start(self):
|
||||
"""Start the test stack"""
|
||||
@@ -103,26 +116,34 @@ class ACMEServer(object):
|
||||
"""Generate and return the acme_xdist dict"""
|
||||
acme_xdist = {'acme_server': acme_server, 'challtestsrv_port': CHALLTESTSRV_PORT}
|
||||
|
||||
# Directory and ACME port are set implicitly in the docker-compose.yml files of Boulder/Pebble.
|
||||
# Directory and ACME port are set implicitly in the docker-compose.yml
|
||||
# files of Boulder/Pebble.
|
||||
if acme_server == 'pebble':
|
||||
acme_xdist['directory_url'] = PEBBLE_DIRECTORY_URL
|
||||
else: # boulder
|
||||
acme_xdist['directory_url'] = BOULDER_V2_DIRECTORY_URL \
|
||||
if acme_server == 'boulder-v2' else BOULDER_V1_DIRECTORY_URL
|
||||
|
||||
acme_xdist['http_port'] = {node: port for (node, port)
|
||||
in zip(nodes, range(5200, 5200 + len(nodes)))}
|
||||
acme_xdist['https_port'] = {node: port for (node, port)
|
||||
in zip(nodes, range(5100, 5100 + len(nodes)))}
|
||||
acme_xdist['other_port'] = {node: port for (node, port)
|
||||
in zip(nodes, range(5300, 5300 + len(nodes)))}
|
||||
acme_xdist['http_port'] = {
|
||||
node: port for (node, port) in # pylint: disable=unnecessary-comprehension
|
||||
zip(nodes, range(5200, 5200 + len(nodes)))
|
||||
}
|
||||
acme_xdist['https_port'] = {
|
||||
node: port for (node, port) in # pylint: disable=unnecessary-comprehension
|
||||
zip(nodes, range(5100, 5100 + len(nodes)))
|
||||
}
|
||||
acme_xdist['other_port'] = {
|
||||
node: port for (node, port) in # pylint: disable=unnecessary-comprehension
|
||||
zip(nodes, range(5300, 5300 + len(nodes)))
|
||||
}
|
||||
|
||||
self.acme_xdist = acme_xdist
|
||||
|
||||
def _prepare_pebble_server(self):
|
||||
"""Configure and launch the Pebble server"""
|
||||
print('=> Starting pebble instance deployment...')
|
||||
pebble_path, challtestsrv_path, pebble_config_path = pebble_artifacts.fetch(self._workspace)
|
||||
pebble_artifacts_rv = pebble_artifacts.fetch(self._workspace, self._http_01_port)
|
||||
pebble_path, challtestsrv_path, pebble_config_path = pebble_artifacts_rv
|
||||
|
||||
# Configure Pebble at full speed (PEBBLE_VA_NOSLEEP=1) and not randomly refusing valid
|
||||
# nonce (PEBBLE_WFE_NONCEREJECT=0) to have a stable test environment.
|
||||
@@ -130,19 +151,25 @@ class ACMEServer(object):
|
||||
environ['PEBBLE_VA_NOSLEEP'] = '1'
|
||||
environ['PEBBLE_WFE_NONCEREJECT'] = '0'
|
||||
environ['PEBBLE_AUTHZREUSE'] = '100'
|
||||
environ['PEBBLE_ALTERNATE_ROOTS'] = str(PEBBLE_ALTERNATE_ROOTS)
|
||||
|
||||
if self._dns_server:
|
||||
dns_server = self._dns_server
|
||||
else:
|
||||
dns_server = '127.0.0.1:8053'
|
||||
self._launch_process(
|
||||
[challtestsrv_path, '-management', ':{0}'.format(CHALLTESTSRV_PORT),
|
||||
'-defaultIPv6', '""', '-defaultIPv4', '127.0.0.1', '-http01', '""',
|
||||
'-tlsalpn01', '""', '-https01', '""'])
|
||||
|
||||
self._launch_process(
|
||||
[pebble_path, '-config', pebble_config_path, '-dnsserver', '127.0.0.1:8053', '-strict'],
|
||||
[pebble_path, '-config', pebble_config_path, '-dnsserver', dns_server, '-strict'],
|
||||
env=environ)
|
||||
|
||||
self._launch_process(
|
||||
[challtestsrv_path, '-management', ':{0}'.format(CHALLTESTSRV_PORT), '-defaultIPv6', '""',
|
||||
'-defaultIPv4', '127.0.0.1', '-http01', '""', '-tlsalpn01', '""', '-https01', '""'])
|
||||
|
||||
# pebble_ocsp_server is imported here and not at the top of module in order to avoid a useless
|
||||
# ImportError, in the case where cryptography dependency is too old to support ocsp, but
|
||||
# Boulder is used instead of Pebble, so pebble_ocsp_server is not used. This is the typical
|
||||
# situation of integration-certbot-oldest tox testenv.
|
||||
# pebble_ocsp_server is imported here and not at the top of module in order to avoid a
|
||||
# useless ImportError, in the case where cryptography dependency is too old to support
|
||||
# ocsp, but Boulder is used instead of Pebble, so pebble_ocsp_server is not used. This is
|
||||
# the typical situation of integration-certbot-oldest tox testenv.
|
||||
from certbot_integration_tests.utils import pebble_ocsp_server
|
||||
self._launch_process([sys.executable, pebble_ocsp_server.__file__])
|
||||
|
||||
@@ -166,6 +193,15 @@ class ACMEServer(object):
|
||||
os.rename(join(instance_path, 'test/rate-limit-policies-b.yml'),
|
||||
join(instance_path, 'test/rate-limit-policies.yml'))
|
||||
|
||||
if self._dns_server:
|
||||
# Change Boulder config to use the provided DNS server
|
||||
for suffix in ["", "-remote-a", "-remote-b"]:
|
||||
with open(join(instance_path, 'test/config/va{}.json'.format(suffix)), 'r') as f:
|
||||
config = json.loads(f.read())
|
||||
config['va']['dnsResolvers'] = [self._dns_server]
|
||||
with open(join(instance_path, 'test/config/va{}.json'.format(suffix)), 'w') as f:
|
||||
f.write(json.dumps(config, indent=2, separators=(',', ': ')))
|
||||
|
||||
try:
|
||||
# Launch the Boulder server
|
||||
self._launch_process(['docker-compose', 'up', '--force-recreate'], cwd=instance_path)
|
||||
@@ -174,14 +210,18 @@ class ACMEServer(object):
|
||||
print('=> Waiting for boulder instance to respond...')
|
||||
misc.check_until_timeout(self.acme_xdist['directory_url'], attempts=300)
|
||||
|
||||
# Configure challtestsrv to answer any A record request with ip of the docker host.
|
||||
response = requests.post('http://localhost:{0}/set-default-ipv4'.format(CHALLTESTSRV_PORT),
|
||||
json={'ip': '10.77.77.1'})
|
||||
response.raise_for_status()
|
||||
if not self._dns_server:
|
||||
# Configure challtestsrv to answer any A record request with ip of the docker host.
|
||||
response = requests.post('http://localhost:{0}/set-default-ipv4'.format(
|
||||
CHALLTESTSRV_PORT), json={'ip': '10.77.77.1'}
|
||||
)
|
||||
response.raise_for_status()
|
||||
except BaseException:
|
||||
# If we failed to set up boulder, print its logs.
|
||||
print('=> Boulder setup failed. Boulder logs are:')
|
||||
process = self._launch_process(['docker-compose', 'logs'], cwd=instance_path, force_stderr=True)
|
||||
process = self._launch_process([
|
||||
'docker-compose', 'logs'], cwd=instance_path, force_stderr=True
|
||||
)
|
||||
process.wait()
|
||||
raise
|
||||
|
||||
@@ -192,7 +232,7 @@ class ACMEServer(object):
|
||||
print('=> Configuring the HTTP proxy...')
|
||||
mapping = {r'.+\.{0}\.wtf'.format(node): 'http://127.0.0.1:{0}'.format(port)
|
||||
for node, port in self.acme_xdist['http_port'].items()}
|
||||
command = [sys.executable, proxy.__file__, str(HTTP_01_PORT), json.dumps(mapping)]
|
||||
command = [sys.executable, proxy.__file__, str(DEFAULT_HTTP_01_PORT), json.dumps(mapping)]
|
||||
self._launch_process(command)
|
||||
print('=> Finished configuring the HTTP proxy.')
|
||||
|
||||
@@ -201,20 +241,34 @@ class ACMEServer(object):
|
||||
if not env:
|
||||
env = os.environ
|
||||
stdout = sys.stderr if force_stderr else self._stdout
|
||||
process = subprocess.Popen(command, stdout=stdout, stderr=subprocess.STDOUT, cwd=cwd, env=env)
|
||||
process = subprocess.Popen(
|
||||
command, stdout=stdout, stderr=subprocess.STDOUT, cwd=cwd, env=env
|
||||
)
|
||||
self._processes.append(process)
|
||||
return process
|
||||
|
||||
|
||||
def main():
|
||||
args = sys.argv[1:]
|
||||
server_type = args[0] if args else 'pebble'
|
||||
possible_values = ('pebble', 'boulder-v1', 'boulder-v2')
|
||||
if server_type not in possible_values:
|
||||
raise ValueError('Invalid server value {0}, should be one of {1}'
|
||||
.format(server_type, possible_values))
|
||||
# pylint: disable=missing-function-docstring
|
||||
parser = argparse.ArgumentParser(
|
||||
description='CLI tool to start a local instance of Pebble or Boulder CA server.')
|
||||
parser.add_argument('--server-type', '-s',
|
||||
choices=['pebble', 'boulder-v1', 'boulder-v2'], default='pebble',
|
||||
help='type of CA server to start: can be Pebble or Boulder '
|
||||
'(in ACMEv1 or ACMEv2 mode), Pebble is used if not set.')
|
||||
parser.add_argument('--dns-server', '-d',
|
||||
help='specify the DNS server as `IP:PORT` to use by '
|
||||
'Pebble; if not specified, a local mock DNS server will be used to '
|
||||
'resolve domains to localhost.')
|
||||
parser.add_argument('--http-01-port', type=int, default=DEFAULT_HTTP_01_PORT,
|
||||
help='specify the port to use for http-01 validation; '
|
||||
'this is currently only supported for Pebble.')
|
||||
args = parser.parse_args()
|
||||
|
||||
acme_server = ACMEServer(server_type, [], http_proxy=False, stdout=True)
|
||||
acme_server = ACMEServer(
|
||||
args.server_type, [], http_proxy=False, stdout=True,
|
||||
dns_server=args.dns_server, http_01_port=args.http_01_port,
|
||||
)
|
||||
|
||||
try:
|
||||
with acme_server as acme_xdist:
|
||||
|
||||
@@ -2,12 +2,13 @@
|
||||
"""Module to call certbot in test mode"""
|
||||
from __future__ import absolute_import
|
||||
|
||||
from distutils.version import LooseVersion
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
from distutils.version import LooseVersion
|
||||
|
||||
import certbot_integration_tests
|
||||
# pylint: disable=wildcard-import,unused-wildcard-import
|
||||
from certbot_integration_tests.utils.constants import *
|
||||
|
||||
|
||||
@@ -35,6 +36,8 @@ def certbot_test(certbot_args, directory_url, http_01_port, tls_alpn_01_port,
|
||||
|
||||
|
||||
def _prepare_environ(workspace):
|
||||
# pylint: disable=missing-function-docstring
|
||||
|
||||
new_environ = os.environ.copy()
|
||||
new_environ['TMPDIR'] = workspace
|
||||
|
||||
@@ -58,8 +61,13 @@ def _prepare_environ(workspace):
|
||||
# certbot_integration_tests.__file__ is:
|
||||
# '/path/to/certbot/certbot-ci/certbot_integration_tests/__init__.pyc'
|
||||
# ... and we want '/path/to/certbot'
|
||||
certbot_root = os.path.dirname(os.path.dirname(os.path.dirname(certbot_integration_tests.__file__)))
|
||||
python_paths = [path for path in new_environ['PYTHONPATH'].split(':') if path != certbot_root]
|
||||
certbot_root = os.path.dirname(os.path.dirname(
|
||||
os.path.dirname(certbot_integration_tests.__file__))
|
||||
)
|
||||
python_paths = [
|
||||
path for path in new_environ['PYTHONPATH'].split(':')
|
||||
if path != certbot_root
|
||||
]
|
||||
new_environ['PYTHONPATH'] = ':'.join(python_paths)
|
||||
|
||||
return new_environ
|
||||
@@ -70,7 +78,8 @@ def _compute_additional_args(workspace, environ, force_renew):
|
||||
output = subprocess.check_output(['certbot', '--version'],
|
||||
universal_newlines=True, stderr=subprocess.STDOUT,
|
||||
cwd=workspace, env=environ)
|
||||
version_str = output.split(' ')[1].strip() # Typical response is: output = 'certbot 0.31.0.dev0'
|
||||
# Typical response is: output = 'certbot 0.31.0.dev0'
|
||||
version_str = output.split(' ')[1].strip()
|
||||
if LooseVersion(version_str) >= LooseVersion('0.30.0'):
|
||||
additional_args.append('--no-random-sleep-on-renew')
|
||||
|
||||
@@ -113,11 +122,12 @@ def _prepare_args_env(certbot_args, directory_url, http_01_port, tls_alpn_01_por
|
||||
|
||||
|
||||
def main():
|
||||
# pylint: disable=missing-function-docstring
|
||||
args = sys.argv[1:]
|
||||
|
||||
# Default config is pebble
|
||||
directory_url = os.environ.get('SERVER', PEBBLE_DIRECTORY_URL)
|
||||
http_01_port = int(os.environ.get('HTTP_01_PORT', HTTP_01_PORT))
|
||||
http_01_port = int(os.environ.get('HTTP_01_PORT', DEFAULT_HTTP_01_PORT))
|
||||
tls_alpn_01_port = int(os.environ.get('TLS_ALPN_01_PORT', TLS_ALPN_01_PORT))
|
||||
|
||||
# Execution of certbot in a self-contained workspace
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
"""Some useful constants to use throughout certbot-ci integration tests"""
|
||||
HTTP_01_PORT = 5002
|
||||
DEFAULT_HTTP_01_PORT = 5002
|
||||
TLS_ALPN_01_PORT = 5001
|
||||
CHALLTESTSRV_PORT = 8055
|
||||
BOULDER_V1_DIRECTORY_URL = 'http://localhost:4000/directory'
|
||||
@@ -7,3 +7,4 @@ BOULDER_V2_DIRECTORY_URL = 'http://localhost:4001/directory'
|
||||
PEBBLE_DIRECTORY_URL = 'https://localhost:14000/dir'
|
||||
PEBBLE_MANAGEMENT_URL = 'https://localhost:15000'
|
||||
MOCK_OCSP_SERVER_PORT = 4002
|
||||
PEBBLE_ALTERNATE_ROOTS = 2
|
||||
|
||||
155
certbot-ci/certbot_integration_tests/utils/dns_server.py
Normal file
155
certbot-ci/certbot_integration_tests/utils/dns_server.py
Normal file
@@ -0,0 +1,155 @@
|
||||
#!/usr/bin/env python
|
||||
"""Module to setup an RFC2136-capable DNS server"""
|
||||
from __future__ import print_function
|
||||
|
||||
import os
|
||||
import os.path
|
||||
import shutil
|
||||
import socket
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
import time
|
||||
|
||||
from pkg_resources import resource_filename
|
||||
|
||||
BIND_DOCKER_IMAGE = "internetsystemsconsortium/bind9:9.16"
|
||||
BIND_BIND_ADDRESS = ("127.0.0.1", 45953)
|
||||
|
||||
# A TCP DNS message which is a query for '. CH A' transaction ID 0xcb37. This is used
|
||||
# by _wait_until_ready to check that BIND is responding without depending on dnspython.
|
||||
BIND_TEST_QUERY = bytearray.fromhex("0011cb37000000010000000000000000010003")
|
||||
|
||||
|
||||
class DNSServer(object):
|
||||
"""
|
||||
DNSServer configures and handles the lifetime of an RFC2136-capable server.
|
||||
DNServer provides access to the dns_xdist parameter, listing the address and port
|
||||
to use for each pytest node.
|
||||
|
||||
At this time, DNSServer should only be used with a single node, but may be expanded in
|
||||
future to support parallelization (https://github.com/certbot/certbot/issues/8455).
|
||||
"""
|
||||
|
||||
def __init__(self, unused_nodes, show_output=False):
|
||||
"""
|
||||
Create an DNSServer instance.
|
||||
:param list nodes: list of node names that will be setup by pytest xdist
|
||||
:param bool show_output: if True, print the output of the DNS server
|
||||
"""
|
||||
|
||||
self.bind_root = tempfile.mkdtemp()
|
||||
|
||||
self.process = None # type: subprocess.Popen
|
||||
|
||||
self.dns_xdist = {"address": BIND_BIND_ADDRESS[0], "port": BIND_BIND_ADDRESS[1]}
|
||||
|
||||
# Unfortunately the BIND9 image forces everything to stderr with -g and we can't
|
||||
# modify the verbosity.
|
||||
self._output = sys.stderr if show_output else open(os.devnull, "w")
|
||||
|
||||
def start(self):
|
||||
"""Start the DNS server"""
|
||||
try:
|
||||
self._configure_bind()
|
||||
self._start_bind()
|
||||
except:
|
||||
self.stop()
|
||||
raise
|
||||
|
||||
def stop(self):
|
||||
"""Stop the DNS server, and clean its resources"""
|
||||
if self.process:
|
||||
try:
|
||||
self.process.terminate()
|
||||
self.process.wait()
|
||||
except BaseException as e:
|
||||
print("BIND9 did not stop cleanly: {}".format(e), file=sys.stderr)
|
||||
|
||||
shutil.rmtree(self.bind_root, ignore_errors=True)
|
||||
|
||||
if self._output != sys.stderr:
|
||||
self._output.close()
|
||||
|
||||
def _configure_bind(self):
|
||||
"""Configure the BIND9 server based on the prebaked configuration"""
|
||||
bind_conf_src = resource_filename(
|
||||
"certbot_integration_tests", "assets/bind-config"
|
||||
)
|
||||
for directory in ("conf", "zones"):
|
||||
shutil.copytree(
|
||||
os.path.join(bind_conf_src, directory), os.path.join(self.bind_root, directory)
|
||||
)
|
||||
|
||||
def _start_bind(self):
|
||||
"""Launch the BIND9 server as a Docker container"""
|
||||
addr_str = "{}:{}".format(BIND_BIND_ADDRESS[0], BIND_BIND_ADDRESS[1])
|
||||
self.process = subprocess.Popen(
|
||||
[
|
||||
"docker",
|
||||
"run",
|
||||
"--rm",
|
||||
"-p",
|
||||
"{}:53/udp".format(addr_str),
|
||||
"-p",
|
||||
"{}:53/tcp".format(addr_str),
|
||||
"-v",
|
||||
"{}/conf:/etc/bind".format(self.bind_root),
|
||||
"-v",
|
||||
"{}/zones:/var/lib/bind".format(self.bind_root),
|
||||
BIND_DOCKER_IMAGE,
|
||||
],
|
||||
stdout=self._output,
|
||||
stderr=self._output,
|
||||
)
|
||||
|
||||
if self.process.poll():
|
||||
raise ValueError("BIND9 server stopped unexpectedly")
|
||||
|
||||
try:
|
||||
self._wait_until_ready()
|
||||
except:
|
||||
# The container might be running even if we think it isn't
|
||||
self.stop()
|
||||
raise
|
||||
|
||||
def _wait_until_ready(self, attempts=30):
|
||||
# type: (int) -> None
|
||||
"""
|
||||
Polls the DNS server over TCP until it gets a response, or until
|
||||
it runs out of attempts and raises a ValueError.
|
||||
The DNS response message must match the txn_id of the DNS query message,
|
||||
but otherwise the contents are ignored.
|
||||
:param int attempts: The number of attempts to make.
|
||||
"""
|
||||
for _ in range(attempts):
|
||||
if self.process.poll():
|
||||
raise ValueError("BIND9 server stopped unexpectedly")
|
||||
|
||||
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
sock.settimeout(5.0)
|
||||
try:
|
||||
sock.connect(BIND_BIND_ADDRESS)
|
||||
sock.sendall(BIND_TEST_QUERY)
|
||||
buf = sock.recv(1024)
|
||||
# We should receive a DNS message with the same tx_id
|
||||
if buf and len(buf) > 4 and buf[2:4] == BIND_TEST_QUERY[2:4]:
|
||||
return
|
||||
# If we got a response but it wasn't the one we wanted, wait a little
|
||||
time.sleep(1)
|
||||
except: # pylint: disable=bare-except
|
||||
# If there was a network error, wait a little
|
||||
time.sleep(1)
|
||||
finally:
|
||||
sock.close()
|
||||
|
||||
raise ValueError(
|
||||
"Gave up waiting for DNS server {} to respond".format(BIND_BIND_ADDRESS)
|
||||
)
|
||||
|
||||
def __enter__(self):
|
||||
self.start()
|
||||
return self.dns_xdist
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
self.stop()
|
||||
@@ -19,16 +19,31 @@ from cryptography.hazmat.primitives.asymmetric import ec
|
||||
from cryptography.hazmat.primitives.serialization import Encoding
|
||||
from cryptography.hazmat.primitives.serialization import NoEncryption
|
||||
from cryptography.hazmat.primitives.serialization import PrivateFormat
|
||||
from cryptography.x509 import load_pem_x509_certificate
|
||||
from OpenSSL import crypto
|
||||
import pkg_resources
|
||||
import requests
|
||||
from six.moves import SimpleHTTPServer
|
||||
from six.moves import socketserver
|
||||
|
||||
from certbot_integration_tests.utils.constants import \
|
||||
PEBBLE_ALTERNATE_ROOTS, PEBBLE_MANAGEMENT_URL
|
||||
|
||||
RSA_KEY_TYPE = 'rsa'
|
||||
ECDSA_KEY_TYPE = 'ecdsa'
|
||||
|
||||
|
||||
def _suppress_x509_verification_warnings():
|
||||
try:
|
||||
import urllib3
|
||||
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
|
||||
except ImportError:
|
||||
# Handle old versions of request with vendorized urllib3
|
||||
# pylint: disable=no-member
|
||||
from requests.packages.urllib3.exceptions import InsecureRequestWarning
|
||||
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
|
||||
|
||||
|
||||
def check_until_timeout(url, attempts=30):
|
||||
"""
|
||||
Wait and block until given url responds with status 200, or raise an exception
|
||||
@@ -37,14 +52,7 @@ def check_until_timeout(url, attempts=30):
|
||||
:param int attempts: the number of times to try to connect to the URL
|
||||
:raise ValueError: exception raised if unable to reach the URL
|
||||
"""
|
||||
try:
|
||||
import urllib3
|
||||
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
|
||||
except ImportError:
|
||||
# Handle old versions of request with vendorized urllib3
|
||||
from requests.packages.urllib3.exceptions import InsecureRequestWarning
|
||||
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
|
||||
|
||||
_suppress_x509_verification_warnings()
|
||||
for _ in range(attempts):
|
||||
time.sleep(1)
|
||||
try:
|
||||
@@ -249,7 +257,8 @@ def generate_csr(domains, key_path, csr_path, key_type=RSA_KEY_TYPE):
|
||||
|
||||
def read_certificate(cert_path):
|
||||
"""
|
||||
Load the certificate from the provided path, and return a human readable version of it (TEXT mode).
|
||||
Load the certificate from the provided path, and return a human readable version
|
||||
of it (TEXT mode).
|
||||
:param str cert_path: the path to the certificate
|
||||
:returns: the TEXT version of the certificate, as it would be displayed by openssl binary
|
||||
"""
|
||||
@@ -272,16 +281,21 @@ def load_sample_data_path(workspace):
|
||||
shutil.copytree(original, copied, symlinks=True)
|
||||
|
||||
if os.name == 'nt':
|
||||
# Fix the symlinks on Windows since GIT is not creating them upon checkout
|
||||
for lineage in ['a.encryption-example.com', 'b.encryption-example.com']:
|
||||
# Fix the symlinks on Windows if GIT is not configured to create them upon checkout
|
||||
for lineage in [
|
||||
'a.encryption-example.com',
|
||||
'b.encryption-example.com',
|
||||
'c.encryption-example.com',
|
||||
]:
|
||||
current_live = os.path.join(copied, 'live', lineage)
|
||||
for name in os.listdir(current_live):
|
||||
if name != 'README':
|
||||
current_file = os.path.join(current_live, name)
|
||||
with open(current_file) as file_h:
|
||||
src = file_h.read()
|
||||
os.unlink(current_file)
|
||||
os.symlink(os.path.join(current_live, src), current_file)
|
||||
if not os.path.islink(current_file):
|
||||
with open(current_file) as file_h:
|
||||
src = file_h.read()
|
||||
os.unlink(current_file)
|
||||
os.symlink(os.path.join(current_live, src), current_file)
|
||||
|
||||
return copied
|
||||
|
||||
@@ -299,3 +313,23 @@ def echo(keyword, path=None):
|
||||
.format(keyword))
|
||||
return '{0} -c "from __future__ import print_function; print(\'{1}\')"{2}'.format(
|
||||
os.path.basename(sys.executable), keyword, ' >> "{0}"'.format(path) if path else '')
|
||||
|
||||
|
||||
def get_acme_issuers(context):
|
||||
"""Gets the list of one or more issuer certificates from the ACME server used by the
|
||||
context.
|
||||
:param context: the testing context.
|
||||
:return: the `list of x509.Certificate` representing the list of issuers.
|
||||
"""
|
||||
# TODO: in fact, Boulder has alternate chains in config-next/, just not yet in config/.
|
||||
if context.acme_server != "pebble":
|
||||
raise NotImplementedError()
|
||||
|
||||
_suppress_x509_verification_warnings()
|
||||
|
||||
issuers = []
|
||||
for i in range(PEBBLE_ALTERNATE_ROOTS + 1):
|
||||
request = requests.get(PEBBLE_MANAGEMENT_URL + '/intermediates/{}'.format(i), verify=False)
|
||||
issuers.append(load_pem_x509_certificate(request.content, default_backend()))
|
||||
|
||||
return issuers
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
# pylint: disable=missing-module-docstring
|
||||
|
||||
import json
|
||||
import os
|
||||
import stat
|
||||
@@ -5,18 +7,19 @@ import stat
|
||||
import pkg_resources
|
||||
import requests
|
||||
|
||||
from certbot_integration_tests.utils.constants import MOCK_OCSP_SERVER_PORT
|
||||
from certbot_integration_tests.utils.constants import DEFAULT_HTTP_01_PORT, MOCK_OCSP_SERVER_PORT
|
||||
|
||||
PEBBLE_VERSION = 'v2.3.0'
|
||||
ASSETS_PATH = pkg_resources.resource_filename('certbot_integration_tests', 'assets')
|
||||
|
||||
|
||||
def fetch(workspace):
|
||||
def fetch(workspace, http_01_port=DEFAULT_HTTP_01_PORT):
|
||||
# pylint: disable=missing-function-docstring
|
||||
suffix = 'linux-amd64' if os.name != 'nt' else 'windows-amd64.exe'
|
||||
|
||||
pebble_path = _fetch_asset('pebble', suffix)
|
||||
challtestsrv_path = _fetch_asset('pebble-challtestsrv', suffix)
|
||||
pebble_config_path = _build_pebble_config(workspace)
|
||||
pebble_config_path = _build_pebble_config(workspace, http_01_port)
|
||||
|
||||
return pebble_path, challtestsrv_path, pebble_config_path
|
||||
|
||||
@@ -35,7 +38,7 @@ def _fetch_asset(asset, suffix):
|
||||
return asset_path
|
||||
|
||||
|
||||
def _build_pebble_config(workspace):
|
||||
def _build_pebble_config(workspace, http_01_port):
|
||||
config_path = os.path.join(workspace, 'pebble-config.json')
|
||||
with open(config_path, 'w') as file_h:
|
||||
file_h.write(json.dumps({
|
||||
@@ -44,7 +47,7 @@ def _build_pebble_config(workspace):
|
||||
'managementListenAddress': '0.0.0.0:15000',
|
||||
'certificate': os.path.join(ASSETS_PATH, 'cert.pem'),
|
||||
'privateKey': os.path.join(ASSETS_PATH, 'key.pem'),
|
||||
'httpPort': 5002,
|
||||
'httpPort': http_01_port,
|
||||
'tlsPort': 5001,
|
||||
'ocspResponderURL': 'http://127.0.0.1:{0}'.format(MOCK_OCSP_SERVER_PORT),
|
||||
},
|
||||
|
||||
@@ -21,6 +21,7 @@ from certbot_integration_tests.utils.misc import GracefulTCPServer
|
||||
|
||||
|
||||
class _ProxyHandler(BaseHTTPServer.BaseHTTPRequestHandler):
|
||||
# pylint: disable=missing-function-docstring
|
||||
def do_POST(self):
|
||||
request = requests.get(PEBBLE_MANAGEMENT_URL + '/intermediate-keys/0', verify=False)
|
||||
issuer_key = serialization.load_pem_private_key(request.content, None, default_backend())
|
||||
@@ -35,20 +36,28 @@ class _ProxyHandler(BaseHTTPServer.BaseHTTPRequestHandler):
|
||||
|
||||
ocsp_request = ocsp.load_der_ocsp_request(self.rfile.read(content_len))
|
||||
response = requests.get('{0}/cert-status-by-serial/{1}'.format(
|
||||
PEBBLE_MANAGEMENT_URL, str(hex(ocsp_request.serial_number)).replace('0x', '')), verify=False)
|
||||
PEBBLE_MANAGEMENT_URL, str(hex(ocsp_request.serial_number)).replace('0x', '')),
|
||||
verify=False
|
||||
)
|
||||
|
||||
if not response.ok:
|
||||
ocsp_response = ocsp.OCSPResponseBuilder.build_unsuccessful(ocsp.OCSPResponseStatus.UNAUTHORIZED)
|
||||
ocsp_response = ocsp.OCSPResponseBuilder.build_unsuccessful(
|
||||
ocsp.OCSPResponseStatus.UNAUTHORIZED
|
||||
)
|
||||
else:
|
||||
data = response.json()
|
||||
|
||||
now = datetime.datetime.utcnow()
|
||||
cert = x509.load_pem_x509_certificate(data['Certificate'].encode(), default_backend())
|
||||
if data['Status'] != 'Revoked':
|
||||
ocsp_status, revocation_time, revocation_reason = ocsp.OCSPCertStatus.GOOD, None, None
|
||||
ocsp_status = ocsp.OCSPCertStatus.GOOD
|
||||
revocation_time = None
|
||||
revocation_reason = None
|
||||
else:
|
||||
ocsp_status, revocation_reason = ocsp.OCSPCertStatus.REVOKED, x509.ReasonFlags.unspecified
|
||||
revoked_at = re.sub(r'( \+\d{4}).*$', r'\1', data['RevokedAt']) # "... +0000 UTC" => "+0000"
|
||||
ocsp_status = ocsp.OCSPCertStatus.REVOKED
|
||||
revocation_reason = x509.ReasonFlags.unspecified
|
||||
# "... +0000 UTC" => "+0000"
|
||||
revoked_at = re.sub(r'( \+\d{4}).*$', r'\1', data['RevokedAt'])
|
||||
revocation_time = parser.parse(revoked_at)
|
||||
|
||||
ocsp_response = ocsp.OCSPResponseBuilder().add_response(
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
#!/usr/bin/env python
|
||||
# pylint: disable=missing-module-docstring
|
||||
|
||||
import json
|
||||
import re
|
||||
import sys
|
||||
@@ -10,7 +12,9 @@ from certbot_integration_tests.utils.misc import GracefulTCPServer
|
||||
|
||||
|
||||
def _create_proxy(mapping):
|
||||
# pylint: disable=missing-function-docstring
|
||||
class ProxyHandler(BaseHTTPServer.BaseHTTPRequestHandler):
|
||||
# pylint: disable=missing-class-docstring
|
||||
def do_GET(self):
|
||||
headers = {key.lower(): value for key, value in self.headers.items()}
|
||||
backend = [backend for pattern, backend in mapping.items()
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from distutils.version import StrictVersion
|
||||
from distutils.version import LooseVersion
|
||||
import sys
|
||||
|
||||
from setuptools import __version__ as setuptools_version
|
||||
@@ -18,7 +18,7 @@ install_requires = [
|
||||
'python-dateutil',
|
||||
'pyyaml',
|
||||
'requests',
|
||||
'six',
|
||||
'six'
|
||||
]
|
||||
|
||||
# Add pywin32 on Windows platforms to handle low-level system calls.
|
||||
@@ -26,7 +26,7 @@ install_requires = [
|
||||
# However environment markers are supported only with setuptools >= 36.2.
|
||||
# So this dependency is not added for old Linux distributions with old setuptools,
|
||||
# in order to allow these systems to build certbot from sources.
|
||||
if StrictVersion(setuptools_version) >= StrictVersion('36.2'):
|
||||
if LooseVersion(setuptools_version) >= LooseVersion('36.2'):
|
||||
install_requires.append("pywin32>=224 ; sys_platform == 'win32'")
|
||||
elif 'bdist_wheel' in sys.argv[1:]:
|
||||
raise RuntimeError('Error, you are trying to build certbot wheels using an old version '
|
||||
@@ -40,19 +40,17 @@ setup(
|
||||
author="Certbot Project",
|
||||
author_email='client-dev@letsencrypt.org',
|
||||
license='Apache License 2.0',
|
||||
python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*',
|
||||
python_requires='>=3.6',
|
||||
classifiers=[
|
||||
'Development Status :: 3 - Alpha',
|
||||
'Intended Audience :: Developers',
|
||||
'License :: OSI Approved :: Apache Software License',
|
||||
'Programming Language :: Python',
|
||||
'Programming Language :: Python :: 2',
|
||||
'Programming Language :: Python :: 2.7',
|
||||
'Programming Language :: Python :: 3',
|
||||
'Programming Language :: Python :: 3.5',
|
||||
'Programming Language :: Python :: 3.6',
|
||||
'Programming Language :: Python :: 3.7',
|
||||
'Programming Language :: Python :: 3.8',
|
||||
'Programming Language :: Python :: 3.9',
|
||||
'Topic :: Internet :: WWW/HTTP',
|
||||
'Topic :: Security',
|
||||
],
|
||||
|
||||
0
certbot-ci/snap_integration_tests/__init__.py
Normal file
0
certbot-ci/snap_integration_tests/__init__.py
Normal file
45
certbot-ci/snap_integration_tests/conftest.py
Normal file
45
certbot-ci/snap_integration_tests/conftest.py
Normal file
@@ -0,0 +1,45 @@
|
||||
"""
|
||||
General conftest for pytest execution of all integration tests lying
|
||||
in the snap_installer_integration tests package.
|
||||
As stated by pytest documentation, conftest module is used to set on
|
||||
for a directory a specific configuration using built-in pytest hooks.
|
||||
|
||||
See https://docs.pytest.org/en/latest/reference.html#hook-reference
|
||||
"""
|
||||
import glob
|
||||
import os
|
||||
|
||||
|
||||
def pytest_addoption(parser):
|
||||
"""
|
||||
Standard pytest hook to add options to the pytest parser.
|
||||
:param parser: current pytest parser that will be used on the CLI
|
||||
"""
|
||||
parser.addoption('--snap-folder', required=True,
|
||||
help='set the folder path where snaps to test are located')
|
||||
parser.addoption('--snap-arch', default='amd64',
|
||||
help='set the architecture do test (default: amd64)')
|
||||
parser.addoption('--allow-persistent-changes', action='store_true',
|
||||
help='needs to be set, and confirm that the test will make persistent changes on this machine')
|
||||
|
||||
|
||||
def pytest_configure(config):
|
||||
"""
|
||||
Standard pytest hook used to add a configuration logic for each node of a pytest run.
|
||||
:param config: the current pytest configuration
|
||||
"""
|
||||
if not config.option.allow_persistent_changes:
|
||||
raise RuntimeError('This integration test would install the Certbot snap on your machine. '
|
||||
'Please run it again with the `--allow-persistent-changes` flag set to acknowledge.')
|
||||
|
||||
|
||||
def pytest_generate_tests(metafunc):
|
||||
"""
|
||||
Generate (multiple) parametrized calls to a test function.
|
||||
"""
|
||||
if "dns_snap_path" in metafunc.fixturenames:
|
||||
snap_arch = metafunc.config.getoption('snap_arch')
|
||||
snap_folder = metafunc.config.getoption('snap_folder')
|
||||
snap_dns_path_list = glob.glob(os.path.join(snap_folder,
|
||||
'certbot-dns-*_{0}.snap'.format(snap_arch)))
|
||||
metafunc.parametrize("dns_snap_path", snap_dns_path_list)
|
||||
46
certbot-ci/snap_integration_tests/dns_tests/test_main.py
Normal file
46
certbot-ci/snap_integration_tests/dns_tests/test_main.py
Normal file
@@ -0,0 +1,46 @@
|
||||
#!/usr/bin/env python3
|
||||
import pytest
|
||||
import subprocess
|
||||
import glob
|
||||
import os
|
||||
import re
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True, scope="module")
|
||||
def install_certbot_snap(request):
|
||||
with pytest.raises(Exception):
|
||||
subprocess.check_call(['certbot', '--version'])
|
||||
try:
|
||||
snap_folder = request.config.getoption("snap_folder")
|
||||
snap_arch = request.config.getoption("snap_arch")
|
||||
snap_path = glob.glob(os.path.join(snap_folder, 'certbot_*_{0}.snap'.format(snap_arch)))[0]
|
||||
subprocess.check_call(['snap', 'install', '--classic', '--dangerous', snap_path])
|
||||
subprocess.check_call(['certbot', '--version'])
|
||||
yield
|
||||
finally:
|
||||
subprocess.call(['snap', 'remove', 'certbot'])
|
||||
|
||||
|
||||
def test_dns_plugin_install(dns_snap_path):
|
||||
"""
|
||||
Test that each DNS plugin Certbot snap can be installed
|
||||
and is usable with the Certbot snap.
|
||||
"""
|
||||
plugin_name = re.match(r'^certbot-(dns-\w+)_.*\.snap$',
|
||||
os.path.basename(dns_snap_path)).group(1)
|
||||
snap_name = 'certbot-{0}'.format(plugin_name)
|
||||
assert plugin_name not in subprocess.check_output(['certbot', 'plugins', '--prepare'],
|
||||
universal_newlines=True)
|
||||
|
||||
try:
|
||||
subprocess.check_call(['snap', 'install', '--dangerous', dns_snap_path])
|
||||
subprocess.check_call(['snap', 'set', 'certbot', 'trust-plugin-with-root=ok'])
|
||||
subprocess.check_call(['snap', 'connect', 'certbot:plugin', snap_name])
|
||||
|
||||
assert plugin_name in subprocess.check_output(['certbot', 'plugins', '--prepare'],
|
||||
universal_newlines=True)
|
||||
subprocess.check_call(['snap', 'connect', snap_name + ':certbot-metadata',
|
||||
'certbot:certbot-metadata'])
|
||||
subprocess.check_call(['snap', 'install', '--dangerous', dns_snap_path])
|
||||
finally:
|
||||
subprocess.call(['snap', 'remove', 'plugin_name'])
|
||||
@@ -9,8 +9,6 @@ See https://docs.pytest.org/en/latest/reference.html#hook-reference
|
||||
from __future__ import print_function
|
||||
import os
|
||||
|
||||
import pytest
|
||||
|
||||
ROOT_PATH = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))
|
||||
|
||||
|
||||
|
||||
@@ -1,47 +1,18 @@
|
||||
FROM debian:stretch
|
||||
FROM debian:buster
|
||||
MAINTAINER Brad Warren <bmw@eff.org>
|
||||
|
||||
# no need to mkdir anything:
|
||||
# https://docs.docker.com/reference/builder/#copy
|
||||
# If <dest> doesn't exist, it is created along with all missing
|
||||
# directories in its path.
|
||||
RUN apt-get update && \
|
||||
apt install python3-dev python3-venv gcc libaugeas0 libssl-dev \
|
||||
libffi-dev ca-certificates openssl -y
|
||||
|
||||
# TODO: Install non-default Python versions for tox.
|
||||
# TODO: Install Apache/Nginx for plugin development.
|
||||
COPY letsencrypt-auto-source /opt/certbot/src/letsencrypt-auto-source
|
||||
RUN /opt/certbot/src/letsencrypt-auto-source/letsencrypt-auto --os-packages-only
|
||||
WORKDIR /opt/certbot/src
|
||||
|
||||
# the above is not likely to change, so by putting it further up the
|
||||
# Dockerfile we make sure we cache as much as possible
|
||||
# We copy all contents of the build directory to allow us to easily use
|
||||
# things like tools/venv3.py which expects all of our packages to be available.
|
||||
COPY . .
|
||||
|
||||
COPY certbot/setup.py certbot/README.rst certbot/CHANGELOG.md certbot/MANIFEST.in linter_plugin.py tox.cover.py tox.ini .pylintrc /opt/certbot/src/
|
||||
|
||||
# all above files are necessary for setup.py, however, package source
|
||||
# code directory has to be copied separately to a subdirectory...
|
||||
# https://docs.docker.com/reference/builder/#copy: "If <src> is a
|
||||
# directory, the entire contents of the directory are copied,
|
||||
# including filesystem metadata. Note: The directory itself is not
|
||||
# copied, just its contents." Order again matters, three files are far
|
||||
# more likely to be cached than the whole project directory
|
||||
|
||||
COPY certbot /opt/certbot/src/certbot/
|
||||
COPY acme /opt/certbot/src/acme/
|
||||
COPY certbot-apache /opt/certbot/src/certbot-apache/
|
||||
COPY certbot-nginx /opt/certbot/src/certbot-nginx/
|
||||
COPY certbot-compatibility-test /opt/certbot/src/certbot-compatibility-test/
|
||||
COPY tools /opt/certbot/src/tools
|
||||
|
||||
RUN VIRTUALENV_NO_DOWNLOAD=1 virtualenv -p python2 /opt/certbot/venv && \
|
||||
/opt/certbot/venv/bin/pip install -U setuptools && \
|
||||
/opt/certbot/venv/bin/pip install -U pip
|
||||
ENV PATH /opt/certbot/venv/bin:$PATH
|
||||
RUN /opt/certbot/venv/bin/python \
|
||||
/opt/certbot/src/tools/pip_install_editable.py \
|
||||
/opt/certbot/src/acme \
|
||||
/opt/certbot/src/certbot \
|
||||
/opt/certbot/src/certbot-apache \
|
||||
/opt/certbot/src/certbot-nginx \
|
||||
/opt/certbot/src/certbot-compatibility-test
|
||||
RUN tools/venv3.py
|
||||
ENV PATH /opt/certbot/src/venv3/bin:$PATH
|
||||
|
||||
# install in editable mode (-e) to save space: it's not possible to
|
||||
# "rm -rf /opt/certbot/src" (it's stays in the underlaying image);
|
||||
|
||||
@@ -57,7 +57,7 @@ class Proxy(configurators_common.Proxy):
|
||||
|
||||
def _prepare_configurator(self):
|
||||
"""Prepares the Apache plugin for testing"""
|
||||
for k in entrypoint.ENTRYPOINT.OS_DEFAULTS.keys():
|
||||
for k in entrypoint.ENTRYPOINT.OS_DEFAULTS:
|
||||
setattr(self.le_config, "apache_" + k,
|
||||
entrypoint.ENTRYPOINT.OS_DEFAULTS[k])
|
||||
|
||||
|
||||
@@ -69,11 +69,10 @@ class Proxy(object):
|
||||
shutil.copy(cert_path, cert)
|
||||
key = os.path.join(cert_and_key_dir, "key")
|
||||
shutil.copy(key_path, key)
|
||||
chain = None
|
||||
if chain_path:
|
||||
chain = os.path.join(cert_and_key_dir, "chain")
|
||||
shutil.copy(chain_path, chain)
|
||||
else:
|
||||
chain = None
|
||||
|
||||
return cert, key, chain
|
||||
|
||||
|
||||
@@ -102,8 +102,10 @@ def _create_achalls(plugin):
|
||||
prefs = plugin.get_chall_pref(domain)
|
||||
for chall_type in prefs:
|
||||
if chall_type == challenges.HTTP01:
|
||||
# challenges.HTTP01.TOKEN_SIZE is a float but os.urandom
|
||||
# expects an integer.
|
||||
chall = challenges.HTTP01(
|
||||
token=os.urandom(challenges.HTTP01.TOKEN_SIZE))
|
||||
token=os.urandom(int(challenges.HTTP01.TOKEN_SIZE)))
|
||||
challb = acme_util.chall_to_challb(
|
||||
chall, messages.STATUS_PENDING)
|
||||
achall = achallenges.KeyAuthorizationAnnotatedChallenge(
|
||||
@@ -137,7 +139,7 @@ def test_deploy_cert(plugin, temp_dir, domains):
|
||||
"""Tests deploy_cert returning True if the tests are successful"""
|
||||
cert = crypto_util.gen_ss_cert(util.KEY, domains)
|
||||
cert_path = os.path.join(temp_dir, "cert.pem")
|
||||
with open(cert_path, "w") as f:
|
||||
with open(cert_path, "wb") as f:
|
||||
f.write(OpenSSL.crypto.dump_certificate(
|
||||
OpenSSL.crypto.FILETYPE_PEM, cert))
|
||||
|
||||
@@ -273,7 +275,7 @@ def _dirs_are_unequal(dir1, dir2):
|
||||
logger.error(str(dircmp.diff_files))
|
||||
return True
|
||||
|
||||
for subdir in dircmp.subdirs.itervalues():
|
||||
for subdir in dircmp.subdirs.values():
|
||||
dircmps.append(subdir)
|
||||
|
||||
return False
|
||||
|
||||
@@ -1,13 +1,17 @@
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIICATCCAWoCCQCvMbKu4FHZ6zANBgkqhkiG9w0BAQsFADBFMQswCQYDVQQGEwJB
|
||||
VTETMBEGA1UECAwKU29tZS1TdGF0ZTEhMB8GA1UECgwYSW50ZXJuZXQgV2lkZ2l0
|
||||
cyBQdHkgTHRkMB4XDTE1MDcyMzIzMjc1MFoXDTE2MDcyMjIzMjc1MFowRTELMAkG
|
||||
A1UEBhMCQVUxEzARBgNVBAgMClNvbWUtU3RhdGUxITAfBgNVBAoMGEludGVybmV0
|
||||
IFdpZGdpdHMgUHR5IEx0ZDCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEAws3o
|
||||
y46PMLM9Gr68pbex0MhdPr7Cq4rRe9BBpnOuHFdF35Ak0aPrzFwVzLlGOir94U11
|
||||
e5JYJDWJi+4FwLBRkOAfanjJ5GJ9BnEHSOdbtO+sv9uhbt+7iYOOUOngKSiJyUrM
|
||||
i1THAE+B1CenxZ1KHRQCke708zkK8jVuxLeIAOMCAwEAATANBgkqhkiG9w0BAQsF
|
||||
AAOBgQCC3LUP3MHk+IBmwHHZAZCX+6p4lop9SP6y6rDpWgnqEEeb9oFleHi2Rvzq
|
||||
7gxl6nS5AsaSzfAygJ3zWKTwVAZyU4GOQ8QTK+nHk3+LO1X4cDbUlQfm5+YuwKDa
|
||||
4LFKeovmrK6BiMLIc1J+MxUjLfCeVHYSdkZULTVXue0zif0BUA==
|
||||
MIICqDCCAZACCQCRC1UKg2WfRTANBgkqhkiG9w0BAQsFADAWMRQwEgYDVQQDDAtl
|
||||
eGFtcGxlLmNvbTAeFw0yMDA4MTkyMzM5MjdaFw0yMDA5MTgyMzM5MjdaMBYxFDAS
|
||||
BgNVBAMMC2V4YW1wbGUuY29tMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC
|
||||
AQEA5tViHnJx4y+BbCb8Qz9uxsnqp1ynONR7ET/XL+M/jQ4xPeJg4L2uZ3YnogPc
|
||||
WdEoey17WXBg3KRqKfg+7PqIdGqVeonSCfXhD1HoGJRsThSUJ2fK3uoQ+zGgJTWR
|
||||
FYWa8Cb6xsuq0xaYtw2jaJBp+697Np60PWs4pY5FkadT50wZ0TYDnYt3NSAdn+Pt
|
||||
j3cpI4ocZZ2FLiOFn+UFOaRcetGtpnU1QwvmygD9tiL7kJ55B4CWGEv6DMRQk/UE
|
||||
eMUETzse1NkVlaxQ1TCd5iAfBTluiV30EpmmWa+OsXJWxCK+EEOkXD1r3CdXAldY
|
||||
nRYxJrn4udrFe69QX95wiRZNXwIDAQABMA0GCSqGSIb3DQEBCwUAA4IBAQCJvtDC
|
||||
875CK7SKNf006gSciXsNPNSVORGPjc/5OQ23baK4iPhxftI4LGZN8773N14jWp3E
|
||||
QnQLL1gZ9/G+98SlI5lm97a4m4XZyNaULbmQwRKgI22H0F1AWbvsG0SppjnhVlJ+
|
||||
93ZUqSQBXgbXelFHSsNfk1AB6Kvo6+UvS8s0vkz7SfkPOZGx0b+3RJSJZnZHvYih
|
||||
ggudN/jJggSgRrb+F6lpaelJE9pZsznJFb9R7mFI33AGBpQWV4r3p1ZbM1vGMqGc
|
||||
4PGBzDzi28BhLBplSOPZZxqRiINQzGiQ5T2SfN06usr7EafFr6+7YKNhgrCdlVjU
|
||||
thzJ5MgHZgALNXsh
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
Binary file not shown.
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user