Compare commits
326 Commits
test-uploa
...
test-inval
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a86d19b066 | ||
|
|
3666f8c384 | ||
|
|
a0cc5f5c64 | ||
|
|
f2905a1474 | ||
|
|
2e367e6591 | ||
|
|
1b39d3dc47 | ||
|
|
2324c1bb7a | ||
|
|
bc892e04c4 | ||
|
|
0962b0fc83 | ||
|
|
dd6f2f565e | ||
|
|
f2d8c81e9b | ||
|
|
67b65bb2c0 | ||
|
|
76895457c9 | ||
|
|
c02b2d30f2 | ||
|
|
94dc6936e7 | ||
|
|
a3abcc001a | ||
|
|
9643e85b4c | ||
|
|
9d97be3a84 | ||
|
|
4d6db0eb71 | ||
|
|
92a66454b6 | ||
|
|
976068b5a0 | ||
|
|
1e30723003 | ||
|
|
496a4ced25 | ||
|
|
fab9bfd878 | ||
|
|
d3ca6af982 | ||
|
|
540fd6db93 | ||
|
|
b0e35c694e | ||
|
|
67c2b27af7 | ||
|
|
135187f03e | ||
|
|
e742cfaa21 | ||
|
|
f71298f661 | ||
|
|
025eb16c7a | ||
|
|
ae3ed200c0 | ||
|
|
c3d6fca3eb | ||
|
|
c43f4fe518 | ||
|
|
0f3f07b5cb | ||
|
|
ef265eccaf | ||
|
|
c0eccdd358 | ||
|
|
c59775c3c0 | ||
|
|
cf062f4c6d | ||
|
|
3d0dad8718 | ||
|
|
edad9bd82b | ||
|
|
2a16aa16c3 | ||
|
|
711cc95dc4 | ||
|
|
c2ee0d2938 | ||
|
|
c668172ef0 | ||
|
|
666ee35e29 | ||
|
|
13af3f7ec2 | ||
|
|
5ad0c254ca | ||
|
|
236062c2d2 | ||
|
|
2bcd8c59db | ||
|
|
57cba3690d | ||
|
|
786a130b7d | ||
|
|
df866b907b | ||
|
|
f0b32783f0 | ||
|
|
534af33a50 | ||
|
|
2e33aec8a8 | ||
|
|
bdfb9f19c4 | ||
|
|
b4e955a60e | ||
|
|
7399807ff2 | ||
|
|
00235d3807 | ||
|
|
adb7e5e62f | ||
|
|
261b5a76d8 | ||
|
|
2fca48caaa | ||
|
|
c0917a0302 | ||
|
|
13d4a99251 | ||
|
|
b9de48e93e | ||
|
|
7a02deeeba | ||
|
|
42f20455cd | ||
|
|
434ca1985f | ||
|
|
4a9748ace5 | ||
|
|
fb8cd063eb | ||
|
|
e602736bda | ||
|
|
ccde1eef64 | ||
|
|
c44a5a7701 | ||
|
|
6e1d042f76 | ||
|
|
daf989fc21 | ||
|
|
5c3fd7d9ee | ||
|
|
fc6c238bf9 | ||
|
|
a49b84d64e | ||
|
|
7567e8d8db | ||
|
|
02a5d000cb | ||
|
|
98fb9d2d93 | ||
|
|
32fb89df7e | ||
|
|
d3b82a4e8e | ||
|
|
18faf4f7ab | ||
|
|
a7c3c0b90c | ||
|
|
421e8b6270 | ||
|
|
8e7353900c | ||
|
|
1146f35519 | ||
|
|
198f7d66e6 | ||
|
|
e9bdfcc94b | ||
|
|
a8b6a1c98d | ||
|
|
d714ccec05 | ||
|
|
0465643d0a | ||
|
|
cbf42ffae1 | ||
|
|
fcdfed9c2c | ||
|
|
96a05d946c | ||
|
|
d38766e05c | ||
|
|
c5a0b1ae5d | ||
|
|
fcc8b38c02 | ||
|
|
7febc18bb0 | ||
|
|
5151e2afee | ||
|
|
3889311557 | ||
|
|
6d71378c05 | ||
|
|
e9a96f5e2a | ||
|
|
878c3e396f | ||
|
|
148246b85b | ||
|
|
9045c03949 | ||
|
|
447b6ffaef | ||
|
|
38017473c5 | ||
|
|
dc3ac13750 | ||
|
|
5871de0c07 | ||
|
|
356e8d84d6 | ||
|
|
d476aa4389 | ||
|
|
22cf94f930 | ||
|
|
d3166d7072 | ||
|
|
67fecbe1e0 | ||
|
|
1dfac955c7 | ||
|
|
38f3d3d185 | ||
|
|
64543d4970 | ||
|
|
4c896fd87c | ||
|
|
a71e22678f | ||
|
|
45e48b565d | ||
|
|
5f73274390 | ||
|
|
87386769f7 | ||
|
|
7497c51f34 | ||
|
|
1a3c96a955 | ||
|
|
d1e7404358 | ||
|
|
e5113d5815 | ||
|
|
ff3a07dca3 | ||
|
|
31b5f1310e | ||
|
|
faa8d230c7 | ||
|
|
baab69e653 | ||
|
|
7b687611a4 | ||
|
|
adacc4ab6d | ||
|
|
43ee2993f1 | ||
|
|
f5a88ade54 | ||
|
|
aea416f654 | ||
|
|
9a4e95e25a | ||
|
|
9ca7f76505 | ||
|
|
a8cede6ae1 | ||
|
|
be3d0d872f | ||
|
|
5a85825493 | ||
|
|
e8139e80be | ||
|
|
7ba35b4407 | ||
|
|
90557921e3 | ||
|
|
78edb2889e | ||
|
|
553d3279c6 | ||
|
|
b742b60c4d | ||
|
|
2132cf7f04 | ||
|
|
f15f4f9838 | ||
|
|
2a118f3e83 | ||
|
|
8f5787008d | ||
|
|
db2ffea351 | ||
|
|
bf20f39ceb | ||
|
|
11a4882128 | ||
|
|
c102ca66c3 | ||
|
|
75365f1d4e | ||
|
|
198f5a99bc | ||
|
|
47c1045f6d | ||
|
|
e570e8ad32 | ||
|
|
df138d0027 | ||
|
|
9567352002 | ||
|
|
6c7b99f7e0 | ||
|
|
3673ca77a5 | ||
|
|
bb45c9aa41 | ||
|
|
4c347f5576 | ||
|
|
bf07ec20b0 | ||
|
|
fc864543a7 | ||
|
|
4fa1df3075 | ||
|
|
cfd0a6ff1f | ||
|
|
00ed56afd6 | ||
|
|
b6e3a3ad02 | ||
|
|
c250957ab0 | ||
|
|
4eb0b560c5 | ||
|
|
cb916a0682 | ||
|
|
88386e8c82 | ||
|
|
a64e1f0129 | ||
|
|
fea176449c | ||
|
|
ff03e34c70 | ||
|
|
6fc832677e | ||
|
|
725870d558 | ||
|
|
631c88b209 | ||
|
|
6a093bd35a | ||
|
|
afb07cf50d | ||
|
|
aa61e6ad4e | ||
|
|
8a3aed0476 | ||
|
|
afc5baad4a | ||
|
|
eff761ab1e | ||
|
|
5f040a8e32 | ||
|
|
5173ab6b90 | ||
|
|
448fd9145a | ||
|
|
ac8798e818 | ||
|
|
34694251dd | ||
|
|
cc76906712 | ||
|
|
ef8c481634 | ||
|
|
c12404451d | ||
|
|
e378931eda | ||
|
|
160b209394 | ||
|
|
cac9d8f75e | ||
|
|
7f0fa18c57 | ||
|
|
fca7ec896a | ||
|
|
e066766cc9 | ||
|
|
be6c890874 | ||
|
|
feca125437 | ||
|
|
1be005289a | ||
|
|
79297ef5cb | ||
|
|
5ec29ca60b | ||
|
|
9a72db5b9b | ||
|
|
14cbf67d65 | ||
|
|
b20aaff661 | ||
|
|
a66f4e1150 | ||
|
|
501df0dc4e | ||
|
|
b551b6ee73 | ||
|
|
71d9dfa86e | ||
|
|
6628bc0e9b | ||
|
|
f43fa12fc0 | ||
|
|
2b425110dc | ||
|
|
55d411f1eb | ||
|
|
7ddd327f63 | ||
|
|
3a615176c5 | ||
|
|
e79af1b1de | ||
|
|
c8828dab30 | ||
|
|
f85b738e2f | ||
|
|
95a6b61cdc | ||
|
|
21b320ef42 | ||
|
|
8c81a1aaf8 | ||
|
|
ec147740ee | ||
|
|
b7b0ec321e | ||
|
|
7fe7a965f5 | ||
|
|
9f243c768f | ||
|
|
b841f0f307 | ||
|
|
8e736479f7 | ||
|
|
2ceabadb81 | ||
|
|
a2951b4db1 | ||
|
|
98615564ed | ||
|
|
3ce87d1fcb | ||
|
|
d62d853ea4 | ||
|
|
70731dd75b | ||
|
|
ae7b4a1755 | ||
|
|
f66a592e37 | ||
|
|
e8518bf206 | ||
|
|
2a047eb526 | ||
|
|
bc137103a3 | ||
|
|
085967ad29 | ||
|
|
4e9d3afcc4 | ||
|
|
acb6d34c5f | ||
|
|
63ec74276c | ||
|
|
e8a232297d | ||
|
|
575092d603 | ||
|
|
2d62dec7ec | ||
|
|
f93b90f87a | ||
|
|
f40e5bdefe | ||
|
|
9bbcc0046c | ||
|
|
b3dd2c09ba | ||
|
|
8574313841 | ||
|
|
a677534462 | ||
|
|
22730dc0ac | ||
|
|
086e6c46b6 | ||
|
|
bc0ed3cb01 | ||
|
|
220cc07239 | ||
|
|
271be07267 | ||
|
|
48a0cc0c42 | ||
|
|
5415fc201c | ||
|
|
b08fdc7dfb | ||
|
|
6eb5954f0e | ||
|
|
6ec83d52b5 | ||
|
|
403ded5c58 | ||
|
|
4d3f6c23be | ||
|
|
6d73b21dcf | ||
|
|
072c070c0c | ||
|
|
df1ca726f9 | ||
|
|
086c8b1b3e | ||
|
|
09ab4aea01 | ||
|
|
a6f2061ff7 | ||
|
|
02c1339753 | ||
|
|
a1cd909247 | ||
|
|
9ee4831f78 | ||
|
|
14dfbdbea5 | ||
|
|
270b5535e2 | ||
|
|
74b0340a13 | ||
|
|
b13dfc6437 | ||
|
|
c5bab9b07c | ||
|
|
b6964cae2e | ||
|
|
ebf1349b15 | ||
|
|
9d2e0ac013 | ||
|
|
05dbda4b51 | ||
|
|
40a2a5b99f | ||
|
|
68b3b048b9 | ||
|
|
d434b92945 | ||
|
|
1697d66ba7 | ||
|
|
a6a998d11b | ||
|
|
f82e2cc714 | ||
|
|
d64bb81864 | ||
|
|
88e183e69e | ||
|
|
8192e3eb85 | ||
|
|
d8e9f558c2 | ||
|
|
3a997a5631 | ||
|
|
361d1f732e | ||
|
|
9483b33ec1 | ||
|
|
bc5b079b2a | ||
|
|
bca73f9932 | ||
|
|
a180d5d5c9 | ||
|
|
78624a2b8c | ||
|
|
695107bc98 | ||
|
|
fb323e083a | ||
|
|
5713decf23 | ||
|
|
c194381f04 | ||
|
|
b92eb6f620 | ||
|
|
ea44834c41 | ||
|
|
a730b00a36 | ||
|
|
5e01467e2c | ||
|
|
e9a9a180bb | ||
|
|
67fddae90d | ||
|
|
7337f64180 | ||
|
|
d296ef2dcd | ||
|
|
f64386c73c | ||
|
|
1666e85118 | ||
|
|
db522aa155 | ||
|
|
d0d7521215 | ||
|
|
2fc6f6e619 | ||
|
|
d8ab321894 | ||
|
|
62b054f265 | ||
|
|
1d1c096067 | ||
|
|
bcffaab602 |
@@ -1,13 +1,14 @@
|
|||||||
# Advanced pipeline for running our full test suite on demand and for release branches.
|
# Advanced pipeline for running our full test suite on demand.
|
||||||
trigger:
|
trigger:
|
||||||
- '*.x'
|
|
||||||
# When changing these triggers, please ensure the documentation under
|
# When changing these triggers, please ensure the documentation under
|
||||||
# "Running tests in CI" is still correct.
|
# "Running tests in CI" is still correct.
|
||||||
- test-*
|
- test-*
|
||||||
pr: none
|
pr: none
|
||||||
|
|
||||||
|
variables:
|
||||||
|
# We don't publish our Docker images in this pipeline, but when building them
|
||||||
|
# for testing, let's use the nightly tag.
|
||||||
|
dockerTag: nightly
|
||||||
|
|
||||||
stages:
|
stages:
|
||||||
- template: templates/stages/test-and-package-stage.yml
|
- template: templates/stages/test-and-package-stage.yml
|
||||||
# Notify failures only for release branches.
|
|
||||||
- ${{ if not(startsWith(variables['Build.SourceBranchName'], 'test-')) }}:
|
|
||||||
- template: templates/stages/notify-failure-stage.yml
|
|
||||||
|
|||||||
@@ -5,3 +5,4 @@ pr:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
- template: templates/jobs/standard-tests-jobs.yml
|
- template: templates/jobs/standard-tests-jobs.yml
|
||||||
|
|
||||||
|
|||||||
@@ -9,6 +9,9 @@ schedules:
|
|||||||
- master
|
- master
|
||||||
always: true
|
always: true
|
||||||
|
|
||||||
|
variables:
|
||||||
|
dockerTag: nightly
|
||||||
|
|
||||||
stages:
|
stages:
|
||||||
- template: templates/stages/test-and-package-stage.yml
|
- template: templates/stages/test-and-package-stage.yml
|
||||||
- template: templates/stages/deploy-stage.yml
|
- template: templates/stages/deploy-stage.yml
|
||||||
|
|||||||
@@ -1,12 +1,18 @@
|
|||||||
# Release pipeline to build and deploy Certbot for Windows for GitHub release tags
|
# Release pipeline to run our full test suite, build artifacts, and deploy them
|
||||||
|
# for GitHub release tags.
|
||||||
trigger:
|
trigger:
|
||||||
tags:
|
tags:
|
||||||
include:
|
include:
|
||||||
- v*
|
- v*
|
||||||
pr: none
|
pr: none
|
||||||
|
|
||||||
|
variables:
|
||||||
|
dockerTag: ${{variables['Build.SourceBranchName']}}
|
||||||
|
|
||||||
stages:
|
stages:
|
||||||
- template: templates/stages/test-and-package-stage.yml
|
- template: templates/stages/test-and-package-stage.yml
|
||||||
- template: templates/stages/changelog-stage.yml
|
- template: templates/stages/changelog-stage.yml
|
||||||
- template: templates/stages/deploy-stage.yml
|
- template: templates/stages/deploy-stage.yml
|
||||||
|
parameters:
|
||||||
|
snapReleaseChannel: beta
|
||||||
- template: templates/stages/notify-failure-stage.yml
|
- template: templates/stages/notify-failure-stage.yml
|
||||||
|
|||||||
@@ -3,6 +3,8 @@ jobs:
|
|||||||
variables:
|
variables:
|
||||||
- name: IMAGE_NAME
|
- name: IMAGE_NAME
|
||||||
value: ubuntu-18.04
|
value: ubuntu-18.04
|
||||||
|
- name: PYTHON_VERSION
|
||||||
|
value: 3.9
|
||||||
- group: certbot-common
|
- group: certbot-common
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
@@ -12,38 +14,31 @@ jobs:
|
|||||||
linux-py37:
|
linux-py37:
|
||||||
PYTHON_VERSION: 3.7
|
PYTHON_VERSION: 3.7
|
||||||
TOXENV: py37
|
TOXENV: py37
|
||||||
|
linux-py38:
|
||||||
|
PYTHON_VERSION: 3.8
|
||||||
|
TOXENV: py38
|
||||||
linux-py37-nopin:
|
linux-py37-nopin:
|
||||||
PYTHON_VERSION: 3.7
|
PYTHON_VERSION: 3.7
|
||||||
TOXENV: py37
|
TOXENV: py37
|
||||||
CERTBOT_NO_PIN: 1
|
CERTBOT_NO_PIN: 1
|
||||||
|
linux-external-mock:
|
||||||
|
TOXENV: external-mock
|
||||||
linux-boulder-v1-integration-certbot-oldest:
|
linux-boulder-v1-integration-certbot-oldest:
|
||||||
|
PYTHON_VERSION: 3.6
|
||||||
TOXENV: integration-certbot-oldest
|
TOXENV: integration-certbot-oldest
|
||||||
ACME_SERVER: boulder-v1
|
ACME_SERVER: boulder-v1
|
||||||
linux-boulder-v2-integration-certbot-oldest:
|
linux-boulder-v2-integration-certbot-oldest:
|
||||||
|
PYTHON_VERSION: 3.6
|
||||||
TOXENV: integration-certbot-oldest
|
TOXENV: integration-certbot-oldest
|
||||||
ACME_SERVER: boulder-v2
|
ACME_SERVER: boulder-v2
|
||||||
linux-boulder-v1-integration-nginx-oldest:
|
linux-boulder-v1-integration-nginx-oldest:
|
||||||
|
PYTHON_VERSION: 3.6
|
||||||
TOXENV: integration-nginx-oldest
|
TOXENV: integration-nginx-oldest
|
||||||
ACME_SERVER: boulder-v1
|
ACME_SERVER: boulder-v1
|
||||||
linux-boulder-v2-integration-nginx-oldest:
|
linux-boulder-v2-integration-nginx-oldest:
|
||||||
|
PYTHON_VERSION: 3.6
|
||||||
TOXENV: integration-nginx-oldest
|
TOXENV: integration-nginx-oldest
|
||||||
ACME_SERVER: boulder-v2
|
ACME_SERVER: boulder-v2
|
||||||
linux-boulder-v1-py27-integration:
|
|
||||||
PYTHON_VERSION: 2.7
|
|
||||||
TOXENV: integration
|
|
||||||
ACME_SERVER: boulder-v1
|
|
||||||
linux-boulder-v2-py27-integration:
|
|
||||||
PYTHON_VERSION: 2.7
|
|
||||||
TOXENV: integration
|
|
||||||
ACME_SERVER: boulder-v2
|
|
||||||
linux-boulder-v1-py35-integration:
|
|
||||||
PYTHON_VERSION: 3.5
|
|
||||||
TOXENV: integration
|
|
||||||
ACME_SERVER: boulder-v1
|
|
||||||
linux-boulder-v2-py35-integration:
|
|
||||||
PYTHON_VERSION: 3.5
|
|
||||||
TOXENV: integration
|
|
||||||
ACME_SERVER: boulder-v2
|
|
||||||
linux-boulder-v1-py36-integration:
|
linux-boulder-v1-py36-integration:
|
||||||
PYTHON_VERSION: 3.6
|
PYTHON_VERSION: 3.6
|
||||||
TOXENV: integration
|
TOXENV: integration
|
||||||
@@ -68,18 +63,27 @@ jobs:
|
|||||||
PYTHON_VERSION: 3.8
|
PYTHON_VERSION: 3.8
|
||||||
TOXENV: integration
|
TOXENV: integration
|
||||||
ACME_SERVER: boulder-v2
|
ACME_SERVER: boulder-v2
|
||||||
|
linux-boulder-v1-py39-integration:
|
||||||
|
PYTHON_VERSION: 3.9
|
||||||
|
TOXENV: integration
|
||||||
|
ACME_SERVER: boulder-v1
|
||||||
|
linux-boulder-v2-py39-integration:
|
||||||
|
PYTHON_VERSION: 3.9
|
||||||
|
TOXENV: integration
|
||||||
|
ACME_SERVER: boulder-v2
|
||||||
nginx-compat:
|
nginx-compat:
|
||||||
TOXENV: nginx_compat
|
TOXENV: nginx_compat
|
||||||
le-auto-jessie:
|
linux-integration-rfc2136:
|
||||||
TOXENV: le_auto_jessie
|
IMAGE_NAME: ubuntu-18.04
|
||||||
le-auto-centos6:
|
PYTHON_VERSION: 3.8
|
||||||
TOXENV: le_auto_centos6
|
TOXENV: integration-dns-rfc2136
|
||||||
le-auto-oraclelinux6:
|
|
||||||
TOXENV: le_auto_oraclelinux6
|
|
||||||
docker-dev:
|
docker-dev:
|
||||||
TOXENV: docker_dev
|
TOXENV: docker_dev
|
||||||
farmtest-apache2:
|
macos-farmtest-apache2:
|
||||||
PYTHON_VERSION: 3.7
|
# We run one of these test farm tests on macOS to help ensure the
|
||||||
|
# tests continue to work on the platform.
|
||||||
|
IMAGE_NAME: macOS-10.15
|
||||||
|
PYTHON_VERSION: 3.8
|
||||||
TOXENV: test-farm-apache2
|
TOXENV: test-farm-apache2
|
||||||
farmtest-leauto-upgrades:
|
farmtest-leauto-upgrades:
|
||||||
PYTHON_VERSION: 3.7
|
PYTHON_VERSION: 3.7
|
||||||
|
|||||||
@@ -1,102 +1,217 @@
|
|||||||
jobs:
|
jobs:
|
||||||
- job: installer_build
|
# - job: docker_build
|
||||||
|
# pool:
|
||||||
|
# vmImage: ubuntu-18.04
|
||||||
|
# strategy:
|
||||||
|
# matrix:
|
||||||
|
# amd64:
|
||||||
|
# DOCKER_ARCH: amd64
|
||||||
|
# # Do not run the heavy non-amd64 builds for test branches
|
||||||
|
# ${{ if not(startsWith(variables['Build.SourceBranchName'], 'test-')) }}:
|
||||||
|
# arm32v6:
|
||||||
|
# DOCKER_ARCH: arm32v6
|
||||||
|
# arm64v8:
|
||||||
|
# DOCKER_ARCH: arm64v8
|
||||||
|
# steps:
|
||||||
|
# - bash: set -e && tools/docker/build.sh $(dockerTag) $DOCKER_ARCH
|
||||||
|
# displayName: Build the Docker images
|
||||||
|
# # We don't filter for the Docker Hub organization to continue to allow
|
||||||
|
# # easy testing of these scripts on forks.
|
||||||
|
# - bash: |
|
||||||
|
# set -e
|
||||||
|
# DOCKER_IMAGES=$(docker images --filter reference='*/certbot' --filter reference='*/dns-*' --format '{{.Repository}}')
|
||||||
|
# docker save --output images.tar $DOCKER_IMAGES
|
||||||
|
# displayName: Save the Docker images
|
||||||
|
# # If the name of the tar file or artifact changes, the deploy stage will
|
||||||
|
# # also need to be updated.
|
||||||
|
# - bash: set -e && mv images.tar $(Build.ArtifactStagingDirectory)
|
||||||
|
# displayName: Prepare Docker artifact
|
||||||
|
# - task: PublishPipelineArtifact@1
|
||||||
|
# inputs:
|
||||||
|
# path: $(Build.ArtifactStagingDirectory)
|
||||||
|
# artifact: docker_$(DOCKER_ARCH)
|
||||||
|
# displayName: Store Docker artifact
|
||||||
|
# - job: docker_run
|
||||||
|
# dependsOn: docker_build
|
||||||
|
# pool:
|
||||||
|
# vmImage: ubuntu-18.04
|
||||||
|
# steps:
|
||||||
|
# - task: DownloadPipelineArtifact@2
|
||||||
|
# inputs:
|
||||||
|
# artifact: docker_amd64
|
||||||
|
# path: $(Build.SourcesDirectory)
|
||||||
|
# displayName: Retrieve Docker images
|
||||||
|
# - bash: set -e && docker load --input $(Build.SourcesDirectory)/images.tar
|
||||||
|
# displayName: Load Docker images
|
||||||
|
# - bash: |
|
||||||
|
# set -ex
|
||||||
|
# DOCKER_IMAGES=$(docker images --filter reference='*/certbot' --filter reference='*/dns-*' --format '{{.Repository}}:{{.Tag}}')
|
||||||
|
# for DOCKER_IMAGE in ${DOCKER_IMAGES}
|
||||||
|
# do docker run --rm "${DOCKER_IMAGE}" plugins --prepare
|
||||||
|
# done
|
||||||
|
# displayName: Run integration tests for Docker images
|
||||||
|
# - job: installer_build
|
||||||
|
# pool:
|
||||||
|
# vmImage: vs2017-win2016
|
||||||
|
# steps:
|
||||||
|
# - task: UsePythonVersion@0
|
||||||
|
# inputs:
|
||||||
|
# versionSpec: 3.8
|
||||||
|
# architecture: x86
|
||||||
|
# addToPath: true
|
||||||
|
# - script: python windows-installer/construct.py
|
||||||
|
# displayName: Build Certbot installer
|
||||||
|
# - task: CopyFiles@2
|
||||||
|
# inputs:
|
||||||
|
# sourceFolder: $(System.DefaultWorkingDirectory)/windows-installer/build/nsis
|
||||||
|
# contents: '*.exe'
|
||||||
|
# targetFolder: $(Build.ArtifactStagingDirectory)
|
||||||
|
# - task: PublishPipelineArtifact@1
|
||||||
|
# inputs:
|
||||||
|
# path: $(Build.ArtifactStagingDirectory)
|
||||||
|
# # If we change the artifact's name, it should also be changed in tools/create_github_release.py
|
||||||
|
# artifact: windows-installer
|
||||||
|
# displayName: Publish Windows installer
|
||||||
|
# - job: installer_run
|
||||||
|
# dependsOn: installer_build
|
||||||
|
# strategy:
|
||||||
|
# matrix:
|
||||||
|
# win2019:
|
||||||
|
# imageName: windows-2019
|
||||||
|
# win2016:
|
||||||
|
# imageName: vs2017-win2016
|
||||||
|
# pool:
|
||||||
|
# vmImage: $(imageName)
|
||||||
|
# steps:
|
||||||
|
# - powershell: |
|
||||||
|
# if ($PSVersionTable.PSVersion.Major -ne 5) {
|
||||||
|
# throw "Powershell version is not 5.x"
|
||||||
|
# }
|
||||||
|
# condition: eq(variables['imageName'], 'vs2017-win2016')
|
||||||
|
# displayName: Check Powershell 5.x is used in vs2017-win2016
|
||||||
|
# - task: UsePythonVersion@0
|
||||||
|
# inputs:
|
||||||
|
# versionSpec: 3.8
|
||||||
|
# addToPath: true
|
||||||
|
# - task: DownloadPipelineArtifact@2
|
||||||
|
# inputs:
|
||||||
|
# artifact: windows-installer
|
||||||
|
# path: $(Build.SourcesDirectory)/bin
|
||||||
|
# displayName: Retrieve Windows installer
|
||||||
|
# - script: |
|
||||||
|
# python -m venv venv
|
||||||
|
# venv\Scripts\python tools\pipstrap.py
|
||||||
|
# venv\Scripts\python tools\pip_install.py -e certbot-ci
|
||||||
|
# env:
|
||||||
|
# PIP_NO_BUILD_ISOLATION: no
|
||||||
|
# displayName: Prepare Certbot-CI
|
||||||
|
# - script: |
|
||||||
|
# set PATH=%ProgramFiles(x86)%\Certbot\bin;%PATH%
|
||||||
|
# venv\Scripts\python -m pytest certbot-ci\windows_installer_integration_tests --allow-persistent-changes --installer-path $(Build.SourcesDirectory)\bin\certbot-beta-installer-win32.exe
|
||||||
|
# displayName: Run windows installer integration tests
|
||||||
|
# - script: |
|
||||||
|
# set PATH=%ProgramFiles(x86)%\Certbot\bin;%PATH%
|
||||||
|
# venv\Scripts\python -m pytest certbot-ci\certbot_integration_tests\certbot_tests -n 4
|
||||||
|
# displayName: Run certbot integration tests
|
||||||
|
- job: snaps_build
|
||||||
pool:
|
pool:
|
||||||
vmImage: vs2017-win2016
|
vmImage: ubuntu-18.04
|
||||||
|
timeoutInMinutes: 0
|
||||||
|
variables:
|
||||||
|
# Do not run the heavy non-amd64 builds for test branches
|
||||||
|
${{ if not(startsWith(variables['Build.SourceBranchName'], 'ignore-test-')) }}:
|
||||||
|
ARCHS: amd64 arm64 armhf
|
||||||
|
${{ if startsWith(variables['Build.SourceBranchName'], 'ignore-test-') }}:
|
||||||
|
ARCHS: amd64
|
||||||
steps:
|
steps:
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
sudo apt-get update
|
||||||
|
sudo apt-get install -y --no-install-recommends snapd
|
||||||
|
sudo snap install --classic snapcraft
|
||||||
|
displayName: Install dependencies
|
||||||
- task: UsePythonVersion@0
|
- task: UsePythonVersion@0
|
||||||
inputs:
|
inputs:
|
||||||
versionSpec: 3.7
|
versionSpec: 3.8
|
||||||
architecture: x86
|
|
||||||
addToPath: true
|
addToPath: true
|
||||||
- script: python windows-installer/construct.py
|
- task: DownloadSecureFile@1
|
||||||
displayName: Build Certbot installer
|
name: credentials
|
||||||
- task: CopyFiles@2
|
|
||||||
inputs:
|
inputs:
|
||||||
sourceFolder: $(System.DefaultWorkingDirectory)/windows-installer/build/nsis
|
secureFile: launchpad-credentials
|
||||||
contents: '*.exe'
|
- script: |
|
||||||
targetFolder: $(Build.ArtifactStagingDirectory)
|
set -e
|
||||||
|
git config --global user.email "$(Build.RequestedForEmail)"
|
||||||
|
git config --global user.name "$(Build.RequestedFor)"
|
||||||
|
mkdir -p ~/.local/share/snapcraft/provider/launchpad
|
||||||
|
cp $(credentials.secureFilePath) ~/.local/share/snapcraft/provider/launchpad/credentials
|
||||||
|
python3 tools/snap/build_remote.py ALL --archs ${ARCHS} --timeout 19800
|
||||||
|
displayName: Build snaps
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
mv *.snap $(Build.ArtifactStagingDirectory)
|
||||||
|
mv certbot-dns-*/*.snap $(Build.ArtifactStagingDirectory)
|
||||||
|
displayName: Prepare artifacts
|
||||||
- task: PublishPipelineArtifact@1
|
- task: PublishPipelineArtifact@1
|
||||||
inputs:
|
inputs:
|
||||||
path: $(Build.ArtifactStagingDirectory)
|
path: $(Build.ArtifactStagingDirectory)
|
||||||
artifact: windows-installer
|
artifact: snaps
|
||||||
displayName: Publish Windows installer
|
displayName: Store snaps artifacts
|
||||||
- job: installer_run
|
- job: snap_run
|
||||||
dependsOn: installer_build
|
dependsOn: snaps_build
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
win2019:
|
|
||||||
imageName: windows-2019
|
|
||||||
win2016:
|
|
||||||
imageName: vs2017-win2016
|
|
||||||
pool:
|
pool:
|
||||||
vmImage: $(imageName)
|
vmImage: ubuntu-18.04
|
||||||
steps:
|
steps:
|
||||||
- powershell: |
|
- task: UsePythonVersion@0
|
||||||
if ($PSVersionTable.PSVersion.Major -ne 5) {
|
inputs:
|
||||||
throw "Powershell version is not 5.x"
|
versionSpec: 3.8
|
||||||
}
|
addToPath: true
|
||||||
condition: eq(variables['imageName'], 'vs2017-win2016')
|
- script: |
|
||||||
displayName: Check Powershell 5.x is used in vs2017-win2016
|
set -e
|
||||||
|
sudo apt-get update
|
||||||
|
sudo apt-get install -y --no-install-recommends nginx-light snapd
|
||||||
|
python3 -m venv venv
|
||||||
|
venv/bin/python tools/pipstrap.py
|
||||||
|
venv/bin/python tools/pip_install.py -U tox
|
||||||
|
displayName: Install dependencies
|
||||||
|
- task: DownloadPipelineArtifact@2
|
||||||
|
inputs:
|
||||||
|
artifact: snaps
|
||||||
|
path: $(Build.SourcesDirectory)/snap
|
||||||
|
displayName: Retrieve Certbot snaps
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
sudo snap install --dangerous --classic snap/certbot_*_amd64.snap
|
||||||
|
displayName: Install Certbot snap
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
venv/bin/python -m tox -e integration-external,apacheconftest-external-with-pebble
|
||||||
|
displayName: Run tox
|
||||||
|
- job: snap_dns_run
|
||||||
|
dependsOn: snaps_build
|
||||||
|
pool:
|
||||||
|
vmImage: ubuntu-18.04
|
||||||
|
steps:
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
sudo apt-get update
|
||||||
|
sudo apt-get install -y --no-install-recommends snapd
|
||||||
|
displayName: Install dependencies
|
||||||
- task: UsePythonVersion@0
|
- task: UsePythonVersion@0
|
||||||
inputs:
|
inputs:
|
||||||
versionSpec: 3.8
|
versionSpec: 3.8
|
||||||
addToPath: true
|
addToPath: true
|
||||||
- task: DownloadPipelineArtifact@2
|
- task: DownloadPipelineArtifact@2
|
||||||
inputs:
|
inputs:
|
||||||
artifact: windows-installer
|
artifact: snaps
|
||||||
path: $(Build.SourcesDirectory)/bin
|
path: $(Build.SourcesDirectory)/snap
|
||||||
displayName: Retrieve Windows installer
|
displayName: Retrieve Certbot snaps
|
||||||
- script: |
|
- script: |
|
||||||
py -3 -m venv venv
|
set -e
|
||||||
venv\Scripts\python tools\pip_install.py -e certbot-ci
|
python3 -m venv venv
|
||||||
|
venv/bin/python tools/pipstrap.py
|
||||||
|
venv/bin/python tools/pip_install.py -e certbot-ci
|
||||||
displayName: Prepare Certbot-CI
|
displayName: Prepare Certbot-CI
|
||||||
- script: |
|
- script: |
|
||||||
set PATH=%ProgramFiles(x86)%\Certbot\bin;%PATH%
|
set -e
|
||||||
venv\Scripts\python -m pytest certbot-ci\windows_installer_integration_tests --allow-persistent-changes --installer-path $(Build.SourcesDirectory)\bin\certbot-beta-installer-win32.exe
|
sudo -E venv/bin/pytest certbot-ci/snap_integration_tests/dns_tests --allow-persistent-changes --snap-folder $(Build.SourcesDirectory)/snap --snap-arch amd64
|
||||||
displayName: Run windows installer integration tests
|
displayName: Test DNS plugins snaps
|
||||||
- script: |
|
|
||||||
set PATH=%ProgramFiles(x86)%\Certbot\bin;%PATH%
|
|
||||||
venv\Scripts\python -m pytest certbot-ci\certbot_integration_tests\certbot_tests -n 4
|
|
||||||
displayName: Run certbot integration tests
|
|
||||||
- job: snap_build
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
amd64:
|
|
||||||
ARCH: amd64
|
|
||||||
arm64:
|
|
||||||
ARCH: arm64
|
|
||||||
armhf:
|
|
||||||
ARCH: armhf
|
|
||||||
pool:
|
|
||||||
vmImage: ubuntu-18.04
|
|
||||||
steps:
|
|
||||||
- script: |
|
|
||||||
snap/local/build.sh ${ARCH}
|
|
||||||
mv *.snap $(Build.ArtifactStagingDirectory)
|
|
||||||
displayName: Build Certbot snap
|
|
||||||
- task: PublishPipelineArtifact@1
|
|
||||||
inputs:
|
|
||||||
path: $(Build.ArtifactStagingDirectory)
|
|
||||||
artifact: snap-$(arch)
|
|
||||||
displayName: Store snap artifact
|
|
||||||
- job: snap_run
|
|
||||||
dependsOn: snap_build
|
|
||||||
pool:
|
|
||||||
vmImage: ubuntu-18.04
|
|
||||||
steps:
|
|
||||||
- script: |
|
|
||||||
sudo apt-get update
|
|
||||||
sudo apt-get install -y --no-install-recommends nginx-light snapd
|
|
||||||
python tools/pip_install.py -U tox
|
|
||||||
displayName: Install dependencies
|
|
||||||
- task: DownloadPipelineArtifact@2
|
|
||||||
inputs:
|
|
||||||
artifact: snap-amd64
|
|
||||||
path: $(Build.SourcesDirectory)/snap
|
|
||||||
displayName: Retrieve Certbot snap
|
|
||||||
- script: |
|
|
||||||
sudo snap install --dangerous --classic snap/*.snap
|
|
||||||
displayName: Install Certbot snap
|
|
||||||
- script: |
|
|
||||||
python -m tox -e integration-external,apacheconftest-external-with-pebble
|
|
||||||
displayName: Run tox
|
|
||||||
|
|||||||
@@ -1,73 +1,78 @@
|
|||||||
jobs:
|
jobs:
|
||||||
- job: test
|
- job: test
|
||||||
|
variables:
|
||||||
|
PYTHON_VERSION: 3.9
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
macos-py27:
|
macos-py36:
|
||||||
IMAGE_NAME: macOS-10.14
|
IMAGE_NAME: macOS-10.15
|
||||||
PYTHON_VERSION: 2.7
|
PYTHON_VERSION: 3.6
|
||||||
TOXENV: py27
|
TOXENV: py36
|
||||||
macos-py38:
|
macos-py39:
|
||||||
IMAGE_NAME: macOS-10.14
|
IMAGE_NAME: macOS-10.15
|
||||||
|
PYTHON_VERSION: 3.9
|
||||||
|
TOXENV: py39
|
||||||
|
windows-py36:
|
||||||
|
IMAGE_NAME: vs2017-win2016
|
||||||
|
PYTHON_VERSION: 3.6
|
||||||
|
TOXENV: py36
|
||||||
|
windows-py38-cover:
|
||||||
|
IMAGE_NAME: vs2017-win2016
|
||||||
PYTHON_VERSION: 3.8
|
PYTHON_VERSION: 3.8
|
||||||
TOXENV: py38
|
TOXENV: py38-cover
|
||||||
windows-py35:
|
|
||||||
IMAGE_NAME: vs2017-win2016
|
|
||||||
PYTHON_VERSION: 3.5
|
|
||||||
TOXENV: py35
|
|
||||||
windows-py37-cover:
|
|
||||||
IMAGE_NAME: vs2017-win2016
|
|
||||||
PYTHON_VERSION: 3.7
|
|
||||||
TOXENV: py37-cover
|
|
||||||
windows-integration-certbot:
|
windows-integration-certbot:
|
||||||
IMAGE_NAME: vs2017-win2016
|
IMAGE_NAME: vs2017-win2016
|
||||||
PYTHON_VERSION: 3.7
|
PYTHON_VERSION: 3.8
|
||||||
TOXENV: integration-certbot
|
TOXENV: integration-certbot
|
||||||
linux-oldest-tests-1:
|
linux-oldest-tests-1:
|
||||||
IMAGE_NAME: ubuntu-18.04
|
IMAGE_NAME: ubuntu-18.04
|
||||||
TOXENV: py27-{acme,apache,apache-v2,certbot}-oldest
|
PYTHON_VERSION: 3.6
|
||||||
|
TOXENV: '{acme,apache,apache-v2,certbot}-oldest'
|
||||||
linux-oldest-tests-2:
|
linux-oldest-tests-2:
|
||||||
IMAGE_NAME: ubuntu-18.04
|
IMAGE_NAME: ubuntu-18.04
|
||||||
TOXENV: py27-{dns,nginx}-oldest
|
PYTHON_VERSION: 3.6
|
||||||
linux-py27:
|
TOXENV: '{dns,nginx}-oldest'
|
||||||
|
linux-py36:
|
||||||
IMAGE_NAME: ubuntu-18.04
|
IMAGE_NAME: ubuntu-18.04
|
||||||
PYTHON_VERSION: 2.7
|
PYTHON_VERSION: 3.6
|
||||||
TOXENV: py27
|
TOXENV: py36
|
||||||
linux-py35:
|
linux-py39-cover:
|
||||||
IMAGE_NAME: ubuntu-18.04
|
IMAGE_NAME: ubuntu-18.04
|
||||||
PYTHON_VERSION: 3.5
|
PYTHON_VERSION: 3.9
|
||||||
TOXENV: py35
|
TOXENV: py39-cover
|
||||||
linux-py38-cover:
|
|
||||||
IMAGE_NAME: ubuntu-18.04
|
|
||||||
PYTHON_VERSION: 3.8
|
|
||||||
TOXENV: py38-cover
|
|
||||||
linux-py37-lint:
|
linux-py37-lint:
|
||||||
IMAGE_NAME: ubuntu-18.04
|
IMAGE_NAME: ubuntu-18.04
|
||||||
PYTHON_VERSION: 3.7
|
PYTHON_VERSION: 3.7
|
||||||
TOXENV: lint
|
TOXENV: lint
|
||||||
linux-py35-mypy:
|
linux-py36-mypy:
|
||||||
IMAGE_NAME: ubuntu-18.04
|
IMAGE_NAME: ubuntu-18.04
|
||||||
PYTHON_VERSION: 3.5
|
PYTHON_VERSION: 3.6
|
||||||
TOXENV: mypy
|
TOXENV: mypy
|
||||||
linux-integration:
|
linux-integration:
|
||||||
IMAGE_NAME: ubuntu-18.04
|
IMAGE_NAME: ubuntu-18.04
|
||||||
PYTHON_VERSION: 2.7
|
PYTHON_VERSION: 3.8
|
||||||
TOXENV: integration
|
TOXENV: integration
|
||||||
ACME_SERVER: pebble
|
ACME_SERVER: pebble
|
||||||
apache-compat:
|
apache-compat:
|
||||||
IMAGE_NAME: ubuntu-18.04
|
IMAGE_NAME: ubuntu-18.04
|
||||||
TOXENV: apache_compat
|
TOXENV: apache_compat
|
||||||
le-auto-xenial:
|
le-modification:
|
||||||
IMAGE_NAME: ubuntu-18.04
|
IMAGE_NAME: ubuntu-18.04
|
||||||
TOXENV: le_auto_xenial
|
TOXENV: modification
|
||||||
apacheconftest:
|
apacheconftest:
|
||||||
IMAGE_NAME: ubuntu-18.04
|
IMAGE_NAME: ubuntu-18.04
|
||||||
PYTHON_VERSION: 2.7
|
PYTHON_VERSION: 3.6
|
||||||
TOXENV: apacheconftest-with-pebble
|
TOXENV: apacheconftest-with-pebble
|
||||||
nginxroundtrip:
|
nginxroundtrip:
|
||||||
IMAGE_NAME: ubuntu-18.04
|
IMAGE_NAME: ubuntu-18.04
|
||||||
PYTHON_VERSION: 2.7
|
PYTHON_VERSION: 3.6
|
||||||
TOXENV: nginxroundtrip
|
TOXENV: nginxroundtrip
|
||||||
pool:
|
pool:
|
||||||
vmImage: $(IMAGE_NAME)
|
vmImage: $(IMAGE_NAME)
|
||||||
steps:
|
steps:
|
||||||
- template: ../steps/tox-steps.yml
|
- template: ../steps/tox-steps.yml
|
||||||
|
- job: test_sphinx_builds
|
||||||
|
pool:
|
||||||
|
vmImage: ubuntu-20.04
|
||||||
|
steps:
|
||||||
|
- template: ../steps/sphinx-steps.yml
|
||||||
|
|||||||
@@ -5,12 +5,15 @@ stages:
|
|||||||
pool:
|
pool:
|
||||||
vmImage: vs2017-win2016
|
vmImage: vs2017-win2016
|
||||||
steps:
|
steps:
|
||||||
|
# If we change the output filename from `release_notes.md`, it should also be changed in tools/create_github_release.py
|
||||||
- bash: |
|
- bash: |
|
||||||
|
set -e
|
||||||
CERTBOT_VERSION="$(cd certbot && python -c "import certbot; print(certbot.__version__)" && cd ~-)"
|
CERTBOT_VERSION="$(cd certbot && python -c "import certbot; print(certbot.__version__)" && cd ~-)"
|
||||||
"${BUILD_REPOSITORY_LOCALPATH}\tools\extract_changelog.py" "${CERTBOT_VERSION}" >> "${BUILD_ARTIFACTSTAGINGDIRECTORY}/release_notes.md"
|
"${BUILD_REPOSITORY_LOCALPATH}\tools\extract_changelog.py" "${CERTBOT_VERSION}" >> "${BUILD_ARTIFACTSTAGINGDIRECTORY}/release_notes.md"
|
||||||
displayName: Prepare changelog
|
displayName: Prepare changelog
|
||||||
- task: PublishPipelineArtifact@1
|
- task: PublishPipelineArtifact@1
|
||||||
inputs:
|
inputs:
|
||||||
path: $(Build.ArtifactStagingDirectory)
|
path: $(Build.ArtifactStagingDirectory)
|
||||||
|
# If we change the artifact's name, it should also be changed in tools/create_github_release.py
|
||||||
artifact: changelog
|
artifact: changelog
|
||||||
displayName: Publish changelog
|
displayName: Publish changelog
|
||||||
|
|||||||
@@ -1,43 +1,99 @@
|
|||||||
|
parameters:
|
||||||
|
- name: snapReleaseChannel
|
||||||
|
type: string
|
||||||
|
default: edge
|
||||||
|
values:
|
||||||
|
- edge
|
||||||
|
- beta
|
||||||
|
|
||||||
stages:
|
stages:
|
||||||
- stage: Deploy
|
- stage: Deploy
|
||||||
jobs:
|
jobs:
|
||||||
# This job relies on a snapcraft.cfg preconfigured with credential,
|
# This job relies on credentials used to publish the Certbot snaps. This
|
||||||
# stored as a secure file in Azure Pipeline.
|
# credential file was created by running:
|
||||||
# This credential has a maximum lifetime of 1 year and the current
|
#
|
||||||
# credential will expire on 6/25/2021. The content of snapcraft.cfg
|
# snapcraft logout
|
||||||
# will need to be updated to use a new credential before then to
|
# snapcraft login (provide the shared snapcraft credentials when prompted)
|
||||||
# prevent automated deploys from breaking. Remembering to do this is
|
# snapcraft export-login --channels=beta,edge snapcraft.cfg
|
||||||
# also tracked by https://github.com/certbot/certbot/issues/7931.
|
#
|
||||||
|
# Then the file was added as a secure file in Azure pipelines
|
||||||
|
# with the name snapcraft.cfg by following the instructions at
|
||||||
|
# https://docs.microsoft.com/en-us/azure/devops/pipelines/library/secure-files?view=azure-devops
|
||||||
|
# including authorizing the file in all pipelines as described at
|
||||||
|
# https://docs.microsoft.com/en-us/azure/devops/pipelines/library/secure-files?view=azure-devops#how-do-i-authorize-a-secure-file-for-use-in-all-pipelines.
|
||||||
|
#
|
||||||
|
# This file has a maximum lifetime of one year and the current
|
||||||
|
# file will expire on 2021-07-28 which is also tracked by
|
||||||
|
# https://github.com/certbot/certbot/issues/7931. The file will
|
||||||
|
# need to be updated before then to prevent automated deploys
|
||||||
|
# from breaking.
|
||||||
|
#
|
||||||
|
# Revoking these credentials can be done by changing the password of the
|
||||||
|
# account used to generate the credentials. See
|
||||||
|
# https://forum.snapcraft.io/t/revoking-exported-credentials/19031 for
|
||||||
|
# more info.
|
||||||
- job: publish_snap
|
- job: publish_snap
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
amd64:
|
|
||||||
ARCH: amd64
|
|
||||||
arm64:
|
|
||||||
ARCH: arm64
|
|
||||||
armhf:
|
|
||||||
ARCH: armhf
|
|
||||||
pool:
|
pool:
|
||||||
vmImage: ubuntu-18.04
|
vmImage: ubuntu-18.04
|
||||||
variables:
|
variables:
|
||||||
- group: certbot-common
|
- group: certbot-common
|
||||||
steps:
|
steps:
|
||||||
- bash: |
|
- bash: |
|
||||||
|
set -e
|
||||||
sudo apt-get update
|
sudo apt-get update
|
||||||
sudo apt-get install -y --no-install-recommends snapd
|
sudo apt-get install -y --no-install-recommends snapd
|
||||||
sudo snap install --classic snapcraft
|
sudo snap install --classic snapcraft
|
||||||
displayName: Install dependencies
|
displayName: Install dependencies
|
||||||
- task: DownloadPipelineArtifact@2
|
- task: DownloadPipelineArtifact@2
|
||||||
inputs:
|
inputs:
|
||||||
artifact: snap-$(arch)
|
artifact: snaps
|
||||||
path: $(Build.SourcesDirectory)/snap
|
path: $(Build.SourcesDirectory)/snap
|
||||||
displayName: Retrieve Certbot snap
|
displayName: Retrieve Certbot snaps
|
||||||
- task: DownloadSecureFile@1
|
- task: DownloadSecureFile@1
|
||||||
name: snapcraftCfg
|
name: snapcraftCfg
|
||||||
inputs:
|
inputs:
|
||||||
secureFile: snapcraft.cfg
|
secureFile: snapcraft.cfg
|
||||||
- bash: |
|
- bash: |
|
||||||
|
set -e
|
||||||
mkdir -p .snapcraft
|
mkdir -p .snapcraft
|
||||||
ln -s $(snapcraftCfg.secureFilePath) .snapcraft/snapcraft.cfg
|
ln -s $(snapcraftCfg.secureFilePath) .snapcraft/snapcraft.cfg
|
||||||
snapcraft push --release=edge snap/*.snap
|
for SNAP_FILE in snap/*.snap; do
|
||||||
|
tools/retry.sh eval snapcraft upload --release=${{ parameters.snapReleaseChannel }} "${SNAP_FILE}"
|
||||||
|
done
|
||||||
displayName: Publish to Snap store
|
displayName: Publish to Snap store
|
||||||
|
- job: publish_docker
|
||||||
|
pool:
|
||||||
|
vmImage: ubuntu-18.04
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
amd64:
|
||||||
|
DOCKER_ARCH: amd64
|
||||||
|
arm32v6:
|
||||||
|
DOCKER_ARCH: arm32v6
|
||||||
|
arm64v8:
|
||||||
|
DOCKER_ARCH: arm64v8
|
||||||
|
steps:
|
||||||
|
- task: DownloadPipelineArtifact@2
|
||||||
|
inputs:
|
||||||
|
artifact: docker_$(DOCKER_ARCH)
|
||||||
|
path: $(Build.SourcesDirectory)
|
||||||
|
displayName: Retrieve Docker images
|
||||||
|
- bash: set -e && docker load --input $(Build.SourcesDirectory)/images.tar
|
||||||
|
displayName: Load Docker images
|
||||||
|
- task: Docker@2
|
||||||
|
inputs:
|
||||||
|
command: login
|
||||||
|
# The credentials used here are for the shared certbotbot account
|
||||||
|
# on Docker Hub. The credentials are stored in a service account
|
||||||
|
# which was created by following the instructions at
|
||||||
|
# https://docs.microsoft.com/en-us/azure/devops/pipelines/library/service-endpoints?view=azure-devops&tabs=yaml#sep-docreg.
|
||||||
|
# The name given to this service account must match the value
|
||||||
|
# given to containerRegistry below. "Grant access to all
|
||||||
|
# pipelines" should also be checked. To revoke these
|
||||||
|
# credentials, we can change the password on the certbotbot
|
||||||
|
# Docker Hub account or remove the account from the
|
||||||
|
# Certbot organization on Docker Hub.
|
||||||
|
containerRegistry: docker-hub
|
||||||
|
displayName: Login to Docker Hub
|
||||||
|
- bash: set -e && tools/docker/deploy.sh $(dockerTag) $DOCKER_ARCH
|
||||||
|
displayName: Deploy the Docker images
|
||||||
|
|||||||
@@ -5,9 +5,10 @@ stages:
|
|||||||
variables:
|
variables:
|
||||||
- group: certbot-common
|
- group: certbot-common
|
||||||
pool:
|
pool:
|
||||||
vmImage: ubuntu-latest
|
vmImage: ubuntu-20.04
|
||||||
steps:
|
steps:
|
||||||
- bash: |
|
- bash: |
|
||||||
|
set -e
|
||||||
MESSAGE="\
|
MESSAGE="\
|
||||||
---\n\
|
---\n\
|
||||||
##### Azure Pipeline
|
##### Azure Pipeline
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
stages:
|
stages:
|
||||||
- stage: TestAndPackage
|
- stage: TestAndPackage
|
||||||
jobs:
|
jobs:
|
||||||
- template: ../jobs/standard-tests-jobs.yml
|
# - template: ../jobs/standard-tests-jobs.yml
|
||||||
- template: ../jobs/extended-tests-jobs.yml
|
# - template: ../jobs/extended-tests-jobs.yml
|
||||||
- template: ../jobs/packaging-jobs.yml
|
- template: ../jobs/packaging-jobs.yml
|
||||||
|
|||||||
23
.azure-pipelines/templates/steps/sphinx-steps.yml
Normal file
23
.azure-pipelines/templates/steps/sphinx-steps.yml
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
steps:
|
||||||
|
- bash: |
|
||||||
|
FINAL_STATUS=0
|
||||||
|
declare -a FAILED_BUILDS
|
||||||
|
python3 -m venv .venv
|
||||||
|
source .venv/bin/activate
|
||||||
|
python tools/pipstrap.py
|
||||||
|
for doc_path in */docs
|
||||||
|
do
|
||||||
|
echo ""
|
||||||
|
echo "##[group]Building $doc_path"
|
||||||
|
pip install -q -e $doc_path/..[docs]
|
||||||
|
if ! sphinx-build -W --keep-going -b html $doc_path $doc_path/_build/html; then
|
||||||
|
FINAL_STATUS=1
|
||||||
|
FAILED_BUILDS[${#FAILED_BUILDS[@]}]="${doc_path%/docs}"
|
||||||
|
fi
|
||||||
|
echo "##[endgroup]"
|
||||||
|
done
|
||||||
|
if [[ $FINAL_STATUS -ne 0 ]]; then
|
||||||
|
echo "##[error]The following builds failed: ${FAILED_BUILDS[*]}"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
displayName: Build Sphinx Documentation
|
||||||
@@ -1,9 +1,11 @@
|
|||||||
steps:
|
steps:
|
||||||
- bash: |
|
- bash: |
|
||||||
|
set -e
|
||||||
brew install augeas
|
brew install augeas
|
||||||
condition: startswith(variables['IMAGE_NAME'], 'macOS')
|
condition: startswith(variables['IMAGE_NAME'], 'macOS')
|
||||||
displayName: Install MacOS dependencies
|
displayName: Install MacOS dependencies
|
||||||
- bash: |
|
- bash: |
|
||||||
|
set -e
|
||||||
sudo apt-get update
|
sudo apt-get update
|
||||||
sudo apt-get install -y --no-install-recommends \
|
sudo apt-get install -y --no-install-recommends \
|
||||||
python-dev \
|
python-dev \
|
||||||
@@ -21,7 +23,6 @@ steps:
|
|||||||
inputs:
|
inputs:
|
||||||
versionSpec: $(PYTHON_VERSION)
|
versionSpec: $(PYTHON_VERSION)
|
||||||
addToPath: true
|
addToPath: true
|
||||||
condition: ne(variables['PYTHON_VERSION'], '')
|
|
||||||
# tools/pip_install.py is used to pin packages to a known working version
|
# tools/pip_install.py is used to pin packages to a known working version
|
||||||
# except in tests where the environment variable CERTBOT_NO_PIN is set.
|
# except in tests where the environment variable CERTBOT_NO_PIN is set.
|
||||||
# virtualenv is listed here explicitly to make sure it is upgraded when
|
# virtualenv is listed here explicitly to make sure it is upgraded when
|
||||||
@@ -30,6 +31,8 @@ steps:
|
|||||||
# set, pip updates dependencies it thinks are already satisfied to avoid some
|
# set, pip updates dependencies it thinks are already satisfied to avoid some
|
||||||
# problems with its lack of real dependency resolution.
|
# problems with its lack of real dependency resolution.
|
||||||
- bash: |
|
- bash: |
|
||||||
|
set -e
|
||||||
|
python tools/pipstrap.py
|
||||||
python tools/pip_install.py -I tox virtualenv
|
python tools/pip_install.py -I tox virtualenv
|
||||||
displayName: Install runtime dependencies
|
displayName: Install runtime dependencies
|
||||||
- task: DownloadSecureFile@1
|
- task: DownloadSecureFile@1
|
||||||
@@ -38,14 +41,11 @@ steps:
|
|||||||
secureFile: azure-test-farm.pem
|
secureFile: azure-test-farm.pem
|
||||||
condition: contains(variables['TOXENV'], 'test-farm')
|
condition: contains(variables['TOXENV'], 'test-farm')
|
||||||
- bash: |
|
- bash: |
|
||||||
|
set -e
|
||||||
export TARGET_BRANCH="`echo "${BUILD_SOURCEBRANCH}" | sed -E 's!refs/(heads|tags)/!!g'`"
|
export TARGET_BRANCH="`echo "${BUILD_SOURCEBRANCH}" | sed -E 's!refs/(heads|tags)/!!g'`"
|
||||||
[ -z "${SYSTEM_PULLREQUEST_TARGETBRANCH}" ] || export TARGET_BRANCH="${SYSTEM_PULLREQUEST_TARGETBRANCH}"
|
[ -z "${SYSTEM_PULLREQUEST_TARGETBRANCH}" ] || export TARGET_BRANCH="${SYSTEM_PULLREQUEST_TARGETBRANCH}"
|
||||||
env
|
env
|
||||||
if [[ "${TOXENV}" == *"oldest"* ]]; then
|
python -m tox
|
||||||
tools/run_oldest_tests.sh
|
|
||||||
else
|
|
||||||
python -m tox
|
|
||||||
fi
|
|
||||||
env:
|
env:
|
||||||
AWS_ACCESS_KEY_ID: $(AWS_ACCESS_KEY_ID)
|
AWS_ACCESS_KEY_ID: $(AWS_ACCESS_KEY_ID)
|
||||||
AWS_SECRET_ACCESS_KEY: $(AWS_SECRET_ACCESS_KEY)
|
AWS_SECRET_ACCESS_KEY: $(AWS_SECRET_ACCESS_KEY)
|
||||||
|
|||||||
@@ -8,5 +8,4 @@
|
|||||||
.git
|
.git
|
||||||
.tox
|
.tox
|
||||||
venv
|
venv
|
||||||
venv3
|
|
||||||
docs
|
docs
|
||||||
|
|||||||
18
.editorconfig
Normal file
18
.editorconfig
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
# https://editorconfig.org/
|
||||||
|
|
||||||
|
root = true
|
||||||
|
|
||||||
|
[*]
|
||||||
|
insert_final_newline = true
|
||||||
|
trim_trailing_whitespace = true
|
||||||
|
end_of_line = lf
|
||||||
|
|
||||||
|
[*.py]
|
||||||
|
indent_style = space
|
||||||
|
indent_size = 4
|
||||||
|
charset = utf-8
|
||||||
|
max_line_length = 100
|
||||||
|
|
||||||
|
[*.yaml]
|
||||||
|
indent_style = space
|
||||||
|
indent_size = 2
|
||||||
12
.envrc
Normal file
12
.envrc
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
# This file is just a nicety for developers who use direnv. When you cd under
|
||||||
|
# the Certbot repo, Certbot's virtual environment will be automatically
|
||||||
|
# activated and then deactivated when you cd elsewhere. Developers have to have
|
||||||
|
# direnv set up and run `direnv allow` to allow this file to execute on their
|
||||||
|
# system. You can find more information at https://direnv.net/.
|
||||||
|
. venv/bin/activate
|
||||||
|
# direnv doesn't support modifying PS1 so we unset it to squelch the error
|
||||||
|
# it'll otherwise print about this being done in the activate script. See
|
||||||
|
# https://github.com/direnv/direnv/wiki/PS1. If you would like your shell
|
||||||
|
# prompt to change like it normally does, see
|
||||||
|
# https://github.com/direnv/direnv/wiki/Python#restoring-the-ps1.
|
||||||
|
unset PS1
|
||||||
5
.gitignore
vendored
5
.gitignore
vendored
@@ -60,3 +60,8 @@ stage
|
|||||||
*.snap
|
*.snap
|
||||||
snap-constraints.txt
|
snap-constraints.txt
|
||||||
qemu-*
|
qemu-*
|
||||||
|
certbot-dns*/certbot-dns*_amd64*.txt
|
||||||
|
certbot-dns*/certbot-dns*_arm*.txt
|
||||||
|
/certbot_amd64*.txt
|
||||||
|
/certbot_arm*.txt
|
||||||
|
certbot-dns*/snap
|
||||||
|
|||||||
@@ -254,7 +254,7 @@ ignore-mixin-members=yes
|
|||||||
# List of module names for which member attributes should not be checked
|
# List of module names for which member attributes should not be checked
|
||||||
# (useful for modules/projects where namespaces are manipulated during runtime
|
# (useful for modules/projects where namespaces are manipulated during runtime
|
||||||
# and thus existing member attributes cannot be deduced by static analysis
|
# and thus existing member attributes cannot be deduced by static analysis
|
||||||
ignored-modules=pkg_resources,confargparse,argparse,six.moves,six.moves.urllib
|
ignored-modules=pkg_resources,confargparse,argparse
|
||||||
# import errors ignored only in 1.4.4
|
# import errors ignored only in 1.4.4
|
||||||
# https://bitbucket.org/logilab/pylint/commits/cd000904c9e2
|
# https://bitbucket.org/logilab/pylint/commits/cd000904c9e2
|
||||||
|
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
Authors
|
Authors
|
||||||
=======
|
=======
|
||||||
|
|
||||||
|
* [Aaron Gable](https://github.com/aarongable)
|
||||||
* [Aaron Zirbes](https://github.com/aaronzirbes)
|
* [Aaron Zirbes](https://github.com/aaronzirbes)
|
||||||
* Aaron Zuehlke
|
* Aaron Zuehlke
|
||||||
* Ada Lovelace
|
* Ada Lovelace
|
||||||
@@ -60,7 +61,9 @@ Authors
|
|||||||
* [DanCld](https://github.com/DanCld)
|
* [DanCld](https://github.com/DanCld)
|
||||||
* [Daniel Albers](https://github.com/AID)
|
* [Daniel Albers](https://github.com/AID)
|
||||||
* [Daniel Aleksandersen](https://github.com/da2x)
|
* [Daniel Aleksandersen](https://github.com/da2x)
|
||||||
|
* [Daniel Almasi](https://github.com/almasen)
|
||||||
* [Daniel Convissor](https://github.com/convissor)
|
* [Daniel Convissor](https://github.com/convissor)
|
||||||
|
* [Daniel "Drex" Drexler](https://github.com/aeturnum)
|
||||||
* [Daniel Huang](https://github.com/dhuang)
|
* [Daniel Huang](https://github.com/dhuang)
|
||||||
* [Dave Guarino](https://github.com/daguar)
|
* [Dave Guarino](https://github.com/daguar)
|
||||||
* [David cz](https://github.com/dave-cz)
|
* [David cz](https://github.com/dave-cz)
|
||||||
@@ -148,11 +151,13 @@ Authors
|
|||||||
* [Lior Sabag](https://github.com/liorsbg)
|
* [Lior Sabag](https://github.com/liorsbg)
|
||||||
* [Lipis](https://github.com/lipis)
|
* [Lipis](https://github.com/lipis)
|
||||||
* [lord63](https://github.com/lord63)
|
* [lord63](https://github.com/lord63)
|
||||||
|
* [Lorenzo Fundaró](https://github.com/lfundaro)
|
||||||
* [Luca Beltrame](https://github.com/lbeltrame)
|
* [Luca Beltrame](https://github.com/lbeltrame)
|
||||||
* [Luca Ebach](https://github.com/lucebac)
|
* [Luca Ebach](https://github.com/lucebac)
|
||||||
* [Luca Olivetti](https://github.com/olivluca)
|
* [Luca Olivetti](https://github.com/olivluca)
|
||||||
* [Luke Rogers](https://github.com/lukeroge)
|
* [Luke Rogers](https://github.com/lukeroge)
|
||||||
* [Maarten](https://github.com/mrtndwrd)
|
* [Maarten](https://github.com/mrtndwrd)
|
||||||
|
* [Mads Jensen](https://github.com/atombrella)
|
||||||
* [Maikel Martens](https://github.com/krukas)
|
* [Maikel Martens](https://github.com/krukas)
|
||||||
* [Malte Janduda](https://github.com/MalteJ)
|
* [Malte Janduda](https://github.com/MalteJ)
|
||||||
* [Mantas Mikulėnas](https://github.com/grawity)
|
* [Mantas Mikulėnas](https://github.com/grawity)
|
||||||
@@ -212,6 +217,7 @@ Authors
|
|||||||
* [Richard Barnes](https://github.com/r-barnes)
|
* [Richard Barnes](https://github.com/r-barnes)
|
||||||
* [Richard Panek](https://github.com/kernelpanek)
|
* [Richard Panek](https://github.com/kernelpanek)
|
||||||
* [Robert Buchholz](https://github.com/rbu)
|
* [Robert Buchholz](https://github.com/rbu)
|
||||||
|
* [Robert Dailey](https://github.com/pahrohfit)
|
||||||
* [Robert Habermann](https://github.com/frennkie)
|
* [Robert Habermann](https://github.com/frennkie)
|
||||||
* [Robert Xiao](https://github.com/nneonneo)
|
* [Robert Xiao](https://github.com/nneonneo)
|
||||||
* [Roland Shoemaker](https://github.com/rolandshoemaker)
|
* [Roland Shoemaker](https://github.com/rolandshoemaker)
|
||||||
@@ -237,6 +243,7 @@ Authors
|
|||||||
* [Spencer Bliven](https://github.com/sbliven)
|
* [Spencer Bliven](https://github.com/sbliven)
|
||||||
* [Stacey Sheldon](https://github.com/solidgoldbomb)
|
* [Stacey Sheldon](https://github.com/solidgoldbomb)
|
||||||
* [Stavros Korokithakis](https://github.com/skorokithakis)
|
* [Stavros Korokithakis](https://github.com/skorokithakis)
|
||||||
|
* [Ștefan Talpalaru](https://github.com/stefantalpalaru)
|
||||||
* [Stefan Weil](https://github.com/stweil)
|
* [Stefan Weil](https://github.com/stweil)
|
||||||
* [Steve Desmond](https://github.com/stevedesmond-ca)
|
* [Steve Desmond](https://github.com/stevedesmond-ca)
|
||||||
* [sydneyli](https://github.com/sydneyli)
|
* [sydneyli](https://github.com/sydneyli)
|
||||||
|
|||||||
@@ -11,7 +11,7 @@ to the Sphinx generated docs is provided below.
|
|||||||
|
|
||||||
|
|
||||||
[1] https://github.com/blog/1184-contributing-guidelines
|
[1] https://github.com/blog/1184-contributing-guidelines
|
||||||
[2] http://docutils.sourceforge.net/docs/user/rst/quickref.html#hyperlink-targets
|
[2] https://docutils.sourceforge.io/docs/user/rst/quickref.html#hyperlink-targets
|
||||||
|
|
||||||
-->
|
-->
|
||||||
|
|
||||||
|
|||||||
@@ -15,6 +15,6 @@ RUN apt-get update && \
|
|||||||
/tmp/* \
|
/tmp/* \
|
||||||
/var/tmp/*
|
/var/tmp/*
|
||||||
|
|
||||||
RUN VENV_NAME="../venv3" python3 tools/venv3.py
|
RUN VENV_NAME="../venv" python3 tools/venv.py
|
||||||
|
|
||||||
ENV PATH /opt/certbot/venv3/bin:$PATH
|
ENV PATH /opt/certbot/venv/bin:$PATH
|
||||||
|
|||||||
@@ -6,7 +6,6 @@ This module is an implementation of the `ACME protocol`_.
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
import sys
|
import sys
|
||||||
import warnings
|
|
||||||
|
|
||||||
# This code exists to keep backwards compatibility with people using acme.jose
|
# This code exists to keep backwards compatibility with people using acme.jose
|
||||||
# before it became the standalone josepy package.
|
# before it became the standalone josepy package.
|
||||||
|
|||||||
@@ -8,15 +8,15 @@ import socket
|
|||||||
|
|
||||||
from cryptography.hazmat.primitives import hashes # type: ignore
|
from cryptography.hazmat.primitives import hashes # type: ignore
|
||||||
import josepy as jose
|
import josepy as jose
|
||||||
import requests
|
|
||||||
import six
|
|
||||||
from OpenSSL import SSL # type: ignore # https://github.com/python/typeshed/issues/2052
|
|
||||||
from OpenSSL import crypto
|
from OpenSSL import crypto
|
||||||
|
from OpenSSL import SSL # type: ignore # https://github.com/python/typeshed/issues/2052
|
||||||
|
import requests
|
||||||
|
|
||||||
from acme import crypto_util
|
from acme import crypto_util
|
||||||
from acme import errors
|
from acme import errors
|
||||||
from acme import fields
|
from acme import fields
|
||||||
from acme.mixins import ResourceMixin, TypeMixin
|
from acme.mixins import ResourceMixin
|
||||||
|
from acme.mixins import TypeMixin
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -24,7 +24,7 @@ logger = logging.getLogger(__name__)
|
|||||||
class Challenge(jose.TypedJSONObjectWithFields):
|
class Challenge(jose.TypedJSONObjectWithFields):
|
||||||
# _fields_to_partial_json
|
# _fields_to_partial_json
|
||||||
"""ACME challenge."""
|
"""ACME challenge."""
|
||||||
TYPES = {} # type: dict
|
TYPES: dict = {}
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def from_json(cls, jobj):
|
def from_json(cls, jobj):
|
||||||
@@ -38,7 +38,7 @@ class Challenge(jose.TypedJSONObjectWithFields):
|
|||||||
class ChallengeResponse(ResourceMixin, TypeMixin, jose.TypedJSONObjectWithFields):
|
class ChallengeResponse(ResourceMixin, TypeMixin, jose.TypedJSONObjectWithFields):
|
||||||
# _fields_to_partial_json
|
# _fields_to_partial_json
|
||||||
"""ACME challenge response."""
|
"""ACME challenge response."""
|
||||||
TYPES = {} # type: dict
|
TYPES: dict = {}
|
||||||
resource_type = 'challenge'
|
resource_type = 'challenge'
|
||||||
resource = fields.Resource(resource_type)
|
resource = fields.Resource(resource_type)
|
||||||
|
|
||||||
@@ -145,12 +145,11 @@ class KeyAuthorizationChallengeResponse(ChallengeResponse):
|
|||||||
return jobj
|
return jobj
|
||||||
|
|
||||||
|
|
||||||
@six.add_metaclass(abc.ABCMeta)
|
class KeyAuthorizationChallenge(_TokenChallenge, metaclass=abc.ABCMeta):
|
||||||
class KeyAuthorizationChallenge(_TokenChallenge):
|
|
||||||
"""Challenge based on Key Authorization.
|
"""Challenge based on Key Authorization.
|
||||||
|
|
||||||
:param response_cls: Subclass of `KeyAuthorizationChallengeResponse`
|
:param response_cls: Subclass of `KeyAuthorizationChallengeResponse`
|
||||||
that will be used to generate `response`.
|
that will be used to generate ``response``.
|
||||||
:param str typ: type of the challenge
|
:param str typ: type of the challenge
|
||||||
"""
|
"""
|
||||||
typ = NotImplemented
|
typ = NotImplemented
|
||||||
|
|||||||
@@ -4,10 +4,14 @@ import collections
|
|||||||
import datetime
|
import datetime
|
||||||
from email.utils import parsedate_tz
|
from email.utils import parsedate_tz
|
||||||
import heapq
|
import heapq
|
||||||
|
import http.client as http_client
|
||||||
import logging
|
import logging
|
||||||
import re
|
import re
|
||||||
import sys
|
|
||||||
import time
|
import time
|
||||||
|
from typing import Dict
|
||||||
|
from typing import List
|
||||||
|
from typing import Set
|
||||||
|
from typing import Text
|
||||||
|
|
||||||
import josepy as jose
|
import josepy as jose
|
||||||
import OpenSSL
|
import OpenSSL
|
||||||
@@ -15,38 +19,21 @@ import requests
|
|||||||
from requests.adapters import HTTPAdapter
|
from requests.adapters import HTTPAdapter
|
||||||
from requests.utils import parse_header_links
|
from requests.utils import parse_header_links
|
||||||
from requests_toolbelt.adapters.source import SourceAddressAdapter
|
from requests_toolbelt.adapters.source import SourceAddressAdapter
|
||||||
import six
|
|
||||||
from six.moves import http_client
|
|
||||||
|
|
||||||
from acme import crypto_util
|
from acme import crypto_util
|
||||||
from acme import errors
|
from acme import errors
|
||||||
from acme import jws
|
from acme import jws
|
||||||
from acme import messages
|
from acme import messages
|
||||||
from acme.magic_typing import Dict
|
|
||||||
from acme.magic_typing import List
|
|
||||||
from acme.magic_typing import Set
|
|
||||||
from acme.magic_typing import Text
|
|
||||||
from acme.mixins import VersionedLEACMEMixin
|
from acme.mixins import VersionedLEACMEMixin
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
# Prior to Python 2.7.9 the stdlib SSL module did not allow a user to configure
|
|
||||||
# many important security related options. On these platforms we use PyOpenSSL
|
|
||||||
# for SSL, which does allow these options to be configured.
|
|
||||||
# https://urllib3.readthedocs.org/en/latest/security.html#insecureplatformwarning
|
|
||||||
if sys.version_info < (2, 7, 9): # pragma: no cover
|
|
||||||
try:
|
|
||||||
requests.packages.urllib3.contrib.pyopenssl.inject_into_urllib3() # type: ignore
|
|
||||||
except AttributeError:
|
|
||||||
import urllib3.contrib.pyopenssl
|
|
||||||
urllib3.contrib.pyopenssl.inject_into_urllib3()
|
|
||||||
|
|
||||||
DEFAULT_NETWORK_TIMEOUT = 45
|
DEFAULT_NETWORK_TIMEOUT = 45
|
||||||
|
|
||||||
DER_CONTENT_TYPE = 'application/pkix-cert'
|
DER_CONTENT_TYPE = 'application/pkix-cert'
|
||||||
|
|
||||||
|
|
||||||
class ClientBase(object):
|
class ClientBase:
|
||||||
"""ACME client base object.
|
"""ACME client base object.
|
||||||
|
|
||||||
:ivar messages.Directory directory:
|
:ivar messages.Directory directory:
|
||||||
@@ -125,8 +112,9 @@ class ClientBase(object):
|
|||||||
"""
|
"""
|
||||||
return self.update_registration(regr, update={'status': 'deactivated'})
|
return self.update_registration(regr, update={'status': 'deactivated'})
|
||||||
|
|
||||||
def deactivate_authorization(self, authzr):
|
def deactivate_authorization(self,
|
||||||
# type: (messages.AuthorizationResource) -> messages.AuthorizationResource
|
authzr: messages.AuthorizationResource
|
||||||
|
) -> messages.AuthorizationResource:
|
||||||
"""Deactivate authorization.
|
"""Deactivate authorization.
|
||||||
|
|
||||||
:param messages.AuthorizationResource authzr: The Authorization resource
|
:param messages.AuthorizationResource authzr: The Authorization resource
|
||||||
@@ -201,7 +189,7 @@ class ClientBase(object):
|
|||||||
when = parsedate_tz(retry_after)
|
when = parsedate_tz(retry_after)
|
||||||
if when is not None:
|
if when is not None:
|
||||||
try:
|
try:
|
||||||
tz_secs = datetime.timedelta(when[-1] if when[-1] else 0)
|
tz_secs = datetime.timedelta(when[-1] if when[-1] is not None else 0)
|
||||||
return datetime.datetime(*when[:7]) - tz_secs
|
return datetime.datetime(*when[:7]) - tz_secs
|
||||||
except (ValueError, OverflowError):
|
except (ValueError, OverflowError):
|
||||||
pass
|
pass
|
||||||
@@ -260,7 +248,7 @@ class Client(ClientBase):
|
|||||||
if net is None:
|
if net is None:
|
||||||
net = ClientNetwork(key, alg=alg, verify_ssl=verify_ssl)
|
net = ClientNetwork(key, alg=alg, verify_ssl=verify_ssl)
|
||||||
|
|
||||||
if isinstance(directory, six.string_types):
|
if isinstance(directory, str):
|
||||||
directory = messages.Directory.from_json(
|
directory = messages.Directory.from_json(
|
||||||
net.get(directory).json())
|
net.get(directory).json())
|
||||||
super(Client, self).__init__(directory=directory,
|
super(Client, self).__init__(directory=directory,
|
||||||
@@ -436,7 +424,7 @@ class Client(ClientBase):
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
assert max_attempts > 0
|
assert max_attempts > 0
|
||||||
attempts = collections.defaultdict(int) # type: Dict[messages.AuthorizationResource, int]
|
attempts: Dict[messages.AuthorizationResource, int] = collections.defaultdict(int)
|
||||||
exhausted = set()
|
exhausted = set()
|
||||||
|
|
||||||
# priority queue with datetime.datetime (based on Retry-After) as key,
|
# priority queue with datetime.datetime (based on Retry-After) as key,
|
||||||
@@ -448,7 +436,7 @@ class Client(ClientBase):
|
|||||||
heapq.heapify(waiting)
|
heapq.heapify(waiting)
|
||||||
# mapping between original Authorization Resource and the most
|
# mapping between original Authorization Resource and the most
|
||||||
# recently updated one
|
# recently updated one
|
||||||
updated = dict((authzr, authzr) for authzr in authzrs)
|
updated = {authzr: authzr for authzr in authzrs}
|
||||||
|
|
||||||
while waiting:
|
while waiting:
|
||||||
# find the smallest Retry-After, and sleep if necessary
|
# find the smallest Retry-After, and sleep if necessary
|
||||||
@@ -475,7 +463,7 @@ class Client(ClientBase):
|
|||||||
exhausted.add(authzr)
|
exhausted.add(authzr)
|
||||||
|
|
||||||
if exhausted or any(authzr.body.status == messages.STATUS_INVALID
|
if exhausted or any(authzr.body.status == messages.STATUS_INVALID
|
||||||
for authzr in six.itervalues(updated)):
|
for authzr in updated.values()):
|
||||||
raise errors.PollError(exhausted, updated)
|
raise errors.PollError(exhausted, updated)
|
||||||
|
|
||||||
updated_authzrs = tuple(updated[authzr] for authzr in authzrs)
|
updated_authzrs = tuple(updated[authzr] for authzr in authzrs)
|
||||||
@@ -549,7 +537,7 @@ class Client(ClientBase):
|
|||||||
:rtype: `list` of `OpenSSL.crypto.X509` wrapped in `.ComparableX509`
|
:rtype: `list` of `OpenSSL.crypto.X509` wrapped in `.ComparableX509`
|
||||||
|
|
||||||
"""
|
"""
|
||||||
chain = [] # type: List[jose.ComparableX509]
|
chain: List[jose.ComparableX509] = []
|
||||||
uri = certr.cert_chain_uri
|
uri = certr.cert_chain_uri
|
||||||
while uri is not None and len(chain) < max_length:
|
while uri is not None and len(chain) < max_length:
|
||||||
response, cert = self._get_cert(uri)
|
response, cert = self._get_cert(uri)
|
||||||
@@ -801,14 +789,14 @@ class ClientV2(ClientBase):
|
|||||||
"""
|
"""
|
||||||
# Can't use response.links directly because it drops multiple links
|
# Can't use response.links directly because it drops multiple links
|
||||||
# of the same relation type, which is possible in RFC8555 responses.
|
# of the same relation type, which is possible in RFC8555 responses.
|
||||||
if not 'Link' in response.headers:
|
if 'Link' not in response.headers:
|
||||||
return []
|
return []
|
||||||
links = parse_header_links(response.headers['Link'])
|
links = parse_header_links(response.headers['Link'])
|
||||||
return [l['url'] for l in links
|
return [l['url'] for l in links
|
||||||
if 'rel' in l and 'url' in l and l['rel'] == relation_type]
|
if 'rel' in l and 'url' in l and l['rel'] == relation_type]
|
||||||
|
|
||||||
|
|
||||||
class BackwardsCompatibleClientV2(object):
|
class BackwardsCompatibleClientV2:
|
||||||
"""ACME client wrapper that tends towards V2-style calls, but
|
"""ACME client wrapper that tends towards V2-style calls, but
|
||||||
supports V1 servers.
|
supports V1 servers.
|
||||||
|
|
||||||
@@ -951,7 +939,7 @@ class BackwardsCompatibleClientV2(object):
|
|||||||
return self.client.external_account_required()
|
return self.client.external_account_required()
|
||||||
|
|
||||||
|
|
||||||
class ClientNetwork(object):
|
class ClientNetwork:
|
||||||
"""Wrapper around requests that signs POSTs for authentication.
|
"""Wrapper around requests that signs POSTs for authentication.
|
||||||
|
|
||||||
Also adds user agent, and handles Content-Type.
|
Also adds user agent, and handles Content-Type.
|
||||||
@@ -981,7 +969,7 @@ class ClientNetwork(object):
|
|||||||
self.account = account
|
self.account = account
|
||||||
self.alg = alg
|
self.alg = alg
|
||||||
self.verify_ssl = verify_ssl
|
self.verify_ssl = verify_ssl
|
||||||
self._nonces = set() # type: Set[Text]
|
self._nonces: Set[Text] = set()
|
||||||
self.user_agent = user_agent
|
self.user_agent = user_agent
|
||||||
self.session = requests.Session()
|
self.session = requests.Session()
|
||||||
self._default_timeout = timeout
|
self._default_timeout = timeout
|
||||||
|
|||||||
@@ -5,15 +5,15 @@ import logging
|
|||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import socket
|
import socket
|
||||||
|
from typing import Callable
|
||||||
|
from typing import Tuple
|
||||||
|
from typing import Union
|
||||||
|
|
||||||
import josepy as jose
|
import josepy as jose
|
||||||
from OpenSSL import crypto
|
from OpenSSL import crypto
|
||||||
from OpenSSL import SSL # type: ignore # https://github.com/python/typeshed/issues/2052
|
from OpenSSL import SSL # type: ignore # https://github.com/python/typeshed/issues/2052
|
||||||
|
|
||||||
from acme import errors
|
from acme import errors
|
||||||
from acme.magic_typing import Callable
|
|
||||||
from acme.magic_typing import Tuple
|
|
||||||
from acme.magic_typing import Union
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -27,7 +27,7 @@ logger = logging.getLogger(__name__)
|
|||||||
_DEFAULT_SSL_METHOD = SSL.SSLv23_METHOD # type: ignore
|
_DEFAULT_SSL_METHOD = SSL.SSLv23_METHOD # type: ignore
|
||||||
|
|
||||||
|
|
||||||
class _DefaultCertSelection(object):
|
class _DefaultCertSelection:
|
||||||
def __init__(self, certs):
|
def __init__(self, certs):
|
||||||
self.certs = certs
|
self.certs = certs
|
||||||
|
|
||||||
@@ -36,7 +36,7 @@ class _DefaultCertSelection(object):
|
|||||||
return self.certs.get(server_name, None)
|
return self.certs.get(server_name, None)
|
||||||
|
|
||||||
|
|
||||||
class SSLSocket(object): # pylint: disable=too-few-public-methods
|
class SSLSocket: # pylint: disable=too-few-public-methods
|
||||||
"""SSL wrapper for sockets.
|
"""SSL wrapper for sockets.
|
||||||
|
|
||||||
:ivar socket sock: Original wrapped socket.
|
:ivar socket sock: Original wrapped socket.
|
||||||
@@ -93,7 +93,7 @@ class SSLSocket(object): # pylint: disable=too-few-public-methods
|
|||||||
new_context.set_alpn_select_callback(self.alpn_selection)
|
new_context.set_alpn_select_callback(self.alpn_selection)
|
||||||
connection.set_context(new_context)
|
connection.set_context(new_context)
|
||||||
|
|
||||||
class FakeConnection(object):
|
class FakeConnection:
|
||||||
"""Fake OpenSSL.SSL.Connection."""
|
"""Fake OpenSSL.SSL.Connection."""
|
||||||
|
|
||||||
# pylint: disable=missing-function-docstring
|
# pylint: disable=missing-function-docstring
|
||||||
@@ -166,9 +166,9 @@ def probe_sni(name, host, port=443, timeout=300, # pylint: disable=too-many-argu
|
|||||||
" from {0}:{1}".format(
|
" from {0}:{1}".format(
|
||||||
source_address[0],
|
source_address[0],
|
||||||
source_address[1]
|
source_address[1]
|
||||||
) if socket_kwargs else ""
|
) if any(source_address) else ""
|
||||||
)
|
)
|
||||||
socket_tuple = (host, port) # type: Tuple[str, int]
|
socket_tuple: Tuple[str, int] = (host, port)
|
||||||
sock = socket.create_connection(socket_tuple, **socket_kwargs) # type: ignore
|
sock = socket.create_connection(socket_tuple, **socket_kwargs) # type: ignore
|
||||||
except socket.error as error:
|
except socket.error as error:
|
||||||
raise errors.Error(error)
|
raise errors.Error(error)
|
||||||
@@ -186,6 +186,7 @@ def probe_sni(name, host, port=443, timeout=300, # pylint: disable=too-many-argu
|
|||||||
raise errors.Error(error)
|
raise errors.Error(error)
|
||||||
return client_ssl.get_peer_certificate()
|
return client_ssl.get_peer_certificate()
|
||||||
|
|
||||||
|
|
||||||
def make_csr(private_key_pem, domains, must_staple=False):
|
def make_csr(private_key_pem, domains, must_staple=False):
|
||||||
"""Generate a CSR containing a list of domains as subjectAltNames.
|
"""Generate a CSR containing a list of domains as subjectAltNames.
|
||||||
|
|
||||||
@@ -217,6 +218,7 @@ def make_csr(private_key_pem, domains, must_staple=False):
|
|||||||
return crypto.dump_certificate_request(
|
return crypto.dump_certificate_request(
|
||||||
crypto.FILETYPE_PEM, csr)
|
crypto.FILETYPE_PEM, csr)
|
||||||
|
|
||||||
|
|
||||||
def _pyopenssl_cert_or_req_all_names(loaded_cert_or_req):
|
def _pyopenssl_cert_or_req_all_names(loaded_cert_or_req):
|
||||||
common_name = loaded_cert_or_req.get_subject().CN
|
common_name = loaded_cert_or_req.get_subject().CN
|
||||||
sans = _pyopenssl_cert_or_req_san(loaded_cert_or_req)
|
sans = _pyopenssl_cert_or_req_san(loaded_cert_or_req)
|
||||||
@@ -225,6 +227,7 @@ def _pyopenssl_cert_or_req_all_names(loaded_cert_or_req):
|
|||||||
return sans
|
return sans
|
||||||
return [common_name] + [d for d in sans if d != common_name]
|
return [common_name] + [d for d in sans if d != common_name]
|
||||||
|
|
||||||
|
|
||||||
def _pyopenssl_cert_or_req_san(cert_or_req):
|
def _pyopenssl_cert_or_req_san(cert_or_req):
|
||||||
"""Get Subject Alternative Names from certificate or CSR using pyOpenSSL.
|
"""Get Subject Alternative Names from certificate or CSR using pyOpenSSL.
|
||||||
|
|
||||||
@@ -253,7 +256,7 @@ def _pyopenssl_cert_or_req_san(cert_or_req):
|
|||||||
|
|
||||||
if isinstance(cert_or_req, crypto.X509):
|
if isinstance(cert_or_req, crypto.X509):
|
||||||
# pylint: disable=line-too-long
|
# pylint: disable=line-too-long
|
||||||
func = crypto.dump_certificate # type: Union[Callable[[int, crypto.X509Req], bytes], Callable[[int, crypto.X509], bytes]]
|
func: Union[Callable[[int, crypto.X509Req], bytes], Callable[[int, crypto.X509], bytes]] = crypto.dump_certificate
|
||||||
else:
|
else:
|
||||||
func = crypto.dump_certificate_request
|
func = crypto.dump_certificate_request
|
||||||
text = func(crypto.FILETYPE_TEXT, cert_or_req).decode("utf-8")
|
text = func(crypto.FILETYPE_TEXT, cert_or_req).decode("utf-8")
|
||||||
@@ -317,6 +320,7 @@ def gen_ss_cert(key, domains, not_before=None,
|
|||||||
cert.sign(key, "sha256")
|
cert.sign(key, "sha256")
|
||||||
return cert
|
return cert
|
||||||
|
|
||||||
|
|
||||||
def dump_pyopenssl_chain(chain, filetype=crypto.FILETYPE_PEM):
|
def dump_pyopenssl_chain(chain, filetype=crypto.FILETYPE_PEM):
|
||||||
"""Dump certificate chain into a bundle.
|
"""Dump certificate chain into a bundle.
|
||||||
|
|
||||||
|
|||||||
@@ -49,7 +49,7 @@ class MissingNonce(NonceError):
|
|||||||
Replay-Nonce header field in each successful response to a POST it
|
Replay-Nonce header field in each successful response to a POST it
|
||||||
provides to a client (...)".
|
provides to a client (...)".
|
||||||
|
|
||||||
:ivar requests.Response response: HTTP Response
|
:ivar requests.Response ~.response: HTTP Response
|
||||||
|
|
||||||
"""
|
"""
|
||||||
def __init__(self, response, *args, **kwargs):
|
def __init__(self, response, *args, **kwargs):
|
||||||
|
|||||||
@@ -1,15 +1,17 @@
|
|||||||
"""Shim class to not have to depend on typing module in prod."""
|
"""Simple shim around the typing module.
|
||||||
import sys
|
|
||||||
|
|
||||||
|
This was useful when this code supported Python 2 and typing wasn't always
|
||||||
|
available. This code is being kept for now for backwards compatibility.
|
||||||
|
|
||||||
class TypingClass(object):
|
"""
|
||||||
|
import warnings
|
||||||
|
from typing import * # pylint: disable=wildcard-import, unused-wildcard-import
|
||||||
|
from typing import Collection, IO # type: ignore
|
||||||
|
|
||||||
|
warnings.warn("acme.magic_typing is deprecated and will be removed in a future release.",
|
||||||
|
DeprecationWarning)
|
||||||
|
|
||||||
|
class TypingClass:
|
||||||
"""Ignore import errors by getting anything"""
|
"""Ignore import errors by getting anything"""
|
||||||
def __getattr__(self, name):
|
def __getattr__(self, name):
|
||||||
return None
|
return None # pragma: no cover
|
||||||
|
|
||||||
try:
|
|
||||||
# mypy doesn't respect modifying sys.modules
|
|
||||||
from typing import * # pylint: disable=wildcard-import, unused-wildcard-import
|
|
||||||
from typing import Collection, IO # type: ignore
|
|
||||||
except ImportError:
|
|
||||||
sys.modules[__name__] = TypingClass()
|
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
"""ACME protocol messages."""
|
"""ACME protocol messages."""
|
||||||
import json
|
import json
|
||||||
|
from collections.abc import Hashable
|
||||||
|
|
||||||
import josepy as jose
|
import josepy as jose
|
||||||
import six
|
|
||||||
|
|
||||||
from acme import challenges
|
from acme import challenges
|
||||||
from acme import errors
|
from acme import errors
|
||||||
@@ -11,13 +11,6 @@ from acme import jws
|
|||||||
from acme import util
|
from acme import util
|
||||||
from acme.mixins import ResourceMixin
|
from acme.mixins import ResourceMixin
|
||||||
|
|
||||||
try:
|
|
||||||
from collections.abc import Hashable
|
|
||||||
except ImportError: # pragma: no cover
|
|
||||||
from collections import Hashable
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
OLD_ERROR_PREFIX = "urn:acme:error:"
|
OLD_ERROR_PREFIX = "urn:acme:error:"
|
||||||
ERROR_PREFIX = "urn:ietf:params:acme:error:"
|
ERROR_PREFIX = "urn:ietf:params:acme:error:"
|
||||||
|
|
||||||
@@ -68,7 +61,6 @@ def is_acme_error(err):
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
@six.python_2_unicode_compatible
|
|
||||||
class Error(jose.JSONObjectWithFields, errors.Error):
|
class Error(jose.JSONObjectWithFields, errors.Error):
|
||||||
"""ACME error.
|
"""ACME error.
|
||||||
|
|
||||||
@@ -158,13 +150,10 @@ class _Constant(jose.JSONDeSerializable, Hashable): # type: ignore
|
|||||||
def __hash__(self):
|
def __hash__(self):
|
||||||
return hash((self.__class__, self.name))
|
return hash((self.__class__, self.name))
|
||||||
|
|
||||||
def __ne__(self, other):
|
|
||||||
return not self == other
|
|
||||||
|
|
||||||
|
|
||||||
class Status(_Constant):
|
class Status(_Constant):
|
||||||
"""ACME "status" field."""
|
"""ACME "status" field."""
|
||||||
POSSIBLE_NAMES = {} # type: dict
|
POSSIBLE_NAMES: dict = {}
|
||||||
STATUS_UNKNOWN = Status('unknown')
|
STATUS_UNKNOWN = Status('unknown')
|
||||||
STATUS_PENDING = Status('pending')
|
STATUS_PENDING = Status('pending')
|
||||||
STATUS_PROCESSING = Status('processing')
|
STATUS_PROCESSING = Status('processing')
|
||||||
@@ -177,7 +166,7 @@ STATUS_DEACTIVATED = Status('deactivated')
|
|||||||
|
|
||||||
class IdentifierType(_Constant):
|
class IdentifierType(_Constant):
|
||||||
"""ACME identifier type."""
|
"""ACME identifier type."""
|
||||||
POSSIBLE_NAMES = {} # type: dict
|
POSSIBLE_NAMES: dict = {}
|
||||||
IDENTIFIER_FQDN = IdentifierType('dns') # IdentifierDNS in Boulder
|
IDENTIFIER_FQDN = IdentifierType('dns') # IdentifierDNS in Boulder
|
||||||
|
|
||||||
|
|
||||||
@@ -195,7 +184,7 @@ class Identifier(jose.JSONObjectWithFields):
|
|||||||
class Directory(jose.JSONDeSerializable):
|
class Directory(jose.JSONDeSerializable):
|
||||||
"""Directory."""
|
"""Directory."""
|
||||||
|
|
||||||
_REGISTERED_TYPES = {} # type: dict
|
_REGISTERED_TYPES: dict = {}
|
||||||
|
|
||||||
class Meta(jose.JSONObjectWithFields):
|
class Meta(jose.JSONObjectWithFields):
|
||||||
"""Directory Meta."""
|
"""Directory Meta."""
|
||||||
@@ -206,7 +195,7 @@ class Directory(jose.JSONDeSerializable):
|
|||||||
external_account_required = jose.Field('externalAccountRequired', omitempty=True)
|
external_account_required = jose.Field('externalAccountRequired', omitempty=True)
|
||||||
|
|
||||||
def __init__(self, **kwargs):
|
def __init__(self, **kwargs):
|
||||||
kwargs = dict((self._internal_name(k), v) for k, v in kwargs.items())
|
kwargs = {self._internal_name(k): v for k, v in kwargs.items()}
|
||||||
super(Directory.Meta, self).__init__(**kwargs)
|
super(Directory.Meta, self).__init__(**kwargs)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@@ -275,7 +264,7 @@ class Resource(jose.JSONObjectWithFields):
|
|||||||
class ResourceWithURI(Resource):
|
class ResourceWithURI(Resource):
|
||||||
"""ACME Resource with URI.
|
"""ACME Resource with URI.
|
||||||
|
|
||||||
:ivar unicode uri: Location of the resource.
|
:ivar unicode ~.uri: Location of the resource.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
uri = jose.Field('uri') # no ChallengeResource.uri
|
uri = jose.Field('uri') # no ChallengeResource.uri
|
||||||
@@ -285,7 +274,7 @@ class ResourceBody(jose.JSONObjectWithFields):
|
|||||||
"""ACME Resource Body."""
|
"""ACME Resource Body."""
|
||||||
|
|
||||||
|
|
||||||
class ExternalAccountBinding(object):
|
class ExternalAccountBinding:
|
||||||
"""ACME External Account Binding"""
|
"""ACME External Account Binding"""
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
@@ -315,6 +304,9 @@ class Registration(ResourceBody):
|
|||||||
# on new-reg key server ignores 'key' and populates it based on
|
# on new-reg key server ignores 'key' and populates it based on
|
||||||
# JWS.signature.combined.jwk
|
# JWS.signature.combined.jwk
|
||||||
key = jose.Field('key', omitempty=True, decoder=jose.JWK.from_json)
|
key = jose.Field('key', omitempty=True, decoder=jose.JWK.from_json)
|
||||||
|
# Contact field implements special behavior to allow messages that clear existing
|
||||||
|
# contacts while not expecting the `contact` field when loading from json.
|
||||||
|
# This is implemented in the constructor and *_json methods.
|
||||||
contact = jose.Field('contact', omitempty=True, default=())
|
contact = jose.Field('contact', omitempty=True, default=())
|
||||||
agreement = jose.Field('agreement', omitempty=True)
|
agreement = jose.Field('agreement', omitempty=True)
|
||||||
status = jose.Field('status', omitempty=True)
|
status = jose.Field('status', omitempty=True)
|
||||||
@@ -327,24 +319,73 @@ class Registration(ResourceBody):
|
|||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def from_data(cls, phone=None, email=None, external_account_binding=None, **kwargs):
|
def from_data(cls, phone=None, email=None, external_account_binding=None, **kwargs):
|
||||||
"""Create registration resource from contact details."""
|
"""
|
||||||
|
Create registration resource from contact details.
|
||||||
|
|
||||||
|
The `contact` keyword being passed to a Registration object is meaningful, so
|
||||||
|
this function represents empty iterables in its kwargs by passing on an empty
|
||||||
|
`tuple`.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Note if `contact` was in kwargs.
|
||||||
|
contact_provided = 'contact' in kwargs
|
||||||
|
|
||||||
|
# Pop `contact` from kwargs and add formatted email or phone numbers
|
||||||
details = list(kwargs.pop('contact', ()))
|
details = list(kwargs.pop('contact', ()))
|
||||||
if phone is not None:
|
if phone is not None:
|
||||||
details.append(cls.phone_prefix + phone)
|
details.append(cls.phone_prefix + phone)
|
||||||
if email is not None:
|
if email is not None:
|
||||||
details.extend([cls.email_prefix + mail for mail in email.split(',')])
|
details.extend([cls.email_prefix + mail for mail in email.split(',')])
|
||||||
kwargs['contact'] = tuple(details)
|
|
||||||
|
# Insert formatted contact information back into kwargs
|
||||||
|
# or insert an empty tuple if `contact` provided.
|
||||||
|
if details or contact_provided:
|
||||||
|
kwargs['contact'] = tuple(details)
|
||||||
|
|
||||||
if external_account_binding:
|
if external_account_binding:
|
||||||
kwargs['external_account_binding'] = external_account_binding
|
kwargs['external_account_binding'] = external_account_binding
|
||||||
|
|
||||||
return cls(**kwargs)
|
return cls(**kwargs)
|
||||||
|
|
||||||
|
def __init__(self, **kwargs):
|
||||||
|
"""Note if the user provides a value for the `contact` member."""
|
||||||
|
if 'contact' in kwargs:
|
||||||
|
# Avoid the __setattr__ used by jose.TypedJSONObjectWithFields
|
||||||
|
object.__setattr__(self, '_add_contact', True)
|
||||||
|
super(Registration, self).__init__(**kwargs)
|
||||||
|
|
||||||
def _filter_contact(self, prefix):
|
def _filter_contact(self, prefix):
|
||||||
return tuple(
|
return tuple(
|
||||||
detail[len(prefix):] for detail in self.contact # pylint: disable=not-an-iterable
|
detail[len(prefix):] for detail in self.contact # pylint: disable=not-an-iterable
|
||||||
if detail.startswith(prefix))
|
if detail.startswith(prefix))
|
||||||
|
|
||||||
|
def _add_contact_if_appropriate(self, jobj):
|
||||||
|
"""
|
||||||
|
The `contact` member of Registration objects should not be required when
|
||||||
|
de-serializing (as it would be if the Fields' `omitempty` flag were `False`), but
|
||||||
|
it should be included in serializations if it was provided.
|
||||||
|
|
||||||
|
:param jobj: Dictionary containing this Registrations' data
|
||||||
|
:type jobj: dict
|
||||||
|
|
||||||
|
:returns: Dictionary containing Registrations data to transmit to the server
|
||||||
|
:rtype: dict
|
||||||
|
"""
|
||||||
|
if getattr(self, '_add_contact', False):
|
||||||
|
jobj['contact'] = self.encode('contact')
|
||||||
|
|
||||||
|
return jobj
|
||||||
|
|
||||||
|
def to_partial_json(self):
|
||||||
|
"""Modify josepy.JSONDeserializable.to_partial_json()"""
|
||||||
|
jobj = super(Registration, self).to_partial_json()
|
||||||
|
return self._add_contact_if_appropriate(jobj)
|
||||||
|
|
||||||
|
def fields_to_partial_json(self):
|
||||||
|
"""Modify josepy.JSONObjectWithFields.fields_to_partial_json()"""
|
||||||
|
jobj = super(Registration, self).fields_to_partial_json()
|
||||||
|
return self._add_contact_if_appropriate(jobj)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def phones(self):
|
def phones(self):
|
||||||
"""All phones found in the ``contact`` field."""
|
"""All phones found in the ``contact`` field."""
|
||||||
@@ -413,7 +454,7 @@ class ChallengeBody(ResourceBody):
|
|||||||
omitempty=True, default=None)
|
omitempty=True, default=None)
|
||||||
|
|
||||||
def __init__(self, **kwargs):
|
def __init__(self, **kwargs):
|
||||||
kwargs = dict((self._internal_name(k), v) for k, v in kwargs.items())
|
kwargs = {self._internal_name(k): v for k, v in kwargs.items()}
|
||||||
super(ChallengeBody, self).__init__(**kwargs)
|
super(ChallengeBody, self).__init__(**kwargs)
|
||||||
|
|
||||||
def encode(self, name):
|
def encode(self, name):
|
||||||
@@ -575,7 +616,7 @@ class Order(ResourceBody):
|
|||||||
:ivar str finalize: URL to POST to to request issuance once all
|
:ivar str finalize: URL to POST to to request issuance once all
|
||||||
authorizations have "valid" status.
|
authorizations have "valid" status.
|
||||||
:ivar datetime.datetime expires: When the order expires.
|
:ivar datetime.datetime expires: When the order expires.
|
||||||
:ivar .Error error: Any error that occurred during finalization, if applicable.
|
:ivar ~.Error error: Any error that occurred during finalization, if applicable.
|
||||||
"""
|
"""
|
||||||
identifiers = jose.Field('identifiers', omitempty=True)
|
identifiers = jose.Field('identifiers', omitempty=True)
|
||||||
status = jose.Field('status', decoder=Status.from_json,
|
status = jose.Field('status', decoder=Status.from_json,
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
"""Useful mixins for Challenge and Resource objects"""
|
"""Useful mixins for Challenge and Resource objects"""
|
||||||
|
|
||||||
|
|
||||||
class VersionedLEACMEMixin(object):
|
class VersionedLEACMEMixin:
|
||||||
"""This mixin stores the version of Let's Encrypt's endpoint being used."""
|
"""This mixin stores the version of Let's Encrypt's endpoint being used."""
|
||||||
@property
|
@property
|
||||||
def le_acme_version(self):
|
def le_acme_version(self):
|
||||||
|
|||||||
@@ -1,17 +1,16 @@
|
|||||||
"""Support for standalone client challenge solvers. """
|
"""Support for standalone client challenge solvers. """
|
||||||
import collections
|
import collections
|
||||||
import functools
|
import functools
|
||||||
|
import http.client as http_client
|
||||||
|
import http.server as BaseHTTPServer
|
||||||
import logging
|
import logging
|
||||||
import socket
|
import socket
|
||||||
|
import socketserver
|
||||||
import threading
|
import threading
|
||||||
|
from typing import List
|
||||||
from six.moves import BaseHTTPServer # type: ignore
|
|
||||||
from six.moves import http_client
|
|
||||||
from six.moves import socketserver # type: ignore
|
|
||||||
|
|
||||||
from acme import challenges
|
from acme import challenges
|
||||||
from acme import crypto_util
|
from acme import crypto_util
|
||||||
from acme.magic_typing import List
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -54,7 +53,7 @@ class ACMEServerMixin:
|
|||||||
allow_reuse_address = True
|
allow_reuse_address = True
|
||||||
|
|
||||||
|
|
||||||
class BaseDualNetworkedServers(object):
|
class BaseDualNetworkedServers:
|
||||||
"""Base class for a pair of IPv6 and IPv4 servers that tries to do everything
|
"""Base class for a pair of IPv6 and IPv4 servers that tries to do everything
|
||||||
it's asked for both servers, but where failures in one server don't
|
it's asked for both servers, but where failures in one server don't
|
||||||
affect the other.
|
affect the other.
|
||||||
@@ -64,8 +63,8 @@ class BaseDualNetworkedServers(object):
|
|||||||
|
|
||||||
def __init__(self, ServerClass, server_address, *remaining_args, **kwargs):
|
def __init__(self, ServerClass, server_address, *remaining_args, **kwargs):
|
||||||
port = server_address[1]
|
port = server_address[1]
|
||||||
self.threads = [] # type: List[threading.Thread]
|
self.threads: List[threading.Thread] = []
|
||||||
self.servers = [] # type: List[ACMEServerMixin]
|
self.servers: List[ACMEServerMixin] = []
|
||||||
|
|
||||||
# Must try True first.
|
# Must try True first.
|
||||||
# Ubuntu, for example, will fail to bind to IPv4 if we've already bound
|
# Ubuntu, for example, will fail to bind to IPv4 if we've already bound
|
||||||
|
|||||||
@@ -1,7 +1,6 @@
|
|||||||
"""ACME utilities."""
|
"""ACME utilities."""
|
||||||
import six
|
|
||||||
|
|
||||||
|
|
||||||
def map_keys(dikt, func):
|
def map_keys(dikt, func):
|
||||||
"""Map dictionary keys."""
|
"""Map dictionary keys."""
|
||||||
return dict((func(key), value) for key, value in six.iteritems(dikt))
|
return {func(key): value for key, value in dikt.items()}
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ BUILDDIR = _build
|
|||||||
|
|
||||||
# User-friendly check for sphinx-build
|
# User-friendly check for sphinx-build
|
||||||
ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1)
|
ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1)
|
||||||
$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/)
|
$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from https://www.sphinx-doc.org/)
|
||||||
endif
|
endif
|
||||||
|
|
||||||
# Internal variables.
|
# Internal variables.
|
||||||
|
|||||||
@@ -85,7 +85,9 @@ language = 'en'
|
|||||||
|
|
||||||
# List of patterns, relative to source directory, that match files and
|
# List of patterns, relative to source directory, that match files and
|
||||||
# directories to ignore when looking for source files.
|
# directories to ignore when looking for source files.
|
||||||
exclude_patterns = ['_build']
|
exclude_patterns = [
|
||||||
|
'_build',
|
||||||
|
]
|
||||||
|
|
||||||
# The reST default role (used for this markup: `text`) to use for all
|
# The reST default role (used for this markup: `text`) to use for all
|
||||||
# documents.
|
# documents.
|
||||||
@@ -120,7 +122,7 @@ todo_include_todos = False
|
|||||||
# The theme to use for HTML and HTML Help pages. See the documentation for
|
# The theme to use for HTML and HTML Help pages. See the documentation for
|
||||||
# a list of builtin themes.
|
# a list of builtin themes.
|
||||||
|
|
||||||
# http://docs.readthedocs.org/en/latest/theme.html#how-do-i-use-this-locally-and-on-read-the-docs
|
# https://docs.readthedocs.io/en/stable/faq.html#i-want-to-use-the-read-the-docs-theme-locally
|
||||||
# on_rtd is whether we are on readthedocs.org
|
# on_rtd is whether we are on readthedocs.org
|
||||||
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
|
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
|
||||||
if not on_rtd: # only import and set the theme if we're building docs locally
|
if not on_rtd: # only import and set the theme if we're building docs locally
|
||||||
|
|||||||
@@ -65,7 +65,7 @@ if errorlevel 9009 (
|
|||||||
echo.may add the Sphinx directory to PATH.
|
echo.may add the Sphinx directory to PATH.
|
||||||
echo.
|
echo.
|
||||||
echo.If you don't have Sphinx installed, grab it from
|
echo.If you don't have Sphinx installed, grab it from
|
||||||
echo.http://sphinx-doc.org/
|
echo.https://www.sphinx-doc.org/
|
||||||
exit /b 1
|
exit /b 1
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|||||||
@@ -1 +1,3 @@
|
|||||||
|
:orphan:
|
||||||
|
|
||||||
.. literalinclude:: ../jws-help.txt
|
.. literalinclude:: ../jws-help.txt
|
||||||
|
|||||||
@@ -1,41 +1,25 @@
|
|||||||
from distutils.version import LooseVersion
|
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
from setuptools import __version__ as setuptools_version
|
|
||||||
from setuptools import find_packages
|
from setuptools import find_packages
|
||||||
from setuptools import setup
|
from setuptools import setup
|
||||||
from setuptools.command.test import test as TestCommand
|
|
||||||
|
|
||||||
version = '1.7.0.dev0'
|
version = '1.14.0.dev0'
|
||||||
|
|
||||||
# Please update tox.ini when modifying dependency version requirements
|
# Please update tox.ini when modifying dependency version requirements
|
||||||
install_requires = [
|
install_requires = [
|
||||||
# load_pem_private/public_key (>=0.6)
|
'cryptography>=2.1.4',
|
||||||
# rsa_recover_prime_factors (>=0.8)
|
|
||||||
'cryptography>=1.2.3',
|
|
||||||
# formerly known as acme.jose:
|
# formerly known as acme.jose:
|
||||||
# 1.1.0+ is required to avoid the warnings described at
|
# 1.1.0+ is required to avoid the warnings described at
|
||||||
# https://github.com/certbot/josepy/issues/13.
|
# https://github.com/certbot/josepy/issues/13.
|
||||||
'josepy>=1.1.0',
|
'josepy>=1.1.0',
|
||||||
# Connection.set_tlsext_host_name (>=0.13) + matching Xenial requirements (>=0.15.1)
|
'PyOpenSSL>=17.3.0',
|
||||||
'PyOpenSSL>=0.15.1',
|
|
||||||
'pyrfc3339',
|
'pyrfc3339',
|
||||||
'pytz',
|
'pytz',
|
||||||
'requests[security]>=2.6.0', # security extras added in 2.4.1
|
'requests>=2.6.0',
|
||||||
'requests-toolbelt>=0.3.0',
|
'requests-toolbelt>=0.3.0',
|
||||||
'setuptools',
|
'setuptools>=39.0.1',
|
||||||
'six>=1.9.0', # needed for python_2_unicode_compatible
|
|
||||||
]
|
]
|
||||||
|
|
||||||
setuptools_known_environment_markers = (LooseVersion(setuptools_version) >= LooseVersion('36.2'))
|
|
||||||
if setuptools_known_environment_markers:
|
|
||||||
install_requires.append('mock ; python_version < "3.3"')
|
|
||||||
elif 'bdist_wheel' in sys.argv[1:]:
|
|
||||||
raise RuntimeError('Error, you are trying to build certbot wheels using an old version '
|
|
||||||
'of setuptools. Version 36.2+ of setuptools is required.')
|
|
||||||
elif sys.version_info < (3,3):
|
|
||||||
install_requires.append('mock')
|
|
||||||
|
|
||||||
dev_extras = [
|
dev_extras = [
|
||||||
'pytest',
|
'pytest',
|
||||||
'pytest-xdist',
|
'pytest-xdist',
|
||||||
@@ -47,22 +31,6 @@ docs_extras = [
|
|||||||
'sphinx_rtd_theme',
|
'sphinx_rtd_theme',
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
class PyTest(TestCommand):
|
|
||||||
user_options = []
|
|
||||||
|
|
||||||
def initialize_options(self):
|
|
||||||
TestCommand.initialize_options(self)
|
|
||||||
self.pytest_args = ''
|
|
||||||
|
|
||||||
def run_tests(self):
|
|
||||||
import shlex
|
|
||||||
# import here, cause outside the eggs aren't loaded
|
|
||||||
import pytest
|
|
||||||
errno = pytest.main(shlex.split(self.pytest_args))
|
|
||||||
sys.exit(errno)
|
|
||||||
|
|
||||||
|
|
||||||
setup(
|
setup(
|
||||||
name='acme',
|
name='acme',
|
||||||
version=version,
|
version=version,
|
||||||
@@ -71,19 +39,17 @@ setup(
|
|||||||
author="Certbot Project",
|
author="Certbot Project",
|
||||||
author_email='client-dev@letsencrypt.org',
|
author_email='client-dev@letsencrypt.org',
|
||||||
license='Apache License 2.0',
|
license='Apache License 2.0',
|
||||||
python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*',
|
python_requires='>=3.6',
|
||||||
classifiers=[
|
classifiers=[
|
||||||
'Development Status :: 5 - Production/Stable',
|
'Development Status :: 5 - Production/Stable',
|
||||||
'Intended Audience :: Developers',
|
'Intended Audience :: Developers',
|
||||||
'License :: OSI Approved :: Apache Software License',
|
'License :: OSI Approved :: Apache Software License',
|
||||||
'Programming Language :: Python',
|
'Programming Language :: Python',
|
||||||
'Programming Language :: Python :: 2',
|
|
||||||
'Programming Language :: Python :: 2.7',
|
|
||||||
'Programming Language :: Python :: 3',
|
'Programming Language :: Python :: 3',
|
||||||
'Programming Language :: Python :: 3.5',
|
|
||||||
'Programming Language :: Python :: 3.6',
|
'Programming Language :: Python :: 3.6',
|
||||||
'Programming Language :: Python :: 3.7',
|
'Programming Language :: Python :: 3.7',
|
||||||
'Programming Language :: Python :: 3.8',
|
'Programming Language :: Python :: 3.8',
|
||||||
|
'Programming Language :: Python :: 3.9',
|
||||||
'Topic :: Internet :: WWW/HTTP',
|
'Topic :: Internet :: WWW/HTTP',
|
||||||
'Topic :: Security',
|
'Topic :: Security',
|
||||||
],
|
],
|
||||||
@@ -95,7 +61,4 @@ setup(
|
|||||||
'dev': dev_extras,
|
'dev': dev_extras,
|
||||||
'docs': docs_extras,
|
'docs': docs_extras,
|
||||||
},
|
},
|
||||||
test_suite='acme',
|
|
||||||
tests_require=["pytest"],
|
|
||||||
cmdclass={"test": PyTest},
|
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -1,14 +1,11 @@
|
|||||||
"""Tests for acme.challenges."""
|
"""Tests for acme.challenges."""
|
||||||
|
import urllib.parse as urllib_parse
|
||||||
import unittest
|
import unittest
|
||||||
|
from unittest import mock
|
||||||
|
|
||||||
import josepy as jose
|
import josepy as jose
|
||||||
import OpenSSL
|
import OpenSSL
|
||||||
try:
|
|
||||||
import mock
|
|
||||||
except ImportError: # pragma: no cover
|
|
||||||
from unittest import mock # type: ignore
|
|
||||||
import requests
|
import requests
|
||||||
from six.moves.urllib import parse as urllib_parse
|
|
||||||
|
|
||||||
from acme import errors
|
from acme import errors
|
||||||
|
|
||||||
|
|||||||
@@ -2,17 +2,14 @@
|
|||||||
# pylint: disable=too-many-lines
|
# pylint: disable=too-many-lines
|
||||||
import copy
|
import copy
|
||||||
import datetime
|
import datetime
|
||||||
|
import http.client as http_client
|
||||||
import json
|
import json
|
||||||
import unittest
|
import unittest
|
||||||
|
from unittest import mock
|
||||||
|
|
||||||
import josepy as jose
|
import josepy as jose
|
||||||
try:
|
|
||||||
import mock
|
|
||||||
except ImportError: # pragma: no cover
|
|
||||||
from unittest import mock # type: ignore
|
|
||||||
import OpenSSL
|
import OpenSSL
|
||||||
import requests
|
import requests
|
||||||
from six.moves import http_client # pylint: disable=import-error
|
|
||||||
|
|
||||||
from acme import challenges
|
from acme import challenges
|
||||||
from acme import errors
|
from acme import errors
|
||||||
@@ -64,7 +61,7 @@ class ClientTestBase(unittest.TestCase):
|
|||||||
self.contact = ('mailto:cert-admin@example.com', 'tel:+12025551212')
|
self.contact = ('mailto:cert-admin@example.com', 'tel:+12025551212')
|
||||||
reg = messages.Registration(
|
reg = messages.Registration(
|
||||||
contact=self.contact, key=KEY.public_key())
|
contact=self.contact, key=KEY.public_key())
|
||||||
the_arg = dict(reg) # type: Dict
|
the_arg: Dict = dict(reg)
|
||||||
self.new_reg = messages.NewRegistration(**the_arg)
|
self.new_reg = messages.NewRegistration(**the_arg)
|
||||||
self.regr = messages.RegistrationResource(
|
self.regr = messages.RegistrationResource(
|
||||||
body=reg, uri='https://www.letsencrypt-demo.org/acme/reg/1')
|
body=reg, uri='https://www.letsencrypt-demo.org/acme/reg/1')
|
||||||
@@ -853,7 +850,7 @@ class ClientV2Test(ClientTestBase):
|
|||||||
self.response.json.return_value = updated_order.to_json()
|
self.response.json.return_value = updated_order.to_json()
|
||||||
self.response.text = CERT_SAN_PEM
|
self.response.text = CERT_SAN_PEM
|
||||||
self.response.headers['Link'] ='<https://example.com/acme/cert/1>;rel="alternate", ' + \
|
self.response.headers['Link'] ='<https://example.com/acme/cert/1>;rel="alternate", ' + \
|
||||||
'<https://exaple.com/dir>;rel="index", ' + \
|
'<https://example.com/dir>;rel="index", ' + \
|
||||||
'<https://example.com/acme/cert/2>;title="foo";rel="alternate"'
|
'<https://example.com/acme/cert/2>;title="foo";rel="alternate"'
|
||||||
|
|
||||||
deadline = datetime.datetime(9999, 9, 9)
|
deadline = datetime.datetime(9999, 9, 9)
|
||||||
@@ -1342,7 +1339,7 @@ class ClientNetworkSourceAddressBindingTest(unittest.TestCase):
|
|||||||
# test should fail if the default adapter type is changed by requests
|
# test should fail if the default adapter type is changed by requests
|
||||||
net = ClientNetwork(key=None, alg=None)
|
net = ClientNetwork(key=None, alg=None)
|
||||||
session = requests.Session()
|
session = requests.Session()
|
||||||
for scheme in session.adapters.keys():
|
for scheme in session.adapters:
|
||||||
client_network_adapter = net.session.adapters.get(scheme)
|
client_network_adapter = net.session.adapters.get(scheme)
|
||||||
default_adapter = session.adapters.get(scheme)
|
default_adapter = session.adapters.get(scheme)
|
||||||
self.assertEqual(client_network_adapter.__class__, default_adapter.__class__)
|
self.assertEqual(client_network_adapter.__class__, default_adapter.__class__)
|
||||||
|
|||||||
@@ -1,14 +1,13 @@
|
|||||||
"""Tests for acme.crypto_util."""
|
"""Tests for acme.crypto_util."""
|
||||||
import itertools
|
import itertools
|
||||||
import socket
|
import socket
|
||||||
|
import socketserver
|
||||||
import threading
|
import threading
|
||||||
import time
|
import time
|
||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
import josepy as jose
|
import josepy as jose
|
||||||
import OpenSSL
|
import OpenSSL
|
||||||
import six
|
|
||||||
from six.moves import socketserver # type: ignore # pylint: disable=import-error
|
|
||||||
|
|
||||||
from acme import errors
|
from acme import errors
|
||||||
import test_util
|
import test_util
|
||||||
@@ -27,8 +26,6 @@ class SSLSocketAndProbeSNITest(unittest.TestCase):
|
|||||||
|
|
||||||
class _TestServer(socketserver.TCPServer):
|
class _TestServer(socketserver.TCPServer):
|
||||||
|
|
||||||
# six.moves.* | pylint: disable=attribute-defined-outside-init,no-init
|
|
||||||
|
|
||||||
def server_bind(self): # pylint: disable=missing-docstring
|
def server_bind(self): # pylint: disable=missing-docstring
|
||||||
self.socket = SSLSocket(socket.socket(),
|
self.socket = SSLSocket(socket.socket(),
|
||||||
certs)
|
certs)
|
||||||
@@ -62,7 +59,6 @@ class SSLSocketAndProbeSNITest(unittest.TestCase):
|
|||||||
self.assertRaises(errors.Error, self._probe, b'bar')
|
self.assertRaises(errors.Error, self._probe, b'bar')
|
||||||
|
|
||||||
def test_probe_connection_error(self):
|
def test_probe_connection_error(self):
|
||||||
# pylint has a hard time with six
|
|
||||||
self.server.server_close()
|
self.server.server_close()
|
||||||
original_timeout = socket.getdefaulttimeout()
|
original_timeout = socket.getdefaulttimeout()
|
||||||
try:
|
try:
|
||||||
@@ -121,9 +117,9 @@ class PyOpenSSLCertOrReqSANTest(unittest.TestCase):
|
|||||||
@classmethod
|
@classmethod
|
||||||
def _get_idn_names(cls):
|
def _get_idn_names(cls):
|
||||||
"""Returns expected names from '{cert,csr}-idnsans.pem'."""
|
"""Returns expected names from '{cert,csr}-idnsans.pem'."""
|
||||||
chars = [six.unichr(i) for i in itertools.chain(range(0x3c3, 0x400),
|
chars = [chr(i) for i in itertools.chain(range(0x3c3, 0x400),
|
||||||
range(0x641, 0x6fc),
|
range(0x641, 0x6fc),
|
||||||
range(0x1820, 0x1877))]
|
range(0x1820, 0x1877))]
|
||||||
return [''.join(chars[i: i + 45]) + '.invalid'
|
return [''.join(chars[i: i + 45]) + '.invalid'
|
||||||
for i in range(0, len(chars), 45)]
|
for i in range(0, len(chars), 45)]
|
||||||
|
|
||||||
@@ -184,7 +180,7 @@ class RandomSnTest(unittest.TestCase):
|
|||||||
|
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
self.cert_count = 5
|
self.cert_count = 5
|
||||||
self.serial_num = [] # type: List[int]
|
self.serial_num: List[int] = []
|
||||||
self.key = OpenSSL.crypto.PKey()
|
self.key = OpenSSL.crypto.PKey()
|
||||||
self.key.generate_key(OpenSSL.crypto.TYPE_RSA, 2048)
|
self.key.generate_key(OpenSSL.crypto.TYPE_RSA, 2048)
|
||||||
|
|
||||||
|
|||||||
@@ -1,10 +1,6 @@
|
|||||||
"""Tests for acme.errors."""
|
"""Tests for acme.errors."""
|
||||||
import unittest
|
import unittest
|
||||||
|
from unittest import mock
|
||||||
try:
|
|
||||||
import mock
|
|
||||||
except ImportError: # pragma: no cover
|
|
||||||
from unittest import mock # type: ignore
|
|
||||||
|
|
||||||
|
|
||||||
class BadNonceTest(unittest.TestCase):
|
class BadNonceTest(unittest.TestCase):
|
||||||
|
|||||||
@@ -1,11 +1,8 @@
|
|||||||
"""Tests for acme.magic_typing."""
|
"""Tests for acme.magic_typing."""
|
||||||
import sys
|
import sys
|
||||||
import unittest
|
import unittest
|
||||||
|
import warnings
|
||||||
try:
|
from unittest import mock
|
||||||
import mock
|
|
||||||
except ImportError: # pragma: no cover
|
|
||||||
from unittest import mock # type: ignore
|
|
||||||
|
|
||||||
|
|
||||||
class MagicTypingTest(unittest.TestCase):
|
class MagicTypingTest(unittest.TestCase):
|
||||||
@@ -13,32 +10,21 @@ class MagicTypingTest(unittest.TestCase):
|
|||||||
def test_import_success(self):
|
def test_import_success(self):
|
||||||
try:
|
try:
|
||||||
import typing as temp_typing
|
import typing as temp_typing
|
||||||
except ImportError: # pragma: no cover
|
except ImportError: # pragma: no cover
|
||||||
temp_typing = None # pragma: no cover
|
temp_typing = None # pragma: no cover
|
||||||
typing_class_mock = mock.MagicMock()
|
typing_class_mock = mock.MagicMock()
|
||||||
text_mock = mock.MagicMock()
|
text_mock = mock.MagicMock()
|
||||||
typing_class_mock.Text = text_mock
|
typing_class_mock.Text = text_mock
|
||||||
sys.modules['typing'] = typing_class_mock
|
sys.modules['typing'] = typing_class_mock
|
||||||
if 'acme.magic_typing' in sys.modules:
|
if 'acme.magic_typing' in sys.modules:
|
||||||
del sys.modules['acme.magic_typing'] # pragma: no cover
|
del sys.modules['acme.magic_typing'] # pragma: no cover
|
||||||
from acme.magic_typing import Text
|
with warnings.catch_warnings():
|
||||||
|
warnings.filterwarnings("ignore", category=DeprecationWarning)
|
||||||
|
from acme.magic_typing import Text
|
||||||
self.assertEqual(Text, text_mock)
|
self.assertEqual(Text, text_mock)
|
||||||
del sys.modules['acme.magic_typing']
|
del sys.modules['acme.magic_typing']
|
||||||
sys.modules['typing'] = temp_typing
|
sys.modules['typing'] = temp_typing
|
||||||
|
|
||||||
def test_import_failure(self):
|
|
||||||
try:
|
|
||||||
import typing as temp_typing
|
|
||||||
except ImportError: # pragma: no cover
|
|
||||||
temp_typing = None # pragma: no cover
|
|
||||||
sys.modules['typing'] = None
|
|
||||||
if 'acme.magic_typing' in sys.modules:
|
|
||||||
del sys.modules['acme.magic_typing'] # pragma: no cover
|
|
||||||
from acme.magic_typing import Text
|
|
||||||
self.assertTrue(Text is None)
|
|
||||||
del sys.modules['acme.magic_typing']
|
|
||||||
sys.modules['typing'] = temp_typing
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
unittest.main() # pragma: no cover
|
unittest.main() # pragma: no cover
|
||||||
|
|||||||
@@ -1,11 +1,9 @@
|
|||||||
"""Tests for acme.messages."""
|
"""Tests for acme.messages."""
|
||||||
|
from typing import Dict
|
||||||
import unittest
|
import unittest
|
||||||
|
from unittest import mock
|
||||||
|
|
||||||
import josepy as jose
|
import josepy as jose
|
||||||
try:
|
|
||||||
import mock
|
|
||||||
except ImportError: # pragma: no cover
|
|
||||||
from unittest import mock # type: ignore
|
|
||||||
|
|
||||||
from acme import challenges
|
from acme import challenges
|
||||||
import test_util
|
import test_util
|
||||||
@@ -84,7 +82,7 @@ class ConstantTest(unittest.TestCase):
|
|||||||
from acme.messages import _Constant
|
from acme.messages import _Constant
|
||||||
|
|
||||||
class MockConstant(_Constant): # pylint: disable=missing-docstring
|
class MockConstant(_Constant): # pylint: disable=missing-docstring
|
||||||
POSSIBLE_NAMES = {} # type: Dict
|
POSSIBLE_NAMES: Dict = {}
|
||||||
|
|
||||||
self.MockConstant = MockConstant # pylint: disable=invalid-name
|
self.MockConstant = MockConstant # pylint: disable=invalid-name
|
||||||
self.const_a = MockConstant('a')
|
self.const_a = MockConstant('a')
|
||||||
@@ -108,11 +106,11 @@ class ConstantTest(unittest.TestCase):
|
|||||||
|
|
||||||
def test_equality(self):
|
def test_equality(self):
|
||||||
const_a_prime = self.MockConstant('a')
|
const_a_prime = self.MockConstant('a')
|
||||||
self.assertFalse(self.const_a == self.const_b)
|
self.assertNotEqual(self.const_a, self.const_b)
|
||||||
self.assertTrue(self.const_a == const_a_prime)
|
self.assertEqual(self.const_a, const_a_prime)
|
||||||
|
|
||||||
self.assertTrue(self.const_a != self.const_b)
|
self.assertNotEqual(self.const_a, self.const_b)
|
||||||
self.assertFalse(self.const_a != const_a_prime)
|
self.assertEqual(self.const_a, const_a_prime)
|
||||||
|
|
||||||
|
|
||||||
class DirectoryTest(unittest.TestCase):
|
class DirectoryTest(unittest.TestCase):
|
||||||
@@ -254,6 +252,19 @@ class RegistrationTest(unittest.TestCase):
|
|||||||
from acme.messages import Registration
|
from acme.messages import Registration
|
||||||
hash(Registration.from_json(self.jobj_from))
|
hash(Registration.from_json(self.jobj_from))
|
||||||
|
|
||||||
|
def test_default_not_transmitted(self):
|
||||||
|
from acme.messages import NewRegistration
|
||||||
|
empty_new_reg = NewRegistration()
|
||||||
|
new_reg_with_contact = NewRegistration(contact=())
|
||||||
|
|
||||||
|
self.assertEqual(empty_new_reg.contact, ())
|
||||||
|
self.assertEqual(new_reg_with_contact.contact, ())
|
||||||
|
|
||||||
|
self.assertTrue('contact' not in empty_new_reg.to_partial_json())
|
||||||
|
self.assertTrue('contact' not in empty_new_reg.fields_to_partial_json())
|
||||||
|
self.assertTrue('contact' in new_reg_with_contact.to_partial_json())
|
||||||
|
self.assertTrue('contact' in new_reg_with_contact.fields_to_partial_json())
|
||||||
|
|
||||||
|
|
||||||
class UpdateRegistrationTest(unittest.TestCase):
|
class UpdateRegistrationTest(unittest.TestCase):
|
||||||
"""Tests for acme.messages.UpdateRegistration."""
|
"""Tests for acme.messages.UpdateRegistration."""
|
||||||
|
|||||||
@@ -1,16 +1,13 @@
|
|||||||
"""Tests for acme.standalone."""
|
"""Tests for acme.standalone."""
|
||||||
|
import http.client as http_client
|
||||||
import socket
|
import socket
|
||||||
|
import socketserver
|
||||||
import threading
|
import threading
|
||||||
import unittest
|
import unittest
|
||||||
|
from unittest import mock
|
||||||
|
|
||||||
import josepy as jose
|
import josepy as jose
|
||||||
try:
|
|
||||||
import mock
|
|
||||||
except ImportError: # pragma: no cover
|
|
||||||
from unittest import mock # type: ignore
|
|
||||||
import requests
|
import requests
|
||||||
from six.moves import http_client # pylint: disable=import-error
|
|
||||||
from six.moves import socketserver # type: ignore # pylint: disable=import-error
|
|
||||||
|
|
||||||
from acme import challenges
|
from acme import challenges
|
||||||
from acme import crypto_util
|
from acme import crypto_util
|
||||||
@@ -44,7 +41,7 @@ class HTTP01ServerTest(unittest.TestCase):
|
|||||||
def setUp(self):
|
def setUp(self):
|
||||||
self.account_key = jose.JWK.load(
|
self.account_key = jose.JWK.load(
|
||||||
test_util.load_vector('rsa1024_key.pem'))
|
test_util.load_vector('rsa1024_key.pem'))
|
||||||
self.resources = set() # type: Set
|
self.resources: Set = set()
|
||||||
|
|
||||||
from acme.standalone import HTTP01Server
|
from acme.standalone import HTTP01Server
|
||||||
self.server = HTTP01Server(('', 0), resources=self.resources)
|
self.server = HTTP01Server(('', 0), resources=self.resources)
|
||||||
@@ -221,7 +218,7 @@ class HTTP01DualNetworkedServersTest(unittest.TestCase):
|
|||||||
def setUp(self):
|
def setUp(self):
|
||||||
self.account_key = jose.JWK.load(
|
self.account_key = jose.JWK.load(
|
||||||
test_util.load_vector('rsa1024_key.pem'))
|
test_util.load_vector('rsa1024_key.pem'))
|
||||||
self.resources = set() # type: Set
|
self.resources: Set = set()
|
||||||
|
|
||||||
from acme.standalone import HTTP01DualNetworkedServers
|
from acme.standalone import HTTP01DualNetworkedServers
|
||||||
self.servers = HTTP01DualNetworkedServers(('', 0), resources=self.resources)
|
self.servers = HTTP01DualNetworkedServers(('', 0), resources=self.resources)
|
||||||
|
|||||||
@@ -9,7 +9,6 @@ import pkg_resources
|
|||||||
|
|
||||||
from certbot import errors
|
from certbot import errors
|
||||||
from certbot import util
|
from certbot import util
|
||||||
|
|
||||||
from certbot.compat import os
|
from certbot.compat import os
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|||||||
@@ -3,7 +3,6 @@ import fnmatch
|
|||||||
|
|
||||||
from certbot_apache._internal import interfaces
|
from certbot_apache._internal import interfaces
|
||||||
|
|
||||||
|
|
||||||
PASS = "CERTBOT_PASS_ASSERT"
|
PASS = "CERTBOT_PASS_ASSERT"
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ Authors:
|
|||||||
Raphael Pinson <raphink@gmail.com>
|
Raphael Pinson <raphink@gmail.com>
|
||||||
|
|
||||||
About: Reference
|
About: Reference
|
||||||
Online Apache configuration manual: http://httpd.apache.org/docs/trunk/
|
Online Apache configuration manual: https://httpd.apache.org/docs/trunk/
|
||||||
|
|
||||||
About: License
|
About: License
|
||||||
This file is licensed under the LGPL v2+.
|
This file is licensed under the LGPL v2+.
|
||||||
|
|||||||
@@ -64,10 +64,10 @@ Translates over to:
|
|||||||
"/files/etc/apache2/apache2.conf/bLoCk[1]",
|
"/files/etc/apache2/apache2.conf/bLoCk[1]",
|
||||||
]
|
]
|
||||||
"""
|
"""
|
||||||
from acme.magic_typing import Set
|
from typing import Set
|
||||||
|
|
||||||
from certbot import errors
|
from certbot import errors
|
||||||
from certbot.compat import os
|
from certbot.compat import os
|
||||||
|
|
||||||
from certbot_apache._internal import apache_util
|
from certbot_apache._internal import apache_util
|
||||||
from certbot_apache._internal import assertions
|
from certbot_apache._internal import assertions
|
||||||
from certbot_apache._internal import interfaces
|
from certbot_apache._internal import interfaces
|
||||||
@@ -355,7 +355,7 @@ class AugeasBlockNode(AugeasDirectiveNode):
|
|||||||
ownpath = self.metadata.get("augeaspath")
|
ownpath = self.metadata.get("augeaspath")
|
||||||
|
|
||||||
directives = self.parser.find_dir(name, start=ownpath, exclude=exclude)
|
directives = self.parser.find_dir(name, start=ownpath, exclude=exclude)
|
||||||
already_parsed = set() # type: Set[str]
|
already_parsed: Set[str] = set()
|
||||||
for directive in directives:
|
for directive in directives:
|
||||||
# Remove the /arg part from the Augeas path
|
# Remove the /arg part from the Augeas path
|
||||||
directive = directive.partition("/arg")[0]
|
directive = directive.partition("/arg")[0]
|
||||||
|
|||||||
@@ -1,29 +1,23 @@
|
|||||||
"""Apache Configurator."""
|
"""Apache Configurator."""
|
||||||
# pylint: disable=too-many-lines
|
# pylint: disable=too-many-lines
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
from distutils.version import LooseVersion
|
|
||||||
import copy
|
import copy
|
||||||
|
from distutils.version import LooseVersion
|
||||||
import fnmatch
|
import fnmatch
|
||||||
import logging
|
import logging
|
||||||
import re
|
import re
|
||||||
import socket
|
import socket
|
||||||
import time
|
import time
|
||||||
|
from typing import DefaultDict
|
||||||
|
from typing import Dict
|
||||||
|
from typing import List
|
||||||
|
from typing import Set
|
||||||
|
from typing import Union
|
||||||
|
|
||||||
import six
|
|
||||||
import zope.component
|
import zope.component
|
||||||
import zope.interface
|
import zope.interface
|
||||||
try:
|
|
||||||
import apacheconfig
|
|
||||||
HAS_APACHECONFIG = True
|
|
||||||
except ImportError: # pragma: no cover
|
|
||||||
HAS_APACHECONFIG = False
|
|
||||||
|
|
||||||
from acme import challenges
|
from acme import challenges
|
||||||
from acme.magic_typing import DefaultDict
|
|
||||||
from acme.magic_typing import Dict
|
|
||||||
from acme.magic_typing import List
|
|
||||||
from acme.magic_typing import Set
|
|
||||||
from acme.magic_typing import Union
|
|
||||||
from certbot import errors
|
from certbot import errors
|
||||||
from certbot import interfaces
|
from certbot import interfaces
|
||||||
from certbot import util
|
from certbot import util
|
||||||
@@ -42,6 +36,13 @@ from certbot_apache._internal import http_01
|
|||||||
from certbot_apache._internal import obj
|
from certbot_apache._internal import obj
|
||||||
from certbot_apache._internal import parser
|
from certbot_apache._internal import parser
|
||||||
|
|
||||||
|
try:
|
||||||
|
import apacheconfig
|
||||||
|
HAS_APACHECONFIG = True
|
||||||
|
except ImportError: # pragma: no cover
|
||||||
|
HAS_APACHECONFIG = False
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
@@ -211,23 +212,23 @@ class ApacheConfigurator(common.Installer):
|
|||||||
super(ApacheConfigurator, self).__init__(*args, **kwargs)
|
super(ApacheConfigurator, self).__init__(*args, **kwargs)
|
||||||
|
|
||||||
# Add name_server association dict
|
# Add name_server association dict
|
||||||
self.assoc = {} # type: Dict[str, obj.VirtualHost]
|
self.assoc: Dict[str, obj.VirtualHost] = {}
|
||||||
# Outstanding challenges
|
# Outstanding challenges
|
||||||
self._chall_out = set() # type: Set[KeyAuthorizationAnnotatedChallenge]
|
self._chall_out: Set[KeyAuthorizationAnnotatedChallenge] = set()
|
||||||
# List of vhosts configured per wildcard domain on this run.
|
# List of vhosts configured per wildcard domain on this run.
|
||||||
# used by deploy_cert() and enhance()
|
# used by deploy_cert() and enhance()
|
||||||
self._wildcard_vhosts = {} # type: Dict[str, List[obj.VirtualHost]]
|
self._wildcard_vhosts: Dict[str, List[obj.VirtualHost]] = {}
|
||||||
# Maps enhancements to vhosts we've enabled the enhancement for
|
# Maps enhancements to vhosts we've enabled the enhancement for
|
||||||
self._enhanced_vhosts = defaultdict(set) # type: DefaultDict[str, Set[obj.VirtualHost]]
|
self._enhanced_vhosts: DefaultDict[str, Set[obj.VirtualHost]] = defaultdict(set)
|
||||||
# Temporary state for AutoHSTS enhancement
|
# Temporary state for AutoHSTS enhancement
|
||||||
self._autohsts = {} # type: Dict[str, Dict[str, Union[int, float]]]
|
self._autohsts: Dict[str, Dict[str, Union[int, float]]] = {}
|
||||||
# Reverter save notes
|
# Reverter save notes
|
||||||
self.save_notes = ""
|
self.save_notes = ""
|
||||||
# Should we use ParserNode implementation instead of the old behavior
|
# Should we use ParserNode implementation instead of the old behavior
|
||||||
self.USE_PARSERNODE = use_parsernode
|
self.USE_PARSERNODE = use_parsernode
|
||||||
# Saves the list of file paths that were parsed initially, and
|
# Saves the list of file paths that were parsed initially, and
|
||||||
# not added to parser tree by self.conf("vhost-root") for example.
|
# not added to parser tree by self.conf("vhost-root") for example.
|
||||||
self.parsed_paths = [] # type: List[str]
|
self.parsed_paths: List[str] = []
|
||||||
# These will be set in the prepare function
|
# These will be set in the prepare function
|
||||||
self._prepared = False
|
self._prepared = False
|
||||||
self.parser = None
|
self.parser = None
|
||||||
@@ -328,6 +329,9 @@ class ApacheConfigurator(common.Installer):
|
|||||||
if self.version < (2, 2):
|
if self.version < (2, 2):
|
||||||
raise errors.NotSupportedError(
|
raise errors.NotSupportedError(
|
||||||
"Apache Version {0} not supported.".format(str(self.version)))
|
"Apache Version {0} not supported.".format(str(self.version)))
|
||||||
|
elif self.version < (2, 4):
|
||||||
|
logger.warning('Support for Apache 2.2 is deprecated and will be removed in a '
|
||||||
|
'future release.')
|
||||||
|
|
||||||
# Recover from previous crash before Augeas initialization to have the
|
# Recover from previous crash before Augeas initialization to have the
|
||||||
# correct parse tree from the get go.
|
# correct parse tree from the get go.
|
||||||
@@ -464,21 +468,6 @@ class ApacheConfigurator(common.Installer):
|
|||||||
metadata=metadata
|
metadata=metadata
|
||||||
)
|
)
|
||||||
|
|
||||||
def _wildcard_domain(self, domain):
|
|
||||||
"""
|
|
||||||
Checks if domain is a wildcard domain
|
|
||||||
|
|
||||||
:param str domain: Domain to check
|
|
||||||
|
|
||||||
:returns: If the domain is wildcard domain
|
|
||||||
:rtype: bool
|
|
||||||
"""
|
|
||||||
if isinstance(domain, six.text_type):
|
|
||||||
wildcard_marker = u"*."
|
|
||||||
else:
|
|
||||||
wildcard_marker = b"*."
|
|
||||||
return domain.startswith(wildcard_marker)
|
|
||||||
|
|
||||||
def deploy_cert(self, domain, cert_path, key_path,
|
def deploy_cert(self, domain, cert_path, key_path,
|
||||||
chain_path=None, fullchain_path=None):
|
chain_path=None, fullchain_path=None):
|
||||||
"""Deploys certificate to specified virtual host.
|
"""Deploys certificate to specified virtual host.
|
||||||
@@ -513,7 +502,7 @@ class ApacheConfigurator(common.Installer):
|
|||||||
:rtype: `list` of :class:`~certbot_apache._internal.obj.VirtualHost`
|
:rtype: `list` of :class:`~certbot_apache._internal.obj.VirtualHost`
|
||||||
"""
|
"""
|
||||||
|
|
||||||
if self._wildcard_domain(domain):
|
if util.is_wildcard_domain(domain):
|
||||||
if domain in self._wildcard_vhosts:
|
if domain in self._wildcard_vhosts:
|
||||||
# Vhosts for a wildcard domain were already selected
|
# Vhosts for a wildcard domain were already selected
|
||||||
return self._wildcard_vhosts[domain]
|
return self._wildcard_vhosts[domain]
|
||||||
@@ -845,7 +834,7 @@ class ApacheConfigurator(common.Installer):
|
|||||||
:rtype: set
|
:rtype: set
|
||||||
|
|
||||||
"""
|
"""
|
||||||
all_names = set() # type: Set[str]
|
all_names: Set[str] = set()
|
||||||
|
|
||||||
vhost_macro = []
|
vhost_macro = []
|
||||||
|
|
||||||
@@ -1009,8 +998,8 @@ class ApacheConfigurator(common.Installer):
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
# Search base config, and all included paths for VirtualHosts
|
# Search base config, and all included paths for VirtualHosts
|
||||||
file_paths = {} # type: Dict[str, str]
|
file_paths: Dict[str, str] = {}
|
||||||
internal_paths = defaultdict(set) # type: DefaultDict[str, Set[str]]
|
internal_paths: DefaultDict[str, Set[str]] = defaultdict(set)
|
||||||
vhs = []
|
vhs = []
|
||||||
# Make a list of parser paths because the parser_paths
|
# Make a list of parser paths because the parser_paths
|
||||||
# dictionary may be modified during the loop.
|
# dictionary may be modified during the loop.
|
||||||
@@ -1462,7 +1451,7 @@ class ApacheConfigurator(common.Installer):
|
|||||||
if not line.lower().lstrip().startswith("rewriterule"):
|
if not line.lower().lstrip().startswith("rewriterule"):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
# According to: http://httpd.apache.org/docs/2.4/rewrite/flags.html
|
# According to: https://httpd.apache.org/docs/2.4/rewrite/flags.html
|
||||||
# The syntax of a RewriteRule is:
|
# The syntax of a RewriteRule is:
|
||||||
# RewriteRule pattern target [Flag1,Flag2,Flag3]
|
# RewriteRule pattern target [Flag1,Flag2,Flag3]
|
||||||
# i.e. target is required, so it must exist.
|
# i.e. target is required, so it must exist.
|
||||||
@@ -2169,7 +2158,7 @@ class ApacheConfigurator(common.Installer):
|
|||||||
# There can be other RewriteRule directive lines in vhost config.
|
# There can be other RewriteRule directive lines in vhost config.
|
||||||
# rewrite_args_dict keys are directive ids and the corresponding value
|
# rewrite_args_dict keys are directive ids and the corresponding value
|
||||||
# for each is a list of arguments to that directive.
|
# for each is a list of arguments to that directive.
|
||||||
rewrite_args_dict = defaultdict(list) # type: DefaultDict[str, List[str]]
|
rewrite_args_dict: DefaultDict[str, List[str]] = defaultdict(list)
|
||||||
pat = r'(.*directive\[\d+\]).*'
|
pat = r'(.*directive\[\d+\]).*'
|
||||||
for match in rewrite_path:
|
for match in rewrite_path:
|
||||||
m = re.match(pat, match)
|
m = re.match(pat, match)
|
||||||
@@ -2263,7 +2252,7 @@ class ApacheConfigurator(common.Installer):
|
|||||||
if ssl_vhost.aliases:
|
if ssl_vhost.aliases:
|
||||||
serveralias = "ServerAlias " + " ".join(ssl_vhost.aliases)
|
serveralias = "ServerAlias " + " ".join(ssl_vhost.aliases)
|
||||||
|
|
||||||
rewrite_rule_args = [] # type: List[str]
|
rewrite_rule_args: List[str] = []
|
||||||
if self.get_version() >= (2, 3, 9):
|
if self.get_version() >= (2, 3, 9):
|
||||||
rewrite_rule_args = constants.REWRITE_HTTPS_ARGS_WITH_END
|
rewrite_rule_args = constants.REWRITE_HTTPS_ARGS_WITH_END
|
||||||
else:
|
else:
|
||||||
|
|||||||
@@ -1,10 +1,10 @@
|
|||||||
""" Dual ParserNode implementation """
|
""" Dual ParserNode implementation """
|
||||||
|
from certbot_apache._internal import apacheparser
|
||||||
from certbot_apache._internal import assertions
|
from certbot_apache._internal import assertions
|
||||||
from certbot_apache._internal import augeasparser
|
from certbot_apache._internal import augeasparser
|
||||||
from certbot_apache._internal import apacheparser
|
|
||||||
|
|
||||||
|
|
||||||
class DualNodeBase(object):
|
class DualNodeBase:
|
||||||
""" Dual parser interface for in development testing. This is used as the
|
""" Dual parser interface for in development testing. This is used as the
|
||||||
base class for dual parser interface classes. This class handles runtime
|
base class for dual parser interface classes. This class handles runtime
|
||||||
attribute value assertions."""
|
attribute value assertions."""
|
||||||
|
|||||||
@@ -1,9 +1,9 @@
|
|||||||
"""A class that performs HTTP-01 challenges for Apache"""
|
"""A class that performs HTTP-01 challenges for Apache"""
|
||||||
import logging
|
|
||||||
import errno
|
import errno
|
||||||
|
import logging
|
||||||
|
from typing import List
|
||||||
|
from typing import Set
|
||||||
|
|
||||||
from acme.magic_typing import List
|
|
||||||
from acme.magic_typing import Set
|
|
||||||
from certbot import errors
|
from certbot import errors
|
||||||
from certbot.compat import filesystem
|
from certbot.compat import filesystem
|
||||||
from certbot.compat import os
|
from certbot.compat import os
|
||||||
@@ -57,7 +57,7 @@ class ApacheHttp01(common.ChallengePerformer):
|
|||||||
self.challenge_dir = os.path.join(
|
self.challenge_dir = os.path.join(
|
||||||
self.configurator.config.work_dir,
|
self.configurator.config.work_dir,
|
||||||
"http_challenges")
|
"http_challenges")
|
||||||
self.moded_vhosts = set() # type: Set[VirtualHost]
|
self.moded_vhosts: Set[VirtualHost] = set()
|
||||||
|
|
||||||
def perform(self):
|
def perform(self):
|
||||||
"""Perform all HTTP-01 challenges."""
|
"""Perform all HTTP-01 challenges."""
|
||||||
@@ -93,7 +93,7 @@ class ApacheHttp01(common.ChallengePerformer):
|
|||||||
self.configurator.enable_mod(mod, temp=True)
|
self.configurator.enable_mod(mod, temp=True)
|
||||||
|
|
||||||
def _mod_config(self):
|
def _mod_config(self):
|
||||||
selected_vhosts = [] # type: List[VirtualHost]
|
selected_vhosts: List[VirtualHost] = []
|
||||||
http_port = str(self.configurator.config.http01_port)
|
http_port = str(self.configurator.config.http01_port)
|
||||||
for chall in self.achalls:
|
for chall in self.achalls:
|
||||||
# Search for matching VirtualHosts
|
# Search for matching VirtualHosts
|
||||||
|
|||||||
@@ -100,12 +100,9 @@ For this reason the internal representation of data should not ignore the case.
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
import abc
|
import abc
|
||||||
import six
|
|
||||||
|
|
||||||
|
|
||||||
|
class ParserNode(object, metaclass=abc.ABCMeta):
|
||||||
@six.add_metaclass(abc.ABCMeta)
|
|
||||||
class ParserNode(object):
|
|
||||||
"""
|
"""
|
||||||
ParserNode is the basic building block of the tree of such nodes,
|
ParserNode is the basic building block of the tree of such nodes,
|
||||||
representing the structure of the configuration. It is largely meant to keep
|
representing the structure of the configuration. It is largely meant to keep
|
||||||
@@ -204,9 +201,7 @@ class ParserNode(object):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
# Linter rule exclusion done because of https://github.com/PyCQA/pylint/issues/179
|
class CommentNode(ParserNode, metaclass=abc.ABCMeta):
|
||||||
@six.add_metaclass(abc.ABCMeta) # pylint: disable=abstract-method
|
|
||||||
class CommentNode(ParserNode):
|
|
||||||
"""
|
"""
|
||||||
CommentNode class is used for representation of comments within the parsed
|
CommentNode class is used for representation of comments within the parsed
|
||||||
configuration structure. Because of the nature of comments, it is not able
|
configuration structure. Because of the nature of comments, it is not able
|
||||||
@@ -249,8 +244,7 @@ class CommentNode(ParserNode):
|
|||||||
metadata=kwargs.get('metadata', {})) # pragma: no cover
|
metadata=kwargs.get('metadata', {})) # pragma: no cover
|
||||||
|
|
||||||
|
|
||||||
@six.add_metaclass(abc.ABCMeta)
|
class DirectiveNode(ParserNode, metaclass=abc.ABCMeta):
|
||||||
class DirectiveNode(ParserNode):
|
|
||||||
"""
|
"""
|
||||||
DirectiveNode class represents a configuration directive within the configuration.
|
DirectiveNode class represents a configuration directive within the configuration.
|
||||||
It can have zero or more parameters attached to it. Because of the nature of
|
It can have zero or more parameters attached to it. Because of the nature of
|
||||||
@@ -325,8 +319,7 @@ class DirectiveNode(ParserNode):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
@six.add_metaclass(abc.ABCMeta)
|
class BlockNode(DirectiveNode, metaclass=abc.ABCMeta):
|
||||||
class BlockNode(DirectiveNode):
|
|
||||||
"""
|
"""
|
||||||
BlockNode class represents a block of nested configuration directives, comments
|
BlockNode class represents a block of nested configuration directives, comments
|
||||||
and other blocks as its children. A BlockNode can have zero or more parameters
|
and other blocks as its children. A BlockNode can have zero or more parameters
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
"""Module contains classes used by the Apache Configurator."""
|
"""Module contains classes used by the Apache Configurator."""
|
||||||
import re
|
import re
|
||||||
|
from typing import Set
|
||||||
|
|
||||||
from acme.magic_typing import Set
|
|
||||||
from certbot.plugins import common
|
from certbot.plugins import common
|
||||||
|
|
||||||
|
|
||||||
@@ -20,9 +20,6 @@ class Addr(common.Addr):
|
|||||||
self.is_wildcard() and other.is_wildcard()))
|
self.is_wildcard() and other.is_wildcard()))
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def __ne__(self, other):
|
|
||||||
return not self.__eq__(other)
|
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return "certbot_apache._internal.obj.Addr(" + repr(self.tup) + ")"
|
return "certbot_apache._internal.obj.Addr(" + repr(self.tup) + ")"
|
||||||
|
|
||||||
@@ -98,7 +95,7 @@ class Addr(common.Addr):
|
|||||||
return self.get_addr_obj(port)
|
return self.get_addr_obj(port)
|
||||||
|
|
||||||
|
|
||||||
class VirtualHost(object):
|
class VirtualHost:
|
||||||
"""Represents an Apache Virtualhost.
|
"""Represents an Apache Virtualhost.
|
||||||
|
|
||||||
:ivar str filep: file path of VH
|
:ivar str filep: file path of VH
|
||||||
@@ -140,7 +137,7 @@ class VirtualHost(object):
|
|||||||
|
|
||||||
def get_names(self):
|
def get_names(self):
|
||||||
"""Return a set of all names."""
|
"""Return a set of all names."""
|
||||||
all_names = set() # type: Set[str]
|
all_names: Set[str] = set()
|
||||||
all_names.update(self.aliases)
|
all_names.update(self.aliases)
|
||||||
# Strip out any scheme:// and <port> field from servername
|
# Strip out any scheme:// and <port> field from servername
|
||||||
if self.name is not None:
|
if self.name is not None:
|
||||||
@@ -191,9 +188,6 @@ class VirtualHost(object):
|
|||||||
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def __ne__(self, other):
|
|
||||||
return not self.__eq__(other)
|
|
||||||
|
|
||||||
def __hash__(self):
|
def __hash__(self):
|
||||||
return hash((self.filep, self.path,
|
return hash((self.filep, self.path,
|
||||||
tuple(self.addrs), tuple(self.get_names()),
|
tuple(self.addrs), tuple(self.get_names()),
|
||||||
@@ -251,7 +245,7 @@ class VirtualHost(object):
|
|||||||
|
|
||||||
# already_found acts to keep everything very conservative.
|
# already_found acts to keep everything very conservative.
|
||||||
# Don't allow multiple ip:ports in same set.
|
# Don't allow multiple ip:ports in same set.
|
||||||
already_found = set() # type: Set[str]
|
already_found: Set[str] = set()
|
||||||
|
|
||||||
for addr in vhost.addrs:
|
for addr in vhost.addrs:
|
||||||
for local_addr in self.addrs:
|
for local_addr in self.addrs:
|
||||||
|
|||||||
@@ -1,9 +1,9 @@
|
|||||||
""" Distribution specific override class for CentOS family (RHEL, Fedora) """
|
""" Distribution specific override class for CentOS family (RHEL, Fedora) """
|
||||||
import logging
|
import logging
|
||||||
|
from typing import List
|
||||||
|
|
||||||
import zope.interface
|
import zope.interface
|
||||||
|
|
||||||
from acme.magic_typing import List
|
|
||||||
from certbot import errors
|
from certbot import errors
|
||||||
from certbot import interfaces
|
from certbot import interfaces
|
||||||
from certbot import util
|
from certbot import util
|
||||||
@@ -102,9 +102,9 @@ class CentOSConfigurator(configurator.ApacheConfigurator):
|
|||||||
|
|
||||||
loadmods = self.parser.find_dir("LoadModule", "ssl_module", exclude=False)
|
loadmods = self.parser.find_dir("LoadModule", "ssl_module", exclude=False)
|
||||||
|
|
||||||
correct_ifmods = [] # type: List[str]
|
correct_ifmods: List[str] = []
|
||||||
loadmod_args = [] # type: List[str]
|
loadmod_args: List[str] = []
|
||||||
loadmod_paths = [] # type: List[str]
|
loadmod_paths: List[str] = []
|
||||||
for m in loadmods:
|
for m in loadmods:
|
||||||
noarg_path = m.rpartition("/")[0]
|
noarg_path = m.rpartition("/")[0]
|
||||||
path_args = self.parser.get_all_args(noarg_path)
|
path_args = self.parser.get_all_args(noarg_path)
|
||||||
|
|||||||
@@ -14,10 +14,10 @@ class OpenSUSEConfigurator(configurator.ApacheConfigurator):
|
|||||||
vhost_root="/etc/apache2/vhosts.d",
|
vhost_root="/etc/apache2/vhosts.d",
|
||||||
vhost_files="*.conf",
|
vhost_files="*.conf",
|
||||||
logs_root="/var/log/apache2",
|
logs_root="/var/log/apache2",
|
||||||
ctl="apache2ctl",
|
ctl="apachectl",
|
||||||
version_cmd=['apache2ctl', '-v'],
|
version_cmd=['apachectl', '-v'],
|
||||||
restart_cmd=['apache2ctl', 'graceful'],
|
restart_cmd=['apachectl', 'graceful'],
|
||||||
conftest_cmd=['apache2ctl', 'configtest'],
|
conftest_cmd=['apachectl', 'configtest'],
|
||||||
enmod="a2enmod",
|
enmod="a2enmod",
|
||||||
dismod="a2dismod",
|
dismod="a2dismod",
|
||||||
le_vhost_ext="-le-ssl.conf",
|
le_vhost_ext="-le-ssl.conf",
|
||||||
|
|||||||
@@ -3,12 +3,9 @@ import copy
|
|||||||
import fnmatch
|
import fnmatch
|
||||||
import logging
|
import logging
|
||||||
import re
|
import re
|
||||||
import sys
|
from typing import Dict
|
||||||
|
from typing import List
|
||||||
|
|
||||||
import six
|
|
||||||
|
|
||||||
from acme.magic_typing import Dict
|
|
||||||
from acme.magic_typing import List
|
|
||||||
from certbot import errors
|
from certbot import errors
|
||||||
from certbot.compat import os
|
from certbot.compat import os
|
||||||
from certbot_apache._internal import apache_util
|
from certbot_apache._internal import apache_util
|
||||||
@@ -17,7 +14,7 @@ from certbot_apache._internal import constants
|
|||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class ApacheParser(object):
|
class ApacheParser:
|
||||||
"""Class handles the fine details of parsing the Apache Configuration.
|
"""Class handles the fine details of parsing the Apache Configuration.
|
||||||
|
|
||||||
.. todo:: Make parsing general... remove sites-available etc...
|
.. todo:: Make parsing general... remove sites-available etc...
|
||||||
@@ -51,9 +48,9 @@ class ApacheParser(object):
|
|||||||
"version 1.2.0 or higher, please make sure you have you have "
|
"version 1.2.0 or higher, please make sure you have you have "
|
||||||
"those installed.")
|
"those installed.")
|
||||||
|
|
||||||
self.modules = {} # type: Dict[str, str]
|
self.modules: Dict[str, str] = {}
|
||||||
self.parser_paths = {} # type: Dict[str, List[str]]
|
self.parser_paths: Dict[str, List[str]] = {}
|
||||||
self.variables = {} # type: Dict[str, str]
|
self.variables: Dict[str, str] = {}
|
||||||
|
|
||||||
# Find configuration root and make sure augeas can parse it.
|
# Find configuration root and make sure augeas can parse it.
|
||||||
self.root = os.path.abspath(root)
|
self.root = os.path.abspath(root)
|
||||||
@@ -266,7 +263,7 @@ class ApacheParser(object):
|
|||||||
the iteration issue. Else... parse and enable mods at same time.
|
the iteration issue. Else... parse and enable mods at same time.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
mods = {} # type: Dict[str, str]
|
mods: Dict[str, str] = {}
|
||||||
matches = self.find_dir("LoadModule")
|
matches = self.find_dir("LoadModule")
|
||||||
iterator = iter(matches)
|
iterator = iter(matches)
|
||||||
# Make sure prev_size != cur_size for do: while: iteration
|
# Make sure prev_size != cur_size for do: while: iteration
|
||||||
@@ -275,7 +272,7 @@ class ApacheParser(object):
|
|||||||
while len(mods) != prev_size:
|
while len(mods) != prev_size:
|
||||||
prev_size = len(mods)
|
prev_size = len(mods)
|
||||||
|
|
||||||
for match_name, match_filename in six.moves.zip(
|
for match_name, match_filename in zip(
|
||||||
iterator, iterator):
|
iterator, iterator):
|
||||||
mod_name = self.get_arg(match_name)
|
mod_name = self.get_arg(match_name)
|
||||||
mod_filename = self.get_arg(match_filename)
|
mod_filename = self.get_arg(match_filename)
|
||||||
@@ -553,7 +550,7 @@ class ApacheParser(object):
|
|||||||
else:
|
else:
|
||||||
arg_suffix = "/*[self::arg=~regexp('%s')]" % case_i(arg)
|
arg_suffix = "/*[self::arg=~regexp('%s')]" % case_i(arg)
|
||||||
|
|
||||||
ordered_matches = [] # type: List[str]
|
ordered_matches: List[str] = []
|
||||||
|
|
||||||
# TODO: Wildcards should be included in alphabetical order
|
# TODO: Wildcards should be included in alphabetical order
|
||||||
# https://httpd.apache.org/docs/2.4/mod/core.html#include
|
# https://httpd.apache.org/docs/2.4/mod/core.html#include
|
||||||
@@ -731,7 +728,6 @@ class ApacheParser(object):
|
|||||||
privileged users.
|
privileged users.
|
||||||
|
|
||||||
https://apr.apache.org/docs/apr/2.0/apr__fnmatch_8h_source.html
|
https://apr.apache.org/docs/apr/2.0/apr__fnmatch_8h_source.html
|
||||||
http://apache2.sourcearchive.com/documentation/2.2.16-6/apr__fnmatch_8h_source.html
|
|
||||||
|
|
||||||
:param str clean_fn_match: Apache style filename match, like globs
|
:param str clean_fn_match: Apache style filename match, like globs
|
||||||
|
|
||||||
@@ -739,9 +735,6 @@ class ApacheParser(object):
|
|||||||
:rtype: str
|
:rtype: str
|
||||||
|
|
||||||
"""
|
"""
|
||||||
if sys.version_info < (3, 6):
|
|
||||||
# This strips off final /Z(?ms)
|
|
||||||
return fnmatch.translate(clean_fn_match)[:-7] # pragma: no cover
|
|
||||||
# Since Python 3.6, it returns a different pattern like (?s:.*\.load)\Z
|
# Since Python 3.6, it returns a different pattern like (?s:.*\.load)\Z
|
||||||
return fnmatch.translate(clean_fn_match)[4:-3] # pragma: no cover
|
return fnmatch.translate(clean_fn_match)[4:-3] # pragma: no cover
|
||||||
|
|
||||||
@@ -799,7 +792,7 @@ class ApacheParser(object):
|
|||||||
def _parsed_by_parser_paths(self, filep, paths):
|
def _parsed_by_parser_paths(self, filep, paths):
|
||||||
"""Helper function that searches through provided paths and returns
|
"""Helper function that searches through provided paths and returns
|
||||||
True if file path is found in the set"""
|
True if file path is found in the set"""
|
||||||
for directory in paths.keys():
|
for directory in paths:
|
||||||
for filename in paths[directory]:
|
for filename in paths[directory]:
|
||||||
if fnmatch.fnmatch(filep, os.path.join(directory, filename)):
|
if fnmatch.fnmatch(filep, os.path.join(directory, filename)):
|
||||||
return True
|
return True
|
||||||
|
|||||||
@@ -1,12 +1,7 @@
|
|||||||
from distutils.version import LooseVersion
|
|
||||||
import sys
|
|
||||||
|
|
||||||
from setuptools import __version__ as setuptools_version
|
|
||||||
from setuptools import find_packages
|
from setuptools import find_packages
|
||||||
from setuptools import setup
|
from setuptools import setup
|
||||||
from setuptools.command.test import test as TestCommand
|
|
||||||
|
|
||||||
version = '1.7.0.dev0'
|
version = '1.14.0.dev0'
|
||||||
|
|
||||||
# Remember to update local-oldest-requirements.txt when changing the minimum
|
# Remember to update local-oldest-requirements.txt when changing the minimum
|
||||||
# acme/certbot version.
|
# acme/certbot version.
|
||||||
@@ -14,39 +9,15 @@ install_requires = [
|
|||||||
'acme>=0.29.0',
|
'acme>=0.29.0',
|
||||||
'certbot>=1.6.0',
|
'certbot>=1.6.0',
|
||||||
'python-augeas',
|
'python-augeas',
|
||||||
'setuptools',
|
'setuptools>=39.0.1',
|
||||||
'zope.component',
|
'zope.component',
|
||||||
'zope.interface',
|
'zope.interface',
|
||||||
]
|
]
|
||||||
|
|
||||||
setuptools_known_environment_markers = (LooseVersion(setuptools_version) >= LooseVersion('36.2'))
|
|
||||||
if setuptools_known_environment_markers:
|
|
||||||
install_requires.append('mock ; python_version < "3.3"')
|
|
||||||
elif 'bdist_wheel' in sys.argv[1:]:
|
|
||||||
raise RuntimeError('Error, you are trying to build certbot wheels using an old version '
|
|
||||||
'of setuptools. Version 36.2+ of setuptools is required.')
|
|
||||||
elif sys.version_info < (3,3):
|
|
||||||
install_requires.append('mock')
|
|
||||||
|
|
||||||
dev_extras = [
|
dev_extras = [
|
||||||
'apacheconfig>=0.3.2',
|
'apacheconfig>=0.3.2',
|
||||||
]
|
]
|
||||||
|
|
||||||
class PyTest(TestCommand):
|
|
||||||
user_options = []
|
|
||||||
|
|
||||||
def initialize_options(self):
|
|
||||||
TestCommand.initialize_options(self)
|
|
||||||
self.pytest_args = ''
|
|
||||||
|
|
||||||
def run_tests(self):
|
|
||||||
import shlex
|
|
||||||
# import here, cause outside the eggs aren't loaded
|
|
||||||
import pytest
|
|
||||||
errno = pytest.main(shlex.split(self.pytest_args))
|
|
||||||
sys.exit(errno)
|
|
||||||
|
|
||||||
|
|
||||||
setup(
|
setup(
|
||||||
name='certbot-apache',
|
name='certbot-apache',
|
||||||
version=version,
|
version=version,
|
||||||
@@ -55,7 +26,7 @@ setup(
|
|||||||
author="Certbot Project",
|
author="Certbot Project",
|
||||||
author_email='client-dev@letsencrypt.org',
|
author_email='client-dev@letsencrypt.org',
|
||||||
license='Apache License 2.0',
|
license='Apache License 2.0',
|
||||||
python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*',
|
python_requires='>=3.6',
|
||||||
classifiers=[
|
classifiers=[
|
||||||
'Development Status :: 5 - Production/Stable',
|
'Development Status :: 5 - Production/Stable',
|
||||||
'Environment :: Plugins',
|
'Environment :: Plugins',
|
||||||
@@ -63,13 +34,11 @@ setup(
|
|||||||
'License :: OSI Approved :: Apache Software License',
|
'License :: OSI Approved :: Apache Software License',
|
||||||
'Operating System :: POSIX :: Linux',
|
'Operating System :: POSIX :: Linux',
|
||||||
'Programming Language :: Python',
|
'Programming Language :: Python',
|
||||||
'Programming Language :: Python :: 2',
|
|
||||||
'Programming Language :: Python :: 2.7',
|
|
||||||
'Programming Language :: Python :: 3',
|
'Programming Language :: Python :: 3',
|
||||||
'Programming Language :: Python :: 3.5',
|
|
||||||
'Programming Language :: Python :: 3.6',
|
'Programming Language :: Python :: 3.6',
|
||||||
'Programming Language :: Python :: 3.7',
|
'Programming Language :: Python :: 3.7',
|
||||||
'Programming Language :: Python :: 3.8',
|
'Programming Language :: Python :: 3.8',
|
||||||
|
'Programming Language :: Python :: 3.9',
|
||||||
'Topic :: Internet :: WWW/HTTP',
|
'Topic :: Internet :: WWW/HTTP',
|
||||||
'Topic :: Security',
|
'Topic :: Security',
|
||||||
'Topic :: System :: Installation/Setup',
|
'Topic :: System :: Installation/Setup',
|
||||||
@@ -89,7 +58,4 @@ setup(
|
|||||||
'apache = certbot_apache._internal.entrypoint:ENTRYPOINT',
|
'apache = certbot_apache._internal.entrypoint:ENTRYPOINT',
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
test_suite='certbot_apache',
|
|
||||||
tests_require=["pytest"],
|
|
||||||
cmdclass={"test": PyTest},
|
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -52,7 +52,7 @@ function Cleanup() {
|
|||||||
# if our environment asks us to enable modules, do our best!
|
# if our environment asks us to enable modules, do our best!
|
||||||
if [ "$1" = --debian-modules ] ; then
|
if [ "$1" = --debian-modules ] ; then
|
||||||
sudo apt-get install -y apache2
|
sudo apt-get install -y apache2
|
||||||
sudo apt-get install -y libapache2-mod-wsgi
|
sudo apt-get install -y libapache2-mod-wsgi-py3
|
||||||
sudo apt-get install -y libapache2-mod-macro
|
sudo apt-get install -y libapache2-mod-macro
|
||||||
|
|
||||||
for mod in ssl rewrite macro wsgi deflate userdir version mime setenvif ; do
|
for mod in ssl rewrite macro wsgi deflate userdir version mime setenvif ; do
|
||||||
|
|||||||
@@ -107,7 +107,7 @@ class AugeasParserNodeTest(util.ApacheTest): # pylint: disable=too-many-public-
|
|||||||
|
|
||||||
def test_set_parameters(self):
|
def test_set_parameters(self):
|
||||||
servernames = self.config.parser_root.find_directives("servername")
|
servernames = self.config.parser_root.find_directives("servername")
|
||||||
names = [] # type: List[str]
|
names: List[str] = []
|
||||||
for servername in servernames:
|
for servername in servernames:
|
||||||
names += servername.parameters
|
names += servername.parameters
|
||||||
self.assertFalse("going_to_set_this" in names)
|
self.assertFalse("going_to_set_this" in names)
|
||||||
|
|||||||
@@ -7,7 +7,6 @@ try:
|
|||||||
import mock
|
import mock
|
||||||
except ImportError: # pragma: no cover
|
except ImportError: # pragma: no cover
|
||||||
from unittest import mock # type: ignore
|
from unittest import mock # type: ignore
|
||||||
import six # pylint: disable=unused-import # six is used in mock.patch()
|
|
||||||
|
|
||||||
from certbot import errors
|
from certbot import errors
|
||||||
from certbot_apache._internal import constants
|
from certbot_apache._internal import constants
|
||||||
|
|||||||
@@ -140,7 +140,7 @@ class MultipleVhostsTestCentOS(util.ApacheTest):
|
|||||||
self.assertEqual(mock_get.call_count, 3)
|
self.assertEqual(mock_get.call_count, 3)
|
||||||
self.assertEqual(len(self.config.parser.modules), 4)
|
self.assertEqual(len(self.config.parser.modules), 4)
|
||||||
self.assertEqual(len(self.config.parser.variables), 2)
|
self.assertEqual(len(self.config.parser.variables), 2)
|
||||||
self.assertTrue("TEST2" in self.config.parser.variables.keys())
|
self.assertTrue("TEST2" in self.config.parser.variables)
|
||||||
self.assertTrue("mod_another.c" in self.config.parser.modules)
|
self.assertTrue("mod_another.c" in self.config.parser.modules)
|
||||||
|
|
||||||
def test_get_virtual_hosts(self):
|
def test_get_virtual_hosts(self):
|
||||||
@@ -172,11 +172,11 @@ class MultipleVhostsTestCentOS(util.ApacheTest):
|
|||||||
mock_osi.return_value = ("centos", "7")
|
mock_osi.return_value = ("centos", "7")
|
||||||
self.config.parser.update_runtime_variables()
|
self.config.parser.update_runtime_variables()
|
||||||
|
|
||||||
self.assertTrue("mock_define" in self.config.parser.variables.keys())
|
self.assertTrue("mock_define" in self.config.parser.variables)
|
||||||
self.assertTrue("mock_define_too" in self.config.parser.variables.keys())
|
self.assertTrue("mock_define_too" in self.config.parser.variables)
|
||||||
self.assertTrue("mock_value" in self.config.parser.variables.keys())
|
self.assertTrue("mock_value" in self.config.parser.variables)
|
||||||
self.assertEqual("TRUE", self.config.parser.variables["mock_value"])
|
self.assertEqual("TRUE", self.config.parser.variables["mock_value"])
|
||||||
self.assertTrue("MOCK_NOSEP" in self.config.parser.variables.keys())
|
self.assertTrue("MOCK_NOSEP" in self.config.parser.variables)
|
||||||
self.assertEqual("NOSEP_VAL", self.config.parser.variables["NOSEP_TWO"])
|
self.assertEqual("NOSEP_VAL", self.config.parser.variables["NOSEP_TWO"])
|
||||||
|
|
||||||
@mock.patch("certbot_apache._internal.configurator.util.run_script")
|
@mock.patch("certbot_apache._internal.configurator.util.run_script")
|
||||||
|
|||||||
@@ -10,7 +10,6 @@ try:
|
|||||||
import mock
|
import mock
|
||||||
except ImportError: # pragma: no cover
|
except ImportError: # pragma: no cover
|
||||||
from unittest import mock # type: ignore
|
from unittest import mock # type: ignore
|
||||||
import six # pylint: disable=unused-import # six is used in mock.patch()
|
|
||||||
|
|
||||||
from acme import challenges
|
from acme import challenges
|
||||||
from certbot import achallenges
|
from certbot import achallenges
|
||||||
@@ -726,7 +725,7 @@ class MultipleVhostsTest(util.ApacheTest):
|
|||||||
# This calls open
|
# This calls open
|
||||||
self.config.reverter.register_file_creation = mock.Mock()
|
self.config.reverter.register_file_creation = mock.Mock()
|
||||||
mock_open.side_effect = IOError
|
mock_open.side_effect = IOError
|
||||||
with mock.patch("six.moves.builtins.open", mock_open):
|
with mock.patch("builtins.open", mock_open):
|
||||||
self.assertRaises(
|
self.assertRaises(
|
||||||
errors.PluginError,
|
errors.PluginError,
|
||||||
self.config.make_vhost_ssl, self.vh_truth[0])
|
self.config.make_vhost_ssl, self.vh_truth[0])
|
||||||
@@ -1337,13 +1336,6 @@ class MultipleVhostsTest(util.ApacheTest):
|
|||||||
self.config.enable_mod,
|
self.config.enable_mod,
|
||||||
"whatever")
|
"whatever")
|
||||||
|
|
||||||
def test_wildcard_domain(self):
|
|
||||||
# pylint: disable=protected-access
|
|
||||||
cases = {u"*.example.org": True, b"*.x.example.org": True,
|
|
||||||
u"a.example.org": False, b"a.x.example.org": False}
|
|
||||||
for key in cases:
|
|
||||||
self.assertEqual(self.config._wildcard_domain(key), cases[key])
|
|
||||||
|
|
||||||
def test_choose_vhosts_wildcard(self):
|
def test_choose_vhosts_wildcard(self):
|
||||||
# pylint: disable=protected-access
|
# pylint: disable=protected-access
|
||||||
mock_path = "certbot_apache._internal.display_ops.select_vhost_multiple"
|
mock_path = "certbot_apache._internal.display_ops.select_vhost_multiple"
|
||||||
@@ -1357,10 +1349,10 @@ class MultipleVhostsTest(util.ApacheTest):
|
|||||||
|
|
||||||
# And the actual returned values
|
# And the actual returned values
|
||||||
self.assertEqual(len(vhs), 1)
|
self.assertEqual(len(vhs), 1)
|
||||||
self.assertTrue(vhs[0].name == "certbot.demo")
|
self.assertEqual(vhs[0].name, "certbot.demo")
|
||||||
self.assertTrue(vhs[0].ssl)
|
self.assertTrue(vhs[0].ssl)
|
||||||
|
|
||||||
self.assertFalse(vhs[0] == self.vh_truth[3])
|
self.assertNotEqual(vhs[0], self.vh_truth[3])
|
||||||
|
|
||||||
@mock.patch("certbot_apache._internal.configurator.ApacheConfigurator.make_vhost_ssl")
|
@mock.patch("certbot_apache._internal.configurator.ApacheConfigurator.make_vhost_ssl")
|
||||||
def test_choose_vhosts_wildcard_no_ssl(self, mock_makessl):
|
def test_choose_vhosts_wildcard_no_ssl(self, mock_makessl):
|
||||||
@@ -1471,10 +1463,10 @@ class MultipleVhostsTest(util.ApacheTest):
|
|||||||
self.config.parser.aug.match = mock_match
|
self.config.parser.aug.match = mock_match
|
||||||
vhs = self.config.get_virtual_hosts()
|
vhs = self.config.get_virtual_hosts()
|
||||||
self.assertEqual(len(vhs), 2)
|
self.assertEqual(len(vhs), 2)
|
||||||
self.assertTrue(vhs[0] == self.vh_truth[1])
|
self.assertEqual(vhs[0], self.vh_truth[1])
|
||||||
# mock_vhost should have replaced the vh_truth[0], because its filepath
|
# mock_vhost should have replaced the vh_truth[0], because its filepath
|
||||||
# isn't a symlink
|
# isn't a symlink
|
||||||
self.assertTrue(vhs[1] == mock_vhost)
|
self.assertEqual(vhs[1], mock_vhost)
|
||||||
|
|
||||||
|
|
||||||
class AugeasVhostsTest(util.ApacheTest):
|
class AugeasVhostsTest(util.ApacheTest):
|
||||||
@@ -1841,7 +1833,7 @@ class InstallSslOptionsConfTest(util.ApacheTest):
|
|||||||
|
|
||||||
def test_open_module_file(self):
|
def test_open_module_file(self):
|
||||||
mock_open = mock.mock_open(read_data="testing 12 3")
|
mock_open = mock.mock_open(read_data="testing 12 3")
|
||||||
with mock.patch("six.moves.builtins.open", mock_open):
|
with mock.patch("builtins.open", mock_open):
|
||||||
self.assertEqual(self.config._open_module_file("/nonsense/"), "testing 12 3")
|
self.assertEqual(self.config._open_module_file("/nonsense/"), "testing 12 3")
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
|||||||
@@ -412,9 +412,9 @@ class DualParserNodeTest(unittest.TestCase): # pylint: disable=too-many-public-
|
|||||||
ancestor=self.block,
|
ancestor=self.block,
|
||||||
filepath="/path/to/whatever",
|
filepath="/path/to/whatever",
|
||||||
metadata=self.metadata)
|
metadata=self.metadata)
|
||||||
self.assertFalse(self.block == ne_block)
|
self.assertNotEqual(self.block, ne_block)
|
||||||
self.assertFalse(self.directive == ne_directive)
|
self.assertNotEqual(self.directive, ne_directive)
|
||||||
self.assertFalse(self.comment == ne_comment)
|
self.assertNotEqual(self.comment, ne_comment)
|
||||||
|
|
||||||
def test_parsed_paths(self):
|
def test_parsed_paths(self):
|
||||||
mock_p = mock.MagicMock(return_value=['/path/file.conf',
|
mock_p = mock.MagicMock(return_value=['/path/file.conf',
|
||||||
|
|||||||
@@ -134,7 +134,7 @@ class MultipleVhostsTestFedora(util.ApacheTest):
|
|||||||
self.assertEqual(mock_get.call_count, 3)
|
self.assertEqual(mock_get.call_count, 3)
|
||||||
self.assertEqual(len(self.config.parser.modules), 4)
|
self.assertEqual(len(self.config.parser.modules), 4)
|
||||||
self.assertEqual(len(self.config.parser.variables), 2)
|
self.assertEqual(len(self.config.parser.variables), 2)
|
||||||
self.assertTrue("TEST2" in self.config.parser.variables.keys())
|
self.assertTrue("TEST2" in self.config.parser.variables)
|
||||||
self.assertTrue("mod_another.c" in self.config.parser.modules)
|
self.assertTrue("mod_another.c" in self.config.parser.modules)
|
||||||
|
|
||||||
@mock.patch("certbot_apache._internal.configurator.util.run_script")
|
@mock.patch("certbot_apache._internal.configurator.util.run_script")
|
||||||
@@ -172,11 +172,11 @@ class MultipleVhostsTestFedora(util.ApacheTest):
|
|||||||
mock_osi.return_value = ("fedora", "29")
|
mock_osi.return_value = ("fedora", "29")
|
||||||
self.config.parser.update_runtime_variables()
|
self.config.parser.update_runtime_variables()
|
||||||
|
|
||||||
self.assertTrue("mock_define" in self.config.parser.variables.keys())
|
self.assertTrue("mock_define" in self.config.parser.variables)
|
||||||
self.assertTrue("mock_define_too" in self.config.parser.variables.keys())
|
self.assertTrue("mock_define_too" in self.config.parser.variables)
|
||||||
self.assertTrue("mock_value" in self.config.parser.variables.keys())
|
self.assertTrue("mock_value" in self.config.parser.variables)
|
||||||
self.assertEqual("TRUE", self.config.parser.variables["mock_value"])
|
self.assertEqual("TRUE", self.config.parser.variables["mock_value"])
|
||||||
self.assertTrue("MOCK_NOSEP" in self.config.parser.variables.keys())
|
self.assertTrue("MOCK_NOSEP" in self.config.parser.variables)
|
||||||
self.assertEqual("NOSEP_VAL", self.config.parser.variables["NOSEP_TWO"])
|
self.assertEqual("NOSEP_VAL", self.config.parser.variables["NOSEP_TWO"])
|
||||||
|
|
||||||
@mock.patch("certbot_apache._internal.configurator.util.run_script")
|
@mock.patch("certbot_apache._internal.configurator.util.run_script")
|
||||||
|
|||||||
@@ -91,7 +91,7 @@ class MultipleVhostsTestGentoo(util.ApacheTest):
|
|||||||
with mock.patch("certbot_apache._internal.override_gentoo.GentooParser.update_modules"):
|
with mock.patch("certbot_apache._internal.override_gentoo.GentooParser.update_modules"):
|
||||||
self.config.parser.update_runtime_variables()
|
self.config.parser.update_runtime_variables()
|
||||||
for define in defines:
|
for define in defines:
|
||||||
self.assertTrue(define in self.config.parser.variables.keys())
|
self.assertTrue(define in self.config.parser.variables)
|
||||||
|
|
||||||
@mock.patch("certbot_apache._internal.apache_util.parse_from_subprocess")
|
@mock.patch("certbot_apache._internal.apache_util.parse_from_subprocess")
|
||||||
def test_no_binary_configdump(self, mock_subprocess):
|
def test_no_binary_configdump(self, mock_subprocess):
|
||||||
|
|||||||
@@ -26,7 +26,7 @@ class ApacheHttp01Test(util.ApacheTest):
|
|||||||
super(ApacheHttp01Test, self).setUp(*args, **kwargs)
|
super(ApacheHttp01Test, self).setUp(*args, **kwargs)
|
||||||
|
|
||||||
self.account_key = self.rsa512jwk
|
self.account_key = self.rsa512jwk
|
||||||
self.achalls = [] # type: List[achallenges.KeyAuthorizationAnnotatedChallenge]
|
self.achalls: List[achallenges.KeyAuthorizationAnnotatedChallenge] = []
|
||||||
vh_truth = util.get_vh_truth(
|
vh_truth = util.get_vh_truth(
|
||||||
self.temp_dir, "debian_apache_2_4/multiple_vhosts")
|
self.temp_dir, "debian_apache_2_4/multiple_vhosts")
|
||||||
# Takes the vhosts for encryption-example.demo, certbot.demo
|
# Takes the vhosts for encryption-example.demo, certbot.demo
|
||||||
|
|||||||
@@ -27,14 +27,14 @@ class VirtualHostTest(unittest.TestCase):
|
|||||||
"certbot_apache._internal.obj.Addr(('127.0.0.1', '443'))")
|
"certbot_apache._internal.obj.Addr(('127.0.0.1', '443'))")
|
||||||
|
|
||||||
def test_eq(self):
|
def test_eq(self):
|
||||||
self.assertTrue(self.vhost1b == self.vhost1)
|
self.assertEqual(self.vhost1b, self.vhost1)
|
||||||
self.assertFalse(self.vhost1 == self.vhost2)
|
self.assertNotEqual(self.vhost1, self.vhost2)
|
||||||
self.assertEqual(str(self.vhost1b), str(self.vhost1))
|
self.assertEqual(str(self.vhost1b), str(self.vhost1))
|
||||||
self.assertFalse(self.vhost1b == 1234)
|
self.assertNotEqual(self.vhost1b, 1234)
|
||||||
|
|
||||||
def test_ne(self):
|
def test_ne(self):
|
||||||
self.assertTrue(self.vhost1 != self.vhost2)
|
self.assertNotEqual(self.vhost1, self.vhost2)
|
||||||
self.assertFalse(self.vhost1 != self.vhost1b)
|
self.assertEqual(self.vhost1, self.vhost1b)
|
||||||
|
|
||||||
def test_conflicts(self):
|
def test_conflicts(self):
|
||||||
from certbot_apache._internal.obj import Addr
|
from certbot_apache._internal.obj import Addr
|
||||||
@@ -128,13 +128,13 @@ class AddrTest(unittest.TestCase):
|
|||||||
self.assertTrue(self.addr1.conflicts(self.addr2))
|
self.assertTrue(self.addr1.conflicts(self.addr2))
|
||||||
|
|
||||||
def test_equal(self):
|
def test_equal(self):
|
||||||
self.assertTrue(self.addr1 == self.addr2)
|
self.assertEqual(self.addr1, self.addr2)
|
||||||
self.assertFalse(self.addr == self.addr1)
|
self.assertNotEqual(self.addr, self.addr1)
|
||||||
self.assertFalse(self.addr == 123)
|
self.assertNotEqual(self.addr, 123)
|
||||||
|
|
||||||
def test_not_equal(self):
|
def test_not_equal(self):
|
||||||
self.assertFalse(self.addr1 != self.addr2)
|
self.assertEqual(self.addr1, self.addr2)
|
||||||
self.assertTrue(self.addr != self.addr1)
|
self.assertNotEqual(self.addr, self.addr1)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
|||||||
@@ -26,8 +26,6 @@ class ApacheTest(unittest.TestCase):
|
|||||||
config_root="debian_apache_2_4/multiple_vhosts/apache2",
|
config_root="debian_apache_2_4/multiple_vhosts/apache2",
|
||||||
vhost_root="debian_apache_2_4/multiple_vhosts/apache2/sites-available"):
|
vhost_root="debian_apache_2_4/multiple_vhosts/apache2/sites-available"):
|
||||||
# pylint: disable=arguments-differ
|
# pylint: disable=arguments-differ
|
||||||
super(ApacheTest, self).setUp()
|
|
||||||
|
|
||||||
self.temp_dir, self.config_dir, self.work_dir = common.dir_setup(
|
self.temp_dir, self.config_dir, self.work_dir = common.dir_setup(
|
||||||
test_dir=test_dir,
|
test_dir=test_dir,
|
||||||
pkg=__name__)
|
pkg=__name__)
|
||||||
|
|||||||
121
certbot-auto
121
certbot-auto
@@ -31,7 +31,7 @@ if [ -z "$VENV_PATH" ]; then
|
|||||||
fi
|
fi
|
||||||
VENV_BIN="$VENV_PATH/bin"
|
VENV_BIN="$VENV_PATH/bin"
|
||||||
BOOTSTRAP_VERSION_PATH="$VENV_PATH/certbot-auto-bootstrap-version.txt"
|
BOOTSTRAP_VERSION_PATH="$VENV_PATH/certbot-auto-bootstrap-version.txt"
|
||||||
LE_AUTO_VERSION="1.6.0"
|
LE_AUTO_VERSION="1.13.0"
|
||||||
BASENAME=$(basename $0)
|
BASENAME=$(basename $0)
|
||||||
USAGE="Usage: $BASENAME [OPTIONS]
|
USAGE="Usage: $BASENAME [OPTIONS]
|
||||||
A self-updating wrapper script for the Certbot ACME client. When run, updates
|
A self-updating wrapper script for the Certbot ACME client. When run, updates
|
||||||
@@ -258,7 +258,7 @@ DeprecationBootstrap() {
|
|||||||
|
|
||||||
MIN_PYTHON_2_VERSION="2.7"
|
MIN_PYTHON_2_VERSION="2.7"
|
||||||
MIN_PYVER2=$(echo "$MIN_PYTHON_2_VERSION" | sed 's/\.//')
|
MIN_PYVER2=$(echo "$MIN_PYTHON_2_VERSION" | sed 's/\.//')
|
||||||
MIN_PYTHON_3_VERSION="3.5"
|
MIN_PYTHON_3_VERSION="3.6"
|
||||||
MIN_PYVER3=$(echo "$MIN_PYTHON_3_VERSION" | sed 's/\.//')
|
MIN_PYVER3=$(echo "$MIN_PYTHON_3_VERSION" | sed 's/\.//')
|
||||||
# Sets LE_PYTHON to Python version string and PYVER to the first two
|
# Sets LE_PYTHON to Python version string and PYVER to the first two
|
||||||
# digits of the python version.
|
# digits of the python version.
|
||||||
@@ -799,18 +799,14 @@ BootstrapMageiaCommon() {
|
|||||||
# that function. If Bootstrap is set to a function that doesn't install any
|
# that function. If Bootstrap is set to a function that doesn't install any
|
||||||
# packages BOOTSTRAP_VERSION is not set.
|
# packages BOOTSTRAP_VERSION is not set.
|
||||||
if [ -f /etc/debian_version ]; then
|
if [ -f /etc/debian_version ]; then
|
||||||
Bootstrap() {
|
DEPRECATED_OS=1
|
||||||
BootstrapMessage "Debian-based OSes"
|
|
||||||
BootstrapDebCommon
|
|
||||||
}
|
|
||||||
BOOTSTRAP_VERSION="BootstrapDebCommon $BOOTSTRAP_DEB_COMMON_VERSION"
|
|
||||||
elif [ -f /etc/mageia-release ]; then
|
elif [ -f /etc/mageia-release ]; then
|
||||||
# Mageia has both /etc/mageia-release and /etc/redhat-release
|
# Mageia has both /etc/mageia-release and /etc/redhat-release
|
||||||
Bootstrap() {
|
DEPRECATED_OS=1
|
||||||
ExperimentalBootstrap "Mageia" BootstrapMageiaCommon
|
NO_SELF_UPGRADE=1
|
||||||
}
|
|
||||||
BOOTSTRAP_VERSION="BootstrapMageiaCommon $BOOTSTRAP_MAGEIA_COMMON_VERSION"
|
|
||||||
elif [ -f /etc/redhat-release ]; then
|
elif [ -f /etc/redhat-release ]; then
|
||||||
|
DEPRECATED_OS=1
|
||||||
|
NO_SELF_UPGRADE=1
|
||||||
# Run DeterminePythonVersion to decide on the basis of available Python versions
|
# Run DeterminePythonVersion to decide on the basis of available Python versions
|
||||||
# whether to use 2.x or 3.x on RedHat-like systems.
|
# whether to use 2.x or 3.x on RedHat-like systems.
|
||||||
# Then, revert LE_PYTHON to its previous state.
|
# Then, revert LE_PYTHON to its previous state.
|
||||||
@@ -843,12 +839,7 @@ elif [ -f /etc/redhat-release ]; then
|
|||||||
INTERACTIVE_BOOTSTRAP=1
|
INTERACTIVE_BOOTSTRAP=1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
Bootstrap() {
|
|
||||||
BootstrapMessage "Legacy RedHat-based OSes that will use Python3"
|
|
||||||
BootstrapRpmPython3Legacy
|
|
||||||
}
|
|
||||||
USE_PYTHON_3=1
|
USE_PYTHON_3=1
|
||||||
BOOTSTRAP_VERSION="BootstrapRpmPython3Legacy $BOOTSTRAP_RPM_PYTHON3_LEGACY_VERSION"
|
|
||||||
|
|
||||||
# Try now to enable SCL rh-python36 for systems already bootstrapped
|
# Try now to enable SCL rh-python36 for systems already bootstrapped
|
||||||
# NB: EnablePython36SCL has been defined along with BootstrapRpmPython3Legacy in certbot-auto
|
# NB: EnablePython36SCL has been defined along with BootstrapRpmPython3Legacy in certbot-auto
|
||||||
@@ -867,73 +858,38 @@ elif [ -f /etc/redhat-release ]; then
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
if [ "$RPM_USE_PYTHON_3" = 1 ]; then
|
if [ "$RPM_USE_PYTHON_3" = 1 ]; then
|
||||||
Bootstrap() {
|
|
||||||
BootstrapMessage "RedHat-based OSes that will use Python3"
|
|
||||||
BootstrapRpmPython3
|
|
||||||
}
|
|
||||||
USE_PYTHON_3=1
|
USE_PYTHON_3=1
|
||||||
BOOTSTRAP_VERSION="BootstrapRpmPython3 $BOOTSTRAP_RPM_PYTHON3_VERSION"
|
|
||||||
else
|
|
||||||
Bootstrap() {
|
|
||||||
BootstrapMessage "RedHat-based OSes"
|
|
||||||
BootstrapRpmCommon
|
|
||||||
}
|
|
||||||
BOOTSTRAP_VERSION="BootstrapRpmCommon $BOOTSTRAP_RPM_COMMON_VERSION"
|
|
||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
|
|
||||||
LE_PYTHON="$prev_le_python"
|
LE_PYTHON="$prev_le_python"
|
||||||
elif [ -f /etc/os-release ] && `grep -q openSUSE /etc/os-release` ; then
|
elif [ -f /etc/os-release ] && `grep -q openSUSE /etc/os-release` ; then
|
||||||
Bootstrap() {
|
DEPRECATED_OS=1
|
||||||
BootstrapMessage "openSUSE-based OSes"
|
NO_SELF_UPGRADE=1
|
||||||
BootstrapSuseCommon
|
|
||||||
}
|
|
||||||
BOOTSTRAP_VERSION="BootstrapSuseCommon $BOOTSTRAP_SUSE_COMMON_VERSION"
|
|
||||||
elif [ -f /etc/arch-release ]; then
|
elif [ -f /etc/arch-release ]; then
|
||||||
Bootstrap() {
|
DEPRECATED_OS=1
|
||||||
if [ "$DEBUG" = 1 ]; then
|
NO_SELF_UPGRADE=1
|
||||||
BootstrapMessage "Archlinux"
|
|
||||||
BootstrapArchCommon
|
|
||||||
else
|
|
||||||
error "Please use pacman to install letsencrypt packages:"
|
|
||||||
error "# pacman -S certbot certbot-apache"
|
|
||||||
error
|
|
||||||
error "If you would like to use the virtualenv way, please run the script again with the"
|
|
||||||
error "--debug flag."
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
BOOTSTRAP_VERSION="BootstrapArchCommon $BOOTSTRAP_ARCH_COMMON_VERSION"
|
|
||||||
elif [ -f /etc/manjaro-release ]; then
|
elif [ -f /etc/manjaro-release ]; then
|
||||||
Bootstrap() {
|
DEPRECATED_OS=1
|
||||||
ExperimentalBootstrap "Manjaro Linux" BootstrapArchCommon
|
NO_SELF_UPGRADE=1
|
||||||
}
|
|
||||||
BOOTSTRAP_VERSION="BootstrapArchCommon $BOOTSTRAP_ARCH_COMMON_VERSION"
|
|
||||||
elif [ -f /etc/gentoo-release ]; then
|
elif [ -f /etc/gentoo-release ]; then
|
||||||
DEPRECATED_OS=1
|
DEPRECATED_OS=1
|
||||||
|
NO_SELF_UPGRADE=1
|
||||||
elif uname | grep -iq FreeBSD ; then
|
elif uname | grep -iq FreeBSD ; then
|
||||||
DEPRECATED_OS=1
|
DEPRECATED_OS=1
|
||||||
|
NO_SELF_UPGRADE=1
|
||||||
elif uname | grep -iq Darwin ; then
|
elif uname | grep -iq Darwin ; then
|
||||||
DEPRECATED_OS=1
|
DEPRECATED_OS=1
|
||||||
|
NO_SELF_UPGRADE=1
|
||||||
elif [ -f /etc/issue ] && grep -iq "Amazon Linux" /etc/issue ; then
|
elif [ -f /etc/issue ] && grep -iq "Amazon Linux" /etc/issue ; then
|
||||||
Bootstrap() {
|
DEPRECATED_OS=1
|
||||||
ExperimentalBootstrap "Amazon Linux" BootstrapRpmCommon
|
NO_SELF_UPGRADE=1
|
||||||
}
|
|
||||||
BOOTSTRAP_VERSION="BootstrapRpmCommon $BOOTSTRAP_RPM_COMMON_VERSION"
|
|
||||||
elif [ -f /etc/product ] && grep -q "Joyent Instance" /etc/product ; then
|
elif [ -f /etc/product ] && grep -q "Joyent Instance" /etc/product ; then
|
||||||
Bootstrap() {
|
DEPRECATED_OS=1
|
||||||
ExperimentalBootstrap "Joyent SmartOS Zone" BootstrapSmartOS
|
NO_SELF_UPGRADE=1
|
||||||
}
|
|
||||||
BOOTSTRAP_VERSION="BootstrapSmartOS $BOOTSTRAP_SMARTOS_VERSION"
|
|
||||||
else
|
else
|
||||||
Bootstrap() {
|
DEPRECATED_OS=1
|
||||||
error "Sorry, I don't know how to bootstrap Certbot on your operating system!"
|
NO_SELF_UPGRADE=1
|
||||||
error
|
|
||||||
error "You will need to install OS dependencies, configure virtualenv, and run pip install manually."
|
|
||||||
error "Please see https://letsencrypt.readthedocs.org/en/latest/contributing.html#prerequisites"
|
|
||||||
error "for more info."
|
|
||||||
exit 1
|
|
||||||
}
|
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# We handle this case after determining the normal bootstrap version to allow
|
# We handle this case after determining the normal bootstrap version to allow
|
||||||
@@ -1162,7 +1118,9 @@ if [ "$1" = "--le-auto-phase2" ]; then
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
if [ -f "$VENV_BIN/letsencrypt" -a "$INSTALL_ONLY" != 1 ]; then
|
if [ -f "$VENV_BIN/letsencrypt" -a "$INSTALL_ONLY" != 1 ]; then
|
||||||
error "Certbot will no longer receive updates."
|
error "certbot-auto and its Certbot installation will no longer receive updates."
|
||||||
|
error "You will not receive any bug fixes including those fixing server compatibility"
|
||||||
|
error "or security problems."
|
||||||
error "Please visit https://certbot.eff.org/ to check for other alternatives."
|
error "Please visit https://certbot.eff.org/ to check for other alternatives."
|
||||||
"$VENV_BIN/letsencrypt" "$@"
|
"$VENV_BIN/letsencrypt" "$@"
|
||||||
exit 0
|
exit 0
|
||||||
@@ -1530,18 +1488,18 @@ letsencrypt==0.7.0 \
|
|||||||
--hash=sha256:105a5fb107e45bcd0722eb89696986dcf5f08a86a321d6aef25a0c7c63375ade \
|
--hash=sha256:105a5fb107e45bcd0722eb89696986dcf5f08a86a321d6aef25a0c7c63375ade \
|
||||||
--hash=sha256:c36e532c486a7e92155ee09da54b436a3c420813ec1c590b98f635d924720de9
|
--hash=sha256:c36e532c486a7e92155ee09da54b436a3c420813ec1c590b98f635d924720de9
|
||||||
|
|
||||||
certbot==1.6.0 \
|
certbot==1.13.0 \
|
||||||
--hash=sha256:7237ac851ef7f3ff2d5ddb49e692e4bd5346273734cbc531531e4ad56d14d460 \
|
--hash=sha256:082eb732e1318bb9605afa7aea8db2c2f4c5029d523c73f24c6aa98f03caff76 \
|
||||||
--hash=sha256:d373ee0f24ab06f561efa2b00f68cff43521b003d87fbf4d9e869e7cc7395481
|
--hash=sha256:64cf41b57df7667d9d849fcaa9031a4f151788246733d1f4c3f37a5aa5e2f458
|
||||||
acme==1.6.0 \
|
acme==1.13.0 \
|
||||||
--hash=sha256:dc532fee475dde07a843232f69f54b185ba23af6cce9d2e1a1dc132ce2e34f64 \
|
--hash=sha256:93b6365c9425de03497a6b8aee1107814501d2974499b42e9bcc9a7378771143 \
|
||||||
--hash=sha256:fe76e06ae1e9b12304f9e9691ff901da6d2fd588fea2765f891b8cd15d6b3f2b
|
--hash=sha256:6b4257dfd6a6d5f01e8cd4f0b10422c17836bed7c67e9c5b0a0ad6c7d651c088
|
||||||
certbot-apache==1.6.0 \
|
certbot-apache==1.13.0 \
|
||||||
--hash=sha256:d6080664fe24fc5dc1e519382ebe5a5215f3b886ceaa335336a1db2c1b1ed95e \
|
--hash=sha256:36ed02ac7d2d91febee8dd3181ae9095b3f06434c9ed8959fbc6db24ab4da2e8 \
|
||||||
--hash=sha256:e0232a1f1c5513701de06bccb88b57b7d76d9db28c6559fba8539f88293c85ea
|
--hash=sha256:4b5a16e80c1418e2edc05fc2578f522fb24974b2c13eb747cdfeef69e5bd5ae1
|
||||||
certbot-nginx==1.6.0 \
|
certbot-nginx==1.13.0 \
|
||||||
--hash=sha256:6ef97185d9c07ea97656e7b439e7ccfa8e5090f6802e9162e8f5a79080bc5a76 \
|
--hash=sha256:3ff271f65321b25c77a868af21f76f58754a7d61529ad565a1d66e29c711120f \
|
||||||
--hash=sha256:facc59e066d7e5623fbc068fe2fcc5e1f802c2441d148e37ff96ad90b893600a
|
--hash=sha256:9e972cc19c0fa9e5b7863da0423b156fbfb5623fd30b558fd2fd6d21c24c0b08
|
||||||
|
|
||||||
UNLIKELY_EOF
|
UNLIKELY_EOF
|
||||||
# -------------------------------------------------------------------------
|
# -------------------------------------------------------------------------
|
||||||
@@ -1615,6 +1573,11 @@ maybe_argparse = (
|
|||||||
if sys.version_info < (2, 7, 0) else [])
|
if sys.version_info < (2, 7, 0) else [])
|
||||||
|
|
||||||
|
|
||||||
|
# Be careful when updating the pinned versions here, in particular for pip.
|
||||||
|
# Indeed starting from 10.0, pip will build dependencies in isolation if the
|
||||||
|
# related projects are compliant with PEP 517. This is not something we want
|
||||||
|
# as of now, so the isolation build will need to be disabled wherever
|
||||||
|
# pipstrap is used (see https://github.com/certbot/certbot/issues/8256).
|
||||||
PACKAGES = maybe_argparse + [
|
PACKAGES = maybe_argparse + [
|
||||||
# Pip has no dependencies, as it vendors everything:
|
# Pip has no dependencies, as it vendors everything:
|
||||||
('11/b6/abcb525026a4be042b486df43905d6893fb04f05aac21c32c638e939e447/'
|
('11/b6/abcb525026a4be042b486df43905d6893fb04f05aac21c32c638e939e447/'
|
||||||
|
|||||||
@@ -0,0 +1,60 @@
|
|||||||
|
options {
|
||||||
|
directory "/var/cache/bind";
|
||||||
|
|
||||||
|
// Running inside Docker. Bind address on Docker host is 127.0.0.1.
|
||||||
|
listen-on { any; };
|
||||||
|
listen-on-v6 { any; };
|
||||||
|
|
||||||
|
// We are allowing BIND to service recursive queries, but only in an extremely limimited sense
|
||||||
|
// where it is entirely disconnected from public DNS:
|
||||||
|
// - Iterative queries are disabled. Only forwarding to a non-existent forwarder.
|
||||||
|
// - The only recursive answers we can get (that will not be a SERVFAIL) will come from the
|
||||||
|
// RPZ "mock-recursion" zone. Effectively this means we are mocking out the entirety of
|
||||||
|
// public DNS.
|
||||||
|
allow-recursion { any; }; // BIND will only answer using RPZ if recursion is enabled
|
||||||
|
forwarders { 192.0.2.254; }; // Nobody is listening, this is TEST-NET-1
|
||||||
|
forward only; // Do NOT perform iterative queries from the root zone
|
||||||
|
dnssec-validation no; // Do not bother fetching the root DNSKEY set (performance)
|
||||||
|
response-policy { // All recursive queries will be served from here.
|
||||||
|
zone "mock-recursion"
|
||||||
|
log yes;
|
||||||
|
} recursive-only no // Allow RPZs to affect authoritative zones too.
|
||||||
|
qname-wait-recurse no // No real recursion.
|
||||||
|
nsip-wait-recurse no; // No real recursion.
|
||||||
|
|
||||||
|
allow-transfer { none; };
|
||||||
|
allow-update { none; };
|
||||||
|
};
|
||||||
|
|
||||||
|
key "default-key." {
|
||||||
|
algorithm hmac-sha512;
|
||||||
|
secret "91CgOwzihr0nAVEHKFXJPQCbuBBbBI19Ks5VAweUXgbF40NWTD83naeg3c5y2MPdEiFRXnRLJxL6M+AfHCGLNw==";
|
||||||
|
};
|
||||||
|
|
||||||
|
zone "mock-recursion" {
|
||||||
|
type primary;
|
||||||
|
file "/var/lib/bind/rpz.mock-recursion";
|
||||||
|
allow-query {
|
||||||
|
none;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
zone "example.com." {
|
||||||
|
type primary;
|
||||||
|
file "/var/lib/bind/db.example.com";
|
||||||
|
journal "/var/cache/bind/db.example.com.jnl";
|
||||||
|
|
||||||
|
update-policy {
|
||||||
|
grant default-key zonesub TXT;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
zone "sub.example.com." {
|
||||||
|
type primary;
|
||||||
|
file "/var/lib/bind/db.sub.example.com";
|
||||||
|
journal "/var/cache/bind/db.sub.example.com.jnl";
|
||||||
|
|
||||||
|
update-policy {
|
||||||
|
grant default-key zonesub TXT;
|
||||||
|
};
|
||||||
|
};
|
||||||
@@ -0,0 +1,10 @@
|
|||||||
|
# Target DNS server
|
||||||
|
dns_rfc2136_server = {server_address}
|
||||||
|
# Target DNS port
|
||||||
|
dns_rfc2136_port = {server_port}
|
||||||
|
# TSIG key name
|
||||||
|
dns_rfc2136_name = default-key.
|
||||||
|
# TSIG key secret
|
||||||
|
dns_rfc2136_secret = 91CgOwzihr0nAVEHKFXJPQCbuBBbBI19Ks5VAweUXgbF40NWTD83naeg3c5y2MPdEiFRXnRLJxL6M+AfHCGLNw==
|
||||||
|
# TSIG key algorithm
|
||||||
|
dns_rfc2136_algorithm = HMAC-SHA512
|
||||||
@@ -0,0 +1,11 @@
|
|||||||
|
$ORIGIN example.com.
|
||||||
|
$TTL 3600
|
||||||
|
example.com. IN SOA ns1.example.com. admin.example.com. ( 2020091025 7200 3600 1209600 3600 )
|
||||||
|
|
||||||
|
example.com. IN NS ns1
|
||||||
|
example.com. IN NS ns2
|
||||||
|
|
||||||
|
ns1 IN A 192.0.2.2
|
||||||
|
ns2 IN A 192.0.2.3
|
||||||
|
|
||||||
|
@ IN A 192.0.2.1
|
||||||
@@ -0,0 +1,9 @@
|
|||||||
|
$ORIGIN sub.example.com.
|
||||||
|
$TTL 3600
|
||||||
|
sub.example.com. IN SOA ns1.example.com. admin.example.com. ( 2020091025 7200 3600 1209600 3600 )
|
||||||
|
|
||||||
|
sub.example.com. IN NS ns1
|
||||||
|
sub.example.com. IN NS ns2
|
||||||
|
|
||||||
|
ns1 IN A 192.0.2.2
|
||||||
|
ns2 IN A 192.0.2.3
|
||||||
@@ -0,0 +1,6 @@
|
|||||||
|
$TTL 3600
|
||||||
|
|
||||||
|
@ SOA ns1.example.test. dummy.example.test. 1 12h 15m 3w 2h
|
||||||
|
NS ns1.example.test.
|
||||||
|
|
||||||
|
_acme-challenge.aliased.example IN CNAME _acme-challenge.example.com.
|
||||||
@@ -1,5 +1,4 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
from __future__ import print_function
|
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
|
|||||||
@@ -0,0 +1,14 @@
|
|||||||
|
This directory contains your keys and certificates.
|
||||||
|
|
||||||
|
`privkey.pem` : the private key for your certificate.
|
||||||
|
`fullchain.pem`: the certificate file used in most server software.
|
||||||
|
`chain.pem` : used for OCSP stapling in Nginx >=1.3.7.
|
||||||
|
`cert.pem` : will break many server configurations, and should not be used
|
||||||
|
without reading further documentation (see link below).
|
||||||
|
|
||||||
|
WARNING: DO NOT MOVE OR RENAME THESE FILES!
|
||||||
|
Certbot expects these files to remain in this location in order
|
||||||
|
to function properly!
|
||||||
|
|
||||||
|
We recommend not moving these files. For more information, see the Certbot
|
||||||
|
User Guide at https://certbot.eff.org/docs/using.html#where-are-my-certificates.
|
||||||
@@ -0,0 +1,18 @@
|
|||||||
|
-----BEGIN CERTIFICATE-----
|
||||||
|
MIIC2zCCAcOgAwIBAgIIBvrEnbPRYu8wDQYJKoZIhvcNAQELBQAwKDEmMCQGA1UE
|
||||||
|
AxMdUGViYmxlIEludGVybWVkaWF0ZSBDQSAxMjZjNGIwHhcNMjAxMDEyMjEwNzQw
|
||||||
|
WhcNMjUxMDEyMjEwNzQwWjAjMSEwHwYDVQQDExhjLmVuY3J5cHRpb24tZXhhbXBs
|
||||||
|
ZS5jb20wWTATBgcqhkjOPQIBBggqhkjOPQMBBwNCAARjMhuW0ENPPC33PjB5XsYU
|
||||||
|
CRw640kPQENIDatcTJaENZIZdqKd6rI6jc+lpbmXot7Zi52clJlSJS+V6oDAt2Lh
|
||||||
|
o4HYMIHVMA4GA1UdDwEB/wQEAwIFoDAdBgNVHSUEFjAUBggrBgEFBQcDAQYIKwYB
|
||||||
|
BQUHAwIwDAYDVR0TAQH/BAIwADAdBgNVHQ4EFgQUj7Kd3ENqxlPf8B2bIGhsjydX
|
||||||
|
mPswHwYDVR0jBBgwFoAUEiGxlkRsi+VvcogH5dVD3h1laAcwMQYIKwYBBQUHAQEE
|
||||||
|
JTAjMCEGCCsGAQUFBzABhhVodHRwOi8vMTI3LjAuMC4xOjQwMDIwIwYDVR0RBBww
|
||||||
|
GoIYYy5lbmNyeXB0aW9uLWV4YW1wbGUuY29tMA0GCSqGSIb3DQEBCwUAA4IBAQCl
|
||||||
|
k0JXsa8y7fg41WWMDhw60bPW77O0FtOmTcnhdI5daYNemQVk+Q5EMaBLQ/oGjgXd
|
||||||
|
9QXFzXH1PL904YEnSLt+iTpXn++7rQSNzQsdYqw0neWk4f5pEBiN+WORpb6mwobV
|
||||||
|
ifMtBOkNEHvrJ2Pkci9U1lLwtKD/DSew6QtJU5DSkmH1XdGuMJiubygEIvELtvgq
|
||||||
|
cP9S368ZvPmPGmKaJQXBiuaR8MTjY/Bkr79aXQMjKbf+mpn7h0POCcePk1DY/rm6
|
||||||
|
Da+X16lf0hHyQhSUa7Vgyim6rK1/hlw+Z00i+sQCKD9Ih7kXuuGqfSDC33cfO8Tj
|
||||||
|
o/MXO8lcxkrem5zU5QWP
|
||||||
|
-----END CERTIFICATE-----
|
||||||
@@ -0,0 +1,20 @@
|
|||||||
|
-----BEGIN CERTIFICATE-----
|
||||||
|
MIIDUDCCAjigAwIBAgIIbi787yVrcMAwDQYJKoZIhvcNAQELBQAwIDEeMBwGA1UE
|
||||||
|
AxMVUGViYmxlIFJvb3QgQ0EgMGM1MjI1MCAXDTIwMTAxMjIwMjI0NloYDzIwNTAx
|
||||||
|
MDEyMjEyMjQ2WjAoMSYwJAYDVQQDEx1QZWJibGUgSW50ZXJtZWRpYXRlIENBIDEy
|
||||||
|
NmM0YjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALGeVk1BMJraeqRq
|
||||||
|
mJ2+hgso8VOAv2s2CVxUJjIVcn7f2adE8NyTsSQ1brlsnKCUYUw7yLTQH0izLQRB
|
||||||
|
qKVIDFkUqo5/FuTJ2QlfA2EwBL8J7s/7L7vj3L0DiVpwgxPSyFEwdl/Y5y7ofsX5
|
||||||
|
CIhCFcaMAmTIuKLiSfCJjGwkbEMuolm+lO8Mikxxc/JtDVUC479ugU7PU9O09bMH
|
||||||
|
nm+sD6Bgd+KMoPkCCCoeShJS9X3Ziq9HGc7Z6nhM/zirFARt2XkonEdAZ8br01zY
|
||||||
|
MRiY9txhlWQ7mUkOtzOSoEuYJNoUbvMUf0+tNzto26WRyF7dJmh7lTBsYrvAwUTx
|
||||||
|
PzNyst0CAwEAAaOBgzCBgDAOBgNVHQ8BAf8EBAMCAoQwHQYDVR0lBBYwFAYIKwYB
|
||||||
|
BQUHAwEGCCsGAQUFBwMCMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFBIhsZZE
|
||||||
|
bIvlb3KIB+XVQ94dZWgHMB8GA1UdIwQYMBaAFOaKTaXg37vKgRt7d79YOjAoAtJT
|
||||||
|
MA0GCSqGSIb3DQEBCwUAA4IBAQAU2mZii7PH2pkw2lNM0QqPbcW/UYyvFoUeM8Aq
|
||||||
|
uCtsI2s+oxCJTqzfLsA0N8NY4nHLQ5wAlNJfJekngni8hbmJTKU4JFTMe7kLQO8P
|
||||||
|
fJbk0pTzhhHVQw7CVwB6Pwq3u2m/JV+d6xDIDc+AVkuEl19ZJU0rTWyooClfFLZV
|
||||||
|
EdZmEiUtA3PGlxoYwYhoGHYlhFxsoFONhCsBEdN7k7FKtFGVxN7oc5SKmKp0YZTW
|
||||||
|
fcrEtrdNThATO4ymhCC2zh33NI/MT1O74fpaAc2k6LcTl57MKiLfTYX4LTL6v9JG
|
||||||
|
9tlNqjFVRRmzEbtXTPcCb+w9g1VqoOGok7mGXYLTYtShCuvE
|
||||||
|
-----END CERTIFICATE-----
|
||||||
@@ -0,0 +1,38 @@
|
|||||||
|
-----BEGIN CERTIFICATE-----
|
||||||
|
MIIC2zCCAcOgAwIBAgIILlmGtZhUFEwwDQYJKoZIhvcNAQELBQAwKDEmMCQGA1UE
|
||||||
|
AxMdUGViYmxlIEludGVybWVkaWF0ZSBDQSAxMjZjNGIwHhcNMjAxMDEyMjA1MDM0
|
||||||
|
WhcNMjUxMDEyMjA1MDM0WjAjMSEwHwYDVQQDExhjLmVuY3J5cHRpb24tZXhhbXBs
|
||||||
|
ZS5jb20wWTATBgcqhkjOPQIBBggqhkjOPQMBBwNCAARHEzR8JPWrEmpmgM+F2bk5
|
||||||
|
9mT0u6CjzmJG0QpbaqprLiG5NGpW84VQ5TFCrmC4KxYfigCfMhfHRNfFYvNUK3V/
|
||||||
|
o4HYMIHVMA4GA1UdDwEB/wQEAwIFoDAdBgNVHSUEFjAUBggrBgEFBQcDAQYIKwYB
|
||||||
|
BQUHAwIwDAYDVR0TAQH/BAIwADAdBgNVHQ4EFgQU1CsVL+bPnzaxxQ5jUENmQJIO
|
||||||
|
lKwwHwYDVR0jBBgwFoAUEiGxlkRsi+VvcogH5dVD3h1laAcwMQYIKwYBBQUHAQEE
|
||||||
|
JTAjMCEGCCsGAQUFBzABhhVodHRwOi8vMTI3LjAuMC4xOjQwMDIwIwYDVR0RBBww
|
||||||
|
GoIYYy5lbmNyeXB0aW9uLWV4YW1wbGUuY29tMA0GCSqGSIb3DQEBCwUAA4IBAQBn
|
||||||
|
2D8loC7pfk28JYpFLr5lmFKJWWmtLGlpsWDj61fVjtTfGKLziJz+MM6il4Y3hIz5
|
||||||
|
58qiFK0ue0M63dIBJ33N+XxSEXon4Q0gy/zRWfH9jtPJ3FwfjkU/RT9PAUClYi0G
|
||||||
|
ptNWnTmgQkNzousbcAtRNXuuShH3856vhUnwkX+xM+cbIDi1JVmFjcGrEEQJ0rUF
|
||||||
|
mv2ZTyfbWbUs3v4rReETi2NVzr1Ql6J+ByNcMvHODzFy3t0L6yelAw2ca1I+c9HU
|
||||||
|
+Z0tnp/ykR7eXNuVLivok8UBf5OC413lh8ZO5g+Bgzh/LdtkUuavg1MYtEX0H6mX
|
||||||
|
9U7y3nVI8WEbPGf+HDeu
|
||||||
|
-----END CERTIFICATE-----
|
||||||
|
-----BEGIN CERTIFICATE-----
|
||||||
|
MIIDUDCCAjigAwIBAgIIbi787yVrcMAwDQYJKoZIhvcNAQELBQAwIDEeMBwGA1UE
|
||||||
|
AxMVUGViYmxlIFJvb3QgQ0EgMGM1MjI1MCAXDTIwMTAxMjIwMjI0NloYDzIwNTAx
|
||||||
|
MDEyMjEyMjQ2WjAoMSYwJAYDVQQDEx1QZWJibGUgSW50ZXJtZWRpYXRlIENBIDEy
|
||||||
|
NmM0YjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALGeVk1BMJraeqRq
|
||||||
|
mJ2+hgso8VOAv2s2CVxUJjIVcn7f2adE8NyTsSQ1brlsnKCUYUw7yLTQH0izLQRB
|
||||||
|
qKVIDFkUqo5/FuTJ2QlfA2EwBL8J7s/7L7vj3L0DiVpwgxPSyFEwdl/Y5y7ofsX5
|
||||||
|
CIhCFcaMAmTIuKLiSfCJjGwkbEMuolm+lO8Mikxxc/JtDVUC479ugU7PU9O09bMH
|
||||||
|
nm+sD6Bgd+KMoPkCCCoeShJS9X3Ziq9HGc7Z6nhM/zirFARt2XkonEdAZ8br01zY
|
||||||
|
MRiY9txhlWQ7mUkOtzOSoEuYJNoUbvMUf0+tNzto26WRyF7dJmh7lTBsYrvAwUTx
|
||||||
|
PzNyst0CAwEAAaOBgzCBgDAOBgNVHQ8BAf8EBAMCAoQwHQYDVR0lBBYwFAYIKwYB
|
||||||
|
BQUHAwEGCCsGAQUFBwMCMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFBIhsZZE
|
||||||
|
bIvlb3KIB+XVQ94dZWgHMB8GA1UdIwQYMBaAFOaKTaXg37vKgRt7d79YOjAoAtJT
|
||||||
|
MA0GCSqGSIb3DQEBCwUAA4IBAQAU2mZii7PH2pkw2lNM0QqPbcW/UYyvFoUeM8Aq
|
||||||
|
uCtsI2s+oxCJTqzfLsA0N8NY4nHLQ5wAlNJfJekngni8hbmJTKU4JFTMe7kLQO8P
|
||||||
|
fJbk0pTzhhHVQw7CVwB6Pwq3u2m/JV+d6xDIDc+AVkuEl19ZJU0rTWyooClfFLZV
|
||||||
|
EdZmEiUtA3PGlxoYwYhoGHYlhFxsoFONhCsBEdN7k7FKtFGVxN7oc5SKmKp0YZTW
|
||||||
|
fcrEtrdNThATO4ymhCC2zh33NI/MT1O74fpaAc2k6LcTl57MKiLfTYX4LTL6v9JG
|
||||||
|
9tlNqjFVRRmzEbtXTPcCb+w9g1VqoOGok7mGXYLTYtShCuvE
|
||||||
|
-----END CERTIFICATE-----
|
||||||
@@ -0,0 +1,5 @@
|
|||||||
|
-----BEGIN PRIVATE KEY-----
|
||||||
|
MIGHAgEAMBMGByqGSM49AgEGCCqGSM49AwEHBG0wawIBAQQgNgefv2dad4U1VYEi
|
||||||
|
0WkdHuqywi5QXAe30OwNTTGjhbihRANCAARHEzR8JPWrEmpmgM+F2bk59mT0u6Cj
|
||||||
|
zmJG0QpbaqprLiG5NGpW84VQ5TFCrmC4KxYfigCfMhfHRNfFYvNUK3V/
|
||||||
|
-----END PRIVATE KEY-----
|
||||||
@@ -0,0 +1,14 @@
|
|||||||
|
This directory contains your keys and certificates.
|
||||||
|
|
||||||
|
`privkey.pem` : the private key for your certificate.
|
||||||
|
`fullchain.pem`: the certificate file used in most server software.
|
||||||
|
`chain.pem` : used for OCSP stapling in Nginx >=1.3.7.
|
||||||
|
`cert.pem` : will break many server configurations, and should not be used
|
||||||
|
without reading further documentation (see link below).
|
||||||
|
|
||||||
|
WARNING: DO NOT MOVE OR RENAME THESE FILES!
|
||||||
|
Certbot expects these files to remain in this location in order
|
||||||
|
to function properly!
|
||||||
|
|
||||||
|
We recommend not moving these files. For more information, see the Certbot
|
||||||
|
User Guide at https://certbot.eff.org/docs/using.html#where-are-my-certificates.
|
||||||
@@ -0,0 +1 @@
|
|||||||
|
../../archive/c.encryption-example.com/cert.pem
|
||||||
@@ -0,0 +1 @@
|
|||||||
|
../../archive/c.encryption-example.com/chain.pem
|
||||||
@@ -0,0 +1 @@
|
|||||||
|
../../archive/c.encryption-example.com/fullchain.pem
|
||||||
@@ -0,0 +1 @@
|
|||||||
|
../../archive/c.encryption-example.com/privkey.pem
|
||||||
@@ -0,0 +1,17 @@
|
|||||||
|
# renew_before_expiry = 30 days
|
||||||
|
version = 1.10.0.dev0
|
||||||
|
archive_dir = sample-config/archive/c.encryption-example.com
|
||||||
|
cert = sample-config/live/c.encryption-example.com/cert.pem
|
||||||
|
privkey = sample-config/live/c.encryption-example.com/privkey.pem
|
||||||
|
chain = sample-config/live/c.encryption-example.com/chain.pem
|
||||||
|
fullchain = sample-config/live/c.encryption-example.com/fullchain.pem
|
||||||
|
|
||||||
|
# Options used in the renewal process
|
||||||
|
[renewalparams]
|
||||||
|
authenticator = apache
|
||||||
|
installer = apache
|
||||||
|
account = 48d6b9e8d767eccf7e4d877d6ffa81e3
|
||||||
|
key_type = ecdsa
|
||||||
|
config_dir = sample-config-ec
|
||||||
|
elliptic_curve = secp256r1
|
||||||
|
manual_public_ip_logging_ok = True
|
||||||
@@ -1,3 +1,4 @@
|
|||||||
|
# pylint: disable=missing-module-docstring
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
# Custom assertions defined in the following package need to be registered to be properly
|
# Custom assertions defined in the following package need to be registered to be properly
|
||||||
|
|||||||
@@ -2,6 +2,11 @@
|
|||||||
import io
|
import io
|
||||||
import os
|
import os
|
||||||
|
|
||||||
|
from cryptography.hazmat.backends import default_backend
|
||||||
|
from cryptography.hazmat.primitives.asymmetric.ec import EllipticCurvePrivateKey
|
||||||
|
from cryptography.hazmat.primitives.asymmetric.rsa import RSAPrivateKey
|
||||||
|
from cryptography.hazmat.primitives.serialization import load_pem_private_key
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import grp
|
import grp
|
||||||
POSIX_MODE = True
|
POSIX_MODE = True
|
||||||
@@ -16,6 +21,33 @@ SYSTEM_SID = 'S-1-5-18'
|
|||||||
ADMINS_SID = 'S-1-5-32-544'
|
ADMINS_SID = 'S-1-5-32-544'
|
||||||
|
|
||||||
|
|
||||||
|
def assert_elliptic_key(key, curve):
|
||||||
|
"""
|
||||||
|
Asserts that the key at the given path is an EC key using the given curve.
|
||||||
|
:param key: path to key
|
||||||
|
:param curve: name of the expected elliptic curve
|
||||||
|
"""
|
||||||
|
with open(key, 'rb') as file:
|
||||||
|
privkey1 = file.read()
|
||||||
|
|
||||||
|
key = load_pem_private_key(data=privkey1, password=None, backend=default_backend())
|
||||||
|
|
||||||
|
assert isinstance(key, EllipticCurvePrivateKey)
|
||||||
|
assert isinstance(key.curve, curve)
|
||||||
|
|
||||||
|
|
||||||
|
def assert_rsa_key(key):
|
||||||
|
"""
|
||||||
|
Asserts that the key at the given path is an RSA key.
|
||||||
|
:param key: path to key
|
||||||
|
"""
|
||||||
|
with open(key, 'rb') as file:
|
||||||
|
privkey1 = file.read()
|
||||||
|
|
||||||
|
key = load_pem_private_key(data=privkey1, password=None, backend=default_backend())
|
||||||
|
assert isinstance(key, RSAPrivateKey)
|
||||||
|
|
||||||
|
|
||||||
def assert_hook_execution(probe_path, probe_content):
|
def assert_hook_execution(probe_path, probe_content):
|
||||||
"""
|
"""
|
||||||
Assert that a certbot hook has been executed
|
Assert that a certbot hook has been executed
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ import tempfile
|
|||||||
from certbot_integration_tests.utils import certbot_call
|
from certbot_integration_tests.utils import certbot_call
|
||||||
|
|
||||||
|
|
||||||
class IntegrationTestsContext(object):
|
class IntegrationTestsContext:
|
||||||
"""General fixture describing a certbot integration tests context"""
|
"""General fixture describing a certbot integration tests context"""
|
||||||
def __init__(self, request):
|
def __init__(self, request):
|
||||||
self.request = request
|
self.request = request
|
||||||
@@ -77,6 +77,6 @@ class IntegrationTestsContext(object):
|
|||||||
appending the pytest worker id to the subdomain, using this pattern:
|
appending the pytest worker id to the subdomain, using this pattern:
|
||||||
{subdomain}.{worker_id}.wtf
|
{subdomain}.{worker_id}.wtf
|
||||||
:param subdomain: the subdomain to use in the generated domain (default 'le')
|
:param subdomain: the subdomain to use in the generated domain (default 'le')
|
||||||
:return: the well-formed domain suitable for redirection on
|
:return: the well-formed domain suitable for redirection on
|
||||||
"""
|
"""
|
||||||
return '{0}.{1}.wtf'.format(subdomain, self.worker_id)
|
return '{0}.{1}.wtf'.format(subdomain, self.worker_id)
|
||||||
|
|||||||
@@ -1,5 +1,4 @@
|
|||||||
"""Module executing integration tests against certbot core."""
|
"""Module executing integration tests against certbot core."""
|
||||||
from __future__ import print_function
|
|
||||||
|
|
||||||
import os
|
import os
|
||||||
from os.path import exists
|
from os.path import exists
|
||||||
@@ -9,12 +8,15 @@ import shutil
|
|||||||
import subprocess
|
import subprocess
|
||||||
import time
|
import time
|
||||||
|
|
||||||
|
from cryptography.hazmat.primitives.asymmetric.ec import SECP256R1, SECP384R1, SECP521R1
|
||||||
from cryptography.x509 import NameOID
|
from cryptography.x509 import NameOID
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from certbot_integration_tests.certbot_tests import context as certbot_context
|
from certbot_integration_tests.certbot_tests import context as certbot_context
|
||||||
from certbot_integration_tests.certbot_tests.assertions import assert_cert_count_for_lineage
|
from certbot_integration_tests.certbot_tests.assertions import assert_cert_count_for_lineage
|
||||||
|
from certbot_integration_tests.certbot_tests.assertions import assert_elliptic_key
|
||||||
|
from certbot_integration_tests.certbot_tests.assertions import assert_rsa_key
|
||||||
from certbot_integration_tests.certbot_tests.assertions import assert_equals_group_owner
|
from certbot_integration_tests.certbot_tests.assertions import assert_equals_group_owner
|
||||||
from certbot_integration_tests.certbot_tests.assertions import assert_equals_group_permissions
|
from certbot_integration_tests.certbot_tests.assertions import assert_equals_group_permissions
|
||||||
from certbot_integration_tests.certbot_tests.assertions import assert_equals_world_read_permissions
|
from certbot_integration_tests.certbot_tests.assertions import assert_equals_world_read_permissions
|
||||||
@@ -26,8 +28,9 @@ from certbot_integration_tests.certbot_tests.assertions import EVERYBODY_SID
|
|||||||
from certbot_integration_tests.utils import misc
|
from certbot_integration_tests.utils import misc
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture()
|
@pytest.fixture(name='context')
|
||||||
def context(request):
|
def test_context(request):
|
||||||
|
# pylint: disable=missing-function-docstring
|
||||||
# Fixture request is a built-in pytest fixture describing current test request.
|
# Fixture request is a built-in pytest fixture describing current test request.
|
||||||
integration_test_context = certbot_context.IntegrationTestsContext(request)
|
integration_test_context = certbot_context.IntegrationTestsContext(request)
|
||||||
try:
|
try:
|
||||||
@@ -144,6 +147,17 @@ def test_certonly(context):
|
|||||||
"""Test the certonly verb on certbot."""
|
"""Test the certonly verb on certbot."""
|
||||||
context.certbot(['certonly', '--cert-name', 'newname', '-d', context.get_domain('newname')])
|
context.certbot(['certonly', '--cert-name', 'newname', '-d', context.get_domain('newname')])
|
||||||
|
|
||||||
|
assert_cert_count_for_lineage(context.config_dir, 'newname', 1)
|
||||||
|
|
||||||
|
|
||||||
|
def test_certonly_webroot(context):
|
||||||
|
"""Test the certonly verb with webroot plugin"""
|
||||||
|
with misc.create_http_server(context.http_01_port) as webroot:
|
||||||
|
certname = context.get_domain('webroot')
|
||||||
|
context.certbot(['certonly', '-a', 'webroot', '--webroot-path', webroot, '-d', certname])
|
||||||
|
|
||||||
|
assert_cert_count_for_lineage(context.config_dir, certname, 1)
|
||||||
|
|
||||||
|
|
||||||
def test_auth_and_install_with_csr(context):
|
def test_auth_and_install_with_csr(context):
|
||||||
"""Test certificate issuance and install using an existing CSR."""
|
"""Test certificate issuance and install using an existing CSR."""
|
||||||
@@ -219,14 +233,16 @@ def test_renew_files_propagate_permissions(context):
|
|||||||
if os.name != 'nt':
|
if os.name != 'nt':
|
||||||
os.chmod(privkey1, 0o444)
|
os.chmod(privkey1, 0o444)
|
||||||
else:
|
else:
|
||||||
import win32security
|
import win32security # pylint: disable=import-error
|
||||||
import ntsecuritycon
|
import ntsecuritycon # pylint: disable=import-error
|
||||||
# Get the current DACL of the private key
|
# Get the current DACL of the private key
|
||||||
security = win32security.GetFileSecurity(privkey1, win32security.DACL_SECURITY_INFORMATION)
|
security = win32security.GetFileSecurity(privkey1, win32security.DACL_SECURITY_INFORMATION)
|
||||||
dacl = security.GetSecurityDescriptorDacl()
|
dacl = security.GetSecurityDescriptorDacl()
|
||||||
# Create a read permission for Everybody group
|
# Create a read permission for Everybody group
|
||||||
everybody = win32security.ConvertStringSidToSid(EVERYBODY_SID)
|
everybody = win32security.ConvertStringSidToSid(EVERYBODY_SID)
|
||||||
dacl.AddAccessAllowedAce(win32security.ACL_REVISION, ntsecuritycon.FILE_GENERIC_READ, everybody)
|
dacl.AddAccessAllowedAce(
|
||||||
|
win32security.ACL_REVISION, ntsecuritycon.FILE_GENERIC_READ, everybody
|
||||||
|
)
|
||||||
# Apply the updated DACL to the private key
|
# Apply the updated DACL to the private key
|
||||||
security.SetSecurityDescriptorDacl(1, dacl, 0)
|
security.SetSecurityDescriptorDacl(1, dacl, 0)
|
||||||
win32security.SetFileSecurity(privkey1, win32security.DACL_SECURITY_INFORMATION, security)
|
win32security.SetFileSecurity(privkey1, win32security.DACL_SECURITY_INFORMATION, security)
|
||||||
@@ -235,12 +251,14 @@ def test_renew_files_propagate_permissions(context):
|
|||||||
|
|
||||||
assert_cert_count_for_lineage(context.config_dir, certname, 2)
|
assert_cert_count_for_lineage(context.config_dir, certname, 2)
|
||||||
if os.name != 'nt':
|
if os.name != 'nt':
|
||||||
# On Linux, read world permissions + all group permissions will be copied from the previous private key
|
# On Linux, read world permissions + all group permissions
|
||||||
|
# will be copied from the previous private key
|
||||||
assert_world_read_permissions(privkey2)
|
assert_world_read_permissions(privkey2)
|
||||||
assert_equals_world_read_permissions(privkey1, privkey2)
|
assert_equals_world_read_permissions(privkey1, privkey2)
|
||||||
assert_equals_group_permissions(privkey1, privkey2)
|
assert_equals_group_permissions(privkey1, privkey2)
|
||||||
else:
|
else:
|
||||||
# On Windows, world will never have any permissions, and group permission is irrelevant for this platform
|
# On Windows, world will never have any permissions, and
|
||||||
|
# group permission is irrelevant for this platform
|
||||||
assert_world_no_permissions(privkey2)
|
assert_world_no_permissions(privkey2)
|
||||||
|
|
||||||
|
|
||||||
@@ -289,7 +307,7 @@ def test_renew_with_changed_private_key_complexity(context):
|
|||||||
assert_cert_count_for_lineage(context.config_dir, certname, 1)
|
assert_cert_count_for_lineage(context.config_dir, certname, 1)
|
||||||
|
|
||||||
context.certbot(['renew'])
|
context.certbot(['renew'])
|
||||||
|
|
||||||
assert_cert_count_for_lineage(context.config_dir, certname, 2)
|
assert_cert_count_for_lineage(context.config_dir, certname, 2)
|
||||||
key2 = join(context.config_dir, 'archive', certname, 'privkey2.pem')
|
key2 = join(context.config_dir, 'archive', certname, 'privkey2.pem')
|
||||||
assert os.stat(key2).st_size > 3000
|
assert os.stat(key2).st_size > 3000
|
||||||
@@ -421,20 +439,115 @@ def test_reuse_key(context):
|
|||||||
assert len({cert1, cert2, cert3}) == 3
|
assert len({cert1, cert2, cert3}) == 3
|
||||||
|
|
||||||
|
|
||||||
|
def test_incorrect_key_type(context):
|
||||||
|
with pytest.raises(subprocess.CalledProcessError):
|
||||||
|
context.certbot(['--key-type="failwhale"'])
|
||||||
|
|
||||||
|
|
||||||
def test_ecdsa(context):
|
def test_ecdsa(context):
|
||||||
"""Test certificate issuance with ECDSA key."""
|
"""Test issuance for ECDSA CSR based request (legacy supported mode)."""
|
||||||
key_path = join(context.workspace, 'privkey-p384.pem')
|
key_path = join(context.workspace, 'privkey-p384.pem')
|
||||||
csr_path = join(context.workspace, 'csr-p384.der')
|
csr_path = join(context.workspace, 'csr-p384.der')
|
||||||
cert_path = join(context.workspace, 'cert-p384.pem')
|
cert_path = join(context.workspace, 'cert-p384.pem')
|
||||||
chain_path = join(context.workspace, 'chain-p384.pem')
|
chain_path = join(context.workspace, 'chain-p384.pem')
|
||||||
|
|
||||||
misc.generate_csr([context.get_domain('ecdsa')], key_path, csr_path, key_type=misc.ECDSA_KEY_TYPE)
|
misc.generate_csr(
|
||||||
context.certbot(['auth', '--csr', csr_path, '--cert-path', cert_path, '--chain-path', chain_path])
|
[context.get_domain('ecdsa')],
|
||||||
|
key_path, csr_path,
|
||||||
|
key_type=misc.ECDSA_KEY_TYPE
|
||||||
|
)
|
||||||
|
context.certbot([
|
||||||
|
'auth', '--csr', csr_path, '--cert-path', cert_path,
|
||||||
|
'--chain-path', chain_path,
|
||||||
|
])
|
||||||
|
|
||||||
certificate = misc.read_certificate(cert_path)
|
certificate = misc.read_certificate(cert_path)
|
||||||
assert 'ASN1 OID: secp384r1' in certificate
|
assert 'ASN1 OID: secp384r1' in certificate
|
||||||
|
|
||||||
|
|
||||||
|
def test_default_key_type(context):
|
||||||
|
"""Test default key type is RSA"""
|
||||||
|
certname = context.get_domain('renew')
|
||||||
|
context.certbot([
|
||||||
|
'certonly',
|
||||||
|
'--cert-name', certname, '-d', certname
|
||||||
|
])
|
||||||
|
filename = join(context.config_dir, 'archive/{0}/privkey1.pem').format(certname)
|
||||||
|
assert_rsa_key(filename)
|
||||||
|
|
||||||
|
|
||||||
|
def test_default_curve_type(context):
|
||||||
|
"""test that the curve used when not specifying any is secp256r1"""
|
||||||
|
certname = context.get_domain('renew')
|
||||||
|
context.certbot([
|
||||||
|
'--key-type', 'ecdsa', '--cert-name', certname, '-d', certname
|
||||||
|
])
|
||||||
|
key1 = join(context.config_dir, 'archive/{0}/privkey1.pem'.format(certname))
|
||||||
|
assert_elliptic_key(key1, SECP256R1)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('curve,curve_cls,skip_servers', [
|
||||||
|
# Curve name, Curve class, ACME servers to skip
|
||||||
|
('secp256r1', SECP256R1, []),
|
||||||
|
('secp384r1', SECP384R1, []),
|
||||||
|
('secp521r1', SECP521R1, ['boulder-v1', 'boulder-v2'])]
|
||||||
|
)
|
||||||
|
def test_ecdsa_curves(context, curve, curve_cls, skip_servers):
|
||||||
|
"""Test issuance for each supported ECDSA curve"""
|
||||||
|
if context.acme_server in skip_servers:
|
||||||
|
pytest.skip('ACME server {} does not support ECDSA curve {}'
|
||||||
|
.format(context.acme_server, curve))
|
||||||
|
|
||||||
|
domain = context.get_domain('curve')
|
||||||
|
context.certbot([
|
||||||
|
'certonly',
|
||||||
|
'--key-type', 'ecdsa', '--elliptic-curve', curve,
|
||||||
|
'--force-renewal', '-d', domain,
|
||||||
|
])
|
||||||
|
key = join(context.config_dir, "live", domain, 'privkey.pem')
|
||||||
|
assert_elliptic_key(key, curve_cls)
|
||||||
|
|
||||||
|
|
||||||
|
def test_renew_with_ec_keys(context):
|
||||||
|
"""Test proper renew with updated private key complexity."""
|
||||||
|
certname = context.get_domain('renew')
|
||||||
|
context.certbot([
|
||||||
|
'certonly',
|
||||||
|
'--cert-name', certname,
|
||||||
|
'--key-type', 'ecdsa', '--elliptic-curve', 'secp256r1',
|
||||||
|
'--force-renewal', '-d', certname,
|
||||||
|
])
|
||||||
|
|
||||||
|
key1 = join(context.config_dir, "archive", certname, 'privkey1.pem')
|
||||||
|
assert 200 < os.stat(key1).st_size < 250 # ec keys of 256 bits are ~225 bytes
|
||||||
|
assert_elliptic_key(key1, SECP256R1)
|
||||||
|
assert_cert_count_for_lineage(context.config_dir, certname, 1)
|
||||||
|
|
||||||
|
context.certbot(['renew', '--elliptic-curve', 'secp384r1'])
|
||||||
|
|
||||||
|
assert_cert_count_for_lineage(context.config_dir, certname, 2)
|
||||||
|
key2 = join(context.config_dir, 'archive', certname, 'privkey2.pem')
|
||||||
|
assert_elliptic_key(key2, SECP384R1)
|
||||||
|
assert 280 < os.stat(key2).st_size < 320 # ec keys of 384 bits are ~310 bytes
|
||||||
|
|
||||||
|
# We expect here that the command will fail because without --key-type specified,
|
||||||
|
# Certbot must error out to prevent changing an existing certificate key type,
|
||||||
|
# without explicit user consent (by specifying both --cert-name and --key-type).
|
||||||
|
with pytest.raises(subprocess.CalledProcessError):
|
||||||
|
context.certbot([
|
||||||
|
'certonly',
|
||||||
|
'--force-renewal',
|
||||||
|
'-d', certname
|
||||||
|
])
|
||||||
|
|
||||||
|
# We expect that the previous behavior of requiring both --cert-name and
|
||||||
|
# --key-type to be set to not apply to the renew subcommand.
|
||||||
|
context.certbot(['renew', '--force-renewal', '--key-type', 'rsa'])
|
||||||
|
assert_cert_count_for_lineage(context.config_dir, certname, 3)
|
||||||
|
key3 = join(context.config_dir, 'archive', certname, 'privkey3.pem')
|
||||||
|
assert_rsa_key(key3)
|
||||||
|
|
||||||
|
|
||||||
def test_ocsp_must_staple(context):
|
def test_ocsp_must_staple(context):
|
||||||
"""Test that OCSP Must-Staple is correctly set in the generated certificate."""
|
"""Test that OCSP Must-Staple is correctly set in the generated certificate."""
|
||||||
if context.acme_server == 'pebble':
|
if context.acme_server == 'pebble':
|
||||||
@@ -533,18 +646,22 @@ def test_revoke_multiple_lineages(context):
|
|||||||
with open(join(context.config_dir, 'renewal', '{0}.conf'.format(cert2)), 'r') as file:
|
with open(join(context.config_dir, 'renewal', '{0}.conf'.format(cert2)), 'r') as file:
|
||||||
data = file.read()
|
data = file.read()
|
||||||
|
|
||||||
data = re.sub('archive_dir = .*\n',
|
data = re.sub(
|
||||||
'archive_dir = {0}\n'.format(join(context.config_dir, 'archive', cert1).replace('\\', '\\\\')),
|
'archive_dir = .*\n',
|
||||||
data)
|
'archive_dir = {0}\n'.format(
|
||||||
|
join(context.config_dir, 'archive', cert1).replace('\\', '\\\\')
|
||||||
|
), data
|
||||||
|
)
|
||||||
|
|
||||||
with open(join(context.config_dir, 'renewal', '{0}.conf'.format(cert2)), 'w') as file:
|
with open(join(context.config_dir, 'renewal', '{0}.conf'.format(cert2)), 'w') as file:
|
||||||
file.write(data)
|
file.write(data)
|
||||||
|
|
||||||
output = context.certbot([
|
context.certbot([
|
||||||
'revoke', '--cert-path', join(context.config_dir, 'live', cert1, 'cert.pem')
|
'revoke', '--cert-path', join(context.config_dir, 'live', cert1, 'cert.pem')
|
||||||
])
|
])
|
||||||
|
|
||||||
assert 'Not deleting revoked certs due to overlapping archive dirs' in output
|
with open(join(context.workspace, 'logs', 'letsencrypt.log'), 'r') as f:
|
||||||
|
assert 'Not deleting revoked certificates due to overlapping archive dirs' in f.read()
|
||||||
|
|
||||||
|
|
||||||
def test_wildcard_certificates(context):
|
def test_wildcard_certificates(context):
|
||||||
@@ -657,4 +774,4 @@ def test_preferred_chain(context):
|
|||||||
|
|
||||||
with open(conf_path, 'r') as f:
|
with open(conf_path, 'r') as f:
|
||||||
assert 'preferred_chain = {}'.format(requested) in f.read(), \
|
assert 'preferred_chain = {}'.format(requested) in f.read(), \
|
||||||
'Expected preferred_chain to be set in renewal config'
|
'Expected preferred_chain to be set in renewal config'
|
||||||
|
|||||||
@@ -6,12 +6,12 @@ for a directory a specific configuration using built-in pytest hooks.
|
|||||||
|
|
||||||
See https://docs.pytest.org/en/latest/reference.html#hook-reference
|
See https://docs.pytest.org/en/latest/reference.html#hook-reference
|
||||||
"""
|
"""
|
||||||
from __future__ import print_function
|
|
||||||
import contextlib
|
import contextlib
|
||||||
import subprocess
|
import subprocess
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
from certbot_integration_tests.utils import acme_server as acme_lib
|
from certbot_integration_tests.utils import acme_server as acme_lib
|
||||||
|
from certbot_integration_tests.utils import dns_server as dns_lib
|
||||||
|
|
||||||
|
|
||||||
def pytest_addoption(parser):
|
def pytest_addoption(parser):
|
||||||
@@ -23,6 +23,10 @@ def pytest_addoption(parser):
|
|||||||
choices=['boulder-v1', 'boulder-v2', 'pebble'],
|
choices=['boulder-v1', 'boulder-v2', 'pebble'],
|
||||||
help='select the ACME server to use (boulder-v1, boulder-v2, '
|
help='select the ACME server to use (boulder-v1, boulder-v2, '
|
||||||
'pebble), defaulting to pebble')
|
'pebble), defaulting to pebble')
|
||||||
|
parser.addoption('--dns-server', default='challtestsrv',
|
||||||
|
choices=['bind', 'challtestsrv'],
|
||||||
|
help='select the DNS server to use (bind, challtestsrv), '
|
||||||
|
'defaulting to challtestsrv')
|
||||||
|
|
||||||
|
|
||||||
def pytest_configure(config):
|
def pytest_configure(config):
|
||||||
@@ -32,7 +36,7 @@ def pytest_configure(config):
|
|||||||
"""
|
"""
|
||||||
if not hasattr(config, 'slaveinput'): # If true, this is the primary node
|
if not hasattr(config, 'slaveinput'): # If true, this is the primary node
|
||||||
with _print_on_err():
|
with _print_on_err():
|
||||||
config.acme_xdist = _setup_primary_node(config)
|
_setup_primary_node(config)
|
||||||
|
|
||||||
|
|
||||||
def pytest_configure_node(node):
|
def pytest_configure_node(node):
|
||||||
@@ -41,6 +45,7 @@ def pytest_configure_node(node):
|
|||||||
:param node: current worker node
|
:param node: current worker node
|
||||||
"""
|
"""
|
||||||
node.slaveinput['acme_xdist'] = node.config.acme_xdist
|
node.slaveinput['acme_xdist'] = node.config.acme_xdist
|
||||||
|
node.slaveinput['dns_xdist'] = node.config.dns_xdist
|
||||||
|
|
||||||
|
|
||||||
@contextlib.contextmanager
|
@contextlib.contextmanager
|
||||||
@@ -61,12 +66,18 @@ def _print_on_err():
|
|||||||
def _setup_primary_node(config):
|
def _setup_primary_node(config):
|
||||||
"""
|
"""
|
||||||
Setup the environment for integration tests.
|
Setup the environment for integration tests.
|
||||||
Will:
|
|
||||||
|
This function will:
|
||||||
- check runtime compatibility (Docker, docker-compose, Nginx)
|
- check runtime compatibility (Docker, docker-compose, Nginx)
|
||||||
- create a temporary workspace and the persistent GIT repositories space
|
- create a temporary workspace and the persistent GIT repositories space
|
||||||
|
- configure and start a DNS server using Docker, if configured
|
||||||
- configure and start paralleled ACME CA servers using Docker
|
- configure and start paralleled ACME CA servers using Docker
|
||||||
- transfer ACME CA servers configurations to pytest nodes using env variables
|
- transfer ACME CA and DNS servers configurations to pytest nodes using env variables
|
||||||
:param config: Configuration of the pytest primary node
|
|
||||||
|
This function modifies `config` by injecting the ACME CA and DNS server configurations,
|
||||||
|
in addition to cleanup functions for those servers.
|
||||||
|
|
||||||
|
:param config: Configuration of the pytest primary node. Is modified by this function.
|
||||||
"""
|
"""
|
||||||
# Check for runtime compatibility: some tools are required to be available in PATH
|
# Check for runtime compatibility: some tools are required to be available in PATH
|
||||||
if 'boulder' in config.option.acme_server:
|
if 'boulder' in config.option.acme_server:
|
||||||
@@ -79,18 +90,35 @@ def _setup_primary_node(config):
|
|||||||
try:
|
try:
|
||||||
subprocess.check_output(['docker-compose', '-v'], stderr=subprocess.STDOUT)
|
subprocess.check_output(['docker-compose', '-v'], stderr=subprocess.STDOUT)
|
||||||
except (subprocess.CalledProcessError, OSError):
|
except (subprocess.CalledProcessError, OSError):
|
||||||
raise ValueError('Error: docker-compose is required in PATH to launch the integration tests, '
|
raise ValueError(
|
||||||
'but is not installed or not available for current user.')
|
'Error: docker-compose is required in PATH to launch the integration tests, '
|
||||||
|
'but is not installed or not available for current user.'
|
||||||
|
)
|
||||||
|
|
||||||
# Parameter numprocesses is added to option by pytest-xdist
|
# Parameter numprocesses is added to option by pytest-xdist
|
||||||
workers = ['primary'] if not config.option.numprocesses\
|
workers = ['primary'] if not config.option.numprocesses\
|
||||||
else ['gw{0}'.format(i) for i in range(config.option.numprocesses)]
|
else ['gw{0}'.format(i) for i in range(config.option.numprocesses)]
|
||||||
|
|
||||||
|
# If a non-default DNS server is configured, start it and feed it to the ACME server
|
||||||
|
dns_server = None
|
||||||
|
acme_dns_server = None
|
||||||
|
if config.option.dns_server == 'bind':
|
||||||
|
dns_server = dns_lib.DNSServer(workers)
|
||||||
|
config.add_cleanup(dns_server.stop)
|
||||||
|
print('DNS xdist config:\n{0}'.format(dns_server.dns_xdist))
|
||||||
|
dns_server.start()
|
||||||
|
acme_dns_server = '{}:{}'.format(
|
||||||
|
dns_server.dns_xdist['address'],
|
||||||
|
dns_server.dns_xdist['port']
|
||||||
|
)
|
||||||
|
|
||||||
# By calling setup_acme_server we ensure that all necessary acme server instances will be
|
# By calling setup_acme_server we ensure that all necessary acme server instances will be
|
||||||
# fully started. This runtime is reflected by the acme_xdist returned.
|
# fully started. This runtime is reflected by the acme_xdist returned.
|
||||||
acme_server = acme_lib.ACMEServer(config.option.acme_server, workers)
|
acme_server = acme_lib.ACMEServer(config.option.acme_server, workers,
|
||||||
|
dns_server=acme_dns_server)
|
||||||
config.add_cleanup(acme_server.stop)
|
config.add_cleanup(acme_server.stop)
|
||||||
print('ACME xdist config:\n{0}'.format(acme_server.acme_xdist))
|
print('ACME xdist config:\n{0}'.format(acme_server.acme_xdist))
|
||||||
acme_server.start()
|
acme_server.start()
|
||||||
|
|
||||||
return acme_server.acme_xdist
|
config.acme_xdist = acme_server.acme_xdist
|
||||||
|
config.dns_xdist = dns_server.dns_xdist if dns_server else None
|
||||||
|
|||||||
@@ -1,3 +1,4 @@
|
|||||||
|
"""Module to handle the context of nginx integration tests."""
|
||||||
import os
|
import os
|
||||||
import subprocess
|
import subprocess
|
||||||
|
|
||||||
|
|||||||
@@ -1,3 +1,4 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
"""General purpose nginx test configuration generator."""
|
"""General purpose nginx test configuration generator."""
|
||||||
import getpass
|
import getpass
|
||||||
|
|
||||||
@@ -42,6 +43,8 @@ events {{
|
|||||||
worker_connections 1024;
|
worker_connections 1024;
|
||||||
}}
|
}}
|
||||||
|
|
||||||
|
# “This comment contains valid Unicode”.
|
||||||
|
|
||||||
http {{
|
http {{
|
||||||
# Set an array of temp, cache and log file options that will otherwise default to
|
# Set an array of temp, cache and log file options that will otherwise default to
|
||||||
# restricted locations accessible only to root.
|
# restricted locations accessible only to root.
|
||||||
@@ -51,61 +54,61 @@ http {{
|
|||||||
#scgi_temp_path {nginx_root}/scgi_temp;
|
#scgi_temp_path {nginx_root}/scgi_temp;
|
||||||
#uwsgi_temp_path {nginx_root}/uwsgi_temp;
|
#uwsgi_temp_path {nginx_root}/uwsgi_temp;
|
||||||
access_log {nginx_root}/error.log;
|
access_log {nginx_root}/error.log;
|
||||||
|
|
||||||
# This should be turned off in a Virtualbox VM, as it can cause some
|
# This should be turned off in a Virtualbox VM, as it can cause some
|
||||||
# interesting issues with data corruption in delivered files.
|
# interesting issues with data corruption in delivered files.
|
||||||
sendfile off;
|
sendfile off;
|
||||||
|
|
||||||
tcp_nopush on;
|
tcp_nopush on;
|
||||||
tcp_nodelay on;
|
tcp_nodelay on;
|
||||||
keepalive_timeout 65;
|
keepalive_timeout 65;
|
||||||
types_hash_max_size 2048;
|
types_hash_max_size 2048;
|
||||||
|
|
||||||
#include /etc/nginx/mime.types;
|
#include /etc/nginx/mime.types;
|
||||||
index index.html index.htm index.php;
|
index index.html index.htm index.php;
|
||||||
|
|
||||||
log_format main '$remote_addr - $remote_user [$time_local] $status '
|
log_format main '$remote_addr - $remote_user [$time_local] $status '
|
||||||
'"$request" $body_bytes_sent "$http_referer" '
|
'"$request" $body_bytes_sent "$http_referer" '
|
||||||
'"$http_user_agent" "$http_x_forwarded_for"';
|
'"$http_user_agent" "$http_x_forwarded_for"';
|
||||||
|
|
||||||
default_type application/octet-stream;
|
default_type application/octet-stream;
|
||||||
|
|
||||||
server {{
|
server {{
|
||||||
# IPv4.
|
# IPv4.
|
||||||
listen {http_port} {default_server};
|
listen {http_port} {default_server};
|
||||||
# IPv6.
|
# IPv6.
|
||||||
listen [::]:{http_port} {default_server};
|
listen [::]:{http_port} {default_server};
|
||||||
server_name nginx.{wtf_prefix}.wtf nginx2.{wtf_prefix}.wtf;
|
server_name nginx.{wtf_prefix}.wtf nginx2.{wtf_prefix}.wtf;
|
||||||
|
|
||||||
root {nginx_webroot};
|
root {nginx_webroot};
|
||||||
|
|
||||||
location / {{
|
location / {{
|
||||||
# First attempt to serve request as file, then as directory, then fall
|
# First attempt to serve request as file, then as directory, then fall
|
||||||
# back to index.html.
|
# back to index.html.
|
||||||
try_files $uri $uri/ /index.html;
|
try_files $uri $uri/ /index.html;
|
||||||
}}
|
}}
|
||||||
}}
|
}}
|
||||||
|
|
||||||
server {{
|
server {{
|
||||||
listen {http_port};
|
listen {http_port};
|
||||||
listen [::]:{http_port};
|
listen [::]:{http_port};
|
||||||
server_name nginx3.{wtf_prefix}.wtf;
|
server_name nginx3.{wtf_prefix}.wtf;
|
||||||
|
|
||||||
root {nginx_webroot};
|
root {nginx_webroot};
|
||||||
|
|
||||||
location /.well-known/ {{
|
location /.well-known/ {{
|
||||||
return 404;
|
return 404;
|
||||||
}}
|
}}
|
||||||
|
|
||||||
return 301 https://$host$request_uri;
|
return 301 https://$host$request_uri;
|
||||||
}}
|
}}
|
||||||
|
|
||||||
server {{
|
server {{
|
||||||
listen {other_port};
|
listen {other_port};
|
||||||
listen [::]:{other_port};
|
listen [::]:{other_port};
|
||||||
server_name nginx4.{wtf_prefix}.wtf nginx5.{wtf_prefix}.wtf;
|
server_name nginx4.{wtf_prefix}.wtf nginx5.{wtf_prefix}.wtf;
|
||||||
}}
|
}}
|
||||||
|
|
||||||
server {{
|
server {{
|
||||||
listen {http_port};
|
listen {http_port};
|
||||||
listen [::]:{http_port};
|
listen [::]:{http_port};
|
||||||
|
|||||||
@@ -1,14 +1,15 @@
|
|||||||
"""Module executing integration tests against certbot with nginx plugin."""
|
"""Module executing integration tests against certbot with nginx plugin."""
|
||||||
import os
|
import os
|
||||||
import ssl
|
import ssl
|
||||||
|
from typing import List
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from certbot_integration_tests.nginx_tests import context as nginx_context
|
from certbot_integration_tests.nginx_tests import context as nginx_context
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture()
|
@pytest.fixture(name='context')
|
||||||
def context(request):
|
def test_context(request):
|
||||||
# Fixture request is a built-in pytest fixture describing current test request.
|
# Fixture request is a built-in pytest fixture describing current test request.
|
||||||
integration_test_context = nginx_context.IntegrationTestsContext(request)
|
integration_test_context = nginx_context.IntegrationTestsContext(request)
|
||||||
try:
|
try:
|
||||||
@@ -27,10 +28,12 @@ def context(request):
|
|||||||
# No matching server block; default_server does not exist
|
# No matching server block; default_server does not exist
|
||||||
('nginx5.{0}.wtf', ['--preferred-challenges', 'http'], {'default_server': False}),
|
('nginx5.{0}.wtf', ['--preferred-challenges', 'http'], {'default_server': False}),
|
||||||
# Multiple domains, mix of matching and not
|
# Multiple domains, mix of matching and not
|
||||||
('nginx6.{0}.wtf,nginx7.{0}.wtf', ['--preferred-challenges', 'http'], {'default_server': False}),
|
('nginx6.{0}.wtf,nginx7.{0}.wtf', [
|
||||||
|
'--preferred-challenges', 'http'
|
||||||
|
], {'default_server': False}),
|
||||||
], indirect=['context'])
|
], indirect=['context'])
|
||||||
def test_certificate_deployment(certname_pattern, params, context):
|
def test_certificate_deployment(certname_pattern: str, params: List[str],
|
||||||
# type: (str, list, nginx_context.IntegrationTestsContext) -> None
|
context: nginx_context.IntegrationTestsContext) -> None:
|
||||||
"""
|
"""
|
||||||
Test various scenarios to deploy a certificate to nginx using certbot.
|
Test various scenarios to deploy a certificate to nginx using certbot.
|
||||||
"""
|
"""
|
||||||
@@ -41,7 +44,9 @@ def test_certificate_deployment(certname_pattern, params, context):
|
|||||||
|
|
||||||
lineage = domains.split(',')[0]
|
lineage = domains.split(',')[0]
|
||||||
server_cert = ssl.get_server_certificate(('localhost', context.tls_alpn_01_port))
|
server_cert = ssl.get_server_certificate(('localhost', context.tls_alpn_01_port))
|
||||||
with open(os.path.join(context.workspace, 'conf/live/{0}/cert.pem'.format(lineage)), 'r') as file:
|
with open(os.path.join(
|
||||||
|
context.workspace, 'conf/live/{0}/cert.pem'.format(lineage)), 'r'
|
||||||
|
) as file:
|
||||||
certbot_cert = file.read()
|
certbot_cert = file.read()
|
||||||
|
|
||||||
assert server_cert == certbot_cert
|
assert server_cert == certbot_cert
|
||||||
|
|||||||
@@ -0,0 +1,66 @@
|
|||||||
|
"""Module to handle the context of RFC2136 integration tests."""
|
||||||
|
|
||||||
|
from contextlib import contextmanager
|
||||||
|
import tempfile
|
||||||
|
|
||||||
|
from pkg_resources import resource_filename
|
||||||
|
from pytest import skip
|
||||||
|
|
||||||
|
from certbot_integration_tests.certbot_tests import context as certbot_context
|
||||||
|
from certbot_integration_tests.utils import certbot_call
|
||||||
|
|
||||||
|
|
||||||
|
class IntegrationTestsContext(certbot_context.IntegrationTestsContext):
|
||||||
|
"""Integration test context for certbot-dns-rfc2136"""
|
||||||
|
def __init__(self, request):
|
||||||
|
super(IntegrationTestsContext, self).__init__(request)
|
||||||
|
|
||||||
|
self.request = request
|
||||||
|
|
||||||
|
self._dns_xdist = None
|
||||||
|
if hasattr(request.config, 'slaveinput'): # Worker node
|
||||||
|
self._dns_xdist = request.config.slaveinput['dns_xdist']
|
||||||
|
else: # Primary node
|
||||||
|
self._dns_xdist = request.config.dns_xdist
|
||||||
|
|
||||||
|
def certbot_test_rfc2136(self, args):
|
||||||
|
"""
|
||||||
|
Main command to execute certbot using the RFC2136 DNS authenticator.
|
||||||
|
:param list args: list of arguments to pass to Certbot
|
||||||
|
"""
|
||||||
|
command = ['--authenticator', 'dns-rfc2136', '--dns-rfc2136-propagation-seconds', '2']
|
||||||
|
command.extend(args)
|
||||||
|
return certbot_call.certbot_test(
|
||||||
|
command, self.directory_url, self.http_01_port, self.tls_alpn_01_port,
|
||||||
|
self.config_dir, self.workspace, force_renew=True)
|
||||||
|
|
||||||
|
@contextmanager
|
||||||
|
def rfc2136_credentials(self, label='default'):
|
||||||
|
"""
|
||||||
|
Produces the contents of a certbot-dns-rfc2136 credentials file.
|
||||||
|
:param str label: which RFC2136 credential to use
|
||||||
|
:yields: Path to credentials file
|
||||||
|
:rtype: str
|
||||||
|
"""
|
||||||
|
src_file = resource_filename('certbot_integration_tests',
|
||||||
|
'assets/bind-config/rfc2136-credentials-{}.ini.tpl'
|
||||||
|
.format(label))
|
||||||
|
contents = None
|
||||||
|
|
||||||
|
with open(src_file, 'r') as f:
|
||||||
|
contents = f.read().format(
|
||||||
|
server_address=self._dns_xdist['address'],
|
||||||
|
server_port=self._dns_xdist['port']
|
||||||
|
)
|
||||||
|
|
||||||
|
with tempfile.NamedTemporaryFile('w+', prefix='rfc2136-creds-{}'.format(label),
|
||||||
|
suffix='.ini', dir=self.workspace) as fp:
|
||||||
|
fp.write(contents)
|
||||||
|
fp.flush()
|
||||||
|
yield fp.name
|
||||||
|
|
||||||
|
def skip_if_no_bind9_server(self):
|
||||||
|
"""Skips the test if there was no RFC2136-capable DNS server configured
|
||||||
|
in the test environment"""
|
||||||
|
if not self._dns_xdist:
|
||||||
|
skip('No RFC2136-capable DNS server is configured')
|
||||||
@@ -0,0 +1,26 @@
|
|||||||
|
"""Module executing integration tests against Certbot with the RFC2136 DNS authenticator."""
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from certbot_integration_tests.rfc2136_tests import context as rfc2136_context
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(name="context")
|
||||||
|
def pytest_context(request):
|
||||||
|
# pylint: disable=missing-function-docstring
|
||||||
|
# Fixture request is a built-in pytest fixture describing current test request.
|
||||||
|
integration_test_context = rfc2136_context.IntegrationTestsContext(request)
|
||||||
|
try:
|
||||||
|
yield integration_test_context
|
||||||
|
finally:
|
||||||
|
integration_test_context.cleanup()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('domain', [('example.com'), ('sub.example.com')])
|
||||||
|
def test_get_certificate(domain, context):
|
||||||
|
context.skip_if_no_bind9_server()
|
||||||
|
|
||||||
|
with context.rfc2136_credentials() as creds:
|
||||||
|
context.certbot_test_rfc2136([
|
||||||
|
'certonly', '--dns-rfc2136-credentials', creds,
|
||||||
|
'-d', domain, '-d', '*.{}'.format(domain)
|
||||||
|
])
|
||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
"""Module to setup an ACME CA server environment able to run multiple tests in parallel"""
|
"""Module to setup an ACME CA server environment able to run multiple tests in parallel"""
|
||||||
from __future__ import print_function
|
|
||||||
|
|
||||||
|
import argparse
|
||||||
import errno
|
import errno
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
@@ -11,16 +11,18 @@ import subprocess
|
|||||||
import sys
|
import sys
|
||||||
import tempfile
|
import tempfile
|
||||||
import time
|
import time
|
||||||
|
from typing import List
|
||||||
|
|
||||||
import requests
|
import requests
|
||||||
|
|
||||||
|
# pylint: disable=wildcard-import,unused-wildcard-import
|
||||||
from certbot_integration_tests.utils import misc
|
from certbot_integration_tests.utils import misc
|
||||||
from certbot_integration_tests.utils import pebble_artifacts
|
from certbot_integration_tests.utils import pebble_artifacts
|
||||||
from certbot_integration_tests.utils import proxy
|
from certbot_integration_tests.utils import proxy
|
||||||
from certbot_integration_tests.utils.constants import *
|
from certbot_integration_tests.utils.constants import *
|
||||||
|
|
||||||
|
|
||||||
class ACMEServer(object):
|
class ACMEServer:
|
||||||
"""
|
"""
|
||||||
ACMEServer configures and handles the lifecycle of an ACME CA server and an HTTP reverse proxy
|
ACMEServer configures and handles the lifecycle of an ACME CA server and an HTTP reverse proxy
|
||||||
instance, to allow parallel execution of integration tests against the unique http-01 port
|
instance, to allow parallel execution of integration tests against the unique http-01 port
|
||||||
@@ -29,24 +31,34 @@ class ACMEServer(object):
|
|||||||
ACMEServer gives access the acme_xdist parameter, listing the ports and directory url to use
|
ACMEServer gives access the acme_xdist parameter, listing the ports and directory url to use
|
||||||
for each pytest node. It exposes also start and stop methods in order to start the stack, and
|
for each pytest node. It exposes also start and stop methods in order to start the stack, and
|
||||||
stop it with proper resources cleanup.
|
stop it with proper resources cleanup.
|
||||||
ACMEServer is also a context manager, and so can be used to ensure ACME server is started/stopped
|
ACMEServer is also a context manager, and so can be used to ensure ACME server is
|
||||||
upon context enter/exit.
|
started/stopped upon context enter/exit.
|
||||||
"""
|
"""
|
||||||
def __init__(self, acme_server, nodes, http_proxy=True, stdout=False):
|
def __init__(self, acme_server, nodes, http_proxy=True, stdout=False,
|
||||||
|
dns_server=None, http_01_port=DEFAULT_HTTP_01_PORT):
|
||||||
"""
|
"""
|
||||||
Create an ACMEServer instance.
|
Create an ACMEServer instance.
|
||||||
:param str acme_server: the type of acme server used (boulder-v1, boulder-v2 or pebble)
|
:param str acme_server: the type of acme server used (boulder-v1, boulder-v2 or pebble)
|
||||||
:param list nodes: list of node names that will be setup by pytest xdist
|
:param list nodes: list of node names that will be setup by pytest xdist
|
||||||
:param bool http_proxy: if False do not start the HTTP proxy
|
:param bool http_proxy: if False do not start the HTTP proxy
|
||||||
:param bool stdout: if True stream all subprocesses stdout to standard stdout
|
:param bool stdout: if True stream all subprocesses stdout to standard stdout
|
||||||
|
:param str dns_server: if set, Pebble/Boulder will use it to resolve domains
|
||||||
|
:param int http_01_port: port to use for http-01 validation; currently
|
||||||
|
only supported for pebble without an HTTP proxy
|
||||||
"""
|
"""
|
||||||
self._construct_acme_xdist(acme_server, nodes)
|
self._construct_acme_xdist(acme_server, nodes)
|
||||||
|
|
||||||
self._acme_type = 'pebble' if acme_server == 'pebble' else 'boulder'
|
self._acme_type = 'pebble' if acme_server == 'pebble' else 'boulder'
|
||||||
self._proxy = http_proxy
|
self._proxy = http_proxy
|
||||||
self._workspace = tempfile.mkdtemp()
|
self._workspace = tempfile.mkdtemp()
|
||||||
self._processes = []
|
self._processes: List[subprocess.Popen] = []
|
||||||
self._stdout = sys.stdout if stdout else open(os.devnull, 'w')
|
self._stdout = sys.stdout if stdout else open(os.devnull, 'w')
|
||||||
|
self._dns_server = dns_server
|
||||||
|
self._http_01_port = http_01_port
|
||||||
|
if http_01_port != DEFAULT_HTTP_01_PORT:
|
||||||
|
if self._acme_type != 'pebble' or self._proxy:
|
||||||
|
raise ValueError('setting http_01_port is not currently supported '
|
||||||
|
'with boulder or the HTTP proxy')
|
||||||
|
|
||||||
def start(self):
|
def start(self):
|
||||||
"""Start the test stack"""
|
"""Start the test stack"""
|
||||||
@@ -103,26 +115,34 @@ class ACMEServer(object):
|
|||||||
"""Generate and return the acme_xdist dict"""
|
"""Generate and return the acme_xdist dict"""
|
||||||
acme_xdist = {'acme_server': acme_server, 'challtestsrv_port': CHALLTESTSRV_PORT}
|
acme_xdist = {'acme_server': acme_server, 'challtestsrv_port': CHALLTESTSRV_PORT}
|
||||||
|
|
||||||
# Directory and ACME port are set implicitly in the docker-compose.yml files of Boulder/Pebble.
|
# Directory and ACME port are set implicitly in the docker-compose.yml
|
||||||
|
# files of Boulder/Pebble.
|
||||||
if acme_server == 'pebble':
|
if acme_server == 'pebble':
|
||||||
acme_xdist['directory_url'] = PEBBLE_DIRECTORY_URL
|
acme_xdist['directory_url'] = PEBBLE_DIRECTORY_URL
|
||||||
else: # boulder
|
else: # boulder
|
||||||
acme_xdist['directory_url'] = BOULDER_V2_DIRECTORY_URL \
|
acme_xdist['directory_url'] = BOULDER_V2_DIRECTORY_URL \
|
||||||
if acme_server == 'boulder-v2' else BOULDER_V1_DIRECTORY_URL
|
if acme_server == 'boulder-v2' else BOULDER_V1_DIRECTORY_URL
|
||||||
|
|
||||||
acme_xdist['http_port'] = {node: port for (node, port)
|
acme_xdist['http_port'] = {
|
||||||
in zip(nodes, range(5200, 5200 + len(nodes)))}
|
node: port for (node, port) in # pylint: disable=unnecessary-comprehension
|
||||||
acme_xdist['https_port'] = {node: port for (node, port)
|
zip(nodes, range(5200, 5200 + len(nodes)))
|
||||||
in zip(nodes, range(5100, 5100 + len(nodes)))}
|
}
|
||||||
acme_xdist['other_port'] = {node: port for (node, port)
|
acme_xdist['https_port'] = {
|
||||||
in zip(nodes, range(5300, 5300 + len(nodes)))}
|
node: port for (node, port) in # pylint: disable=unnecessary-comprehension
|
||||||
|
zip(nodes, range(5100, 5100 + len(nodes)))
|
||||||
|
}
|
||||||
|
acme_xdist['other_port'] = {
|
||||||
|
node: port for (node, port) in # pylint: disable=unnecessary-comprehension
|
||||||
|
zip(nodes, range(5300, 5300 + len(nodes)))
|
||||||
|
}
|
||||||
|
|
||||||
self.acme_xdist = acme_xdist
|
self.acme_xdist = acme_xdist
|
||||||
|
|
||||||
def _prepare_pebble_server(self):
|
def _prepare_pebble_server(self):
|
||||||
"""Configure and launch the Pebble server"""
|
"""Configure and launch the Pebble server"""
|
||||||
print('=> Starting pebble instance deployment...')
|
print('=> Starting pebble instance deployment...')
|
||||||
pebble_path, challtestsrv_path, pebble_config_path = pebble_artifacts.fetch(self._workspace)
|
pebble_artifacts_rv = pebble_artifacts.fetch(self._workspace, self._http_01_port)
|
||||||
|
pebble_path, challtestsrv_path, pebble_config_path = pebble_artifacts_rv
|
||||||
|
|
||||||
# Configure Pebble at full speed (PEBBLE_VA_NOSLEEP=1) and not randomly refusing valid
|
# Configure Pebble at full speed (PEBBLE_VA_NOSLEEP=1) and not randomly refusing valid
|
||||||
# nonce (PEBBLE_WFE_NONCEREJECT=0) to have a stable test environment.
|
# nonce (PEBBLE_WFE_NONCEREJECT=0) to have a stable test environment.
|
||||||
@@ -132,18 +152,23 @@ class ACMEServer(object):
|
|||||||
environ['PEBBLE_AUTHZREUSE'] = '100'
|
environ['PEBBLE_AUTHZREUSE'] = '100'
|
||||||
environ['PEBBLE_ALTERNATE_ROOTS'] = str(PEBBLE_ALTERNATE_ROOTS)
|
environ['PEBBLE_ALTERNATE_ROOTS'] = str(PEBBLE_ALTERNATE_ROOTS)
|
||||||
|
|
||||||
|
if self._dns_server:
|
||||||
|
dns_server = self._dns_server
|
||||||
|
else:
|
||||||
|
dns_server = '127.0.0.1:8053'
|
||||||
|
self._launch_process(
|
||||||
|
[challtestsrv_path, '-management', ':{0}'.format(CHALLTESTSRV_PORT),
|
||||||
|
'-defaultIPv6', '""', '-defaultIPv4', '127.0.0.1', '-http01', '""',
|
||||||
|
'-tlsalpn01', '""', '-https01', '""'])
|
||||||
|
|
||||||
self._launch_process(
|
self._launch_process(
|
||||||
[pebble_path, '-config', pebble_config_path, '-dnsserver', '127.0.0.1:8053', '-strict'],
|
[pebble_path, '-config', pebble_config_path, '-dnsserver', dns_server, '-strict'],
|
||||||
env=environ)
|
env=environ)
|
||||||
|
|
||||||
self._launch_process(
|
# pebble_ocsp_server is imported here and not at the top of module in order to avoid a
|
||||||
[challtestsrv_path, '-management', ':{0}'.format(CHALLTESTSRV_PORT), '-defaultIPv6', '""',
|
# useless ImportError, in the case where cryptography dependency is too old to support
|
||||||
'-defaultIPv4', '127.0.0.1', '-http01', '""', '-tlsalpn01', '""', '-https01', '""'])
|
# ocsp, but Boulder is used instead of Pebble, so pebble_ocsp_server is not used. This is
|
||||||
|
# the typical situation of integration-certbot-oldest tox testenv.
|
||||||
# pebble_ocsp_server is imported here and not at the top of module in order to avoid a useless
|
|
||||||
# ImportError, in the case where cryptography dependency is too old to support ocsp, but
|
|
||||||
# Boulder is used instead of Pebble, so pebble_ocsp_server is not used. This is the typical
|
|
||||||
# situation of integration-certbot-oldest tox testenv.
|
|
||||||
from certbot_integration_tests.utils import pebble_ocsp_server
|
from certbot_integration_tests.utils import pebble_ocsp_server
|
||||||
self._launch_process([sys.executable, pebble_ocsp_server.__file__])
|
self._launch_process([sys.executable, pebble_ocsp_server.__file__])
|
||||||
|
|
||||||
@@ -167,6 +192,15 @@ class ACMEServer(object):
|
|||||||
os.rename(join(instance_path, 'test/rate-limit-policies-b.yml'),
|
os.rename(join(instance_path, 'test/rate-limit-policies-b.yml'),
|
||||||
join(instance_path, 'test/rate-limit-policies.yml'))
|
join(instance_path, 'test/rate-limit-policies.yml'))
|
||||||
|
|
||||||
|
if self._dns_server:
|
||||||
|
# Change Boulder config to use the provided DNS server
|
||||||
|
for suffix in ["", "-remote-a", "-remote-b"]:
|
||||||
|
with open(join(instance_path, 'test/config/va{}.json'.format(suffix)), 'r') as f:
|
||||||
|
config = json.loads(f.read())
|
||||||
|
config['va']['dnsResolvers'] = [self._dns_server]
|
||||||
|
with open(join(instance_path, 'test/config/va{}.json'.format(suffix)), 'w') as f:
|
||||||
|
f.write(json.dumps(config, indent=2, separators=(',', ': ')))
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# Launch the Boulder server
|
# Launch the Boulder server
|
||||||
self._launch_process(['docker-compose', 'up', '--force-recreate'], cwd=instance_path)
|
self._launch_process(['docker-compose', 'up', '--force-recreate'], cwd=instance_path)
|
||||||
@@ -175,14 +209,18 @@ class ACMEServer(object):
|
|||||||
print('=> Waiting for boulder instance to respond...')
|
print('=> Waiting for boulder instance to respond...')
|
||||||
misc.check_until_timeout(self.acme_xdist['directory_url'], attempts=300)
|
misc.check_until_timeout(self.acme_xdist['directory_url'], attempts=300)
|
||||||
|
|
||||||
# Configure challtestsrv to answer any A record request with ip of the docker host.
|
if not self._dns_server:
|
||||||
response = requests.post('http://localhost:{0}/set-default-ipv4'.format(CHALLTESTSRV_PORT),
|
# Configure challtestsrv to answer any A record request with ip of the docker host.
|
||||||
json={'ip': '10.77.77.1'})
|
response = requests.post('http://localhost:{0}/set-default-ipv4'.format(
|
||||||
response.raise_for_status()
|
CHALLTESTSRV_PORT), json={'ip': '10.77.77.1'}
|
||||||
|
)
|
||||||
|
response.raise_for_status()
|
||||||
except BaseException:
|
except BaseException:
|
||||||
# If we failed to set up boulder, print its logs.
|
# If we failed to set up boulder, print its logs.
|
||||||
print('=> Boulder setup failed. Boulder logs are:')
|
print('=> Boulder setup failed. Boulder logs are:')
|
||||||
process = self._launch_process(['docker-compose', 'logs'], cwd=instance_path, force_stderr=True)
|
process = self._launch_process([
|
||||||
|
'docker-compose', 'logs'], cwd=instance_path, force_stderr=True
|
||||||
|
)
|
||||||
process.wait()
|
process.wait()
|
||||||
raise
|
raise
|
||||||
|
|
||||||
@@ -193,7 +231,7 @@ class ACMEServer(object):
|
|||||||
print('=> Configuring the HTTP proxy...')
|
print('=> Configuring the HTTP proxy...')
|
||||||
mapping = {r'.+\.{0}\.wtf'.format(node): 'http://127.0.0.1:{0}'.format(port)
|
mapping = {r'.+\.{0}\.wtf'.format(node): 'http://127.0.0.1:{0}'.format(port)
|
||||||
for node, port in self.acme_xdist['http_port'].items()}
|
for node, port in self.acme_xdist['http_port'].items()}
|
||||||
command = [sys.executable, proxy.__file__, str(HTTP_01_PORT), json.dumps(mapping)]
|
command = [sys.executable, proxy.__file__, str(DEFAULT_HTTP_01_PORT), json.dumps(mapping)]
|
||||||
self._launch_process(command)
|
self._launch_process(command)
|
||||||
print('=> Finished configuring the HTTP proxy.')
|
print('=> Finished configuring the HTTP proxy.')
|
||||||
|
|
||||||
@@ -202,20 +240,34 @@ class ACMEServer(object):
|
|||||||
if not env:
|
if not env:
|
||||||
env = os.environ
|
env = os.environ
|
||||||
stdout = sys.stderr if force_stderr else self._stdout
|
stdout = sys.stderr if force_stderr else self._stdout
|
||||||
process = subprocess.Popen(command, stdout=stdout, stderr=subprocess.STDOUT, cwd=cwd, env=env)
|
process = subprocess.Popen(
|
||||||
|
command, stdout=stdout, stderr=subprocess.STDOUT, cwd=cwd, env=env
|
||||||
|
)
|
||||||
self._processes.append(process)
|
self._processes.append(process)
|
||||||
return process
|
return process
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
args = sys.argv[1:]
|
# pylint: disable=missing-function-docstring
|
||||||
server_type = args[0] if args else 'pebble'
|
parser = argparse.ArgumentParser(
|
||||||
possible_values = ('pebble', 'boulder-v1', 'boulder-v2')
|
description='CLI tool to start a local instance of Pebble or Boulder CA server.')
|
||||||
if server_type not in possible_values:
|
parser.add_argument('--server-type', '-s',
|
||||||
raise ValueError('Invalid server value {0}, should be one of {1}'
|
choices=['pebble', 'boulder-v1', 'boulder-v2'], default='pebble',
|
||||||
.format(server_type, possible_values))
|
help='type of CA server to start: can be Pebble or Boulder '
|
||||||
|
'(in ACMEv1 or ACMEv2 mode), Pebble is used if not set.')
|
||||||
|
parser.add_argument('--dns-server', '-d',
|
||||||
|
help='specify the DNS server as `IP:PORT` to use by '
|
||||||
|
'Pebble; if not specified, a local mock DNS server will be used to '
|
||||||
|
'resolve domains to localhost.')
|
||||||
|
parser.add_argument('--http-01-port', type=int, default=DEFAULT_HTTP_01_PORT,
|
||||||
|
help='specify the port to use for http-01 validation; '
|
||||||
|
'this is currently only supported for Pebble.')
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
acme_server = ACMEServer(server_type, [], http_proxy=False, stdout=True)
|
acme_server = ACMEServer(
|
||||||
|
args.server_type, [], http_proxy=False, stdout=True,
|
||||||
|
dns_server=args.dns_server, http_01_port=args.http_01_port,
|
||||||
|
)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
with acme_server as acme_xdist:
|
with acme_server as acme_xdist:
|
||||||
|
|||||||
@@ -1,13 +1,13 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
"""Module to call certbot in test mode"""
|
"""Module to call certbot in test mode"""
|
||||||
from __future__ import absolute_import
|
|
||||||
|
|
||||||
from distutils.version import LooseVersion
|
|
||||||
import os
|
import os
|
||||||
import subprocess
|
import subprocess
|
||||||
import sys
|
import sys
|
||||||
|
from distutils.version import LooseVersion
|
||||||
|
|
||||||
import certbot_integration_tests
|
import certbot_integration_tests
|
||||||
|
# pylint: disable=wildcard-import,unused-wildcard-import
|
||||||
from certbot_integration_tests.utils.constants import *
|
from certbot_integration_tests.utils.constants import *
|
||||||
|
|
||||||
|
|
||||||
@@ -35,6 +35,8 @@ def certbot_test(certbot_args, directory_url, http_01_port, tls_alpn_01_port,
|
|||||||
|
|
||||||
|
|
||||||
def _prepare_environ(workspace):
|
def _prepare_environ(workspace):
|
||||||
|
# pylint: disable=missing-function-docstring
|
||||||
|
|
||||||
new_environ = os.environ.copy()
|
new_environ = os.environ.copy()
|
||||||
new_environ['TMPDIR'] = workspace
|
new_environ['TMPDIR'] = workspace
|
||||||
|
|
||||||
@@ -58,8 +60,13 @@ def _prepare_environ(workspace):
|
|||||||
# certbot_integration_tests.__file__ is:
|
# certbot_integration_tests.__file__ is:
|
||||||
# '/path/to/certbot/certbot-ci/certbot_integration_tests/__init__.pyc'
|
# '/path/to/certbot/certbot-ci/certbot_integration_tests/__init__.pyc'
|
||||||
# ... and we want '/path/to/certbot'
|
# ... and we want '/path/to/certbot'
|
||||||
certbot_root = os.path.dirname(os.path.dirname(os.path.dirname(certbot_integration_tests.__file__)))
|
certbot_root = os.path.dirname(os.path.dirname(
|
||||||
python_paths = [path for path in new_environ['PYTHONPATH'].split(':') if path != certbot_root]
|
os.path.dirname(certbot_integration_tests.__file__))
|
||||||
|
)
|
||||||
|
python_paths = [
|
||||||
|
path for path in new_environ['PYTHONPATH'].split(':')
|
||||||
|
if path != certbot_root
|
||||||
|
]
|
||||||
new_environ['PYTHONPATH'] = ':'.join(python_paths)
|
new_environ['PYTHONPATH'] = ':'.join(python_paths)
|
||||||
|
|
||||||
return new_environ
|
return new_environ
|
||||||
@@ -70,7 +77,8 @@ def _compute_additional_args(workspace, environ, force_renew):
|
|||||||
output = subprocess.check_output(['certbot', '--version'],
|
output = subprocess.check_output(['certbot', '--version'],
|
||||||
universal_newlines=True, stderr=subprocess.STDOUT,
|
universal_newlines=True, stderr=subprocess.STDOUT,
|
||||||
cwd=workspace, env=environ)
|
cwd=workspace, env=environ)
|
||||||
version_str = output.split(' ')[1].strip() # Typical response is: output = 'certbot 0.31.0.dev0'
|
# Typical response is: output = 'certbot 0.31.0.dev0'
|
||||||
|
version_str = output.split(' ')[1].strip()
|
||||||
if LooseVersion(version_str) >= LooseVersion('0.30.0'):
|
if LooseVersion(version_str) >= LooseVersion('0.30.0'):
|
||||||
additional_args.append('--no-random-sleep-on-renew')
|
additional_args.append('--no-random-sleep-on-renew')
|
||||||
|
|
||||||
@@ -113,11 +121,12 @@ def _prepare_args_env(certbot_args, directory_url, http_01_port, tls_alpn_01_por
|
|||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
|
# pylint: disable=missing-function-docstring
|
||||||
args = sys.argv[1:]
|
args = sys.argv[1:]
|
||||||
|
|
||||||
# Default config is pebble
|
# Default config is pebble
|
||||||
directory_url = os.environ.get('SERVER', PEBBLE_DIRECTORY_URL)
|
directory_url = os.environ.get('SERVER', PEBBLE_DIRECTORY_URL)
|
||||||
http_01_port = int(os.environ.get('HTTP_01_PORT', HTTP_01_PORT))
|
http_01_port = int(os.environ.get('HTTP_01_PORT', DEFAULT_HTTP_01_PORT))
|
||||||
tls_alpn_01_port = int(os.environ.get('TLS_ALPN_01_PORT', TLS_ALPN_01_PORT))
|
tls_alpn_01_port = int(os.environ.get('TLS_ALPN_01_PORT', TLS_ALPN_01_PORT))
|
||||||
|
|
||||||
# Execution of certbot in a self-contained workspace
|
# Execution of certbot in a self-contained workspace
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
"""Some useful constants to use throughout certbot-ci integration tests"""
|
"""Some useful constants to use throughout certbot-ci integration tests"""
|
||||||
HTTP_01_PORT = 5002
|
DEFAULT_HTTP_01_PORT = 5002
|
||||||
TLS_ALPN_01_PORT = 5001
|
TLS_ALPN_01_PORT = 5001
|
||||||
CHALLTESTSRV_PORT = 8055
|
CHALLTESTSRV_PORT = 8055
|
||||||
BOULDER_V1_DIRECTORY_URL = 'http://localhost:4000/directory'
|
BOULDER_V1_DIRECTORY_URL = 'http://localhost:4000/directory'
|
||||||
@@ -7,4 +7,4 @@ BOULDER_V2_DIRECTORY_URL = 'http://localhost:4001/directory'
|
|||||||
PEBBLE_DIRECTORY_URL = 'https://localhost:14000/dir'
|
PEBBLE_DIRECTORY_URL = 'https://localhost:14000/dir'
|
||||||
PEBBLE_MANAGEMENT_URL = 'https://localhost:15000'
|
PEBBLE_MANAGEMENT_URL = 'https://localhost:15000'
|
||||||
MOCK_OCSP_SERVER_PORT = 4002
|
MOCK_OCSP_SERVER_PORT = 4002
|
||||||
PEBBLE_ALTERNATE_ROOTS = 2
|
PEBBLE_ALTERNATE_ROOTS = 2
|
||||||
|
|||||||
152
certbot-ci/certbot_integration_tests/utils/dns_server.py
Normal file
152
certbot-ci/certbot_integration_tests/utils/dns_server.py
Normal file
@@ -0,0 +1,152 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
"""Module to setup an RFC2136-capable DNS server"""
|
||||||
|
import os
|
||||||
|
import os.path
|
||||||
|
import shutil
|
||||||
|
import socket
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
import tempfile
|
||||||
|
import time
|
||||||
|
|
||||||
|
from pkg_resources import resource_filename
|
||||||
|
|
||||||
|
BIND_DOCKER_IMAGE = "internetsystemsconsortium/bind9:9.16"
|
||||||
|
BIND_BIND_ADDRESS = ("127.0.0.1", 45953)
|
||||||
|
|
||||||
|
# A TCP DNS message which is a query for '. CH A' transaction ID 0xcb37. This is used
|
||||||
|
# by _wait_until_ready to check that BIND is responding without depending on dnspython.
|
||||||
|
BIND_TEST_QUERY = bytearray.fromhex("0011cb37000000010000000000000000010003")
|
||||||
|
|
||||||
|
|
||||||
|
class DNSServer:
|
||||||
|
"""
|
||||||
|
DNSServer configures and handles the lifetime of an RFC2136-capable server.
|
||||||
|
DNServer provides access to the dns_xdist parameter, listing the address and port
|
||||||
|
to use for each pytest node.
|
||||||
|
|
||||||
|
At this time, DNSServer should only be used with a single node, but may be expanded in
|
||||||
|
future to support parallelization (https://github.com/certbot/certbot/issues/8455).
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, unused_nodes, show_output=False):
|
||||||
|
"""
|
||||||
|
Create an DNSServer instance.
|
||||||
|
:param list nodes: list of node names that will be setup by pytest xdist
|
||||||
|
:param bool show_output: if True, print the output of the DNS server
|
||||||
|
"""
|
||||||
|
|
||||||
|
self.bind_root = tempfile.mkdtemp()
|
||||||
|
|
||||||
|
self.process: subprocess.Popen = None
|
||||||
|
|
||||||
|
self.dns_xdist = {"address": BIND_BIND_ADDRESS[0], "port": BIND_BIND_ADDRESS[1]}
|
||||||
|
|
||||||
|
# Unfortunately the BIND9 image forces everything to stderr with -g and we can't
|
||||||
|
# modify the verbosity.
|
||||||
|
self._output = sys.stderr if show_output else open(os.devnull, "w")
|
||||||
|
|
||||||
|
def start(self):
|
||||||
|
"""Start the DNS server"""
|
||||||
|
try:
|
||||||
|
self._configure_bind()
|
||||||
|
self._start_bind()
|
||||||
|
except:
|
||||||
|
self.stop()
|
||||||
|
raise
|
||||||
|
|
||||||
|
def stop(self):
|
||||||
|
"""Stop the DNS server, and clean its resources"""
|
||||||
|
if self.process:
|
||||||
|
try:
|
||||||
|
self.process.terminate()
|
||||||
|
self.process.wait()
|
||||||
|
except BaseException as e:
|
||||||
|
print("BIND9 did not stop cleanly: {}".format(e), file=sys.stderr)
|
||||||
|
|
||||||
|
shutil.rmtree(self.bind_root, ignore_errors=True)
|
||||||
|
|
||||||
|
if self._output != sys.stderr:
|
||||||
|
self._output.close()
|
||||||
|
|
||||||
|
def _configure_bind(self):
|
||||||
|
"""Configure the BIND9 server based on the prebaked configuration"""
|
||||||
|
bind_conf_src = resource_filename(
|
||||||
|
"certbot_integration_tests", "assets/bind-config"
|
||||||
|
)
|
||||||
|
for directory in ("conf", "zones"):
|
||||||
|
shutil.copytree(
|
||||||
|
os.path.join(bind_conf_src, directory), os.path.join(self.bind_root, directory)
|
||||||
|
)
|
||||||
|
|
||||||
|
def _start_bind(self):
|
||||||
|
"""Launch the BIND9 server as a Docker container"""
|
||||||
|
addr_str = "{}:{}".format(BIND_BIND_ADDRESS[0], BIND_BIND_ADDRESS[1])
|
||||||
|
self.process = subprocess.Popen(
|
||||||
|
[
|
||||||
|
"docker",
|
||||||
|
"run",
|
||||||
|
"--rm",
|
||||||
|
"-p",
|
||||||
|
"{}:53/udp".format(addr_str),
|
||||||
|
"-p",
|
||||||
|
"{}:53/tcp".format(addr_str),
|
||||||
|
"-v",
|
||||||
|
"{}/conf:/etc/bind".format(self.bind_root),
|
||||||
|
"-v",
|
||||||
|
"{}/zones:/var/lib/bind".format(self.bind_root),
|
||||||
|
BIND_DOCKER_IMAGE,
|
||||||
|
],
|
||||||
|
stdout=self._output,
|
||||||
|
stderr=self._output,
|
||||||
|
)
|
||||||
|
|
||||||
|
if self.process.poll():
|
||||||
|
raise ValueError("BIND9 server stopped unexpectedly")
|
||||||
|
|
||||||
|
try:
|
||||||
|
self._wait_until_ready()
|
||||||
|
except:
|
||||||
|
# The container might be running even if we think it isn't
|
||||||
|
self.stop()
|
||||||
|
raise
|
||||||
|
|
||||||
|
def _wait_until_ready(self, attempts: int = 30) -> None:
|
||||||
|
"""
|
||||||
|
Polls the DNS server over TCP until it gets a response, or until
|
||||||
|
it runs out of attempts and raises a ValueError.
|
||||||
|
The DNS response message must match the txn_id of the DNS query message,
|
||||||
|
but otherwise the contents are ignored.
|
||||||
|
:param int attempts: The number of attempts to make.
|
||||||
|
"""
|
||||||
|
for _ in range(attempts):
|
||||||
|
if self.process.poll():
|
||||||
|
raise ValueError("BIND9 server stopped unexpectedly")
|
||||||
|
|
||||||
|
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||||
|
sock.settimeout(5.0)
|
||||||
|
try:
|
||||||
|
sock.connect(BIND_BIND_ADDRESS)
|
||||||
|
sock.sendall(BIND_TEST_QUERY)
|
||||||
|
buf = sock.recv(1024)
|
||||||
|
# We should receive a DNS message with the same tx_id
|
||||||
|
if buf and len(buf) > 4 and buf[2:4] == BIND_TEST_QUERY[2:4]:
|
||||||
|
return
|
||||||
|
# If we got a response but it wasn't the one we wanted, wait a little
|
||||||
|
time.sleep(1)
|
||||||
|
except: # pylint: disable=bare-except
|
||||||
|
# If there was a network error, wait a little
|
||||||
|
time.sleep(1)
|
||||||
|
finally:
|
||||||
|
sock.close()
|
||||||
|
|
||||||
|
raise ValueError(
|
||||||
|
"Gave up waiting for DNS server {} to respond".format(BIND_BIND_ADDRESS)
|
||||||
|
)
|
||||||
|
|
||||||
|
def __enter__(self):
|
||||||
|
self.start()
|
||||||
|
return self.dns_xdist
|
||||||
|
|
||||||
|
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||||
|
self.stop()
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user