Compare commits
130 Commits
mutable-va
...
test-pytho
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b973de36d5 | ||
|
|
8143f80478 | ||
|
|
38fc7fcc48 | ||
|
|
0e225dcba2 | ||
|
|
4ff5719a65 | ||
|
|
798a61622c | ||
|
|
b20d01e032 | ||
|
|
990352e371 | ||
|
|
c5a5d6f9a1 | ||
|
|
d4850399c5 | ||
|
|
c4be440853 | ||
|
|
165c3e32b0 | ||
|
|
2660a2017b | ||
|
|
6a6544fd90 | ||
|
|
320cf92944 | ||
|
|
3078c2f3db | ||
|
|
c54f99e35b | ||
|
|
c81dbb2582 | ||
|
|
742f97e11a | ||
|
|
84c8dbc52a | ||
|
|
4b51e3004c | ||
|
|
018800c5cc | ||
|
|
2eb4154169 | ||
|
|
becc2c3fee | ||
|
|
cb5382d4d5 | ||
|
|
6975e32998 | ||
|
|
62962357c5 | ||
|
|
343b540970 | ||
|
|
089b7efacd | ||
|
|
1584b0b58c | ||
|
|
141b15077c | ||
|
|
ee2c4844b9 | ||
|
|
181813b9b2 | ||
|
|
43d0652b0d | ||
|
|
80e68bec26 | ||
|
|
7b2b2b1685 | ||
|
|
c3c587001f | ||
|
|
281b724996 | ||
|
|
3d5714f499 | ||
|
|
ba9f1939ab | ||
|
|
481c8c0600 | ||
|
|
35b177a1a0 | ||
|
|
95976762ac | ||
|
|
bf64e7f4e4 | ||
|
|
9213154e44 | ||
|
|
810d50eb3d | ||
|
|
99a4129cd4 | ||
|
|
8db8fcf26c | ||
|
|
6d8fec7760 | ||
|
|
4f3af45f5c | ||
|
|
8ebd8ea9fb | ||
|
|
83d8fbbd75 | ||
|
|
0c49ab462f | ||
|
|
35091d878f | ||
|
|
c31f53a225 | ||
|
|
d2a13c55f2 | ||
|
|
de1ce7340f | ||
|
|
929f9e944f | ||
|
|
6c422774d5 | ||
|
|
443ec2200f | ||
|
|
38cbeb560c | ||
|
|
873f979a25 | ||
|
|
2a41402f2a | ||
|
|
6ecf3782ac | ||
|
|
d1347fce9a | ||
|
|
9412ce9f05 | ||
|
|
fabe7bbc78 | ||
|
|
1e34fb8b51 | ||
|
|
4d7d0d6d04 | ||
|
|
cf77b3c3fa | ||
|
|
a7674bd45a | ||
|
|
cdeac7a745 | ||
|
|
50b2097d38 | ||
|
|
30e7f23360 | ||
|
|
248455a92b | ||
|
|
cca30ace31 | ||
|
|
90348bde4e | ||
|
|
54dd12cd57 | ||
|
|
4e6934a4b6 | ||
|
|
57bb4e40b7 | ||
|
|
7f885292f9 | ||
|
|
8978e4dbff | ||
|
|
920b717c45 | ||
|
|
54b7b1883e | ||
|
|
87ab76fc7d | ||
|
|
4925f71933 | ||
|
|
39fda1d44d | ||
|
|
c8a1e30981 | ||
|
|
7abf143394 | ||
|
|
f4e031f505 | ||
|
|
2844fdd74a | ||
|
|
3b183961a9 | ||
|
|
76411ecca7 | ||
|
|
725c64d581 | ||
|
|
99ae4ac5ef | ||
|
|
b8b759f1d2 | ||
|
|
8b5a017b05 | ||
|
|
b7ef536ec3 | ||
|
|
282df74ee9 | ||
|
|
0a565815f9 | ||
|
|
d33bbf35c2 | ||
|
|
714a0b348d | ||
|
|
7ca1b8f286 | ||
|
|
be40e377d9 | ||
|
|
01cf4bae75 | ||
|
|
ef949f9149 | ||
|
|
926d0c7e0f | ||
|
|
9d8eb6ccfd | ||
|
|
585f70e700 | ||
|
|
21e24264f4 | ||
|
|
cf78ad3a3d | ||
|
|
dccb92d57f | ||
|
|
f9d31faadc | ||
|
|
e9225d1cc2 | ||
|
|
3dd1f0eea9 | ||
|
|
917e3aba6b | ||
|
|
3833255980 | ||
|
|
619654f317 | ||
|
|
76f9a33e45 | ||
|
|
5f67bb99a8 | ||
|
|
d8392bf394 | ||
|
|
6a89fcbc56 | ||
|
|
2adaacab82 | ||
|
|
2ae810c45a | ||
|
|
b62133e3e1 | ||
|
|
a92bb44ff9 | ||
|
|
9650c25968 | ||
|
|
c3c29afdca | ||
|
|
dca4ddd3d8 | ||
|
|
bf5475fa74 |
@@ -1,8 +1,8 @@
|
||||
# Configuring Azure Pipelines with Certbot
|
||||
|
||||
Let's begin. All pipelines are defined in `.azure-pipelines`. Currently there are two:
|
||||
* `.azure-pipelines/main.yml` is the main one, executed on PRs for master, and pushes to master,
|
||||
* `.azure-pipelines/advanced.yml` add installer testing on top of the main pipeline, and is executed for `test-*` branches, release branches, and nightly run for master.
|
||||
* `.azure-pipelines/main.yml` is the main one, executed on PRs for main, and pushes to main,
|
||||
* `.azure-pipelines/advanced.yml` add installer testing on top of the main pipeline, and is executed for `test-*` branches, release branches, and nightly run for main.
|
||||
|
||||
Several templates are defined in `.azure-pipelines/templates`. These YAML files aggregate common jobs configuration that can be reused in several pipelines.
|
||||
|
||||
@@ -64,7 +64,7 @@ Azure Pipeline needs RW on code, RO on metadata, RW on checks, commit statuses,
|
||||
RW access here is required to allow update of the pipelines YAML files from Azure DevOps interface, and to
|
||||
update the status of builds and PRs on GitHub side when Azure Pipelines are triggered.
|
||||
Note however that no admin access is defined here: this means that Azure Pipelines cannot do anything with
|
||||
protected branches, like master, and cannot modify the security context around this on GitHub.
|
||||
protected branches, like main, and cannot modify the security context around this on GitHub.
|
||||
Access can be defined for all or only selected repositories, which is nice.
|
||||
```
|
||||
|
||||
@@ -91,11 +91,11 @@ grant permissions from Azure Pipelines to GitHub in order to setup a GitHub OAut
|
||||
then are way too large (admin level on almost everything), while the classic approach does not add any more
|
||||
permissions, and works perfectly well.__
|
||||
|
||||
- Select GitHub in "Select your repository section", choose certbot/certbot in Repository, master in default branch.
|
||||
- Select GitHub in "Select your repository section", choose certbot/certbot in Repository, main in default branch.
|
||||
- Click on YAML option for "Select a template"
|
||||
- Choose a name for the pipeline (eg. test-pipeline), and browse to the actual pipeline YAML definition in the
|
||||
"YAML file path" input (eg. `.azure-pipelines/test-pipeline.yml`)
|
||||
- Click "Save & queue", choose the master branch to build the first pipeline, and click "Save and run" button.
|
||||
- Click "Save & queue", choose the main branch to build the first pipeline, and click "Save and run" button.
|
||||
|
||||
_Done. Pipeline is operational. Repeat to add more pipelines from existing YAML files in `.azure-pipelines`._
|
||||
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
# We run the test suite on commits to master so codecov gets coverage data
|
||||
# about the master branch and can use it to track coverage changes.
|
||||
# We run the test suite on commits to main so codecov gets coverage data
|
||||
# about the main branch and can use it to track coverage changes.
|
||||
trigger:
|
||||
- master
|
||||
- main
|
||||
pr:
|
||||
- master
|
||||
- main
|
||||
- '*.x'
|
||||
|
||||
variables:
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Nightly pipeline running each day for master.
|
||||
# Nightly pipeline running each day for main.
|
||||
trigger: none
|
||||
pr: none
|
||||
schedules:
|
||||
@@ -6,7 +6,7 @@ schedules:
|
||||
displayName: Nightly build
|
||||
branches:
|
||||
include:
|
||||
- master
|
||||
- main
|
||||
always: true
|
||||
|
||||
variables:
|
||||
@@ -15,5 +15,5 @@ variables:
|
||||
|
||||
stages:
|
||||
- template: templates/stages/test-and-package-stage.yml
|
||||
- template: templates/stages/deploy-stage.yml
|
||||
- template: templates/stages/nightly-deploy-stage.yml
|
||||
- template: templates/stages/notify-failure-stage.yml
|
||||
|
||||
@@ -13,7 +13,5 @@ variables:
|
||||
stages:
|
||||
- template: templates/stages/test-and-package-stage.yml
|
||||
- template: templates/stages/changelog-stage.yml
|
||||
- template: templates/stages/deploy-stage.yml
|
||||
parameters:
|
||||
snapReleaseChannel: beta
|
||||
- template: templates/stages/release-deploy-stage.yml
|
||||
- template: templates/stages/notify-failure-stage.yml
|
||||
|
||||
@@ -72,3 +72,57 @@ jobs:
|
||||
tools/retry.sh eval snapcraft upload --release=${{ parameters.snapReleaseChannel }} "${SNAP_FILE}"
|
||||
done
|
||||
displayName: Publish to Snap store
|
||||
# The credentials used in the following jobs are for the shared
|
||||
# certbotbot account on Docker Hub. The credentials are stored
|
||||
# in a service account which was created by following the
|
||||
# instructions at
|
||||
# https://docs.microsoft.com/en-us/azure/devops/pipelines/library/service-endpoints?view=azure-devops&tabs=yaml#sep-docreg.
|
||||
# The name given to this service account must match the value
|
||||
# given to containerRegistry below. The authentication used when
|
||||
# creating this service account was a personal access token
|
||||
# rather than a password to bypass 2FA. When Brad set this up,
|
||||
# Azure Pipelines failed to verify the credentials with an error
|
||||
# like "access is forbidden with a JWT issued from a personal
|
||||
# access token", but after saving them without verification, the
|
||||
# access token worked when the pipeline actually ran. "Grant
|
||||
# access to all pipelines" should also be checked on the service
|
||||
# account. The access token can be deleted on Docker Hub if
|
||||
# these credentials need to be revoked.
|
||||
- job: publish_docker_by_arch
|
||||
pool:
|
||||
vmImage: ubuntu-22.04
|
||||
strategy:
|
||||
matrix:
|
||||
arm32v6:
|
||||
DOCKER_ARCH: arm32v6
|
||||
arm64v8:
|
||||
DOCKER_ARCH: arm64v8
|
||||
amd64:
|
||||
DOCKER_ARCH: amd64
|
||||
steps:
|
||||
- task: DownloadPipelineArtifact@2
|
||||
inputs:
|
||||
artifact: docker_$(DOCKER_ARCH)
|
||||
path: $(Build.SourcesDirectory)
|
||||
displayName: Retrieve Docker images
|
||||
- bash: set -e && docker load --input $(Build.SourcesDirectory)/images.tar
|
||||
displayName: Load Docker images
|
||||
- task: Docker@2
|
||||
inputs:
|
||||
command: login
|
||||
containerRegistry: docker-hub
|
||||
displayName: Login to Docker Hub
|
||||
- bash: set -e && tools/docker/deploy_images.sh $(dockerTag) $DOCKER_ARCH
|
||||
displayName: Deploy the Docker images by architecture
|
||||
- job: publish_docker_multiarch
|
||||
dependsOn: publish_docker_by_arch
|
||||
pool:
|
||||
vmImage: ubuntu-22.04
|
||||
steps:
|
||||
- task: Docker@2
|
||||
inputs:
|
||||
command: login
|
||||
containerRegistry: docker-hub
|
||||
displayName: Login to Docker Hub
|
||||
- bash: set -e && tools/docker/deploy_manifests.sh $(dockerTag) all
|
||||
displayName: Deploy the Docker multiarch manifests
|
||||
@@ -4,7 +4,7 @@ jobs:
|
||||
- name: IMAGE_NAME
|
||||
value: ubuntu-22.04
|
||||
- name: PYTHON_VERSION
|
||||
value: 3.11
|
||||
value: 3.12
|
||||
- group: certbot-common
|
||||
strategy:
|
||||
matrix:
|
||||
@@ -14,43 +14,42 @@ jobs:
|
||||
linux-py310:
|
||||
PYTHON_VERSION: 3.10
|
||||
TOXENV: py310
|
||||
linux-py311:
|
||||
PYTHON_VERSION: 3.11
|
||||
TOXENV: py311
|
||||
linux-isolated:
|
||||
TOXENV: 'isolated-{acme,certbot,apache,cloudflare,digitalocean,dnsimple,dnsmadeeasy,gehirn,google,linode,luadns,nsone,ovh,rfc2136,route53,sakuracloud,nginx}'
|
||||
linux-boulder-v2-integration-certbot-oldest:
|
||||
TOXENV: 'isolated-acme,isolated-certbot,isolated-apache,isolated-cloudflare,isolated-digitalocean,isolated-dnsimple,isolated-dnsmadeeasy,isolated-gehirn,isolated-google,isolated-linode,isolated-luadns,isolated-nsone,isolated-ovh,isolated-rfc2136,isolated-route53,isolated-sakuracloud,isolated-nginx'
|
||||
linux-integration-certbot-oldest:
|
||||
PYTHON_VERSION: 3.8
|
||||
TOXENV: integration-certbot-oldest
|
||||
ACME_SERVER: boulder-v2
|
||||
linux-boulder-v2-integration-nginx-oldest:
|
||||
linux-integration-nginx-oldest:
|
||||
PYTHON_VERSION: 3.8
|
||||
TOXENV: integration-nginx-oldest
|
||||
ACME_SERVER: boulder-v2
|
||||
linux-boulder-v2-py38-integration:
|
||||
linux-py38-integration:
|
||||
PYTHON_VERSION: 3.8
|
||||
TOXENV: integration
|
||||
ACME_SERVER: boulder-v2
|
||||
linux-boulder-v2-py39-integration:
|
||||
linux-py39-integration:
|
||||
PYTHON_VERSION: 3.9
|
||||
TOXENV: integration
|
||||
ACME_SERVER: boulder-v2
|
||||
linux-boulder-v2-py310-integration:
|
||||
linux-py310-integration:
|
||||
PYTHON_VERSION: 3.10
|
||||
TOXENV: integration
|
||||
ACME_SERVER: boulder-v2
|
||||
linux-boulder-v2-py311-integration:
|
||||
linux-py311-integration:
|
||||
PYTHON_VERSION: 3.11
|
||||
TOXENV: integration
|
||||
ACME_SERVER: boulder-v2
|
||||
# python 3.12 integration tests are not run here because they're run as
|
||||
# part of the standard test suite
|
||||
nginx-compat:
|
||||
TOXENV: nginx_compat
|
||||
linux-integration-rfc2136:
|
||||
IMAGE_NAME: ubuntu-22.04
|
||||
PYTHON_VERSION: 3.8
|
||||
PYTHON_VERSION: 3.12
|
||||
TOXENV: integration-dns-rfc2136
|
||||
le-modification:
|
||||
IMAGE_NAME: ubuntu-22.04
|
||||
TOXENV: modification
|
||||
farmtest-apache2:
|
||||
PYTHON_VERSION: 3.8
|
||||
PYTHON_VERSION: 3.12
|
||||
TOXENV: test-farm-apache2
|
||||
pool:
|
||||
vmImage: $(IMAGE_NAME)
|
||||
|
||||
@@ -1,119 +1,4 @@
|
||||
jobs:
|
||||
- job: docker_build
|
||||
pool:
|
||||
vmImage: ubuntu-22.04
|
||||
strategy:
|
||||
matrix:
|
||||
arm32v6:
|
||||
DOCKER_ARCH: arm32v6
|
||||
arm64v8:
|
||||
DOCKER_ARCH: arm64v8
|
||||
amd64:
|
||||
DOCKER_ARCH: amd64
|
||||
# The default timeout of 60 minutes is a little low for compiling
|
||||
# cryptography on ARM architectures.
|
||||
timeoutInMinutes: 180
|
||||
steps:
|
||||
- bash: set -e && tools/docker/build.sh $(dockerTag) $DOCKER_ARCH
|
||||
displayName: Build the Docker images
|
||||
# We don't filter for the Docker Hub organization to continue to allow
|
||||
# easy testing of these scripts on forks.
|
||||
- bash: |
|
||||
set -e
|
||||
DOCKER_IMAGES=$(docker images --filter reference='*/certbot' --filter reference='*/dns-*' --format '{{.Repository}}')
|
||||
docker save --output images.tar $DOCKER_IMAGES
|
||||
displayName: Save the Docker images
|
||||
# If the name of the tar file or artifact changes, the deploy stage will
|
||||
# also need to be updated.
|
||||
- bash: set -e && mv images.tar $(Build.ArtifactStagingDirectory)
|
||||
displayName: Prepare Docker artifact
|
||||
- task: PublishPipelineArtifact@1
|
||||
inputs:
|
||||
path: $(Build.ArtifactStagingDirectory)
|
||||
artifact: docker_$(DOCKER_ARCH)
|
||||
displayName: Store Docker artifact
|
||||
- job: docker_test
|
||||
dependsOn: docker_build
|
||||
pool:
|
||||
vmImage: ubuntu-22.04
|
||||
strategy:
|
||||
matrix:
|
||||
arm32v6:
|
||||
DOCKER_ARCH: arm32v6
|
||||
arm64v8:
|
||||
DOCKER_ARCH: arm64v8
|
||||
amd64:
|
||||
DOCKER_ARCH: amd64
|
||||
steps:
|
||||
- task: DownloadPipelineArtifact@2
|
||||
inputs:
|
||||
artifact: docker_$(DOCKER_ARCH)
|
||||
path: $(Build.SourcesDirectory)
|
||||
displayName: Retrieve Docker images
|
||||
- bash: set -e && docker load --input $(Build.SourcesDirectory)/images.tar
|
||||
displayName: Load Docker images
|
||||
- bash: |
|
||||
set -e && tools/docker/test.sh $(dockerTag) $DOCKER_ARCH
|
||||
displayName: Run integration tests for Docker images
|
||||
- job: installer_build
|
||||
pool:
|
||||
vmImage: windows-2019
|
||||
steps:
|
||||
- task: UsePythonVersion@0
|
||||
inputs:
|
||||
versionSpec: 3.9
|
||||
architecture: x64
|
||||
addToPath: true
|
||||
- script: |
|
||||
python -m venv venv
|
||||
venv\Scripts\python tools\pip_install.py -e windows-installer
|
||||
displayName: Prepare Windows installer build environment
|
||||
- script: |
|
||||
venv\Scripts\construct-windows-installer
|
||||
displayName: Build Certbot installer
|
||||
- task: CopyFiles@2
|
||||
inputs:
|
||||
sourceFolder: $(System.DefaultWorkingDirectory)/windows-installer/build/nsis
|
||||
contents: '*.exe'
|
||||
targetFolder: $(Build.ArtifactStagingDirectory)
|
||||
- task: PublishPipelineArtifact@1
|
||||
inputs:
|
||||
path: $(Build.ArtifactStagingDirectory)
|
||||
# If we change the artifact's name, it should also be changed in tools/create_github_release.py
|
||||
artifact: windows-installer
|
||||
displayName: Publish Windows installer
|
||||
- job: installer_run
|
||||
dependsOn: installer_build
|
||||
strategy:
|
||||
matrix:
|
||||
win2019:
|
||||
imageName: windows-2019
|
||||
pool:
|
||||
vmImage: $(imageName)
|
||||
steps:
|
||||
- task: UsePythonVersion@0
|
||||
inputs:
|
||||
versionSpec: 3.9
|
||||
addToPath: true
|
||||
- task: DownloadPipelineArtifact@2
|
||||
inputs:
|
||||
artifact: windows-installer
|
||||
path: $(Build.SourcesDirectory)/bin
|
||||
displayName: Retrieve Windows installer
|
||||
- script: |
|
||||
python -m venv venv
|
||||
venv\Scripts\python tools\pip_install.py -e certbot-ci
|
||||
env:
|
||||
PIP_NO_BUILD_ISOLATION: no
|
||||
displayName: Prepare Certbot-CI
|
||||
- script: |
|
||||
set PATH=%ProgramFiles%\Certbot\bin;%PATH%
|
||||
venv\Scripts\python -m pytest certbot-ci\windows_installer_integration_tests --allow-persistent-changes --installer-path $(Build.SourcesDirectory)\bin\certbot-beta-installer-win_amd64.exe
|
||||
displayName: Run windows installer integration tests
|
||||
- script: |
|
||||
set PATH=%ProgramFiles%\Certbot\bin;%PATH%
|
||||
venv\Scripts\python -m pytest certbot-ci\certbot_integration_tests\certbot_tests -n 4
|
||||
displayName: Run certbot integration tests
|
||||
- job: snaps_build
|
||||
pool:
|
||||
vmImage: ubuntu-22.04
|
||||
@@ -121,10 +6,6 @@ jobs:
|
||||
matrix:
|
||||
amd64:
|
||||
SNAP_ARCH: amd64
|
||||
armhf:
|
||||
SNAP_ARCH: armhf
|
||||
arm64:
|
||||
SNAP_ARCH: arm64
|
||||
timeoutInMinutes: 0
|
||||
steps:
|
||||
- script: |
|
||||
@@ -135,7 +16,7 @@ jobs:
|
||||
displayName: Install dependencies
|
||||
- task: UsePythonVersion@0
|
||||
inputs:
|
||||
versionSpec: 3.8
|
||||
versionSpec: 3.12
|
||||
addToPath: true
|
||||
- task: DownloadSecureFile@1
|
||||
name: credentials
|
||||
@@ -147,12 +28,11 @@ jobs:
|
||||
git config --global user.name "$(Build.RequestedFor)"
|
||||
mkdir -p ~/.local/share/snapcraft/provider/launchpad
|
||||
cp $(credentials.secureFilePath) ~/.local/share/snapcraft/provider/launchpad/credentials
|
||||
python3 tools/snap/build_remote.py ALL --archs ${SNAP_ARCH} --timeout $(snapBuildTimeout)
|
||||
python3 tools/snap/build_remote.py certbot --archs ${SNAP_ARCH} --timeout $(snapBuildTimeout)
|
||||
displayName: Build snaps
|
||||
- script: |
|
||||
set -e
|
||||
mv *.snap $(Build.ArtifactStagingDirectory)
|
||||
mv certbot-dns-*/*.snap $(Build.ArtifactStagingDirectory)
|
||||
displayName: Prepare artifacts
|
||||
- task: PublishPipelineArtifact@1
|
||||
inputs:
|
||||
@@ -166,7 +46,7 @@ jobs:
|
||||
steps:
|
||||
- task: UsePythonVersion@0
|
||||
inputs:
|
||||
versionSpec: 3.8
|
||||
versionSpec: 3.12
|
||||
addToPath: true
|
||||
- script: |
|
||||
set -e
|
||||
@@ -186,33 +66,5 @@ jobs:
|
||||
displayName: Install Certbot snap
|
||||
- script: |
|
||||
set -e
|
||||
venv/bin/python -m tox -e integration-external,apacheconftest-external-with-pebble
|
||||
venv/bin/python -m tox run -e integration-external,apacheconftest-external-with-pebble
|
||||
displayName: Run tox
|
||||
- job: snap_dns_run
|
||||
dependsOn: snaps_build
|
||||
pool:
|
||||
vmImage: ubuntu-22.04
|
||||
steps:
|
||||
- script: |
|
||||
set -e
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y --no-install-recommends snapd
|
||||
displayName: Install dependencies
|
||||
- task: UsePythonVersion@0
|
||||
inputs:
|
||||
versionSpec: 3.8
|
||||
addToPath: true
|
||||
- task: DownloadPipelineArtifact@2
|
||||
inputs:
|
||||
artifact: snaps_amd64
|
||||
path: $(Build.SourcesDirectory)/snap
|
||||
displayName: Retrieve Certbot snaps
|
||||
- script: |
|
||||
set -e
|
||||
python3 -m venv venv
|
||||
venv/bin/python tools/pip_install.py -e certbot-ci
|
||||
displayName: Prepare Certbot-CI
|
||||
- script: |
|
||||
set -e
|
||||
sudo -E venv/bin/pytest certbot-ci/snap_integration_tests/dns_tests --allow-persistent-changes --snap-folder $(Build.SourcesDirectory)/snap --snap-arch amd64
|
||||
displayName: Test DNS plugins snaps
|
||||
|
||||
@@ -1,42 +1,34 @@
|
||||
jobs:
|
||||
- job: test
|
||||
variables:
|
||||
PYTHON_VERSION: 3.11
|
||||
PYTHON_VERSION: 3.12
|
||||
strategy:
|
||||
matrix:
|
||||
macos-py38-cover:
|
||||
IMAGE_NAME: macOS-12
|
||||
# mac unit+cover tests with the oldest python we support
|
||||
IMAGE_NAME: macOS-15
|
||||
PYTHON_VERSION: 3.8
|
||||
TOXENV: cover
|
||||
# As of pip 23.1.0, builds started failing on macOS unless this flag was set.
|
||||
# See https://github.com/certbot/certbot/pull/9717#issuecomment-1610861794.
|
||||
PIP_USE_PEP517: "true"
|
||||
macos-cover:
|
||||
IMAGE_NAME: macOS-12
|
||||
# mac unit+cover tests with the newest python we support
|
||||
IMAGE_NAME: macOS-15
|
||||
TOXENV: cover
|
||||
# See explanation under macos-py38-cover.
|
||||
PIP_USE_PEP517: "true"
|
||||
windows-py38:
|
||||
IMAGE_NAME: windows-2019
|
||||
PYTHON_VERSION: 3.8
|
||||
TOXENV: py-win
|
||||
windows-py39-cover:
|
||||
IMAGE_NAME: windows-2019
|
||||
PYTHON_VERSION: 3.9
|
||||
TOXENV: cover-win
|
||||
windows-integration-certbot:
|
||||
IMAGE_NAME: windows-2019
|
||||
PYTHON_VERSION: 3.9
|
||||
TOXENV: integration-certbot
|
||||
linux-oldest:
|
||||
IMAGE_NAME: ubuntu-22.04
|
||||
PYTHON_VERSION: 3.8
|
||||
TOXENV: oldest
|
||||
linux-py38:
|
||||
# linux unit tests with the oldest python we support
|
||||
IMAGE_NAME: ubuntu-22.04
|
||||
PYTHON_VERSION: 3.8
|
||||
TOXENV: py38
|
||||
linux-cover:
|
||||
# linux unit+cover tests with the newest python we support
|
||||
IMAGE_NAME: ubuntu-22.04
|
||||
TOXENV: cover
|
||||
linux-lint:
|
||||
@@ -47,9 +39,7 @@ jobs:
|
||||
TOXENV: mypy
|
||||
linux-integration:
|
||||
IMAGE_NAME: ubuntu-22.04
|
||||
PYTHON_VERSION: 3.8
|
||||
TOXENV: integration
|
||||
ACME_SERVER: pebble
|
||||
apache-compat:
|
||||
IMAGE_NAME: ubuntu-22.04
|
||||
TOXENV: apache_compat
|
||||
|
||||
@@ -1,67 +0,0 @@
|
||||
parameters:
|
||||
# We do not define acceptable values for this parameter here as it is passed
|
||||
# through to ../jobs/snap-deploy-job.yml which does its own sanity checking.
|
||||
- name: snapReleaseChannel
|
||||
type: string
|
||||
default: edge
|
||||
|
||||
stages:
|
||||
- stage: Deploy
|
||||
jobs:
|
||||
- template: ../jobs/snap-deploy-job.yml
|
||||
parameters:
|
||||
snapReleaseChannel: ${{ parameters.snapReleaseChannel }}
|
||||
# The credentials used in the following jobs are for the shared
|
||||
# certbotbot account on Docker Hub. The credentials are stored
|
||||
# in a service account which was created by following the
|
||||
# instructions at
|
||||
# https://docs.microsoft.com/en-us/azure/devops/pipelines/library/service-endpoints?view=azure-devops&tabs=yaml#sep-docreg.
|
||||
# The name given to this service account must match the value
|
||||
# given to containerRegistry below. The authentication used when
|
||||
# creating this service account was a personal access token
|
||||
# rather than a password to bypass 2FA. When Brad set this up,
|
||||
# Azure Pipelines failed to verify the credentials with an error
|
||||
# like "access is forbidden with a JWT issued from a personal
|
||||
# access token", but after saving them without verification, the
|
||||
# access token worked when the pipeline actually ran. "Grant
|
||||
# access to all pipelines" should also be checked on the service
|
||||
# account. The access token can be deleted on Docker Hub if
|
||||
# these credentials need to be revoked.
|
||||
- job: publish_docker_by_arch
|
||||
pool:
|
||||
vmImage: ubuntu-22.04
|
||||
strategy:
|
||||
matrix:
|
||||
arm32v6:
|
||||
DOCKER_ARCH: arm32v6
|
||||
arm64v8:
|
||||
DOCKER_ARCH: arm64v8
|
||||
amd64:
|
||||
DOCKER_ARCH: amd64
|
||||
steps:
|
||||
- task: DownloadPipelineArtifact@2
|
||||
inputs:
|
||||
artifact: docker_$(DOCKER_ARCH)
|
||||
path: $(Build.SourcesDirectory)
|
||||
displayName: Retrieve Docker images
|
||||
- bash: set -e && docker load --input $(Build.SourcesDirectory)/images.tar
|
||||
displayName: Load Docker images
|
||||
- task: Docker@2
|
||||
inputs:
|
||||
command: login
|
||||
containerRegistry: docker-hub
|
||||
displayName: Login to Docker Hub
|
||||
- bash: set -e && tools/docker/deploy_images.sh $(dockerTag) $DOCKER_ARCH
|
||||
displayName: Deploy the Docker images by architecture
|
||||
- job: publish_docker_multiarch
|
||||
dependsOn: publish_docker_by_arch
|
||||
pool:
|
||||
vmImage: ubuntu-22.04
|
||||
steps:
|
||||
- task: Docker@2
|
||||
inputs:
|
||||
command: login
|
||||
containerRegistry: docker-hub
|
||||
displayName: Login to Docker Hub
|
||||
- bash: set -e && tools/docker/deploy_manifests.sh $(dockerTag) all
|
||||
displayName: Deploy the Docker multiarch manifests
|
||||
@@ -0,0 +1,6 @@
|
||||
stages:
|
||||
- stage: Deploy
|
||||
jobs:
|
||||
- template: ../jobs/common-deploy-jobs.yml
|
||||
parameters:
|
||||
snapReleaseChannel: edge
|
||||
38
.azure-pipelines/templates/stages/release-deploy-stage.yml
Normal file
38
.azure-pipelines/templates/stages/release-deploy-stage.yml
Normal file
@@ -0,0 +1,38 @@
|
||||
stages:
|
||||
- stage: Deploy
|
||||
jobs:
|
||||
- template: ../jobs/common-deploy-jobs.yml
|
||||
parameters:
|
||||
snapReleaseChannel: beta
|
||||
- job: create_github_release
|
||||
pool:
|
||||
vmImage: ubuntu-22.04
|
||||
steps:
|
||||
- task: DownloadPipelineArtifact@2
|
||||
inputs:
|
||||
artifact: changelog
|
||||
path: '$(Pipeline.Workspace)'
|
||||
- task: GitHubRelease@1
|
||||
inputs:
|
||||
# this "github-releases" credential is what azure pipelines calls a
|
||||
# "service connection". it was created using the instructions at
|
||||
# https://learn.microsoft.com/en-us/azure/devops/pipelines/library/service-endpoints?view=azure-devops&tabs=yaml#github-service-connection
|
||||
# with a fine-grained personal access token from github to limit
|
||||
# the permissions given to azure pipelines. the connection on azure
|
||||
# needs permissions for the "release" pipeline (and maybe the
|
||||
# "full-test-suite" pipeline to simplify testing it). information
|
||||
# on how to set up these permissions can be found at
|
||||
# https://learn.microsoft.com/en-us/azure/devops/pipelines/library/service-endpoints?view=azure-devops&tabs=yaml#secure-a-service-connection.
|
||||
# the github token that is used needs "contents:write" and
|
||||
# "workflows:write" permissions for the certbot repo
|
||||
#
|
||||
# as of writing this, the current token will expire on 3/15/2025.
|
||||
# when recreating it, you may also want to create it using the
|
||||
# shared "certbotbot" github account so the credentials aren't tied
|
||||
# to any one dev's github account and their access to the certbot
|
||||
# repo
|
||||
gitHubConnection: github-releases
|
||||
title: ${{ format('Certbot {0}', replace(variables['Build.SourceBranchName'], 'v', '')) }}
|
||||
releaseNotesFilePath: '$(Pipeline.Workspace)/release_notes.md'
|
||||
assets: '$(Build.SourcesDirectory)/packages/{*.tar.gz,SHA256SUMS*}'
|
||||
addChangeLog: false
|
||||
@@ -1,6 +1,4 @@
|
||||
stages:
|
||||
- stage: TestAndPackage
|
||||
jobs:
|
||||
- template: ../jobs/standard-tests-jobs.yml
|
||||
- template: ../jobs/extended-tests-jobs.yml
|
||||
- template: ../jobs/packaging-jobs.yml
|
||||
|
||||
@@ -44,7 +44,7 @@ steps:
|
||||
export TARGET_BRANCH="`echo "${BUILD_SOURCEBRANCH}" | sed -E 's!refs/(heads|tags)/!!g'`"
|
||||
[ -z "${SYSTEM_PULLREQUEST_TARGETBRANCH}" ] || export TARGET_BRANCH="${SYSTEM_PULLREQUEST_TARGETBRANCH}"
|
||||
env
|
||||
python3 -m tox
|
||||
python3 -m tox run
|
||||
env:
|
||||
AWS_ACCESS_KEY_ID: $(AWS_ACCESS_KEY_ID)
|
||||
AWS_SECRET_ACCESS_KEY: $(AWS_SECRET_ACCESS_KEY)
|
||||
|
||||
2
.github/pull_request_template.md
vendored
2
.github/pull_request_template.md
vendored
@@ -1,6 +1,6 @@
|
||||
## Pull Request Checklist
|
||||
|
||||
- [ ] The Certbot team has recently expressed interest in reviewing a PR for this. If not, this PR may be closed due our limited resources and need to prioritize how we spend them.
|
||||
- [ ] If the change being made is to a [distributed component](https://certbot.eff.org/docs/contributing.html#code-components-and-layout), edit the `master` section of `certbot/CHANGELOG.md` to include a description of the change being made.
|
||||
- [ ] If the change being made is to a [distributed component](https://certbot.eff.org/docs/contributing.html#code-components-and-layout), edit the `main` section of `certbot/CHANGELOG.md` to include a description of the change being made.
|
||||
- [ ] Add or update any documentation as needed to support the changes in this PR.
|
||||
- [ ] Include your name in `AUTHORS.md` if you like.
|
||||
|
||||
25
.github/workflows/merged.yaml
vendored
25
.github/workflows/merged.yaml
vendored
@@ -8,25 +8,14 @@ on:
|
||||
jobs:
|
||||
if_merged:
|
||||
# Forked repos can not access Mattermost secret.
|
||||
if: github.event.pull_request.merged == true && !github.event.pull_request.head.repo.fork
|
||||
if: github.event.pull_request.merged == true && !github.event.pull_request.head.repo.fork
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Create Mattermost Message
|
||||
# https://docs.github.com/en/actions/security-guides/security-hardening-for-github-actions#example-of-a-script-injection-attack
|
||||
env:
|
||||
NUMBER: ${{ github.event.number }}
|
||||
PR_URL: https://github.com/${{ github.repository }}/pull/${{ github.event.number }}
|
||||
REPO: ${{ github.repository }}
|
||||
USER: ${{ github.actor }}
|
||||
TITLE: ${{ github.event.pull_request.title }}
|
||||
run: |
|
||||
jq --null-input \
|
||||
--arg number "$NUMBER" \
|
||||
--arg pr_url "$PR_URL" \
|
||||
--arg repo "$REPO" \
|
||||
--arg user "$USER" \
|
||||
--arg title "$TITLE" \
|
||||
'{ "text": "[\($repo)] | [\($title) #\($number)](\($pr_url)) was merged into master by \($user)" }' > mattermost.json
|
||||
- uses: mattermost/action-mattermost-notify@master
|
||||
env:
|
||||
with:
|
||||
MATTERMOST_WEBHOOK_URL: ${{ secrets.MATTERMOST_MERGE_WEBHOOK }}
|
||||
TEXT: >
|
||||
[${{ github.repository }}] |
|
||||
[${{ github.event.pull_request.title }}
|
||||
#${{ github.event.number }}](https://github.com/${{ github.repository }}/pull/${{ github.event.number }})
|
||||
was merged into main by ${{ github.actor }}
|
||||
|
||||
22
.github/workflows/notify_weekly.yaml
vendored
22
.github/workflows/notify_weekly.yaml
vendored
@@ -4,6 +4,7 @@ on:
|
||||
schedule:
|
||||
# Every week on Thursday @ 13:00
|
||||
- cron: "0 13 * * 4"
|
||||
workflow_dispatch:
|
||||
jobs:
|
||||
send-mattermost-message:
|
||||
runs-on: ubuntu-latest
|
||||
@@ -11,15 +12,16 @@ jobs:
|
||||
steps:
|
||||
- name: Create Mattermost Message
|
||||
run: |
|
||||
DATE=$(date --date="7 days ago" +"%Y-%m-%d")
|
||||
MERGED_URL="https://github.com/pulls?q=merged%3A%3E${DATE}+org%3Acertbot"
|
||||
UPDATED_URL="https://github.com/pulls?q=updated%3A%3E${DATE}+org%3Acertbot"
|
||||
echo "{\"text\":\"## Updates Across Certbot Repos\n\n
|
||||
- Certbot team members SHOULD look at: [link]($MERGED_URL)\n\n
|
||||
- Certbot team members MAY also want to look at: [link]($UPDATED_URL)\n\n
|
||||
- Want to Discuss something today? Place it [here](https://docs.google.com/document/d/17YMUbtC1yg6MfiTMwT8zVm9LmO-cuGVBom0qFn8XJBM/edit?usp=sharing) and we can meet today on Zoom.\n\n
|
||||
- The key words SHOULD and MAY in this message are to be interpreted as described in [RFC 8147](https://www.rfc-editor.org/rfc/rfc8174). \"
|
||||
}" > mattermost.json
|
||||
DATE=$(date --date="7 days ago" +"%Y-%m-%d")
|
||||
echo "MERGED_URL=https://github.com/pulls?q=merged%3A%3E${DATE}+org%3Acertbot" >> $GITHUB_ENV
|
||||
echo "UPDATED_URL=https://github.com/pulls?q=updated%3A%3E${DATE}+org%3Acertbot" >> $GITHUB_ENV
|
||||
- uses: mattermost/action-mattermost-notify@master
|
||||
env:
|
||||
with:
|
||||
MATTERMOST_WEBHOOK_URL: ${{ secrets.MATTERMOST_WEBHOOK_URL }}
|
||||
MATTERMOST_CHANNEL: private-certbot
|
||||
TEXT: |
|
||||
## Updates Across Certbot Repos
|
||||
- Certbot team members SHOULD look at: [link](${{ env.MERGED_URL }})
|
||||
- Certbot team members MAY also want to look at: [link](${{ env.UPDATED_URL }})
|
||||
- Want to Discuss something today? Place it [here](https://docs.google.com/document/d/17YMUbtC1yg6MfiTMwT8zVm9LmO-cuGVBom0qFn8XJBM/edit?usp=sharing) and we can meet today on Zoom.
|
||||
- The key words SHOULD and MAY in this message are to be interpreted as described in [RFC 8147](https://www.rfc-editor.org/rfc/rfc8174).
|
||||
|
||||
1
.github/workflows/stale.yml
vendored
1
.github/workflows/stale.yml
vendored
@@ -3,6 +3,7 @@ on:
|
||||
schedule:
|
||||
# Run 1:24AM every night
|
||||
- cron: '24 1 * * *'
|
||||
workflow_dispatch:
|
||||
permissions:
|
||||
issues: write
|
||||
jobs:
|
||||
|
||||
@@ -69,7 +69,7 @@ ignored-modules=
|
||||
# CERTBOT COMMENT
|
||||
# This is needed for pylint to import linter_plugin.py since
|
||||
# https://github.com/PyCQA/pylint/pull/3396.
|
||||
init-hook="import pylint.config, os, sys; sys.path.append(os.path.dirname(pylint.config.PYLINTRC))"
|
||||
init-hook="import pylint.config, os, sys; sys.path.append(os.path.dirname(next(pylint.config.find_default_config_files())))"
|
||||
|
||||
# Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the
|
||||
# number of processors available to use, and will cap the count on Windows to
|
||||
@@ -266,8 +266,8 @@ valid-metaclass-classmethod-first-arg=cls
|
||||
[EXCEPTIONS]
|
||||
|
||||
# Exceptions that will emit a warning when caught.
|
||||
overgeneral-exceptions=BaseException,
|
||||
Exception
|
||||
overgeneral-exceptions=builtins.BaseException,
|
||||
builtins.Exception
|
||||
|
||||
|
||||
[FORMAT]
|
||||
@@ -524,7 +524,7 @@ ignored-classes=optparse.Values,thread._local,_thread._local,argparse.Namespace,
|
||||
# List of module names for which member attributes should not be checked
|
||||
# (useful for modules/projects where namespaces are manipulated during runtime
|
||||
# and thus existing member attributes cannot be deduced by static analysis
|
||||
ignored-modules=pkg_resources,confargparse,argparse
|
||||
ignored-modules=confargparse,argparse
|
||||
|
||||
# Show a hint with possible names when a member name was not found. The aspect
|
||||
# of finding the hint is based on edit distance.
|
||||
|
||||
@@ -94,6 +94,7 @@ Authors
|
||||
* [Felix Yan](https://github.com/felixonmars)
|
||||
* [Filip Ochnik](https://github.com/filipochnik)
|
||||
* [Florian Klink](https://github.com/flokli)
|
||||
* [Francesco Colista](https://github.com/fcolista)
|
||||
* [Francois Marier](https://github.com/fmarier)
|
||||
* [Frank](https://github.com/Frankkkkk)
|
||||
* [Frederic BLANC](https://github.com/fblanc)
|
||||
@@ -165,6 +166,7 @@ Authors
|
||||
* [Luca Ebach](https://github.com/lucebac)
|
||||
* [Luca Olivetti](https://github.com/olivluca)
|
||||
* [Luke Rogers](https://github.com/lukeroge)
|
||||
* [Lukhnos Liu](https://github.com/lukhnos)
|
||||
* [Maarten](https://github.com/mrtndwrd)
|
||||
* [Mads Jensen](https://github.com/atombrella)
|
||||
* [Maikel Martens](https://github.com/krukas)
|
||||
|
||||
@@ -14,14 +14,14 @@ build:
|
||||
|
||||
# Build documentation in the "docs/" directory with Sphinx
|
||||
sphinx:
|
||||
configuration: docs/conf.py
|
||||
configuration: acme/docs/conf.py
|
||||
# You can configure Sphinx to use a different builder, for instance use the dirhtml builder for simpler URLs
|
||||
# builder: "dirhtml"
|
||||
# Fail on all warnings to avoid broken references
|
||||
fail_on_warning: true
|
||||
|
||||
# Optionally build your docs in additional formats such as PDF and ePub
|
||||
# formats:
|
||||
formats:
|
||||
- pdf
|
||||
- epub
|
||||
|
||||
@@ -30,4 +30,4 @@ sphinx:
|
||||
# See https://docs.readthedocs.io/en/stable/guides/reproducible-builds.html
|
||||
python:
|
||||
install:
|
||||
- requirements: ../tools/requirements.txt
|
||||
- requirements: acme/readthedocs.org.requirements.txt
|
||||
@@ -6,6 +6,7 @@ This module is an implementation of the `ACME protocol`_.
|
||||
|
||||
"""
|
||||
import sys
|
||||
import warnings
|
||||
|
||||
# This code exists to keep backwards compatibility with people using acme.jose
|
||||
# before it became the standalone josepy package.
|
||||
@@ -19,3 +20,10 @@ for mod in list(sys.modules):
|
||||
# preserved (acme.jose.* is josepy.*)
|
||||
if mod == 'josepy' or mod.startswith('josepy.'):
|
||||
sys.modules['acme.' + mod.replace('josepy', 'jose', 1)] = sys.modules[mod]
|
||||
|
||||
if sys.version_info[:2] == (3, 8):
|
||||
warnings.warn(
|
||||
"Python 3.8 support will be dropped in the next planned release of "
|
||||
"acme. Please upgrade your Python version.",
|
||||
PendingDeprecationWarning,
|
||||
) # pragma: no cover
|
||||
|
||||
@@ -12,7 +12,6 @@ from typing import List
|
||||
from typing import Mapping
|
||||
from typing import Optional
|
||||
from typing import Set
|
||||
from typing import Text
|
||||
from typing import Tuple
|
||||
from typing import Union
|
||||
|
||||
@@ -517,7 +516,7 @@ class ClientNetwork:
|
||||
self.account = account
|
||||
self.alg = alg
|
||||
self.verify_ssl = verify_ssl
|
||||
self._nonces: Set[Text] = set()
|
||||
self._nonces: Set[str] = set()
|
||||
self.user_agent = user_agent
|
||||
self.session = requests.Session()
|
||||
self._default_timeout = timeout
|
||||
|
||||
@@ -29,7 +29,7 @@ class Header(jose.Header):
|
||||
|
||||
class Signature(jose.Signature):
|
||||
"""ACME-specific Signature. Uses ACME-specific Header for customer fields."""
|
||||
__slots__ = jose.Signature._orig_slots # type: ignore[attr-defined] # pylint: disable=protected-access,no-member
|
||||
__slots__ = jose.Signature._orig_slots # pylint: disable=protected-access,no-member
|
||||
|
||||
# TODO: decoder/encoder should accept cls? Otherwise, subclassing
|
||||
# JSONObjectWithFields is tricky...
|
||||
@@ -44,7 +44,7 @@ class Signature(jose.Signature):
|
||||
class JWS(jose.JWS):
|
||||
"""ACME-specific JWS. Includes none, url, and kid in protected header."""
|
||||
signature_cls = Signature
|
||||
__slots__ = jose.JWS._orig_slots # type: ignore[attr-defined] # pylint: disable=protected-access
|
||||
__slots__ = jose.JWS._orig_slots # pylint: disable=protected-access
|
||||
|
||||
@classmethod
|
||||
# type: ignore[override] # pylint: disable=arguments-differ
|
||||
|
||||
@@ -3,6 +3,6 @@ usage: jws [-h] [--compact] {sign,verify} ...
|
||||
positional arguments:
|
||||
{sign,verify}
|
||||
|
||||
optional arguments:
|
||||
options:
|
||||
-h, --help show this help message and exit
|
||||
--compact
|
||||
|
||||
@@ -3,11 +3,13 @@ import sys
|
||||
from setuptools import find_packages
|
||||
from setuptools import setup
|
||||
|
||||
version = '2.8.0.dev0'
|
||||
version = '3.1.0.dev0'
|
||||
|
||||
install_requires = [
|
||||
'cryptography>=3.2.1',
|
||||
'josepy>=1.13.0',
|
||||
# Josepy 2+ may introduce backward incompatible changes by droping usage of
|
||||
# deprecated PyOpenSSL APIs.
|
||||
'josepy>=1.13.0, <2',
|
||||
# pyOpenSSL 23.1.0 is a bad release: https://github.com/pyca/pyopenssl/issues/1199
|
||||
'PyOpenSSL>=17.5.0,!=23.1.0',
|
||||
'pyrfc3339',
|
||||
@@ -55,6 +57,7 @@ setup(
|
||||
'Programming Language :: Python :: 3.9',
|
||||
'Programming Language :: Python :: 3.10',
|
||||
'Programming Language :: Python :: 3.11',
|
||||
'Programming Language :: Python :: 3.12',
|
||||
'Topic :: Internet :: WWW/HTTP',
|
||||
'Topic :: Security',
|
||||
],
|
||||
|
||||
@@ -257,6 +257,6 @@ def find_ssl_apache_conf(prefix: str) -> str:
|
||||
"""
|
||||
file_manager = ExitStack()
|
||||
atexit.register(file_manager.close)
|
||||
ref = importlib_resources.files("certbot_apache").joinpath(
|
||||
"_internal", "tls_configs", "{0}-options-ssl-apache.conf".format(prefix))
|
||||
ref = (importlib_resources.files("certbot_apache").joinpath("_internal")
|
||||
.joinpath("tls_configs").joinpath("{0}-options-ssl-apache.conf".format(prefix)))
|
||||
return str(file_manager.enter_context(importlib_resources.as_file(ref)))
|
||||
|
||||
@@ -4,6 +4,7 @@ from typing import Type
|
||||
|
||||
from certbot import util
|
||||
from certbot_apache._internal import configurator
|
||||
from certbot_apache._internal import override_alpine
|
||||
from certbot_apache._internal import override_arch
|
||||
from certbot_apache._internal import override_centos
|
||||
from certbot_apache._internal import override_darwin
|
||||
@@ -14,6 +15,7 @@ from certbot_apache._internal import override_suse
|
||||
from certbot_apache._internal import override_void
|
||||
|
||||
OVERRIDE_CLASSES: Dict[str, Type[configurator.ApacheConfigurator]] = {
|
||||
"alpine": override_alpine.AlpineConfigurator,
|
||||
"arch": override_arch.ArchConfigurator,
|
||||
"cloudlinux": override_centos.CentOSConfigurator,
|
||||
"darwin": override_darwin.DarwinConfigurator,
|
||||
|
||||
19
certbot-apache/certbot_apache/_internal/override_alpine.py
Normal file
19
certbot-apache/certbot_apache/_internal/override_alpine.py
Normal file
@@ -0,0 +1,19 @@
|
||||
""" Distribution specific override class for Alpine Linux """
|
||||
from certbot_apache._internal import configurator
|
||||
from certbot_apache._internal.configurator import OsOptions
|
||||
|
||||
|
||||
class AlpineConfigurator(configurator.ApacheConfigurator):
|
||||
"""Alpine Linux specific ApacheConfigurator override class"""
|
||||
|
||||
OS_DEFAULTS = OsOptions(
|
||||
server_root="/etc/apache2",
|
||||
vhost_root="/etc/apache2/conf.d",
|
||||
vhost_files="*.conf",
|
||||
logs_root="/var/log/apache2",
|
||||
ctl="apachectl",
|
||||
version_cmd=['apachectl', '-v'],
|
||||
restart_cmd=['apachectl', 'graceful'],
|
||||
conftest_cmd=['apachectl', 'configtest'],
|
||||
challenge_location="/etc/apache2/conf.d",
|
||||
)
|
||||
@@ -14,7 +14,7 @@ SCRIPT_DIRNAME = os.path.dirname(__file__)
|
||||
|
||||
def main() -> int:
|
||||
args = sys.argv[1:]
|
||||
with acme_server.ACMEServer('pebble', [], False) as acme_xdist:
|
||||
with acme_server.ACMEServer([], False) as acme_xdist:
|
||||
environ = os.environ.copy()
|
||||
environ['SERVER'] = acme_xdist['directory_url']
|
||||
command = [os.path.join(SCRIPT_DIRNAME, 'apache-conf-test')]
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
from setuptools import find_packages
|
||||
from setuptools import setup
|
||||
|
||||
version = '2.8.0.dev0'
|
||||
version = '3.1.0.dev0'
|
||||
|
||||
install_requires = [
|
||||
# We specify the minimum acme and certbot version as the current plugin
|
||||
@@ -43,6 +43,7 @@ setup(
|
||||
'Programming Language :: Python :: 3.9',
|
||||
'Programming Language :: Python :: 3.10',
|
||||
'Programming Language :: Python :: 3.11',
|
||||
'Programming Language :: Python :: 3.12',
|
||||
'Topic :: Internet :: WWW/HTTP',
|
||||
'Topic :: Security',
|
||||
'Topic :: System :: Installation/Setup',
|
||||
|
||||
@@ -23,7 +23,6 @@ class IntegrationTestsContext:
|
||||
self.worker_id = 'primary'
|
||||
acme_xdist = request.config.acme_xdist # type: ignore[attr-defined]
|
||||
|
||||
self.acme_server = acme_xdist['acme_server']
|
||||
self.directory_url = acme_xdist['directory_url']
|
||||
self.tls_alpn_01_port = acme_xdist['https_port'][self.worker_id]
|
||||
self.http_01_port = acme_xdist['http_port'][self.worker_id]
|
||||
|
||||
@@ -7,7 +7,6 @@ import shutil
|
||||
import subprocess
|
||||
import time
|
||||
from typing import Generator
|
||||
from typing import Iterable
|
||||
from typing import Tuple
|
||||
from typing import Type
|
||||
|
||||
@@ -82,11 +81,9 @@ def test_registration_override(context: IntegrationTestsContext) -> None:
|
||||
context.certbot(['update_account', '--email', 'ex1@domain.org,ex2@domain.org'])
|
||||
stdout2, _ = context.certbot(['show_account'])
|
||||
|
||||
# https://github.com/letsencrypt/boulder/issues/6144
|
||||
if context.acme_server != 'boulder-v2':
|
||||
assert 'example@domain.org' in stdout1, "New email should be present"
|
||||
assert 'example@domain.org' not in stdout2, "Old email should not be present"
|
||||
assert 'ex1@domain.org, ex2@domain.org' in stdout2, "New emails should be present"
|
||||
assert 'example@domain.org' in stdout1, "New email should be present"
|
||||
assert 'example@domain.org' not in stdout2, "Old email should not be present"
|
||||
assert 'ex1@domain.org, ex2@domain.org' in stdout2, "New emails should be present"
|
||||
|
||||
|
||||
def test_prepare_plugins(context: IntegrationTestsContext) -> None:
|
||||
@@ -566,19 +563,15 @@ def test_default_rsa_size(context: IntegrationTestsContext) -> None:
|
||||
assert_rsa_key(key1, 2048)
|
||||
|
||||
|
||||
@pytest.mark.parametrize('curve,curve_cls,skip_servers', [
|
||||
@pytest.mark.parametrize('curve,curve_cls', [
|
||||
# Curve name, Curve class, ACME servers to skip
|
||||
('secp256r1', SECP256R1, []),
|
||||
('secp384r1', SECP384R1, []),
|
||||
('secp521r1', SECP521R1, ['boulder-v2'])]
|
||||
('secp256r1', SECP256R1),
|
||||
('secp384r1', SECP384R1),
|
||||
('secp521r1', SECP521R1)]
|
||||
)
|
||||
def test_ecdsa_curves(context: IntegrationTestsContext, curve: str, curve_cls: Type[EllipticCurve],
|
||||
skip_servers: Iterable[str]) -> None:
|
||||
def test_ecdsa_curves(context: IntegrationTestsContext, curve: str,
|
||||
curve_cls: Type[EllipticCurve]) -> None:
|
||||
"""Test issuance for each supported ECDSA curve"""
|
||||
if context.acme_server in skip_servers:
|
||||
pytest.skip('ACME server {} does not support ECDSA curve {}'
|
||||
.format(context.acme_server, curve))
|
||||
|
||||
domain = context.get_domain('curve')
|
||||
context.certbot([
|
||||
'certonly',
|
||||
@@ -640,9 +633,6 @@ def test_renew_with_ec_keys(context: IntegrationTestsContext) -> None:
|
||||
|
||||
def test_ocsp_must_staple(context: IntegrationTestsContext) -> None:
|
||||
"""Test that OCSP Must-Staple is correctly set in the generated certificate."""
|
||||
if context.acme_server == 'pebble':
|
||||
pytest.skip('Pebble does not support OCSP Must-Staple.')
|
||||
|
||||
certname = context.get_domain('must-staple')
|
||||
context.certbot(['auth', '--must-staple', '--domains', certname])
|
||||
|
||||
@@ -710,17 +700,14 @@ def test_revoke_and_unregister(context: IntegrationTestsContext) -> None:
|
||||
assert cert3 in stdout
|
||||
|
||||
|
||||
@pytest.mark.parametrize('curve,curve_cls,skip_servers', [
|
||||
('secp256r1', SECP256R1, []),
|
||||
('secp384r1', SECP384R1, []),
|
||||
('secp521r1', SECP521R1, ['boulder-v2'])]
|
||||
@pytest.mark.parametrize('curve,curve_cls', [
|
||||
('secp256r1', SECP256R1),
|
||||
('secp384r1', SECP384R1),
|
||||
('secp521r1', SECP521R1)]
|
||||
)
|
||||
def test_revoke_ecdsa_cert_key(
|
||||
context: IntegrationTestsContext, curve: str, curve_cls: Type[EllipticCurve],
|
||||
skip_servers: Iterable[str]) -> None:
|
||||
context: IntegrationTestsContext, curve: str, curve_cls: Type[EllipticCurve]) -> None:
|
||||
"""Test revoking a certificate """
|
||||
if context.acme_server in skip_servers:
|
||||
pytest.skip(f'ACME server {context.acme_server} does not support ECDSA curve {curve}')
|
||||
cert: str = context.get_domain('curve')
|
||||
context.certbot([
|
||||
'certonly',
|
||||
@@ -738,17 +725,14 @@ def test_revoke_ecdsa_cert_key(
|
||||
assert stdout.count('INVALID: REVOKED') == 1, 'Expected {0} to be REVOKED'.format(cert)
|
||||
|
||||
|
||||
@pytest.mark.parametrize('curve,curve_cls,skip_servers', [
|
||||
('secp256r1', SECP256R1, []),
|
||||
('secp384r1', SECP384R1, []),
|
||||
('secp521r1', SECP521R1, ['boulder-v2'])]
|
||||
@pytest.mark.parametrize('curve,curve_cls', [
|
||||
('secp256r1', SECP256R1),
|
||||
('secp384r1', SECP384R1),
|
||||
('secp521r1', SECP521R1)]
|
||||
)
|
||||
def test_revoke_ecdsa_cert_key_delete(
|
||||
context: IntegrationTestsContext, curve: str, curve_cls: Type[EllipticCurve],
|
||||
skip_servers: Iterable[str]) -> None:
|
||||
context: IntegrationTestsContext, curve: str, curve_cls: Type[EllipticCurve]) -> None:
|
||||
"""Test revoke and deletion for each supported curve type"""
|
||||
if context.acme_server in skip_servers:
|
||||
pytest.skip(f'ACME server {context.acme_server} does not support ECDSA curve {curve}')
|
||||
cert: str = context.get_domain('curve')
|
||||
context.certbot([
|
||||
'certonly',
|
||||
@@ -913,7 +897,7 @@ def test_dry_run_deactivate_authzs(context: IntegrationTestsContext) -> None:
|
||||
def test_preferred_chain(context: IntegrationTestsContext) -> None:
|
||||
"""Test that --preferred-chain results in the correct chain.pem being produced"""
|
||||
try:
|
||||
issuers = misc.get_acme_issuers(context)
|
||||
issuers = misc.get_acme_issuers()
|
||||
except NotImplementedError:
|
||||
pytest.skip('This ACME server does not support alternative issuers.')
|
||||
|
||||
|
||||
@@ -8,7 +8,6 @@ for a directory a specific configuration using built-in pytest hooks.
|
||||
See https://docs.pytest.org/en/latest/reference.html#hook-reference
|
||||
"""
|
||||
import contextlib
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
from certbot_integration_tests.utils import acme_server as acme_lib
|
||||
@@ -20,10 +19,6 @@ def pytest_addoption(parser):
|
||||
Standard pytest hook to add options to the pytest parser.
|
||||
:param parser: current pytest parser that will be used on the CLI
|
||||
"""
|
||||
parser.addoption('--acme-server', default='pebble',
|
||||
choices=['boulder-v2', 'pebble'],
|
||||
help='select the ACME server to use (boulder-v2, pebble), '
|
||||
'defaulting to pebble')
|
||||
parser.addoption('--dns-server', default='challtestsrv',
|
||||
choices=['bind', 'challtestsrv'],
|
||||
help='select the DNS server to use (bind, challtestsrv), '
|
||||
@@ -69,7 +64,7 @@ def _setup_primary_node(config):
|
||||
Setup the environment for integration tests.
|
||||
|
||||
This function will:
|
||||
- check runtime compatibility (Docker, docker-compose, Nginx)
|
||||
- check runtime compatibility (Docker, docker compose, Nginx)
|
||||
- create a temporary workspace and the persistent GIT repositories space
|
||||
- configure and start a DNS server using Docker, if configured
|
||||
- configure and start paralleled ACME CA servers using Docker
|
||||
@@ -80,22 +75,6 @@ def _setup_primary_node(config):
|
||||
|
||||
:param config: Configuration of the pytest primary node. Is modified by this function.
|
||||
"""
|
||||
# Check for runtime compatibility: some tools are required to be available in PATH
|
||||
if 'boulder' in config.option.acme_server:
|
||||
try:
|
||||
subprocess.check_output(['docker', '-v'], stderr=subprocess.STDOUT)
|
||||
except (subprocess.CalledProcessError, OSError):
|
||||
raise ValueError('Error: docker is required in PATH to launch the integration tests on'
|
||||
'boulder, but is not installed or not available for current user.')
|
||||
|
||||
try:
|
||||
subprocess.check_output(['docker-compose', '-v'], stderr=subprocess.STDOUT)
|
||||
except (subprocess.CalledProcessError, OSError):
|
||||
raise ValueError(
|
||||
'Error: docker-compose is required in PATH to launch the integration tests, '
|
||||
'but is not installed or not available for current user.'
|
||||
)
|
||||
|
||||
# Parameter numprocesses is added to option by pytest-xdist
|
||||
workers = ['primary'] if not config.option.numprocesses\
|
||||
else ['gw{0}'.format(i) for i in range(config.option.numprocesses)]
|
||||
@@ -115,8 +94,7 @@ def _setup_primary_node(config):
|
||||
|
||||
# By calling setup_acme_server we ensure that all necessary acme server instances will be
|
||||
# fully started. This runtime is reflected by the acme_xdist returned.
|
||||
acme_server = acme_lib.ACMEServer(config.option.acme_server, workers,
|
||||
dns_server=acme_dns_server)
|
||||
acme_server = acme_lib.ACMEServer(workers, dns_server=acme_dns_server)
|
||||
config.add_cleanup(acme_server.stop)
|
||||
print('ACME xdist config:\n{0}'.format(acme_server.acme_xdist))
|
||||
acme_server.start()
|
||||
|
||||
@@ -32,13 +32,15 @@ def construct_nginx_config(nginx_root: str, nginx_webroot: str, http_port: int,
|
||||
if not key_path:
|
||||
file_manager = ExitStack()
|
||||
atexit.register(file_manager.close)
|
||||
ref = importlib_resources.files('certbot_integration_tests').joinpath('assets', 'key.pem')
|
||||
ref = (importlib_resources.files('certbot_integration_tests').joinpath('assets')
|
||||
.joinpath('key.pem'))
|
||||
key_path = str(file_manager.enter_context(importlib_resources.as_file(ref)))
|
||||
|
||||
if not cert_path:
|
||||
file_manager = ExitStack()
|
||||
atexit.register(file_manager.close)
|
||||
ref = importlib_resources.files('certbot_integration_tests').joinpath('assets', 'cert.pem')
|
||||
ref = (importlib_resources.files('certbot_integration_tests').joinpath('assets')
|
||||
.joinpath('cert.pem'))
|
||||
cert_path = str(file_manager.enter_context(importlib_resources.as_file(ref)))
|
||||
|
||||
return '''\
|
||||
|
||||
@@ -48,9 +48,8 @@ class IntegrationTestsContext(certbot_context.IntegrationTestsContext):
|
||||
:yields: Path to credentials file
|
||||
:rtype: str
|
||||
"""
|
||||
src_ref_file = importlib_resources.files('certbot_integration_tests').joinpath(
|
||||
'assets', 'bind-config', f'rfc2136-credentials-{label}.ini.tpl'
|
||||
)
|
||||
src_ref_file = (importlib_resources.files('certbot_integration_tests').joinpath('assets')
|
||||
.joinpath('bind-config').joinpath(f'rfc2136-credentials-{label}.ini.tpl'))
|
||||
with importlib_resources.as_file(src_ref_file) as src_file:
|
||||
with open(src_file, 'r') as f:
|
||||
contents = f.read().format(
|
||||
|
||||
@@ -5,7 +5,6 @@ import argparse
|
||||
import errno
|
||||
import json
|
||||
import os
|
||||
from os.path import join
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
@@ -18,14 +17,12 @@ from typing import Dict
|
||||
from typing import List
|
||||
from typing import Mapping
|
||||
from typing import Optional
|
||||
from typing import Tuple
|
||||
from typing import Type
|
||||
|
||||
import requests
|
||||
|
||||
# pylint: disable=wildcard-import,unused-wildcard-import
|
||||
from certbot_integration_tests.utils import misc
|
||||
from certbot_integration_tests.utils import pebble_artifacts
|
||||
from certbot_integration_tests.utils import pebble_ocsp_server
|
||||
from certbot_integration_tests.utils import proxy
|
||||
from certbot_integration_tests.utils.constants import *
|
||||
|
||||
@@ -42,34 +39,30 @@ class ACMEServer:
|
||||
ACMEServer is also a context manager, and so can be used to ensure ACME server is
|
||||
started/stopped upon context enter/exit.
|
||||
"""
|
||||
def __init__(self, acme_server: str, nodes: List[str], http_proxy: bool = True,
|
||||
def __init__(self, nodes: List[str], http_proxy: bool = True,
|
||||
stdout: bool = False, dns_server: Optional[str] = None,
|
||||
http_01_port: Optional[int] = None) -> None:
|
||||
"""
|
||||
Create an ACMEServer instance.
|
||||
:param str acme_server: the type of acme server used (boulder-v2 or pebble)
|
||||
:param list nodes: list of node names that will be setup by pytest xdist
|
||||
:param bool http_proxy: if False do not start the HTTP proxy
|
||||
:param bool stdout: if True stream all subprocesses stdout to standard stdout
|
||||
:param str dns_server: if set, Pebble/Boulder will use it to resolve domains
|
||||
:param str dns_server: if set, Pebble will use it to resolve domains
|
||||
:param int http_01_port: port to use for http-01 validation; currently
|
||||
only supported for pebble without an HTTP proxy
|
||||
"""
|
||||
self._construct_acme_xdist(acme_server, nodes)
|
||||
self._construct_acme_xdist(nodes)
|
||||
|
||||
self._acme_type = 'pebble' if acme_server == 'pebble' else 'boulder'
|
||||
self._proxy = http_proxy
|
||||
self._workspace = tempfile.mkdtemp()
|
||||
self._processes: List[subprocess.Popen] = []
|
||||
self._stdout = sys.stdout if stdout else open(os.devnull, 'w') # pylint: disable=consider-using-with
|
||||
self._dns_server = dns_server
|
||||
self._preterminate_cmds_args: List[Tuple[Tuple[Any, ...], Dict[str, Any]]] = []
|
||||
self._http_01_port = BOULDER_HTTP_01_PORT if self._acme_type == 'boulder' \
|
||||
else DEFAULT_HTTP_01_PORT
|
||||
self._http_01_port = DEFAULT_HTTP_01_PORT
|
||||
if http_01_port:
|
||||
if (self._acme_type == 'pebble' and self._proxy) or self._acme_type == 'boulder':
|
||||
if self._proxy:
|
||||
raise ValueError('Setting http_01_port is not currently supported when '
|
||||
'using Boulder or the HTTP proxy')
|
||||
'using the HTTP proxy')
|
||||
self._http_01_port = http_01_port
|
||||
|
||||
def start(self) -> None:
|
||||
@@ -77,10 +70,7 @@ class ACMEServer:
|
||||
try:
|
||||
if self._proxy:
|
||||
self._prepare_http_proxy()
|
||||
if self._acme_type == 'pebble':
|
||||
self._prepare_pebble_server()
|
||||
if self._acme_type == 'boulder':
|
||||
self._prepare_boulder_server()
|
||||
self._prepare_pebble_server()
|
||||
except BaseException as e:
|
||||
self.stop()
|
||||
raise e
|
||||
@@ -89,7 +79,6 @@ class ACMEServer:
|
||||
"""Stop the test stack, and clean its resources"""
|
||||
print('=> Tear down the test infrastructure...')
|
||||
try:
|
||||
self._run_preterminate_cmds()
|
||||
for process in self._processes:
|
||||
try:
|
||||
process.terminate()
|
||||
@@ -115,19 +104,14 @@ class ACMEServer:
|
||||
traceback: Optional[TracebackType]) -> None:
|
||||
self.stop()
|
||||
|
||||
def _construct_acme_xdist(self, acme_server: str, nodes: List[str]) -> None:
|
||||
def _construct_acme_xdist(self, nodes: List[str]) -> None:
|
||||
"""Generate and return the acme_xdist dict"""
|
||||
acme_xdist: Dict[str, Any] = {'acme_server': acme_server}
|
||||
acme_xdist: Dict[str, Any] = {}
|
||||
|
||||
# Directory and ACME port are set implicitly in the docker-compose.yml
|
||||
# files of Boulder/Pebble.
|
||||
if acme_server == 'pebble':
|
||||
acme_xdist['directory_url'] = PEBBLE_DIRECTORY_URL
|
||||
acme_xdist['challtestsrv_url'] = PEBBLE_CHALLTESTSRV_URL
|
||||
else: # boulder
|
||||
acme_xdist['directory_url'] = BOULDER_V2_DIRECTORY_URL
|
||||
acme_xdist['challtestsrv_url'] = BOULDER_V2_CHALLTESTSRV_URL
|
||||
|
||||
# files of Pebble.
|
||||
acme_xdist['directory_url'] = PEBBLE_DIRECTORY_URL
|
||||
acme_xdist['challtestsrv_url'] = PEBBLE_CHALLTESTSRV_URL
|
||||
acme_xdist['http_port'] = dict(zip(nodes, range(5200, 5200 + len(nodes))))
|
||||
acme_xdist['https_port'] = dict(zip(nodes, range(5100, 5100 + len(nodes))))
|
||||
acme_xdist['other_port'] = dict(zip(nodes, range(5300, 5300 + len(nodes))))
|
||||
@@ -161,11 +145,6 @@ class ACMEServer:
|
||||
[pebble_path, '-config', pebble_config_path, '-dnsserver', dns_server, '-strict'],
|
||||
env=environ)
|
||||
|
||||
# pebble_ocsp_server is imported here and not at the top of module in order to avoid a
|
||||
# useless ImportError, in the case where cryptography dependency is too old to support
|
||||
# ocsp, but Boulder is used instead of Pebble, so pebble_ocsp_server is not used. This is
|
||||
# the typical situation of integration-certbot-oldest tox testenv.
|
||||
from certbot_integration_tests.utils import pebble_ocsp_server
|
||||
self._launch_process([sys.executable, pebble_ocsp_server.__file__])
|
||||
|
||||
# Wait for the ACME CA server to be up.
|
||||
@@ -174,68 +153,6 @@ class ACMEServer:
|
||||
|
||||
print('=> Finished pebble instance deployment.')
|
||||
|
||||
def _prepare_boulder_server(self) -> None:
|
||||
"""Configure and launch the Boulder server"""
|
||||
print('=> Starting boulder instance deployment...')
|
||||
instance_path = join(self._workspace, 'boulder')
|
||||
|
||||
# Load Boulder from git, that includes a docker-compose.yml ready for production.
|
||||
process = self._launch_process(['git', 'clone', 'https://github.com/letsencrypt/boulder',
|
||||
'--single-branch', '--depth=1', instance_path])
|
||||
process.wait(MAX_SUBPROCESS_WAIT)
|
||||
|
||||
# Allow Boulder to ignore usual limit rate policies, useful for tests.
|
||||
os.rename(join(instance_path, 'test/rate-limit-policies-b.yml'),
|
||||
join(instance_path, 'test/rate-limit-policies.yml'))
|
||||
|
||||
if self._dns_server:
|
||||
# Change Boulder config to use the provided DNS server
|
||||
for suffix in ["", "-remote-a", "-remote-b"]:
|
||||
with open(join(instance_path, 'test/config/va{}.json'.format(suffix)), 'r') as f:
|
||||
config = json.loads(f.read())
|
||||
config['va']['dnsResolvers'] = [self._dns_server]
|
||||
with open(join(instance_path, 'test/config/va{}.json'.format(suffix)), 'w') as f:
|
||||
f.write(json.dumps(config, indent=2, separators=(',', ': ')))
|
||||
|
||||
# This command needs to be run before we try and terminate running processes because
|
||||
# docker-compose up doesn't always respond to SIGTERM. See
|
||||
# https://github.com/certbot/certbot/pull/9435.
|
||||
self._register_preterminate_cmd(['docker-compose', 'down'], cwd=instance_path)
|
||||
# Boulder docker generates build artifacts owned by root with 0o744 permissions.
|
||||
# If we started the acme server from a normal user that has access to the Docker
|
||||
# daemon, this user will not be able to delete these artifacts from the host.
|
||||
# We need to do it through a docker.
|
||||
self._register_preterminate_cmd(['docker', 'run', '--rm', '-v',
|
||||
'{0}:/workspace'.format(self._workspace), 'alpine', 'rm',
|
||||
'-rf', '/workspace/boulder'])
|
||||
try:
|
||||
# Launch the Boulder server
|
||||
self._launch_process(['docker-compose', 'up', '--force-recreate'], cwd=instance_path)
|
||||
|
||||
# Wait for the ACME CA server to be up.
|
||||
print('=> Waiting for boulder instance to respond...')
|
||||
misc.check_until_timeout(
|
||||
self.acme_xdist['directory_url'], attempts=300)
|
||||
|
||||
if not self._dns_server:
|
||||
# Configure challtestsrv to answer any A record request with ip of the docker host.
|
||||
response = requests.post(
|
||||
f'{BOULDER_V2_CHALLTESTSRV_URL}/set-default-ipv4',
|
||||
json={'ip': '10.77.77.1'},
|
||||
timeout=10
|
||||
)
|
||||
response.raise_for_status()
|
||||
except BaseException:
|
||||
# If we failed to set up boulder, print its logs.
|
||||
print('=> Boulder setup failed. Boulder logs are:')
|
||||
process = self._launch_process([
|
||||
'docker-compose', 'logs'], cwd=instance_path, force_stderr=True
|
||||
)
|
||||
process.wait(MAX_SUBPROCESS_WAIT)
|
||||
raise
|
||||
|
||||
print('=> Finished boulder instance deployment.')
|
||||
|
||||
def _prepare_http_proxy(self) -> None:
|
||||
"""Configure and launch an HTTP proxy"""
|
||||
print(f'=> Configuring the HTTP proxy on port {self._http_01_port}...')
|
||||
@@ -260,26 +177,11 @@ class ACMEServer:
|
||||
self._processes.append(process)
|
||||
return process
|
||||
|
||||
def _register_preterminate_cmd(self, *args: Any, **kwargs: Any) -> None:
|
||||
self._preterminate_cmds_args.append((args, kwargs))
|
||||
|
||||
def _run_preterminate_cmds(self) -> None:
|
||||
for args, kwargs in self._preterminate_cmds_args:
|
||||
process = self._launch_process(*args, **kwargs)
|
||||
process.wait(MAX_SUBPROCESS_WAIT)
|
||||
# It's unlikely to matter, but let's clear the list of cleanup commands
|
||||
# once they've been run.
|
||||
self._preterminate_cmds_args.clear()
|
||||
|
||||
|
||||
def main() -> None:
|
||||
# pylint: disable=missing-function-docstring
|
||||
parser = argparse.ArgumentParser(
|
||||
description='CLI tool to start a local instance of Pebble or Boulder CA server.')
|
||||
parser.add_argument('--server-type', '-s',
|
||||
choices=['pebble', 'boulder-v2'], default='pebble',
|
||||
help='type of CA server to start: can be Pebble or Boulder. '
|
||||
'Pebble is used if not set.')
|
||||
description='CLI tool to start a local instance of Pebble CA server.')
|
||||
parser.add_argument('--dns-server', '-d',
|
||||
help='specify the DNS server as `IP:PORT` to use by '
|
||||
'Pebble; if not specified, a local mock DNS server will be used to '
|
||||
@@ -290,8 +192,8 @@ def main() -> None:
|
||||
args = parser.parse_args()
|
||||
|
||||
acme_server = ACMEServer(
|
||||
args.server_type, [], http_proxy=False, stdout=True,
|
||||
dns_server=args.dns_server, http_01_port=args.http_01_port,
|
||||
[], http_proxy=False, stdout=True, dns_server=args.dns_server,
|
||||
http_01_port=args.http_01_port,
|
||||
)
|
||||
|
||||
try:
|
||||
|
||||
@@ -96,7 +96,6 @@ def _prepare_args_env(certbot_args: List[str], directory_url: str, http_01_port:
|
||||
'--no-verify-ssl',
|
||||
'--http-01-port', str(http_01_port),
|
||||
'--https-port', str(tls_alpn_01_port),
|
||||
'--manual-public-ip-logging-ok',
|
||||
'--config-dir', config_dir,
|
||||
'--work-dir', os.path.join(workspace, 'work'),
|
||||
'--logs-dir', os.path.join(workspace, 'logs'),
|
||||
|
||||
@@ -1,10 +1,7 @@
|
||||
"""Some useful constants to use throughout certbot-ci integration tests"""
|
||||
DEFAULT_HTTP_01_PORT = 5002
|
||||
BOULDER_HTTP_01_PORT = 80
|
||||
TLS_ALPN_01_PORT = 5001
|
||||
CHALLTESTSRV_PORT = 8055
|
||||
BOULDER_V2_CHALLTESTSRV_URL = f'http://10.77.77.77:{CHALLTESTSRV_PORT}'
|
||||
BOULDER_V2_DIRECTORY_URL = 'http://localhost:4001/directory'
|
||||
PEBBLE_DIRECTORY_URL = 'https://localhost:14000/dir'
|
||||
PEBBLE_MANAGEMENT_URL = 'https://localhost:15000'
|
||||
PEBBLE_CHALLTESTSRV_URL = f'http://localhost:{CHALLTESTSRV_PORT}'
|
||||
|
||||
@@ -22,7 +22,7 @@ if sys.version_info >= (3, 9): # pragma: no cover
|
||||
else: # pragma: no cover
|
||||
import importlib_resources
|
||||
|
||||
BIND_DOCKER_IMAGE = "internetsystemsconsortium/bind9:9.16"
|
||||
BIND_DOCKER_IMAGE = "internetsystemsconsortium/bind9:9.20"
|
||||
BIND_BIND_ADDRESS = ("127.0.0.1", 45953)
|
||||
|
||||
# A TCP DNS message which is a query for '. CH A' transaction ID 0xcb37. This is used
|
||||
|
||||
@@ -33,7 +33,6 @@ from cryptography.x509 import load_pem_x509_certificate
|
||||
from OpenSSL import crypto
|
||||
import requests
|
||||
|
||||
from certbot_integration_tests.certbot_tests.context import IntegrationTestsContext
|
||||
from certbot_integration_tests.utils.constants import PEBBLE_ALTERNATE_ROOTS
|
||||
from certbot_integration_tests.utils.constants import PEBBLE_MANAGEMENT_URL
|
||||
|
||||
@@ -125,8 +124,8 @@ def generate_test_file_hooks(config_dir: str, hook_probe: str) -> None:
|
||||
"""
|
||||
file_manager = contextlib.ExitStack()
|
||||
atexit.register(file_manager.close)
|
||||
hook_path_ref = importlib_resources.files('certbot_integration_tests').joinpath(
|
||||
'assets', 'hook.py')
|
||||
hook_path_ref = (importlib_resources.files('certbot_integration_tests').joinpath('assets')
|
||||
.joinpath('hook.py'))
|
||||
hook_path = str(file_manager.enter_context(importlib_resources.as_file(hook_path_ref)))
|
||||
|
||||
for hook_dir in list_renewal_hooks_dirs(config_dir):
|
||||
@@ -262,9 +261,8 @@ def load_sample_data_path(workspace: str) -> str:
|
||||
:returns: the path to the loaded sample data directory
|
||||
:rtype: str
|
||||
"""
|
||||
original_ref = importlib_resources.files('certbot_integration_tests').joinpath(
|
||||
'assets', 'sample-config'
|
||||
)
|
||||
original_ref = (importlib_resources.files('certbot_integration_tests').joinpath('assets')
|
||||
.joinpath('sample-config'))
|
||||
with importlib_resources.as_file(original_ref) as original:
|
||||
copied = os.path.join(workspace, 'sample-config')
|
||||
shutil.copytree(original, copied, symlinks=True)
|
||||
@@ -304,16 +302,12 @@ def echo(keyword: str, path: Optional[str] = None) -> str:
|
||||
os.path.basename(sys.executable), keyword, ' >> "{0}"'.format(path) if path else '')
|
||||
|
||||
|
||||
def get_acme_issuers(context: IntegrationTestsContext) -> List[Certificate]:
|
||||
def get_acme_issuers() -> List[Certificate]:
|
||||
"""Gets the list of one or more issuer certificates from the ACME server used by the
|
||||
context.
|
||||
:param context: the testing context.
|
||||
:return: the `list of x509.Certificate` representing the list of issuers.
|
||||
"""
|
||||
# TODO: in fact, Boulder has alternate chains in config-next/, just not yet in config/.
|
||||
if context.acme_server != "pebble":
|
||||
raise NotImplementedError()
|
||||
|
||||
_suppress_x509_verification_warnings()
|
||||
|
||||
issuers = []
|
||||
|
||||
@@ -1,11 +1,13 @@
|
||||
# pylint: disable=missing-module-docstring
|
||||
import atexit
|
||||
import io
|
||||
import json
|
||||
import os
|
||||
import stat
|
||||
import sys
|
||||
import zipfile
|
||||
from contextlib import ExitStack
|
||||
from typing import Tuple
|
||||
from typing import Optional, Tuple
|
||||
|
||||
import requests
|
||||
|
||||
@@ -17,39 +19,49 @@ if sys.version_info >= (3, 9): # pragma: no cover
|
||||
else: # pragma: no cover
|
||||
import importlib_resources
|
||||
|
||||
PEBBLE_VERSION = 'v2.3.1'
|
||||
PEBBLE_VERSION = 'v2.5.1'
|
||||
|
||||
|
||||
def fetch(workspace: str, http_01_port: int = DEFAULT_HTTP_01_PORT) -> Tuple[str, str, str]:
|
||||
# pylint: disable=missing-function-docstring
|
||||
suffix = 'linux-amd64' if os.name != 'nt' else 'windows-amd64.exe'
|
||||
|
||||
file_manager = ExitStack()
|
||||
atexit.register(file_manager.close)
|
||||
pebble_path_ref = importlib_resources.files('certbot_integration_tests') / 'assets'
|
||||
assets_path = str(file_manager.enter_context(importlib_resources.as_file(pebble_path_ref)))
|
||||
|
||||
pebble_path = _fetch_asset('pebble', suffix, assets_path)
|
||||
challtestsrv_path = _fetch_asset('pebble-challtestsrv', suffix, assets_path)
|
||||
pebble_path = _fetch_asset('pebble', assets_path)
|
||||
challtestsrv_path = _fetch_asset('pebble-challtestsrv', assets_path)
|
||||
pebble_config_path = _build_pebble_config(workspace, http_01_port, assets_path)
|
||||
|
||||
return pebble_path, challtestsrv_path, pebble_config_path
|
||||
|
||||
|
||||
def _fetch_asset(asset: str, suffix: str, assets_path: str) -> str:
|
||||
asset_path = os.path.join(assets_path, '{0}_{1}_{2}'.format(asset, PEBBLE_VERSION, suffix))
|
||||
def _fetch_asset(asset: str, assets_path: str) -> str:
|
||||
platform = 'linux-amd64'
|
||||
base_url = 'https://github.com/letsencrypt/pebble/releases/download'
|
||||
asset_path = os.path.join(assets_path, f'{asset}_{PEBBLE_VERSION}_{platform}')
|
||||
if not os.path.exists(asset_path):
|
||||
asset_url = ('https://github.com/letsencrypt/pebble/releases/download/{0}/{1}_{2}'
|
||||
.format(PEBBLE_VERSION, asset, suffix))
|
||||
asset_url = f'{base_url}/{PEBBLE_VERSION}/{asset}-{platform}.zip'
|
||||
response = requests.get(asset_url, timeout=30)
|
||||
response.raise_for_status()
|
||||
asset_data = _unzip_asset(response.content, asset)
|
||||
if asset_data is None:
|
||||
raise ValueError(f"zipfile {asset_url} didn't contain file {asset}")
|
||||
with open(asset_path, 'wb') as file_h:
|
||||
file_h.write(response.content)
|
||||
file_h.write(asset_data)
|
||||
os.chmod(asset_path, os.stat(asset_path).st_mode | stat.S_IEXEC)
|
||||
|
||||
return asset_path
|
||||
|
||||
|
||||
def _unzip_asset(zipped_data: bytes, asset_name: str) -> Optional[bytes]:
|
||||
with zipfile.ZipFile(io.BytesIO(zipped_data)) as zip_file:
|
||||
for entry in zip_file.filelist:
|
||||
if not entry.is_dir() and entry.filename.endswith(asset_name):
|
||||
return zip_file.read(entry)
|
||||
return None
|
||||
|
||||
|
||||
def _build_pebble_config(workspace: str, http_01_port: int, assets_path: str) -> str:
|
||||
config_path = os.path.join(workspace, 'pebble-config.json')
|
||||
with open(config_path, 'w') as file_h:
|
||||
|
||||
@@ -45,6 +45,7 @@ setup(
|
||||
'Programming Language :: Python :: 3.9',
|
||||
'Programming Language :: Python :: 3.10',
|
||||
'Programming Language :: Python :: 3.11',
|
||||
'Programming Language :: Python :: 3.12',
|
||||
'Topic :: Internet :: WWW/HTTP',
|
||||
'Topic :: Security',
|
||||
],
|
||||
|
||||
@@ -75,7 +75,7 @@ def _get_server_root(config: str) -> str:
|
||||
if os.path.isdir(os.path.join(config, name))]
|
||||
|
||||
if len(subdirs) != 1:
|
||||
errors.Error("Malformed configuration directory {0}".format(config))
|
||||
raise errors.Error("Malformed configuration directory {0}".format(config))
|
||||
|
||||
return os.path.join(config, subdirs[0].rstrip())
|
||||
|
||||
|
||||
Binary file not shown.
@@ -19,7 +19,6 @@
|
||||
|
||||
server {
|
||||
listen 80 default_server;
|
||||
listen [::]:80 default_server ipv6only=on;
|
||||
|
||||
root /usr/share/nginx/html;
|
||||
index index.html index.htm;
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
from setuptools import find_packages
|
||||
from setuptools import setup
|
||||
|
||||
version = '2.8.0.dev0'
|
||||
version = '3.1.0.dev0'
|
||||
|
||||
install_requires = [
|
||||
'certbot',
|
||||
@@ -29,6 +29,7 @@ setup(
|
||||
'Programming Language :: Python :: 3.9',
|
||||
'Programming Language :: Python :: 3.10',
|
||||
'Programming Language :: Python :: 3.11',
|
||||
'Programming Language :: Python :: 3.12',
|
||||
'Topic :: Internet :: WWW/HTTP',
|
||||
'Topic :: Security',
|
||||
],
|
||||
|
||||
@@ -14,14 +14,14 @@ build:
|
||||
|
||||
# Build documentation in the "docs/" directory with Sphinx
|
||||
sphinx:
|
||||
configuration: docs/conf.py
|
||||
configuration: certbot-dns-cloudflare/docs/conf.py
|
||||
# You can configure Sphinx to use a different builder, for instance use the dirhtml builder for simpler URLs
|
||||
# builder: "dirhtml"
|
||||
# Fail on all warnings to avoid broken references
|
||||
fail_on_warning: true
|
||||
|
||||
# Optionally build your docs in additional formats such as PDF and ePub
|
||||
# formats:
|
||||
formats:
|
||||
- pdf
|
||||
- epub
|
||||
|
||||
@@ -30,4 +30,4 @@ sphinx:
|
||||
# See https://docs.readthedocs.io/en/stable/guides/reproducible-builds.html
|
||||
python:
|
||||
install:
|
||||
- requirements: ../tools/requirements.txt
|
||||
- requirements: certbot-dns-cloudflare/readthedocs.org.requirements.txt
|
||||
@@ -4,10 +4,12 @@ import sys
|
||||
from setuptools import find_packages
|
||||
from setuptools import setup
|
||||
|
||||
version = '2.8.0.dev0'
|
||||
version = '3.1.0.dev0'
|
||||
|
||||
install_requires = [
|
||||
'cloudflare>=1.5.1',
|
||||
# for now, do not upgrade to cloudflare>=2.20 to avoid deprecation warnings and the breaking
|
||||
# changes in version 3.0. see https://github.com/certbot/certbot/issues/9938
|
||||
'cloudflare>=1.5.1, <2.20',
|
||||
'setuptools>=41.6.0',
|
||||
]
|
||||
|
||||
@@ -52,6 +54,7 @@ setup(
|
||||
'Programming Language :: Python :: 3.9',
|
||||
'Programming Language :: Python :: 3.10',
|
||||
'Programming Language :: Python :: 3.11',
|
||||
'Programming Language :: Python :: 3.12',
|
||||
'Topic :: Internet :: WWW/HTTP',
|
||||
'Topic :: Security',
|
||||
'Topic :: System :: Installation/Setup',
|
||||
|
||||
@@ -14,14 +14,14 @@ build:
|
||||
|
||||
# Build documentation in the "docs/" directory with Sphinx
|
||||
sphinx:
|
||||
configuration: docs/conf.py
|
||||
configuration: certbot-dns-digitalocean/docs/conf.py
|
||||
# You can configure Sphinx to use a different builder, for instance use the dirhtml builder for simpler URLs
|
||||
# builder: "dirhtml"
|
||||
# Fail on all warnings to avoid broken references
|
||||
fail_on_warning: true
|
||||
|
||||
# Optionally build your docs in additional formats such as PDF and ePub
|
||||
# formats:
|
||||
formats:
|
||||
- pdf
|
||||
- epub
|
||||
|
||||
@@ -30,4 +30,4 @@ sphinx:
|
||||
# See https://docs.readthedocs.io/en/stable/guides/reproducible-builds.html
|
||||
python:
|
||||
install:
|
||||
- requirements: ../tools/requirements.txt
|
||||
- requirements: certbot-dns-digitalocean/readthedocs.org.requirements.txt
|
||||
@@ -4,7 +4,7 @@ import sys
|
||||
from setuptools import find_packages
|
||||
from setuptools import setup
|
||||
|
||||
version = '2.8.0.dev0'
|
||||
version = '3.1.0.dev0'
|
||||
|
||||
install_requires = [
|
||||
'python-digitalocean>=1.11', # 1.15.0 or newer is recommended for TTL support
|
||||
@@ -52,6 +52,7 @@ setup(
|
||||
'Programming Language :: Python :: 3.9',
|
||||
'Programming Language :: Python :: 3.10',
|
||||
'Programming Language :: Python :: 3.11',
|
||||
'Programming Language :: Python :: 3.12',
|
||||
'Topic :: Internet :: WWW/HTTP',
|
||||
'Topic :: Security',
|
||||
'Topic :: System :: Installation/Setup',
|
||||
|
||||
@@ -14,14 +14,14 @@ build:
|
||||
|
||||
# Build documentation in the "docs/" directory with Sphinx
|
||||
sphinx:
|
||||
configuration: docs/conf.py
|
||||
configuration: certbot-dns-dnsimple/docs/conf.py
|
||||
# You can configure Sphinx to use a different builder, for instance use the dirhtml builder for simpler URLs
|
||||
# builder: "dirhtml"
|
||||
# Fail on all warnings to avoid broken references
|
||||
fail_on_warning: true
|
||||
|
||||
# Optionally build your docs in additional formats such as PDF and ePub
|
||||
# formats:
|
||||
formats:
|
||||
- pdf
|
||||
- epub
|
||||
|
||||
@@ -30,4 +30,4 @@ sphinx:
|
||||
# See https://docs.readthedocs.io/en/stable/guides/reproducible-builds.html
|
||||
python:
|
||||
install:
|
||||
- requirements: ../tools/requirements.txt
|
||||
- requirements: certbot-dns-dnsimple/readthedocs.org.requirements.txt
|
||||
@@ -4,7 +4,7 @@ import sys
|
||||
from setuptools import find_packages
|
||||
from setuptools import setup
|
||||
|
||||
version = '2.8.0.dev0'
|
||||
version = '3.1.0.dev0'
|
||||
|
||||
install_requires = [
|
||||
# This version of lexicon is required to address the problem described in
|
||||
@@ -54,6 +54,7 @@ setup(
|
||||
'Programming Language :: Python :: 3.9',
|
||||
'Programming Language :: Python :: 3.10',
|
||||
'Programming Language :: Python :: 3.11',
|
||||
'Programming Language :: Python :: 3.12',
|
||||
'Topic :: Internet :: WWW/HTTP',
|
||||
'Topic :: Security',
|
||||
'Topic :: System :: Installation/Setup',
|
||||
|
||||
@@ -14,14 +14,14 @@ build:
|
||||
|
||||
# Build documentation in the "docs/" directory with Sphinx
|
||||
sphinx:
|
||||
configuration: docs/conf.py
|
||||
configuration: certbot-dns-dnsmadeeasy/docs/conf.py
|
||||
# You can configure Sphinx to use a different builder, for instance use the dirhtml builder for simpler URLs
|
||||
# builder: "dirhtml"
|
||||
# Fail on all warnings to avoid broken references
|
||||
fail_on_warning: true
|
||||
|
||||
# Optionally build your docs in additional formats such as PDF and ePub
|
||||
# formats:
|
||||
formats:
|
||||
- pdf
|
||||
- epub
|
||||
|
||||
@@ -30,4 +30,4 @@ sphinx:
|
||||
# See https://docs.readthedocs.io/en/stable/guides/reproducible-builds.html
|
||||
python:
|
||||
install:
|
||||
- requirements: ../tools/requirements.txt
|
||||
- requirements: certbot-dns-dnsmadeeasy/readthedocs.org.requirements.txt
|
||||
@@ -4,7 +4,7 @@ import sys
|
||||
from setuptools import find_packages
|
||||
from setuptools import setup
|
||||
|
||||
version = '2.8.0.dev0'
|
||||
version = '3.1.0.dev0'
|
||||
|
||||
install_requires = [
|
||||
'dns-lexicon>=3.14.1',
|
||||
@@ -52,6 +52,7 @@ setup(
|
||||
'Programming Language :: Python :: 3.9',
|
||||
'Programming Language :: Python :: 3.10',
|
||||
'Programming Language :: Python :: 3.11',
|
||||
'Programming Language :: Python :: 3.12',
|
||||
'Topic :: Internet :: WWW/HTTP',
|
||||
'Topic :: Security',
|
||||
'Topic :: System :: Installation/Setup',
|
||||
|
||||
@@ -14,14 +14,14 @@ build:
|
||||
|
||||
# Build documentation in the "docs/" directory with Sphinx
|
||||
sphinx:
|
||||
configuration: docs/conf.py
|
||||
configuration: certbot-dns-gehirn/docs/conf.py
|
||||
# You can configure Sphinx to use a different builder, for instance use the dirhtml builder for simpler URLs
|
||||
# builder: "dirhtml"
|
||||
# Fail on all warnings to avoid broken references
|
||||
fail_on_warning: true
|
||||
|
||||
# Optionally build your docs in additional formats such as PDF and ePub
|
||||
# formats:
|
||||
formats:
|
||||
- pdf
|
||||
- epub
|
||||
|
||||
@@ -30,4 +30,4 @@ sphinx:
|
||||
# See https://docs.readthedocs.io/en/stable/guides/reproducible-builds.html
|
||||
python:
|
||||
install:
|
||||
- requirements: ../tools/requirements.txt
|
||||
- requirements: certbot-dns-gehirn/readthedocs.org.requirements.txt
|
||||
@@ -1,7 +1,6 @@
|
||||
"""Tests for certbot_dns_gehirn._internal.dns_gehirn."""
|
||||
|
||||
import sys
|
||||
import unittest
|
||||
from unittest import mock
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -4,7 +4,7 @@ import sys
|
||||
from setuptools import find_packages
|
||||
from setuptools import setup
|
||||
|
||||
version = '2.8.0.dev0'
|
||||
version = '3.1.0.dev0'
|
||||
|
||||
install_requires = [
|
||||
'dns-lexicon>=3.14.1',
|
||||
@@ -52,6 +52,7 @@ setup(
|
||||
'Programming Language :: Python :: 3.9',
|
||||
'Programming Language :: Python :: 3.10',
|
||||
'Programming Language :: Python :: 3.11',
|
||||
'Programming Language :: Python :: 3.12',
|
||||
'Topic :: Internet :: WWW/HTTP',
|
||||
'Topic :: Security',
|
||||
'Topic :: System :: Installation/Setup',
|
||||
|
||||
@@ -14,14 +14,14 @@ build:
|
||||
|
||||
# Build documentation in the "docs/" directory with Sphinx
|
||||
sphinx:
|
||||
configuration: docs/conf.py
|
||||
configuration: certbot-dns-google/docs/conf.py
|
||||
# You can configure Sphinx to use a different builder, for instance use the dirhtml builder for simpler URLs
|
||||
# builder: "dirhtml"
|
||||
# Fail on all warnings to avoid broken references
|
||||
fail_on_warning: true
|
||||
|
||||
# Optionally build your docs in additional formats such as PDF and ePub
|
||||
# formats:
|
||||
formats:
|
||||
- pdf
|
||||
- epub
|
||||
|
||||
@@ -30,4 +30,4 @@ sphinx:
|
||||
# See https://docs.readthedocs.io/en/stable/guides/reproducible-builds.html
|
||||
python:
|
||||
install:
|
||||
- requirements: ../tools/requirements.txt
|
||||
- requirements: certbot-dns-google/readthedocs.org.requirements.txt
|
||||
@@ -4,7 +4,7 @@ import sys
|
||||
from setuptools import find_packages
|
||||
from setuptools import setup
|
||||
|
||||
version = '2.8.0.dev0'
|
||||
version = '3.1.0.dev0'
|
||||
|
||||
install_requires = [
|
||||
'google-api-python-client>=1.6.5',
|
||||
@@ -53,6 +53,7 @@ setup(
|
||||
'Programming Language :: Python :: 3.9',
|
||||
'Programming Language :: Python :: 3.10',
|
||||
'Programming Language :: Python :: 3.11',
|
||||
'Programming Language :: Python :: 3.12',
|
||||
'Topic :: Internet :: WWW/HTTP',
|
||||
'Topic :: Security',
|
||||
'Topic :: System :: Installation/Setup',
|
||||
|
||||
@@ -14,14 +14,14 @@ build:
|
||||
|
||||
# Build documentation in the "docs/" directory with Sphinx
|
||||
sphinx:
|
||||
configuration: docs/conf.py
|
||||
configuration: certbot-dns-linode/docs/conf.py
|
||||
# You can configure Sphinx to use a different builder, for instance use the dirhtml builder for simpler URLs
|
||||
# builder: "dirhtml"
|
||||
# Fail on all warnings to avoid broken references
|
||||
fail_on_warning: true
|
||||
|
||||
# Optionally build your docs in additional formats such as PDF and ePub
|
||||
# formats:
|
||||
formats:
|
||||
- pdf
|
||||
- epub
|
||||
|
||||
@@ -30,4 +30,4 @@ sphinx:
|
||||
# See https://docs.readthedocs.io/en/stable/guides/reproducible-builds.html
|
||||
python:
|
||||
install:
|
||||
- requirements: ../tools/requirements.txt
|
||||
- requirements: certbot-dns-linode/readthedocs.org.requirements.txt
|
||||
@@ -4,7 +4,7 @@ import sys
|
||||
from setuptools import find_packages
|
||||
from setuptools import setup
|
||||
|
||||
version = '2.8.0.dev0'
|
||||
version = '3.1.0.dev0'
|
||||
|
||||
install_requires = [
|
||||
'dns-lexicon>=3.14.1',
|
||||
@@ -52,6 +52,7 @@ setup(
|
||||
'Programming Language :: Python :: 3.9',
|
||||
'Programming Language :: Python :: 3.10',
|
||||
'Programming Language :: Python :: 3.11',
|
||||
'Programming Language :: Python :: 3.12',
|
||||
'Topic :: Internet :: WWW/HTTP',
|
||||
'Topic :: Security',
|
||||
'Topic :: System :: Installation/Setup',
|
||||
|
||||
@@ -14,14 +14,14 @@ build:
|
||||
|
||||
# Build documentation in the "docs/" directory with Sphinx
|
||||
sphinx:
|
||||
configuration: docs/conf.py
|
||||
configuration: certbot-dns-luadns/docs/conf.py
|
||||
# You can configure Sphinx to use a different builder, for instance use the dirhtml builder for simpler URLs
|
||||
# builder: "dirhtml"
|
||||
# Fail on all warnings to avoid broken references
|
||||
fail_on_warning: true
|
||||
|
||||
# Optionally build your docs in additional formats such as PDF and ePub
|
||||
# formats:
|
||||
formats:
|
||||
- pdf
|
||||
- epub
|
||||
|
||||
@@ -30,4 +30,4 @@ sphinx:
|
||||
# See https://docs.readthedocs.io/en/stable/guides/reproducible-builds.html
|
||||
python:
|
||||
install:
|
||||
- requirements: ../tools/requirements.txt
|
||||
- requirements: certbot-dns-luadns/readthedocs.org.requirements.txt
|
||||
@@ -4,7 +4,7 @@ import sys
|
||||
from setuptools import find_packages
|
||||
from setuptools import setup
|
||||
|
||||
version = '2.8.0.dev0'
|
||||
version = '3.1.0.dev0'
|
||||
|
||||
install_requires = [
|
||||
'dns-lexicon>=3.14.1',
|
||||
@@ -52,6 +52,7 @@ setup(
|
||||
'Programming Language :: Python :: 3.9',
|
||||
'Programming Language :: Python :: 3.10',
|
||||
'Programming Language :: Python :: 3.11',
|
||||
'Programming Language :: Python :: 3.12',
|
||||
'Topic :: Internet :: WWW/HTTP',
|
||||
'Topic :: Security',
|
||||
'Topic :: System :: Installation/Setup',
|
||||
|
||||
@@ -14,14 +14,14 @@ build:
|
||||
|
||||
# Build documentation in the "docs/" directory with Sphinx
|
||||
sphinx:
|
||||
configuration: docs/conf.py
|
||||
configuration: certbot-dns-nsone/docs/conf.py
|
||||
# You can configure Sphinx to use a different builder, for instance use the dirhtml builder for simpler URLs
|
||||
# builder: "dirhtml"
|
||||
# Fail on all warnings to avoid broken references
|
||||
fail_on_warning: true
|
||||
|
||||
# Optionally build your docs in additional formats such as PDF and ePub
|
||||
# formats:
|
||||
formats:
|
||||
- pdf
|
||||
- epub
|
||||
|
||||
@@ -30,4 +30,4 @@ sphinx:
|
||||
# See https://docs.readthedocs.io/en/stable/guides/reproducible-builds.html
|
||||
python:
|
||||
install:
|
||||
- requirements: ../tools/requirements.txt
|
||||
- requirements: certbot-dns-nsone/readthedocs.org.requirements.txt
|
||||
@@ -4,7 +4,7 @@ import sys
|
||||
from setuptools import find_packages
|
||||
from setuptools import setup
|
||||
|
||||
version = '2.8.0.dev0'
|
||||
version = '3.1.0.dev0'
|
||||
|
||||
install_requires = [
|
||||
'dns-lexicon>=3.14.1',
|
||||
@@ -52,6 +52,7 @@ setup(
|
||||
'Programming Language :: Python :: 3.9',
|
||||
'Programming Language :: Python :: 3.10',
|
||||
'Programming Language :: Python :: 3.11',
|
||||
'Programming Language :: Python :: 3.12',
|
||||
'Topic :: Internet :: WWW/HTTP',
|
||||
'Topic :: Security',
|
||||
'Topic :: System :: Installation/Setup',
|
||||
|
||||
@@ -14,14 +14,14 @@ build:
|
||||
|
||||
# Build documentation in the "docs/" directory with Sphinx
|
||||
sphinx:
|
||||
configuration: docs/conf.py
|
||||
configuration: certbot-dns-ovh/docs/conf.py
|
||||
# You can configure Sphinx to use a different builder, for instance use the dirhtml builder for simpler URLs
|
||||
# builder: "dirhtml"
|
||||
# Fail on all warnings to avoid broken references
|
||||
fail_on_warning: true
|
||||
|
||||
# Optionally build your docs in additional formats such as PDF and ePub
|
||||
# formats:
|
||||
formats:
|
||||
- pdf
|
||||
- epub
|
||||
|
||||
@@ -30,4 +30,4 @@ sphinx:
|
||||
# See https://docs.readthedocs.io/en/stable/guides/reproducible-builds.html
|
||||
python:
|
||||
install:
|
||||
- requirements: ../tools/requirements.txt
|
||||
- requirements: certbot-dns-ovh/readthedocs.org.requirements.txt
|
||||
@@ -22,6 +22,7 @@ class AuthenticatorTest(test_util.TempDirTestCase,
|
||||
|
||||
DOMAIN_NOT_FOUND = Exception('Domain example.com not found')
|
||||
LOGIN_ERROR = HTTPError('403 Client Error: Forbidden for url: https://eu.api.ovh.com/1.0/...', response=Response())
|
||||
|
||||
def setUp(self):
|
||||
super().setUp()
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@ import sys
|
||||
from setuptools import find_packages
|
||||
from setuptools import setup
|
||||
|
||||
version = '2.8.0.dev0'
|
||||
version = '3.1.0.dev0'
|
||||
|
||||
install_requires = [
|
||||
'dns-lexicon>=3.15.1',
|
||||
@@ -52,6 +52,7 @@ setup(
|
||||
'Programming Language :: Python :: 3.9',
|
||||
'Programming Language :: Python :: 3.10',
|
||||
'Programming Language :: Python :: 3.11',
|
||||
'Programming Language :: Python :: 3.12',
|
||||
'Topic :: Internet :: WWW/HTTP',
|
||||
'Topic :: Security',
|
||||
'Topic :: System :: Installation/Setup',
|
||||
|
||||
@@ -14,14 +14,14 @@ build:
|
||||
|
||||
# Build documentation in the "docs/" directory with Sphinx
|
||||
sphinx:
|
||||
configuration: docs/conf.py
|
||||
configuration: certbot-dns-rfc2136/docs/conf.py
|
||||
# You can configure Sphinx to use a different builder, for instance use the dirhtml builder for simpler URLs
|
||||
# builder: "dirhtml"
|
||||
# Fail on all warnings to avoid broken references
|
||||
fail_on_warning: true
|
||||
|
||||
# Optionally build your docs in additional formats such as PDF and ePub
|
||||
# formats:
|
||||
formats:
|
||||
- pdf
|
||||
- epub
|
||||
|
||||
@@ -30,4 +30,4 @@ sphinx:
|
||||
# See https://docs.readthedocs.io/en/stable/guides/reproducible-builds.html
|
||||
python:
|
||||
install:
|
||||
- requirements: ../tools/requirements.txt
|
||||
- requirements: certbot-dns-rfc2136/readthedocs.org.requirements.txt
|
||||
@@ -4,7 +4,7 @@ import sys
|
||||
from setuptools import find_packages
|
||||
from setuptools import setup
|
||||
|
||||
version = '2.8.0.dev0'
|
||||
version = '3.1.0.dev0'
|
||||
|
||||
install_requires = [
|
||||
'dnspython>=1.15.0',
|
||||
@@ -52,6 +52,7 @@ setup(
|
||||
'Programming Language :: Python :: 3.9',
|
||||
'Programming Language :: Python :: 3.10',
|
||||
'Programming Language :: Python :: 3.11',
|
||||
'Programming Language :: Python :: 3.12',
|
||||
'Topic :: Internet :: WWW/HTTP',
|
||||
'Topic :: Security',
|
||||
'Topic :: System :: Installation/Setup',
|
||||
|
||||
@@ -14,14 +14,14 @@ build:
|
||||
|
||||
# Build documentation in the "docs/" directory with Sphinx
|
||||
sphinx:
|
||||
configuration: docs/conf.py
|
||||
configuration: certbot-dns-route53/docs/conf.py
|
||||
# You can configure Sphinx to use a different builder, for instance use the dirhtml builder for simpler URLs
|
||||
# builder: "dirhtml"
|
||||
# Fail on all warnings to avoid broken references
|
||||
fail_on_warning: true
|
||||
|
||||
# Optionally build your docs in additional formats such as PDF and ePub
|
||||
# formats:
|
||||
formats:
|
||||
- pdf
|
||||
- epub
|
||||
|
||||
@@ -30,4 +30,4 @@ sphinx:
|
||||
# See https://docs.readthedocs.io/en/stable/guides/reproducible-builds.html
|
||||
python:
|
||||
install:
|
||||
- requirements: ../tools/requirements.txt
|
||||
- requirements: certbot-dns-route53/readthedocs.org.requirements.txt
|
||||
@@ -101,13 +101,4 @@ Examples
|
||||
--dns-route53 \\
|
||||
-d example.com \\
|
||||
-d www.example.com
|
||||
|
||||
.. code-block:: bash
|
||||
:caption: To acquire a certificate for ``example.com``, waiting 30 seconds
|
||||
for DNS propagation
|
||||
|
||||
certbot certonly \\
|
||||
--dns-route53 \\
|
||||
--dns-route53-propagation-seconds 30 \\
|
||||
-d example.com
|
||||
"""
|
||||
|
||||
@@ -6,18 +6,20 @@ from typing import Any
|
||||
from typing import Callable
|
||||
from typing import DefaultDict
|
||||
from typing import Dict
|
||||
from typing import Iterable
|
||||
from typing import List
|
||||
from typing import Type
|
||||
|
||||
import boto3
|
||||
from botocore.exceptions import ClientError
|
||||
from botocore.exceptions import NoCredentialsError
|
||||
|
||||
from acme.challenges import ChallengeResponse
|
||||
from acme import challenges
|
||||
from certbot import achallenges
|
||||
from certbot import errors
|
||||
from certbot import interfaces
|
||||
from certbot.achallenges import AnnotatedChallenge
|
||||
from certbot.plugins import dns_common
|
||||
from certbot.util import add_deprecated_argument
|
||||
from certbot.plugins import common
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -27,7 +29,7 @@ INSTRUCTIONS = (
|
||||
"and add the necessary permissions for Route53 access.")
|
||||
|
||||
|
||||
class Authenticator(dns_common.DNSAuthenticator):
|
||||
class Authenticator(common.Plugin, interfaces.Authenticator):
|
||||
"""Route53 Authenticator
|
||||
|
||||
This authenticator solves a DNS01 challenge by uploading the answer to AWS
|
||||
@@ -41,6 +43,7 @@ class Authenticator(dns_common.DNSAuthenticator):
|
||||
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
||||
super().__init__(*args, **kwargs)
|
||||
self.r53 = boto3.client("route53")
|
||||
self._attempt_cleanup = False
|
||||
self._resource_records: DefaultDict[str, List[Dict[str, str]]] = \
|
||||
collections.defaultdict(list)
|
||||
|
||||
@@ -48,9 +51,9 @@ class Authenticator(dns_common.DNSAuthenticator):
|
||||
return "Solve a DNS01 challenge using AWS Route53"
|
||||
|
||||
@classmethod
|
||||
def add_parser_arguments(cls, add: Callable[..., None], # pylint: disable=arguments-differ
|
||||
default_propagation_seconds: int = 10) -> None:
|
||||
add_deprecated_argument(add, 'propagation-seconds', 1)
|
||||
def add_parser_arguments(cls, add: Callable[..., None]) -> None:
|
||||
# This authenticator currently adds no extra arguments.
|
||||
pass
|
||||
|
||||
def auth_hint(self, failed_achalls: List[achallenges.AnnotatedChallenge]) -> str:
|
||||
return (
|
||||
@@ -58,13 +61,13 @@ class Authenticator(dns_common.DNSAuthenticator):
|
||||
'--dns-route53. Ensure the above domains have their DNS hosted by AWS Route53.'
|
||||
)
|
||||
|
||||
def _setup_credentials(self) -> None:
|
||||
def prepare(self) -> None:
|
||||
pass
|
||||
|
||||
def _perform(self, domain: str, validation_name: str, validation: str) -> None:
|
||||
pass
|
||||
def get_chall_pref(self, unused_domain: str) -> Iterable[Type[challenges.Challenge]]:
|
||||
return [challenges.DNS01]
|
||||
|
||||
def perform(self, achalls: List[AnnotatedChallenge]) -> List[ChallengeResponse]:
|
||||
def perform(self, achalls: List[AnnotatedChallenge]) -> List[challenges.ChallengeResponse]:
|
||||
self._attempt_cleanup = True
|
||||
|
||||
try:
|
||||
@@ -82,7 +85,16 @@ class Authenticator(dns_common.DNSAuthenticator):
|
||||
raise errors.PluginError("\n".join([str(e), INSTRUCTIONS]))
|
||||
return [achall.response(achall.account_key) for achall in achalls]
|
||||
|
||||
def _cleanup(self, domain: str, validation_name: str, validation: str) -> None:
|
||||
def cleanup(self, achalls: List[achallenges.AnnotatedChallenge]) -> None:
|
||||
if self._attempt_cleanup:
|
||||
for achall in achalls:
|
||||
domain = achall.domain
|
||||
validation_domain_name = achall.validation_domain_name(domain)
|
||||
validation = achall.validation(achall.account_key)
|
||||
|
||||
self._cleanup(validation_domain_name, validation)
|
||||
|
||||
def _cleanup(self, validation_name: str, validation: str) -> None:
|
||||
try:
|
||||
self._change_txt_record("DELETE", validation_name, validation)
|
||||
except (NoCredentialsError, ClientError) as e:
|
||||
@@ -166,3 +178,13 @@ class Authenticator(dns_common.DNSAuthenticator):
|
||||
raise errors.PluginError(
|
||||
"Timed out waiting for Route53 change. Current status: %s" %
|
||||
response["ChangeInfo"]["Status"])
|
||||
|
||||
|
||||
# Our route53 plugin was initially a 3rd party plugin named `certbot-route53:auth` as described at
|
||||
# https://github.com/certbot/certbot/issues/4688. This shim exists to allow installations using the
|
||||
# old plugin name of `certbot-route53:auth` to continue to work without cluttering things like
|
||||
# Certbot's help output with two route53 plugins.
|
||||
class HiddenAuthenticator(Authenticator):
|
||||
"""A hidden shim around certbot-dns-route53 for backwards compatibility."""
|
||||
|
||||
hidden = True
|
||||
|
||||
@@ -6,17 +6,27 @@ from unittest import mock
|
||||
|
||||
from botocore.exceptions import ClientError
|
||||
from botocore.exceptions import NoCredentialsError
|
||||
import josepy as jose
|
||||
import pytest
|
||||
|
||||
from acme import challenges
|
||||
from certbot import achallenges
|
||||
from certbot import errors
|
||||
from certbot.compat import os
|
||||
from certbot.plugins import dns_test_common
|
||||
from certbot.plugins.dns_test_common import DOMAIN
|
||||
from certbot.tests import acme_util
|
||||
from certbot.tests import util as test_util
|
||||
|
||||
DOMAIN = 'example.com'
|
||||
KEY = jose.JWKRSA.load(test_util.load_vector("rsa512_key.pem"))
|
||||
|
||||
|
||||
class AuthenticatorTest(unittest.TestCase, dns_test_common.BaseAuthenticatorTest):
|
||||
class AuthenticatorTest(unittest.TestCase):
|
||||
# pylint: disable=protected-access
|
||||
|
||||
achall = achallenges.KeyAuthorizationAnnotatedChallenge(
|
||||
challb=acme_util.DNS01, domain=DOMAIN, account_key=KEY)
|
||||
|
||||
def setUp(self):
|
||||
from certbot_dns_route53._internal.dns_route53 import Authenticator
|
||||
|
||||
@@ -35,6 +45,12 @@ class AuthenticatorTest(unittest.TestCase, dns_test_common.BaseAuthenticatorTest
|
||||
del os.environ["AWS_ACCESS_KEY_ID"]
|
||||
del os.environ["AWS_SECRET_ACCESS_KEY"]
|
||||
|
||||
def test_more_info(self) -> None:
|
||||
self.assertTrue(isinstance(self.auth.more_info(), str))
|
||||
|
||||
def test_get_chall_pref(self) -> None:
|
||||
self.assertEqual(self.auth.get_chall_pref("example.org"), [challenges.DNS01])
|
||||
|
||||
def test_perform(self):
|
||||
self.auth._change_txt_record = mock.MagicMock()
|
||||
self.auth._wait_for_change = mock.MagicMock()
|
||||
@@ -85,13 +101,6 @@ class AuthenticatorTest(unittest.TestCase, dns_test_common.BaseAuthenticatorTest
|
||||
|
||||
self.auth.cleanup([self.achall])
|
||||
|
||||
def test_parser_arguments(self) -> None:
|
||||
from certbot.util import DeprecatedArgumentAction
|
||||
m = mock.MagicMock()
|
||||
self.auth.add_parser_arguments(m) # pylint: disable=no-member
|
||||
m.assert_any_call('propagation-seconds', action=DeprecatedArgumentAction,
|
||||
help=mock.ANY, nargs=1)
|
||||
|
||||
|
||||
class ClientTest(unittest.TestCase):
|
||||
# pylint: disable=protected-access
|
||||
|
||||
@@ -1,17 +0,0 @@
|
||||
"""Shim around `~certbot_dns_route53._internal.dns_route53` for backwards compatibility."""
|
||||
from typing import Any
|
||||
import warnings
|
||||
|
||||
from certbot_dns_route53._internal import dns_route53
|
||||
|
||||
|
||||
class Authenticator(dns_route53.Authenticator):
|
||||
"""Shim around `~certbot_dns_route53._internal.dns_route53.Authenticator`
|
||||
for backwards compatibility."""
|
||||
|
||||
hidden = True
|
||||
|
||||
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
||||
warnings.warn("The 'authenticator' module was renamed 'dns_route53'",
|
||||
DeprecationWarning)
|
||||
super().__init__(*args, **kwargs)
|
||||
@@ -4,7 +4,7 @@ import sys
|
||||
from setuptools import find_packages
|
||||
from setuptools import setup
|
||||
|
||||
version = '2.8.0.dev0'
|
||||
version = '3.1.0.dev0'
|
||||
|
||||
install_requires = [
|
||||
'boto3>=1.15.15',
|
||||
@@ -52,6 +52,7 @@ setup(
|
||||
'Programming Language :: Python :: 3.9',
|
||||
'Programming Language :: Python :: 3.10',
|
||||
'Programming Language :: Python :: 3.11',
|
||||
'Programming Language :: Python :: 3.12',
|
||||
'Topic :: Internet :: WWW/HTTP',
|
||||
'Topic :: Security',
|
||||
'Topic :: System :: Installation/Setup',
|
||||
@@ -70,7 +71,7 @@ setup(
|
||||
entry_points={
|
||||
'certbot.plugins': [
|
||||
'dns-route53 = certbot_dns_route53._internal.dns_route53:Authenticator',
|
||||
'certbot-route53:auth = certbot_dns_route53.authenticator:Authenticator'
|
||||
'certbot-route53:auth = certbot_dns_route53._internal.dns_route53:HiddenAuthenticator',
|
||||
],
|
||||
},
|
||||
)
|
||||
|
||||
@@ -14,14 +14,14 @@ build:
|
||||
|
||||
# Build documentation in the "docs/" directory with Sphinx
|
||||
sphinx:
|
||||
configuration: docs/conf.py
|
||||
configuration: certbot-dns-sakuracloud/docs/conf.py
|
||||
# You can configure Sphinx to use a different builder, for instance use the dirhtml builder for simpler URLs
|
||||
# builder: "dirhtml"
|
||||
# Fail on all warnings to avoid broken references
|
||||
fail_on_warning: true
|
||||
|
||||
# Optionally build your docs in additional formats such as PDF and ePub
|
||||
# formats:
|
||||
formats:
|
||||
- pdf
|
||||
- epub
|
||||
|
||||
@@ -30,4 +30,4 @@ sphinx:
|
||||
# See https://docs.readthedocs.io/en/stable/guides/reproducible-builds.html
|
||||
python:
|
||||
install:
|
||||
- requirements: ../tools/requirements.txt
|
||||
- requirements: certbot-dns-sakuracloud/readthedocs.org.requirements.txt
|
||||
@@ -4,7 +4,7 @@ import sys
|
||||
from setuptools import find_packages
|
||||
from setuptools import setup
|
||||
|
||||
version = '2.8.0.dev0'
|
||||
version = '3.1.0.dev0'
|
||||
|
||||
install_requires = [
|
||||
'dns-lexicon>=3.14.1',
|
||||
@@ -52,6 +52,7 @@ setup(
|
||||
'Programming Language :: Python :: 3.9',
|
||||
'Programming Language :: Python :: 3.10',
|
||||
'Programming Language :: Python :: 3.11',
|
||||
'Programming Language :: Python :: 3.12',
|
||||
'Topic :: Internet :: WWW/HTTP',
|
||||
'Topic :: Security',
|
||||
'Topic :: System :: Installation/Setup',
|
||||
|
||||
@@ -18,7 +18,6 @@ from typing import Mapping
|
||||
from typing import Optional
|
||||
from typing import Sequence
|
||||
from typing import Set
|
||||
from typing import Text
|
||||
from typing import Tuple
|
||||
from typing import Type
|
||||
from typing import Union
|
||||
@@ -172,8 +171,8 @@ class NginxConfigurator(common.Configurator):
|
||||
|
||||
file_manager = ExitStack()
|
||||
atexit.register(file_manager.close)
|
||||
ref = importlib_resources.files("certbot_nginx").joinpath(
|
||||
"_internal", "tls_configs", config_filename)
|
||||
ref = (importlib_resources.files("certbot_nginx").joinpath("_internal")
|
||||
.joinpath("tls_configs").joinpath(config_filename))
|
||||
|
||||
return str(file_manager.enter_context(importlib_resources.as_file(ref)))
|
||||
|
||||
@@ -702,7 +701,7 @@ class NginxConfigurator(common.Configurator):
|
||||
# TODO: generate only once
|
||||
tmp_dir = os.path.join(self.config.work_dir, "snakeoil")
|
||||
le_key = crypto_util.generate_key(
|
||||
key_size=1024, key_dir=tmp_dir, keyname="key.pem",
|
||||
key_size=2048, key_dir=tmp_dir, keyname="key.pem",
|
||||
strict_permissions=self.config.strict_permissions)
|
||||
assert le_key.file is not None
|
||||
key = OpenSSL.crypto.load_privatekey(
|
||||
@@ -1275,7 +1274,7 @@ def nginx_restart(nginx_ctl: str, nginx_conf: str, sleep_duration: int) -> None:
|
||||
|
||||
"""
|
||||
try:
|
||||
reload_output: Text = ""
|
||||
reload_output: str = ""
|
||||
with tempfile.TemporaryFile() as out:
|
||||
proc = subprocess.run([nginx_ctl, "-c", nginx_conf, "-s", "reload"],
|
||||
env=util.env_no_snap_for_external_calls(),
|
||||
|
||||
@@ -294,7 +294,7 @@ def dumps(blocks: UnspacedList) -> str:
|
||||
"""Dump to a Unicode string.
|
||||
|
||||
:param UnspacedList blocks: The parsed tree
|
||||
:rtype: six.text_type
|
||||
:rtype: str
|
||||
|
||||
"""
|
||||
return str(RawNginxDumper(blocks.spaced))
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
from setuptools import find_packages
|
||||
from setuptools import setup
|
||||
|
||||
version = '2.8.0.dev0'
|
||||
version = '3.1.0.dev0'
|
||||
|
||||
install_requires = [
|
||||
# We specify the minimum acme and certbot version as the current plugin
|
||||
@@ -41,6 +41,7 @@ setup(
|
||||
'Programming Language :: Python :: 3.9',
|
||||
'Programming Language :: Python :: 3.10',
|
||||
'Programming Language :: Python :: 3.11',
|
||||
'Programming Language :: Python :: 3.12',
|
||||
'Topic :: Internet :: WWW/HTTP',
|
||||
'Topic :: Security',
|
||||
'Topic :: System :: Installation/Setup',
|
||||
|
||||
@@ -14,14 +14,14 @@ build:
|
||||
|
||||
# Build documentation in the "docs/" directory with Sphinx
|
||||
sphinx:
|
||||
configuration: docs/conf.py
|
||||
configuration: certbot/docs/conf.py
|
||||
# You can configure Sphinx to use a different builder, for instance use the dirhtml builder for simpler URLs
|
||||
# builder: "dirhtml"
|
||||
# Fail on all warnings to avoid broken references
|
||||
fail_on_warning: true
|
||||
|
||||
# Optionally build your docs in additional formats such as PDF and ePub
|
||||
# formats:
|
||||
formats:
|
||||
- pdf
|
||||
- epub
|
||||
|
||||
@@ -30,4 +30,4 @@ sphinx:
|
||||
# See https://docs.readthedocs.io/en/stable/guides/reproducible-builds.html
|
||||
python:
|
||||
install:
|
||||
- requirements: ../tools/requirements.txt
|
||||
- requirements: certbot/readthedocs.org.requirements.txt
|
||||
@@ -1,8 +1,9 @@
|
||||
|
||||
# Certbot change log
|
||||
|
||||
Certbot adheres to [Semantic Versioning](https://semver.org/).
|
||||
|
||||
## 2.8.0 - master
|
||||
## 3.1.0 - main
|
||||
|
||||
### Added
|
||||
|
||||
@@ -10,10 +11,121 @@ Certbot adheres to [Semantic Versioning](https://semver.org/).
|
||||
|
||||
### Changed
|
||||
|
||||
*
|
||||
|
||||
### Fixed
|
||||
|
||||
*
|
||||
|
||||
More details about these changes can be found on our GitHub repo.
|
||||
|
||||
## 3.0.0 - 2024-11-05
|
||||
|
||||
### Added
|
||||
|
||||
*
|
||||
|
||||
### Changed
|
||||
|
||||
* The update_symlinks command was removed.
|
||||
* The `csr_dir` and `key_dir` attributes on
|
||||
`certbot.configuration.NamespaceConfig` were removed.
|
||||
* The `--manual-public-ip-logging-ok` command line flag was removed.
|
||||
* The `--dns-route53-propagation-seconds` command line flag was removed.
|
||||
* The `certbot_dns_route53.authenticator` module has been removed. This should
|
||||
not affect any users of the plugin and instead would only affect developers
|
||||
trying to develop on top of the old code.
|
||||
* Support for Python 3.8 was deprecated and will be removed in our next planned
|
||||
release.
|
||||
|
||||
### Fixed
|
||||
|
||||
*
|
||||
|
||||
More details about these changes can be found on our GitHub repo.
|
||||
|
||||
## 2.11.0 - 2024-06-05
|
||||
|
||||
### Added
|
||||
|
||||
*
|
||||
|
||||
### Changed
|
||||
|
||||
* In anticipation of backwards incompatible changes, certbot-dns-cloudflare now
|
||||
requires less than version 2.20 of Cloudflare's python library.
|
||||
|
||||
### Fixed
|
||||
|
||||
* Fixed a bug in Certbot where a CSR's SANs did not always follow the order of
|
||||
the domain names that the user requested interactively. In some cases, the
|
||||
resulting cert's common name might seem picked up randomly from the SANs
|
||||
when it should be the first item the user had in mind.
|
||||
|
||||
More details about these changes can be found on our GitHub repo.
|
||||
|
||||
## 2.10.0 - 2024-04-02
|
||||
|
||||
### Added
|
||||
|
||||
* The Python source packages which we upload to [PyPI](https://pypi.org/) are
|
||||
now also being uploaded to
|
||||
[our releases on GitHub](https://github.com/certbot/certbot/releases) where
|
||||
we now also include a SHA256SUMS checksum file and a PGP signature for that
|
||||
file.
|
||||
|
||||
### Changed
|
||||
|
||||
* We no longer publish our beta Windows installer as was originally announced
|
||||
[here](https://community.letsencrypt.org/t/certbot-discontinuing-windows-beta-support-in-2024/208101).
|
||||
|
||||
### Fixed
|
||||
|
||||
*
|
||||
|
||||
More details about these changes can be found on our GitHub repo.
|
||||
|
||||
## 2.9.0 - 2024-02-08
|
||||
|
||||
### Added
|
||||
|
||||
* Support for Python 3.12 was added.
|
||||
|
||||
### Changed
|
||||
|
||||
*
|
||||
|
||||
### Fixed
|
||||
|
||||
* Updates `joinpath` syntax to only use one addition per call, because the multiple inputs
|
||||
version was causing mypy errors on Python 3.10.
|
||||
* Makes the `reconfigure` verb actually use the staging server for the dry run to check the new
|
||||
configuration.
|
||||
|
||||
More details about these changes can be found on our GitHub repo.
|
||||
|
||||
## 2.8.0 - 2023-12-05
|
||||
|
||||
### Added
|
||||
|
||||
* Added support for [Alpine Linux](https://www.alpinelinux.org) distribution when is used the apache plugin
|
||||
|
||||
### Changed
|
||||
|
||||
* Support for Python 3.7 was removed.
|
||||
|
||||
### Fixed
|
||||
|
||||
* Stop using the deprecated `pkg_resources` API included in `setuptools`.
|
||||
|
||||
More details about these changes can be found on our GitHub repo.
|
||||
|
||||
## 2.7.4 - 2023-11-01
|
||||
|
||||
### Fixed
|
||||
|
||||
* Fixed a bug introduced in version 2.7.0 that caused interactively entered
|
||||
webroot plugin values to not be saved for renewal.
|
||||
* Fixed a bug introduced in version 2.7.0 of our Lexicon based DNS plugins that
|
||||
caused them to fail to find the DNS zone that needs to be modified in some
|
||||
cases.
|
||||
|
||||
@@ -2,10 +2,10 @@
|
||||
|
||||
|build-status|
|
||||
|
||||
.. |build-status| image:: https://img.shields.io/azure-devops/build/certbot/ba534f81-a483-4b9b-9b4e-a60bec8fee72/5/master
|
||||
.. |build-status| image:: https://img.shields.io/azure-devops/build/certbot/ba534f81-a483-4b9b-9b4e-a60bec8fee72/5/main
|
||||
:target: https://dev.azure.com/certbot/certbot/_build?definitionId=5
|
||||
:alt: Azure Pipelines CI status
|
||||
|
||||
|
||||
.. image:: https://raw.githubusercontent.com/EFForg/design/master/logos/eff-certbot-lockup.png
|
||||
:width: 200
|
||||
:alt: EFF Certbot Logo
|
||||
@@ -39,7 +39,7 @@ Documentation: https://certbot.eff.org/docs
|
||||
|
||||
Software project: https://github.com/certbot/certbot
|
||||
|
||||
Changelog: https://github.com/certbot/certbot/blob/master/certbot/CHANGELOG.md
|
||||
Changelog: https://github.com/certbot/certbot/blob/main/certbot/CHANGELOG.md
|
||||
|
||||
For Contributors: https://certbot.eff.org/docs/contributing.html
|
||||
|
||||
|
||||
@@ -1,4 +1,13 @@
|
||||
"""Certbot client."""
|
||||
import sys
|
||||
import warnings
|
||||
|
||||
# version number like 1.2.3a0, must have at least 2 parts, like 1.2
|
||||
__version__ = '2.8.0.dev0'
|
||||
__version__ = '3.1.0.dev0'
|
||||
|
||||
if sys.version_info[:2] == (3, 8):
|
||||
warnings.warn(
|
||||
"Python 3.8 support will be dropped in the next planned release of "
|
||||
"certbot. Please upgrade your Python version.",
|
||||
PendingDeprecationWarning,
|
||||
) # pragma: no cover
|
||||
|
||||
@@ -30,22 +30,6 @@ logger = logging.getLogger(__name__)
|
||||
###################
|
||||
|
||||
|
||||
def update_live_symlinks(config: configuration.NamespaceConfig) -> None:
|
||||
"""Update the certificate file family symlinks to use archive_dir.
|
||||
|
||||
Use the information in the config file to make symlinks point to
|
||||
the correct archive directory.
|
||||
|
||||
.. note:: This assumes that the installation is using a Reverter object.
|
||||
|
||||
:param config: Configuration.
|
||||
:type config: :class:`certbot._internal.configuration.NamespaceConfig`
|
||||
|
||||
"""
|
||||
for renewal_file in storage.renewal_conf_files(config):
|
||||
storage.RenewableCert(renewal_file, config, update_symlinks=True)
|
||||
|
||||
|
||||
def rename_lineage(config: configuration.NamespaceConfig) -> None:
|
||||
"""Rename the specified lineage to the new name.
|
||||
|
||||
|
||||
@@ -36,6 +36,7 @@ from certbot._internal.cli.cli_utils import HelpfulArgumentGroup
|
||||
from certbot._internal.cli.cli_utils import nonnegative_int
|
||||
from certbot._internal.cli.cli_utils import parse_preferred_challenges
|
||||
from certbot._internal.cli.cli_utils import read_file
|
||||
from certbot._internal.cli.cli_utils import set_test_server_options
|
||||
from certbot._internal.cli.group_adder import _add_all_groups
|
||||
from certbot._internal.cli.helpful import HelpfulArgumentParser
|
||||
from certbot._internal.cli.paths_parser import _paths_parser
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
"""Certbot command line util function"""
|
||||
import argparse
|
||||
import copy
|
||||
import glob
|
||||
import inspect
|
||||
from typing import Any
|
||||
from typing import Iterable
|
||||
@@ -250,3 +251,48 @@ def nonnegative_int(value: str) -> int:
|
||||
if int_value < 0:
|
||||
raise argparse.ArgumentTypeError("value must be non-negative")
|
||||
return int_value
|
||||
|
||||
def set_test_server_options(verb: str, config: configuration.NamespaceConfig) -> None:
|
||||
"""Updates server, break_my_certs, staging, tos, and
|
||||
register_unsafely_without_email in config as necessary to prepare
|
||||
to use the test server.
|
||||
|
||||
We have --staging/--dry-run; perform sanity check and set config.server
|
||||
|
||||
:param str verb: subcommand called
|
||||
|
||||
:param config: parsed command line arguments
|
||||
:type config: configuration.NamespaceConfig
|
||||
|
||||
:raises errors.Error: if non-default server is used and --staging is set
|
||||
:raises errors.Error: if inapplicable verb is used and --dry-run is set
|
||||
"""
|
||||
|
||||
# Flag combinations should produce these results:
|
||||
# | --staging | --dry-run |
|
||||
# ------------------------------------------------------------
|
||||
# | --server acme-v02 | Use staging | Use staging |
|
||||
# | --server acme-staging-v02 | Use staging | Use staging |
|
||||
# | --server <other> | Conflict error | Use <other> |
|
||||
|
||||
default_servers = (flag_default("server"), constants.STAGING_URI)
|
||||
|
||||
if config.staging and config.server not in default_servers:
|
||||
raise errors.Error("--server value conflicts with --staging")
|
||||
|
||||
if config.server == flag_default("server"):
|
||||
config.server = constants.STAGING_URI
|
||||
# If the account has already been loaded (such as by calling reconstitute before this),
|
||||
# clear it so that we don't try to use the prod account on the staging server.
|
||||
config.account = None
|
||||
|
||||
if config.dry_run:
|
||||
if verb not in ["certonly", "renew", "reconfigure"]:
|
||||
raise errors.Error("--dry-run currently only works with the "
|
||||
"'certonly' or 'renew' subcommands (%r)" % verb)
|
||||
config.break_my_certs = config.staging = True
|
||||
if glob.glob(os.path.join(config.config_dir, constants.ACCOUNTS_DIR, "*")):
|
||||
# The user has a prod account, but might not have a staging
|
||||
# one; we don't want to start trying to perform interactive registration
|
||||
config.tos = True
|
||||
config.register_unsafely_without_email = True
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
|
||||
import argparse
|
||||
import functools
|
||||
import glob
|
||||
import sys
|
||||
from typing import Any
|
||||
from typing import Dict
|
||||
@@ -26,11 +25,11 @@ from certbot._internal.cli.cli_utils import add_domains
|
||||
from certbot._internal.cli.cli_utils import CustomHelpFormatter
|
||||
from certbot._internal.cli.cli_utils import flag_default
|
||||
from certbot._internal.cli.cli_utils import HelpfulArgumentGroup
|
||||
from certbot._internal.cli.cli_utils import set_test_server_options
|
||||
from certbot._internal.cli.verb_help import VERB_HELP
|
||||
from certbot._internal.cli.verb_help import VERB_HELP_MAP
|
||||
from certbot._internal.display import obj as display_obj
|
||||
from certbot._internal.plugins import disco
|
||||
from certbot.compat import os
|
||||
from certbot.configuration import ArgumentSource
|
||||
from certbot.configuration import NamespaceConfig
|
||||
|
||||
@@ -59,7 +58,6 @@ class HelpfulArgumentParser:
|
||||
"revoke": main.revoke,
|
||||
"rollback": main.rollback,
|
||||
"everything": main.run,
|
||||
"update_symlinks": main.update_symlinks,
|
||||
"certificates": main.certificates,
|
||||
"delete": main.delete,
|
||||
"enhance": main.enhance,
|
||||
@@ -165,6 +163,7 @@ class HelpfulArgumentParser:
|
||||
def remove_config_file_domains_for_renewal(self, config: NamespaceConfig) -> None:
|
||||
"""Make "certbot renew" safe if domains are set in cli.ini."""
|
||||
# Works around https://github.com/certbot/certbot/issues/4096
|
||||
assert config.argument_sources is not None
|
||||
if (config.argument_sources['domains'] == ArgumentSource.CONFIG_FILE and
|
||||
self.verb == "renew"):
|
||||
config.domains = []
|
||||
@@ -317,33 +316,10 @@ class HelpfulArgumentParser:
|
||||
return config
|
||||
|
||||
def set_test_server(self, config: NamespaceConfig) -> None:
|
||||
"""We have --staging/--dry-run; perform sanity check and set config.server"""
|
||||
|
||||
# Flag combinations should produce these results:
|
||||
# | --staging | --dry-run |
|
||||
# ------------------------------------------------------------
|
||||
# | --server acme-v02 | Use staging | Use staging |
|
||||
# | --server acme-staging-v02 | Use staging | Use staging |
|
||||
# | --server <other> | Conflict error | Use <other> |
|
||||
|
||||
default_servers = (flag_default("server"), constants.STAGING_URI)
|
||||
|
||||
if config.staging and config.server not in default_servers:
|
||||
raise errors.Error("--server value conflicts with --staging")
|
||||
|
||||
if config.server == flag_default("server"):
|
||||
config.server = constants.STAGING_URI
|
||||
|
||||
if config.dry_run:
|
||||
if self.verb not in ["certonly", "renew"]:
|
||||
raise errors.Error("--dry-run currently only works with the "
|
||||
"'certonly' or 'renew' subcommands (%r)" % self.verb)
|
||||
config.break_my_certs = config.staging = True
|
||||
if glob.glob(os.path.join(config.config_dir, constants.ACCOUNTS_DIR, "*")):
|
||||
# The user has a prod account, but might not have a staging
|
||||
# one; we don't want to start trying to perform interactive registration
|
||||
config.tos = True
|
||||
config.register_unsafely_without_email = True
|
||||
"""Updates server, break_my_certs, staging, tos, and
|
||||
register_unsafely_without_email in config as necessary to prepare
|
||||
to use the test server."""
|
||||
return set_test_server_options(self.verb, config)
|
||||
|
||||
def handle_csr(self, config: NamespaceConfig) -> None:
|
||||
"""Process a --csr flag."""
|
||||
|
||||
@@ -21,7 +21,7 @@ SETUPTOOLS_PLUGINS_ENTRY_POINT = "certbot.plugins"
|
||||
OLD_SETUPTOOLS_PLUGINS_ENTRY_POINT = "letsencrypt.plugins"
|
||||
"""Plugins Setuptools entry point before rename."""
|
||||
|
||||
CLI_DEFAULTS: Dict[str, Any] = dict( # noqa
|
||||
CLI_DEFAULTS: Dict[str, Any] = dict( # pylint: disable=use-dict-literal
|
||||
config_files=[
|
||||
os.path.join(misc.get_default_folder('config'), 'cli.ini'),
|
||||
# https://freedesktop.org/wiki/Software/xdg-user-dirs/
|
||||
@@ -182,9 +182,6 @@ BACKUP_DIR = "backups"
|
||||
"""Directory (relative to `certbot.configuration.NamespaceConfig.work_dir`)
|
||||
where backups are kept."""
|
||||
|
||||
CSR_DIR = "csr"
|
||||
"""See `certbot.configuration.NamespaceConfig.csr_dir`."""
|
||||
|
||||
IN_PROGRESS_DIR = "IN_PROGRESS"
|
||||
"""Directory used before a permanent checkpoint is finalized (relative to
|
||||
`certbot.configuration.NamespaceConfig.work_dir`)."""
|
||||
|
||||
@@ -328,8 +328,9 @@ class FileDisplay:
|
||||
except ValueError:
|
||||
return []
|
||||
|
||||
# Remove duplicates
|
||||
indices_int = list(set(indices_int))
|
||||
# Remove duplicates. dict is used instead of set, since dict perserves
|
||||
# insertion order as of Python 3.7
|
||||
indices_int = list(dict.fromkeys(indices_int).keys())
|
||||
|
||||
# Check all input is within range
|
||||
for index in indices_int:
|
||||
|
||||
@@ -15,7 +15,6 @@ from typing import Optional
|
||||
from typing import Tuple
|
||||
from typing import TypeVar
|
||||
from typing import Union
|
||||
import warnings
|
||||
|
||||
import configobj
|
||||
import josepy as jose
|
||||
@@ -1266,27 +1265,6 @@ def rollback(config: configuration.NamespaceConfig, plugins: plugins_disco.Plugi
|
||||
client.rollback(config.installer, config.checkpoints, config, plugins)
|
||||
|
||||
|
||||
def update_symlinks(config: configuration.NamespaceConfig,
|
||||
unused_plugins: plugins_disco.PluginsRegistry) -> None:
|
||||
"""Update the certificate file family symlinks
|
||||
|
||||
Use the information in the config file to make symlinks point to
|
||||
the correct archive directory.
|
||||
|
||||
:param config: Configuration object
|
||||
:type config: configuration.NamespaceConfig
|
||||
|
||||
:param unused_plugins: List of plugins (deprecated)
|
||||
:type unused_plugins: plugins_disco.PluginsRegistry
|
||||
|
||||
:returns: `None`
|
||||
:rtype: None
|
||||
|
||||
"""
|
||||
warnings.warn("update_symlinks is deprecated and will be removed", PendingDeprecationWarning)
|
||||
cert_manager.update_live_symlinks(config)
|
||||
|
||||
|
||||
def rename(config: configuration.NamespaceConfig,
|
||||
unused_plugins: plugins_disco.PluginsRegistry) -> None:
|
||||
"""Rename a certificate
|
||||
@@ -1727,10 +1705,8 @@ def reconfigure(config: configuration.NamespaceConfig,
|
||||
# to say nothing of the difficulty in explaining what exactly this subcommand can modify
|
||||
|
||||
|
||||
# To make sure that the requested changes work, do a dry run. While setting up the dry run,
|
||||
# we will set all the needed fields in config, which will then be saved upon success.
|
||||
config.dry_run = True
|
||||
|
||||
# To make sure that the requested changes work, we're going to do a dry run, and only save
|
||||
# upon success. First, modify the config as the user requested.
|
||||
if not config.certname:
|
||||
certname_question = "Which certificate would you like to reconfigure?"
|
||||
config.certname = cert_manager.get_certnames(
|
||||
@@ -1772,17 +1748,44 @@ def reconfigure(config: configuration.NamespaceConfig,
|
||||
if not renewal_candidate:
|
||||
raise errors.ConfigurationError("Could not load certificate. See logs for errors.")
|
||||
|
||||
renewalparams = orig_renewal_conf['renewalparams']
|
||||
# If server was set but hasn't changed and no account is loaded,
|
||||
# load the old account because reconstitute won't have
|
||||
if lineage_config.set_by_user('server') and lineage_config.server == renewalparams['server']\
|
||||
and lineage_config.account is None:
|
||||
lineage_config.account = renewalparams['account']
|
||||
for param in ('account', 'server',):
|
||||
if getattr(lineage_config, param) != renewalparams.get(param):
|
||||
msg = ("Using reconfigure to change the ACME account or server is not supported. "
|
||||
"If you would like to do so, use renew with the --force-renewal flag instead "
|
||||
"of reconfigure. Note that doing so will count against any rate limits. For "
|
||||
"more information on this method, see "
|
||||
"https://certbot.org/renew-reconfiguration")
|
||||
raise errors.ConfigurationError(msg)
|
||||
|
||||
# this is where lineage_config gets fully filled out (e.g. --apache will set auth and installer)
|
||||
installer, auth = plug_sel.choose_configurator_plugins(lineage_config, plugins, "certonly")
|
||||
le_client = _init_le_client(lineage_config, auth, installer)
|
||||
|
||||
# make a deep copy of lineage_config because we're about to modify it for a test dry run
|
||||
dry_run_lineage_config = copy.deepcopy(lineage_config)
|
||||
|
||||
# we also set noninteractive_mode to more accurately simulate renewal (since `certbot renew`
|
||||
# implies noninteractive mode) and to avoid prompting the user as changes made to
|
||||
# dry_run_lineage_config beyond this point will not be applied to the original lineage_config
|
||||
dry_run_lineage_config.noninteractive_mode = True
|
||||
dry_run_lineage_config.dry_run = True
|
||||
cli.set_test_server_options("reconfigure", dry_run_lineage_config)
|
||||
|
||||
le_client = _init_le_client(dry_run_lineage_config, auth, installer)
|
||||
|
||||
# renews cert as dry run to test that the new values are ok
|
||||
# at this point, renewal_candidate.configuration has the old values, but will use
|
||||
# the values from lineage_config when doing the dry run
|
||||
_get_and_save_cert(le_client, lineage_config, certname=certname,
|
||||
_get_and_save_cert(le_client, dry_run_lineage_config, certname=certname,
|
||||
lineage=renewal_candidate)
|
||||
|
||||
# this function will update lineage.configuration with the new values, and save it to disk
|
||||
# use the pre-dry-run version
|
||||
renewal_candidate.save_new_config_values(lineage_config)
|
||||
|
||||
_report_reconfigure_results(renewal_file, orig_renewal_conf)
|
||||
@@ -1863,6 +1866,10 @@ def main(cli_args: Optional[List[str]] = None) -> Optional[Union[str, int]]:
|
||||
if config.func != plugins_cmd: # pylint: disable=comparison-with-callable
|
||||
raise
|
||||
|
||||
if sys.version_info[:2] == (3, 8):
|
||||
logger.warning("Python 3.8 support will be dropped in the next planned release "
|
||||
"of Certbot - please upgrade your Python version.")
|
||||
|
||||
with make_displayer(config) as displayer:
|
||||
display_obj.set_display(displayer)
|
||||
|
||||
|
||||
@@ -207,6 +207,7 @@ class PluginsRegistry(Mapping):
|
||||
plugin2_dist = other_ep.entry_point.dist
|
||||
plugin1 = plugin1_dist.name.lower() if plugin1_dist else "unknown"
|
||||
plugin2 = plugin2_dist.name.lower() if plugin2_dist else "unknown"
|
||||
# pylint: disable=broad-exception-raised
|
||||
raise Exception("Duplicate plugin name {0} from {1} and {2}.".format(
|
||||
plugin_ep.name, plugin1, plugin2))
|
||||
if issubclass(plugin_ep.plugin_cls, interfaces.Plugin):
|
||||
|
||||
@@ -108,7 +108,6 @@ permitted by DNS standards.)
|
||||
help='Path or command to execute for the authentication script')
|
||||
add('cleanup-hook',
|
||||
help='Path or command to execute for the cleanup script')
|
||||
util.add_deprecated_argument(add, 'public-ip-logging-ok', 0)
|
||||
|
||||
def prepare(self) -> None: # pylint: disable=missing-function-docstring
|
||||
if self.config.noninteractive_mode and not self.conf('auth-hook'):
|
||||
|
||||
@@ -58,7 +58,7 @@ class Authenticator(common.Plugin, interfaces.Authenticator):
|
||||
|
||||
description = """\
|
||||
Saves the necessary validation files to a .well-known/acme-challenge/ directory within the \
|
||||
nominated webroot path. A seperate HTTP server must be running and serving files from the \
|
||||
nominated webroot path. A separate HTTP server must be running and serving files from the \
|
||||
webroot path. HTTP challenge only (wildcards not supported)."""
|
||||
|
||||
MORE_INFO = """\
|
||||
|
||||
@@ -4,8 +4,10 @@ import socket
|
||||
from typing import Iterable
|
||||
from typing import List
|
||||
from typing import Optional
|
||||
from typing import Tuple
|
||||
from typing import Union
|
||||
|
||||
from requests import Session
|
||||
from requests import PreparedRequest, Session
|
||||
from requests.adapters import HTTPAdapter
|
||||
from requests.exceptions import HTTPError
|
||||
from requests.exceptions import RequestException
|
||||
@@ -31,6 +33,7 @@ _ARCH_TRIPLET_MAP = {
|
||||
'amd64': 'x86_64-linux-gnu',
|
||||
's390x': 's390x-linux-gnu',
|
||||
}
|
||||
CURRENT_PYTHON_VERSION_STRING = 'python3.12'
|
||||
|
||||
LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -69,10 +72,35 @@ def prepare_env(cli_args: List[str]) -> List[str]:
|
||||
raise e
|
||||
|
||||
data = response.json()
|
||||
connections = ['/snap/{0}/current/lib/python3.8/site-packages/'.format(item['slot']['snap'])
|
||||
for item in data.get('result', {}).get('established', [])
|
||||
if item.get('plug', {}).get('plug') == 'plugin'
|
||||
and item.get('plug-attrs', {}).get('content') == 'certbot-1']
|
||||
connections = []
|
||||
outdated_plugins = []
|
||||
for plugin in data.get('result', {}).get('established', []):
|
||||
plug: str = plugin.get('plug', {}).get('plug')
|
||||
plug_content: str = plugin.get('plug-attrs', {}).get('content')
|
||||
if plug == 'plugin' and plug_content == 'certbot-1':
|
||||
plugin_name: str = plugin['slot']['snap']
|
||||
# First, check that the plugin is using our expected python version,
|
||||
# i.e. its "read" slot is something like
|
||||
# "$SNAP/lib/python3.12/site-packages". If not, skip it and print an
|
||||
# error.
|
||||
slot_read: str = plugin.get('slot-attrs', {}).get('read', [])
|
||||
if len(slot_read) != 0 and CURRENT_PYTHON_VERSION_STRING not in slot_read[0]:
|
||||
outdated_plugins.append(plugin_name)
|
||||
continue
|
||||
|
||||
connections.append('/snap/{0}/current/lib/{1}/site-packages/'.format(
|
||||
plugin_name,
|
||||
CURRENT_PYTHON_VERSION_STRING
|
||||
))
|
||||
|
||||
if outdated_plugins:
|
||||
LOGGER.warning('The following plugins are using an outdated python version and must be '
|
||||
'updated to be compatible with Certbot 3.0. Please see '
|
||||
'https://community.letsencrypt.org/t/'
|
||||
'certbot-3-0-could-have-potential-third-party-snap-breakages/226940 '
|
||||
'for more information:')
|
||||
plugin_list = '\n'.join(' * {}'.format(plugin) for plugin in outdated_plugins)
|
||||
LOGGER.warning(plugin_list)
|
||||
|
||||
os.environ['CERTBOT_PLUGIN_PATH'] = ':'.join(connections)
|
||||
|
||||
@@ -100,6 +128,21 @@ class _SnapdConnectionPool(HTTPConnectionPool):
|
||||
|
||||
|
||||
class _SnapdAdapter(HTTPAdapter):
|
||||
# get_connection is used with versions of requests before 2.32.2 and
|
||||
# get_connection_with_tls_context is used instead in versions after that. as of
|
||||
# writing this, Certbot in EPEL 9 is still seeing updates and they have requests 2.25.1 so to
|
||||
# help out those packagers while ensuring this code works reliably, we offer custom versions of
|
||||
# both functions for now. when certbot does declare a dependency on requests>=2.32.2 in its
|
||||
# setup.py files, get_connection can be deleted
|
||||
def get_connection(self, url: str,
|
||||
proxies: Optional[Iterable[str]] = None) -> _SnapdConnectionPool:
|
||||
return _SnapdConnectionPool()
|
||||
|
||||
def get_connection_with_tls_context(self, request: PreparedRequest,
|
||||
verify: bool,
|
||||
proxies: Optional[Iterable[str]] = None,
|
||||
cert: Optional[Union[str, Tuple[str,str]]] = None
|
||||
) -> _SnapdConnectionPool:
|
||||
"""Required method for creating a new connection pool. Simply return our
|
||||
shim that forces a UNIX socket connection to snapd."""
|
||||
return _SnapdConnectionPool()
|
||||
|
||||
@@ -455,8 +455,7 @@ class RenewableCert(interfaces.RenewableCert):
|
||||
renewal configuration file and/or systemwide defaults.
|
||||
|
||||
"""
|
||||
def __init__(self, config_filename: str, cli_config: configuration.NamespaceConfig,
|
||||
update_symlinks: bool = False) -> None:
|
||||
def __init__(self, config_filename: str, cli_config: configuration.NamespaceConfig) -> None:
|
||||
"""Instantiate a RenewableCert object from an existing lineage.
|
||||
|
||||
:param str config_filename: the path to the renewal config file
|
||||
@@ -505,8 +504,6 @@ class RenewableCert(interfaces.RenewableCert):
|
||||
self.live_dir = os.path.dirname(self.cert)
|
||||
|
||||
self._fix_symlinks()
|
||||
if update_symlinks:
|
||||
self._update_symlinks()
|
||||
self._check_symlinks()
|
||||
|
||||
@property
|
||||
@@ -593,17 +590,6 @@ class RenewableCert(interfaces.RenewableCert):
|
||||
raise errors.CertStorageError("target {0} of symlink {1} does "
|
||||
"not exist".format(target, link))
|
||||
|
||||
def _update_symlinks(self) -> None:
|
||||
"""Updates symlinks to use archive_dir"""
|
||||
for kind in ALL_FOUR:
|
||||
link = getattr(self, kind)
|
||||
previous_link = get_link_target(link)
|
||||
new_link = os.path.join(self.relative_archive_dir(link),
|
||||
os.path.basename(previous_link))
|
||||
|
||||
os.unlink(link)
|
||||
os.symlink(new_link, link)
|
||||
|
||||
def _consistent(self) -> bool:
|
||||
"""Are the files associated with this lineage self-consistent?
|
||||
|
||||
@@ -636,10 +622,7 @@ class RenewableCert(interfaces.RenewableCert):
|
||||
"cert lineage's directory within the "
|
||||
"official archive directory. Link: %s, "
|
||||
"target directory: %s, "
|
||||
"archive directory: %s. If you've specified "
|
||||
"the archive directory in the renewal configuration "
|
||||
"file, you may need to update links by running "
|
||||
"certbot update_symlinks.",
|
||||
"archive directory: %s.",
|
||||
link, os.path.dirname(target), self.archive_dir)
|
||||
return False
|
||||
|
||||
|
||||
@@ -65,44 +65,6 @@ class BaseCertManagerTest(test_util.ConfigTestCase):
|
||||
return config_file
|
||||
|
||||
|
||||
class UpdateLiveSymlinksTest(BaseCertManagerTest):
|
||||
"""Tests for certbot._internal.cert_manager.update_live_symlinks
|
||||
"""
|
||||
def test_update_live_symlinks(self):
|
||||
"""Test update_live_symlinks"""
|
||||
# create files with incorrect symlinks
|
||||
from certbot._internal import cert_manager
|
||||
archive_paths = {}
|
||||
for domain in self.domains:
|
||||
custom_archive = self.domains[domain]
|
||||
if custom_archive is not None:
|
||||
archive_dir_path = custom_archive
|
||||
else:
|
||||
archive_dir_path = os.path.join(self.config.default_archive_dir, domain)
|
||||
archive_paths[domain] = {kind:
|
||||
os.path.join(archive_dir_path, kind + "1.pem") for kind in ALL_FOUR}
|
||||
for kind in ALL_FOUR:
|
||||
live_path = self.config_files[domain][kind]
|
||||
archive_path = archive_paths[domain][kind]
|
||||
open(archive_path, 'a').close()
|
||||
# path is incorrect but base must be correct
|
||||
os.symlink(os.path.join(self.config.config_dir, kind + "1.pem"), live_path)
|
||||
|
||||
# run update symlinks
|
||||
cert_manager.update_live_symlinks(self.config)
|
||||
|
||||
# check that symlinks go where they should
|
||||
prev_dir = os.getcwd()
|
||||
try:
|
||||
for domain in self.domains:
|
||||
for kind in ALL_FOUR:
|
||||
os.chdir(os.path.dirname(self.config_files[domain][kind]))
|
||||
assert filesystem.realpath(filesystem.readlink(self.config_files[domain][kind])) == \
|
||||
filesystem.realpath(archive_paths[domain][kind])
|
||||
finally:
|
||||
os.chdir(prev_dir)
|
||||
|
||||
|
||||
class DeleteTest(storage_test.BaseRenewableCertTest):
|
||||
"""Tests for certbot._internal.cert_manager.delete
|
||||
"""
|
||||
|
||||
@@ -594,7 +594,7 @@ class ParseTest(unittest.TestCase):
|
||||
assert_set_by_user_with_value(namespace, 'text_mode', True)
|
||||
assert_set_by_user_with_value(namespace, 'verbose_count', 1)
|
||||
assert_set_by_user_with_value(namespace, 'email', 'foo@example.com')
|
||||
|
||||
|
||||
def test_arg_with_contained_spaces(self):
|
||||
# This can happen if a user specifies an arg like "-d foo.com" enclosed
|
||||
# in double quotes, or as its own line in a docker-compose.yml file (as
|
||||
|
||||
@@ -48,7 +48,6 @@ class NamespaceConfigTest(test_util.ConfigTestCase):
|
||||
def test_dynamic_dirs(self, mock_constants):
|
||||
mock_constants.ACCOUNTS_DIR = 'acc'
|
||||
mock_constants.BACKUP_DIR = 'backups'
|
||||
mock_constants.CSR_DIR = 'csr'
|
||||
|
||||
mock_constants.IN_PROGRESS_DIR = '../p'
|
||||
mock_constants.KEY_DIR = 'keys'
|
||||
@@ -60,12 +59,6 @@ class NamespaceConfigTest(test_util.ConfigTestCase):
|
||||
os.path.normpath(os.path.join(self.config.config_dir, ref_path))
|
||||
assert os.path.normpath(self.config.backup_dir) == \
|
||||
os.path.normpath(os.path.join(self.config.work_dir, 'backups'))
|
||||
with warnings.catch_warnings():
|
||||
warnings.simplefilter("ignore", DeprecationWarning)
|
||||
assert os.path.normpath(self.config.csr_dir) == \
|
||||
os.path.normpath(os.path.join(self.config.config_dir, 'csr'))
|
||||
assert os.path.normpath(self.config.key_dir) == \
|
||||
os.path.normpath(os.path.join(self.config.config_dir, 'keys'))
|
||||
assert os.path.normpath(self.config.in_progress_dir) == \
|
||||
os.path.normpath(os.path.join(self.config.work_dir, '../p'))
|
||||
assert os.path.normpath(self.config.temp_checkpoint_dir) == \
|
||||
@@ -100,10 +93,6 @@ class NamespaceConfigTest(test_util.ConfigTestCase):
|
||||
os.path.join(os.getcwd(), logs_base)
|
||||
assert os.path.isabs(config.accounts_dir)
|
||||
assert os.path.isabs(config.backup_dir)
|
||||
with warnings.catch_warnings():
|
||||
warnings.simplefilter("ignore", DeprecationWarning)
|
||||
assert os.path.isabs(config.csr_dir)
|
||||
assert os.path.isabs(config.key_dir)
|
||||
assert os.path.isabs(config.in_progress_dir)
|
||||
assert os.path.isabs(config.temp_checkpoint_dir)
|
||||
|
||||
@@ -165,17 +154,22 @@ class NamespaceConfigTest(test_util.ConfigTestCase):
|
||||
|
||||
def test_set_by_user_exception(self):
|
||||
from certbot.configuration import NamespaceConfig
|
||||
|
||||
|
||||
# a newly created NamespaceConfig has no argument sources dict, so an
|
||||
# exception is raised
|
||||
config = NamespaceConfig(self.config.namespace)
|
||||
with pytest.raises(RuntimeError):
|
||||
config.set_by_user('whatever')
|
||||
|
||||
|
||||
# now set an argument sources dict
|
||||
config.set_argument_sources({})
|
||||
assert not config.set_by_user('whatever')
|
||||
|
||||
def test_set_by_user_mutables(self):
|
||||
assert not self.config.set_by_user('domains')
|
||||
self.config.domains.append('example.org')
|
||||
assert self.config.set_by_user('domains')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(pytest.main(sys.argv[1:] + [__file__])) # pragma: no cover
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user