Compare commits
5 Commits
limit-cove
...
test-windo
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c52be1eeb5 | ||
|
|
39e786405d | ||
|
|
1b89ea773c | ||
|
|
590ad226cb | ||
|
|
9dfe0cd547 |
@@ -9,7 +9,6 @@ variables:
|
||||
# We don't publish our Docker images in this pipeline, but when building them
|
||||
# for testing, let's use the nightly tag.
|
||||
dockerTag: nightly
|
||||
snapBuildTimeout: 5400
|
||||
|
||||
stages:
|
||||
- template: templates/stages/test-and-package-stage.yml
|
||||
|
||||
@@ -1,18 +1,7 @@
|
||||
# We run the test suite on commits to master so codecov gets coverage data
|
||||
# about the master branch and can use it to track coverage changes.
|
||||
trigger:
|
||||
- master
|
||||
trigger: none
|
||||
pr:
|
||||
- master
|
||||
- '*.x'
|
||||
|
||||
variables:
|
||||
# We set this here to avoid coverage data being uploaded from things like our
|
||||
# nightly pipeline. This is done because codecov (helpfully) keeps track of
|
||||
# the number of coverage uploads for a commit and displays a warning when
|
||||
# comparing two commits with an unequal number of uploads. Only uploading
|
||||
# coverage here should keep the number of uploads it sees consistent.
|
||||
uploadCoverage: true
|
||||
|
||||
jobs:
|
||||
- template: templates/jobs/standard-tests-jobs.yml
|
||||
|
||||
@@ -11,7 +11,6 @@ schedules:
|
||||
|
||||
variables:
|
||||
dockerTag: nightly
|
||||
snapBuildTimeout: 19800
|
||||
|
||||
stages:
|
||||
- template: templates/stages/test-and-package-stage.yml
|
||||
|
||||
@@ -8,18 +8,11 @@ pr: none
|
||||
|
||||
variables:
|
||||
dockerTag: ${{variables['Build.SourceBranchName']}}
|
||||
snapBuildTimeout: 19800
|
||||
|
||||
stages:
|
||||
- template: templates/stages/test-and-package-stage.yml
|
||||
- template: templates/stages/changelog-stage.yml
|
||||
- template: templates/stages/deploy-stage.yml
|
||||
parameters:
|
||||
${{ if startsWith(variables['Build.SourceBranchName'], 'v2') }}:
|
||||
snapReleaseChannel: beta
|
||||
${{ elseif startsWith(variables['Build.SourceBranchName'], 'v1') }}:
|
||||
snapReleaseChannel: candidate
|
||||
${{ else }}:
|
||||
# This should never happen
|
||||
snapReleaseChannel: somethingInvalid
|
||||
snapReleaseChannel: beta
|
||||
- template: templates/stages/notify-failure-stage.yml
|
||||
|
||||
@@ -2,67 +2,100 @@ jobs:
|
||||
- job: extended_test
|
||||
variables:
|
||||
- name: IMAGE_NAME
|
||||
value: ubuntu-22.04
|
||||
value: ubuntu-18.04
|
||||
- name: PYTHON_VERSION
|
||||
value: 3.11
|
||||
value: 3.9
|
||||
- group: certbot-common
|
||||
strategy:
|
||||
matrix:
|
||||
linux-py36:
|
||||
PYTHON_VERSION: 3.6
|
||||
TOXENV: py36
|
||||
linux-py37:
|
||||
PYTHON_VERSION: 3.7
|
||||
TOXENV: py37
|
||||
linux-py38:
|
||||
PYTHON_VERSION: 3.8
|
||||
TOXENV: py38
|
||||
linux-py39:
|
||||
PYTHON_VERSION: 3.9
|
||||
TOXENV: py39
|
||||
linux-py310:
|
||||
PYTHON_VERSION: 3.10
|
||||
TOXENV: py310
|
||||
linux-py37-nopin:
|
||||
PYTHON_VERSION: 3.7
|
||||
TOXENV: py37
|
||||
CERTBOT_NO_PIN: 1
|
||||
linux-boulder-v1-integration-certbot-oldest:
|
||||
TOXENV: integration-certbot-oldest
|
||||
ACME_SERVER: boulder-v1
|
||||
linux-boulder-v2-integration-certbot-oldest:
|
||||
PYTHON_VERSION: 3.7
|
||||
TOXENV: integration-certbot-oldest
|
||||
ACME_SERVER: boulder-v2
|
||||
linux-boulder-v1-integration-nginx-oldest:
|
||||
TOXENV: integration-nginx-oldest
|
||||
ACME_SERVER: boulder-v1
|
||||
linux-boulder-v2-integration-nginx-oldest:
|
||||
PYTHON_VERSION: 3.7
|
||||
TOXENV: integration-nginx-oldest
|
||||
ACME_SERVER: boulder-v2
|
||||
linux-boulder-v1-py27-integration:
|
||||
PYTHON_VERSION: 2.7
|
||||
TOXENV: integration
|
||||
ACME_SERVER: boulder-v1
|
||||
linux-boulder-v2-py27-integration:
|
||||
PYTHON_VERSION: 2.7
|
||||
TOXENV: integration
|
||||
ACME_SERVER: boulder-v2
|
||||
linux-boulder-v1-py36-integration:
|
||||
PYTHON_VERSION: 3.6
|
||||
TOXENV: integration
|
||||
ACME_SERVER: boulder-v1
|
||||
linux-boulder-v2-py36-integration:
|
||||
PYTHON_VERSION: 3.6
|
||||
TOXENV: integration
|
||||
ACME_SERVER: boulder-v2
|
||||
linux-boulder-v1-py37-integration:
|
||||
PYTHON_VERSION: 3.7
|
||||
TOXENV: integration
|
||||
ACME_SERVER: boulder-v1
|
||||
linux-boulder-v2-py37-integration:
|
||||
PYTHON_VERSION: 3.7
|
||||
TOXENV: integration
|
||||
ACME_SERVER: boulder-v2
|
||||
linux-boulder-v1-py38-integration:
|
||||
PYTHON_VERSION: 3.8
|
||||
TOXENV: integration
|
||||
ACME_SERVER: boulder-v1
|
||||
linux-boulder-v2-py38-integration:
|
||||
PYTHON_VERSION: 3.8
|
||||
TOXENV: integration
|
||||
ACME_SERVER: boulder-v2
|
||||
linux-boulder-v2-py39-integration:
|
||||
linux-boulder-v1-py39-integration:
|
||||
PYTHON_VERSION: 3.9
|
||||
TOXENV: integration
|
||||
ACME_SERVER: boulder-v2
|
||||
linux-boulder-v2-py310-integration:
|
||||
PYTHON_VERSION: 3.10
|
||||
TOXENV: integration
|
||||
ACME_SERVER: boulder-v2
|
||||
linux-boulder-v2-py311-integration:
|
||||
PYTHON_VERSION: 3.11
|
||||
ACME_SERVER: boulder-v1
|
||||
linux-boulder-v2-py39-integration:
|
||||
PYTHON_VERSION: 3.9
|
||||
TOXENV: integration
|
||||
ACME_SERVER: boulder-v2
|
||||
nginx-compat:
|
||||
TOXENV: nginx_compat
|
||||
linux-integration-rfc2136:
|
||||
IMAGE_NAME: ubuntu-22.04
|
||||
IMAGE_NAME: ubuntu-18.04
|
||||
PYTHON_VERSION: 3.8
|
||||
TOXENV: integration-dns-rfc2136
|
||||
docker-dev:
|
||||
TOXENV: docker_dev
|
||||
le-modification:
|
||||
IMAGE_NAME: ubuntu-22.04
|
||||
TOXENV: modification
|
||||
farmtest-apache2:
|
||||
macos-farmtest-apache2:
|
||||
# We run one of these test farm tests on macOS to help ensure the
|
||||
# tests continue to work on the platform.
|
||||
IMAGE_NAME: macOS-10.15
|
||||
PYTHON_VERSION: 3.8
|
||||
TOXENV: test-farm-apache2
|
||||
farmtest-leauto-upgrades:
|
||||
PYTHON_VERSION: 3.7
|
||||
TOXENV: test-farm-leauto-upgrades
|
||||
farmtest-certonly-standalone:
|
||||
PYTHON_VERSION: 3.7
|
||||
TOXENV: test-farm-certonly-standalone
|
||||
farmtest-sdists:
|
||||
PYTHON_VERSION: 3.7
|
||||
TOXENV: test-farm-sdists
|
||||
pool:
|
||||
vmImage: $(IMAGE_NAME)
|
||||
steps:
|
||||
|
||||
@@ -1,72 +1,65 @@
|
||||
jobs:
|
||||
- job: docker_build
|
||||
pool:
|
||||
vmImage: ubuntu-22.04
|
||||
strategy:
|
||||
matrix:
|
||||
amd64:
|
||||
DOCKER_ARCH: amd64
|
||||
arm32v6:
|
||||
DOCKER_ARCH: arm32v6
|
||||
arm64v8:
|
||||
DOCKER_ARCH: arm64v8
|
||||
# The default timeout of 60 minutes is a little low for compiling
|
||||
# cryptography on ARM architectures.
|
||||
timeoutInMinutes: 180
|
||||
steps:
|
||||
- bash: set -e && tools/docker/build.sh $(dockerTag) $DOCKER_ARCH
|
||||
displayName: Build the Docker images
|
||||
# We don't filter for the Docker Hub organization to continue to allow
|
||||
# easy testing of these scripts on forks.
|
||||
- bash: |
|
||||
set -e
|
||||
DOCKER_IMAGES=$(docker images --filter reference='*/certbot' --filter reference='*/dns-*' --format '{{.Repository}}')
|
||||
docker save --output images.tar $DOCKER_IMAGES
|
||||
displayName: Save the Docker images
|
||||
# If the name of the tar file or artifact changes, the deploy stage will
|
||||
# also need to be updated.
|
||||
- bash: set -e && mv images.tar $(Build.ArtifactStagingDirectory)
|
||||
displayName: Prepare Docker artifact
|
||||
- task: PublishPipelineArtifact@1
|
||||
inputs:
|
||||
path: $(Build.ArtifactStagingDirectory)
|
||||
artifact: docker_$(DOCKER_ARCH)
|
||||
displayName: Store Docker artifact
|
||||
- job: docker_run
|
||||
dependsOn: docker_build
|
||||
pool:
|
||||
vmImage: ubuntu-22.04
|
||||
steps:
|
||||
- task: DownloadPipelineArtifact@2
|
||||
inputs:
|
||||
artifact: docker_amd64
|
||||
path: $(Build.SourcesDirectory)
|
||||
displayName: Retrieve Docker images
|
||||
- bash: set -e && docker load --input $(Build.SourcesDirectory)/images.tar
|
||||
displayName: Load Docker images
|
||||
- bash: |
|
||||
set -ex
|
||||
DOCKER_IMAGES=$(docker images --filter reference='*/certbot' --filter reference='*/dns-*' --format '{{.Repository}}:{{.Tag}}')
|
||||
for DOCKER_IMAGE in ${DOCKER_IMAGES}
|
||||
do docker run --rm "${DOCKER_IMAGE}" plugins --prepare
|
||||
done
|
||||
displayName: Run integration tests for Docker images
|
||||
# - job: docker_build
|
||||
# pool:
|
||||
# vmImage: ubuntu-18.04
|
||||
# strategy:
|
||||
# matrix:
|
||||
# amd64:
|
||||
# DOCKER_ARCH: amd64
|
||||
# # Do not run the heavy non-amd64 builds for test branches
|
||||
# ${{ if not(startsWith(variables['Build.SourceBranchName'], 'test-')) }}:
|
||||
# arm32v6:
|
||||
# DOCKER_ARCH: arm32v6
|
||||
# arm64v8:
|
||||
# DOCKER_ARCH: arm64v8
|
||||
# steps:
|
||||
# - bash: set -e && tools/docker/build.sh $(dockerTag) $DOCKER_ARCH
|
||||
# displayName: Build the Docker images
|
||||
# # We don't filter for the Docker Hub organization to continue to allow
|
||||
# # easy testing of these scripts on forks.
|
||||
# - bash: |
|
||||
# set -e
|
||||
# DOCKER_IMAGES=$(docker images --filter reference='*/certbot' --filter reference='*/dns-*' --format '{{.Repository}}')
|
||||
# docker save --output images.tar $DOCKER_IMAGES
|
||||
# displayName: Save the Docker images
|
||||
# # If the name of the tar file or artifact changes, the deploy stage will
|
||||
# # also need to be updated.
|
||||
# - bash: set -e && mv images.tar $(Build.ArtifactStagingDirectory)
|
||||
# displayName: Prepare Docker artifact
|
||||
# - task: PublishPipelineArtifact@1
|
||||
# inputs:
|
||||
# path: $(Build.ArtifactStagingDirectory)
|
||||
# artifact: docker_$(DOCKER_ARCH)
|
||||
# displayName: Store Docker artifact
|
||||
# - job: docker_run
|
||||
# dependsOn: docker_build
|
||||
# pool:
|
||||
# vmImage: ubuntu-18.04
|
||||
# steps:
|
||||
# - task: DownloadPipelineArtifact@2
|
||||
# inputs:
|
||||
# artifact: docker_amd64
|
||||
# path: $(Build.SourcesDirectory)
|
||||
# displayName: Retrieve Docker images
|
||||
# - bash: set -e && docker load --input $(Build.SourcesDirectory)/images.tar
|
||||
# displayName: Load Docker images
|
||||
# - bash: |
|
||||
# set -ex
|
||||
# DOCKER_IMAGES=$(docker images --filter reference='*/certbot' --filter reference='*/dns-*' --format '{{.Repository}}:{{.Tag}}')
|
||||
# for DOCKER_IMAGE in ${DOCKER_IMAGES}
|
||||
# do docker run --rm "${DOCKER_IMAGE}" plugins --prepare
|
||||
# done
|
||||
# displayName: Run integration tests for Docker images
|
||||
- job: installer_build
|
||||
pool:
|
||||
vmImage: windows-2019
|
||||
vmImage: vs2017-win2016
|
||||
steps:
|
||||
- task: UsePythonVersion@0
|
||||
inputs:
|
||||
versionSpec: 3.9
|
||||
architecture: x64
|
||||
versionSpec: 3.8
|
||||
architecture: x86
|
||||
addToPath: true
|
||||
- script: |
|
||||
python -m venv venv
|
||||
venv\Scripts\python tools\pipstrap.py
|
||||
venv\Scripts\python tools\pip_install.py -e windows-installer
|
||||
displayName: Prepare Windows installer build environment
|
||||
- script: |
|
||||
venv\Scripts\construct-windows-installer
|
||||
- script: python windows-installer/construct.py
|
||||
displayName: Build Certbot installer
|
||||
- task: CopyFiles@2
|
||||
inputs:
|
||||
@@ -85,12 +78,20 @@ jobs:
|
||||
matrix:
|
||||
win2019:
|
||||
imageName: windows-2019
|
||||
win2016:
|
||||
imageName: vs2017-win2016
|
||||
pool:
|
||||
vmImage: $(imageName)
|
||||
steps:
|
||||
- powershell: |
|
||||
if ($PSVersionTable.PSVersion.Major -ne 5) {
|
||||
throw "Powershell version is not 5.x"
|
||||
}
|
||||
condition: eq(variables['imageName'], 'vs2017-win2016')
|
||||
displayName: Check Powershell 5.x is used in vs2017-win2016
|
||||
- task: UsePythonVersion@0
|
||||
inputs:
|
||||
versionSpec: 3.9
|
||||
versionSpec: 3.8
|
||||
addToPath: true
|
||||
- task: DownloadPipelineArtifact@2
|
||||
inputs:
|
||||
@@ -105,114 +106,112 @@ jobs:
|
||||
PIP_NO_BUILD_ISOLATION: no
|
||||
displayName: Prepare Certbot-CI
|
||||
- script: |
|
||||
set PATH=%ProgramFiles%\Certbot\bin;%PATH%
|
||||
venv\Scripts\python -m pytest certbot-ci\windows_installer_integration_tests --allow-persistent-changes --installer-path $(Build.SourcesDirectory)\bin\certbot-beta-installer-win_amd64.exe
|
||||
set PATH=%ProgramFiles(x86)%\Certbot\bin;%PATH%
|
||||
venv\Scripts\python -m pytest certbot-ci\windows_installer_integration_tests --allow-persistent-changes --installer-path $(Build.SourcesDirectory)\bin\certbot-beta-installer-win32.exe
|
||||
displayName: Run windows installer integration tests
|
||||
- script: |
|
||||
set PATH=%ProgramFiles%\Certbot\bin;%PATH%
|
||||
set PATH=%ProgramFiles(x86)%\Certbot\bin;%PATH%
|
||||
venv\Scripts\python -m pytest certbot-ci\certbot_integration_tests\certbot_tests -n 4
|
||||
displayName: Run certbot integration tests
|
||||
- job: snaps_build
|
||||
pool:
|
||||
vmImage: ubuntu-22.04
|
||||
strategy:
|
||||
matrix:
|
||||
amd64:
|
||||
SNAP_ARCH: amd64
|
||||
armhf:
|
||||
SNAP_ARCH: armhf
|
||||
arm64:
|
||||
SNAP_ARCH: arm64
|
||||
timeoutInMinutes: 0
|
||||
steps:
|
||||
- script: |
|
||||
set -e
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y --no-install-recommends snapd
|
||||
sudo snap install --classic snapcraft
|
||||
displayName: Install dependencies
|
||||
- task: UsePythonVersion@0
|
||||
inputs:
|
||||
versionSpec: 3.8
|
||||
addToPath: true
|
||||
- task: DownloadSecureFile@1
|
||||
name: credentials
|
||||
inputs:
|
||||
secureFile: launchpad-credentials
|
||||
- script: |
|
||||
set -e
|
||||
git config --global user.email "$(Build.RequestedForEmail)"
|
||||
git config --global user.name "$(Build.RequestedFor)"
|
||||
mkdir -p ~/.local/share/snapcraft/provider/launchpad
|
||||
cp $(credentials.secureFilePath) ~/.local/share/snapcraft/provider/launchpad/credentials
|
||||
python3 tools/snap/build_remote.py ALL --archs ${SNAP_ARCH} --timeout $(snapBuildTimeout)
|
||||
displayName: Build snaps
|
||||
- script: |
|
||||
set -e
|
||||
mv *.snap $(Build.ArtifactStagingDirectory)
|
||||
mv certbot-dns-*/*.snap $(Build.ArtifactStagingDirectory)
|
||||
displayName: Prepare artifacts
|
||||
- task: PublishPipelineArtifact@1
|
||||
inputs:
|
||||
path: $(Build.ArtifactStagingDirectory)
|
||||
artifact: snaps_$(SNAP_ARCH)
|
||||
displayName: Store snaps artifacts
|
||||
- job: snap_run
|
||||
dependsOn: snaps_build
|
||||
pool:
|
||||
vmImage: ubuntu-22.04
|
||||
steps:
|
||||
- task: UsePythonVersion@0
|
||||
inputs:
|
||||
versionSpec: 3.8
|
||||
addToPath: true
|
||||
- script: |
|
||||
set -e
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y --no-install-recommends nginx-light snapd
|
||||
python3 -m venv venv
|
||||
venv/bin/python tools/pipstrap.py
|
||||
venv/bin/python tools/pip_install.py -U tox
|
||||
displayName: Install dependencies
|
||||
- task: DownloadPipelineArtifact@2
|
||||
inputs:
|
||||
artifact: snaps_amd64
|
||||
path: $(Build.SourcesDirectory)/snap
|
||||
displayName: Retrieve Certbot snaps
|
||||
- script: |
|
||||
set -e
|
||||
sudo snap install --dangerous --classic snap/certbot_*.snap
|
||||
displayName: Install Certbot snap
|
||||
- script: |
|
||||
set -e
|
||||
venv/bin/python -m tox -e integration-external,apacheconftest-external-with-pebble
|
||||
displayName: Run tox
|
||||
- job: snap_dns_run
|
||||
dependsOn: snaps_build
|
||||
pool:
|
||||
vmImage: ubuntu-22.04
|
||||
steps:
|
||||
- script: |
|
||||
set -e
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y --no-install-recommends snapd
|
||||
displayName: Install dependencies
|
||||
- task: UsePythonVersion@0
|
||||
inputs:
|
||||
versionSpec: 3.8
|
||||
addToPath: true
|
||||
- task: DownloadPipelineArtifact@2
|
||||
inputs:
|
||||
artifact: snaps_amd64
|
||||
path: $(Build.SourcesDirectory)/snap
|
||||
displayName: Retrieve Certbot snaps
|
||||
- script: |
|
||||
set -e
|
||||
python3 -m venv venv
|
||||
venv/bin/python tools/pipstrap.py
|
||||
venv/bin/python tools/pip_install.py -e certbot-ci
|
||||
displayName: Prepare Certbot-CI
|
||||
- script: |
|
||||
set -e
|
||||
sudo -E venv/bin/pytest certbot-ci/snap_integration_tests/dns_tests --allow-persistent-changes --snap-folder $(Build.SourcesDirectory)/snap --snap-arch amd64
|
||||
displayName: Test DNS plugins snaps
|
||||
# - job: snaps_build
|
||||
# pool:
|
||||
# vmImage: ubuntu-18.04
|
||||
# timeoutInMinutes: 0
|
||||
# variables:
|
||||
# # Do not run the heavy non-amd64 builds for test branches
|
||||
# ${{ if not(startsWith(variables['Build.SourceBranchName'], 'test-')) }}:
|
||||
# ARCHS: amd64 arm64 armhf
|
||||
# ${{ if startsWith(variables['Build.SourceBranchName'], 'test-') }}:
|
||||
# ARCHS: amd64
|
||||
# steps:
|
||||
# - script: |
|
||||
# set -e
|
||||
# sudo apt-get update
|
||||
# sudo apt-get install -y --no-install-recommends snapd
|
||||
# sudo snap install --classic snapcraft
|
||||
# displayName: Install dependencies
|
||||
# - task: UsePythonVersion@0
|
||||
# inputs:
|
||||
# versionSpec: 3.8
|
||||
# addToPath: true
|
||||
# - task: DownloadSecureFile@1
|
||||
# name: credentials
|
||||
# inputs:
|
||||
# secureFile: launchpad-credentials
|
||||
# - script: |
|
||||
# set -e
|
||||
# git config --global user.email "$(Build.RequestedForEmail)"
|
||||
# git config --global user.name "$(Build.RequestedFor)"
|
||||
# mkdir -p ~/.local/share/snapcraft/provider/launchpad
|
||||
# cp $(credentials.secureFilePath) ~/.local/share/snapcraft/provider/launchpad/credentials
|
||||
# python3 tools/snap/build_remote.py ALL --archs ${ARCHS} --timeout 19800
|
||||
# displayName: Build snaps
|
||||
# - script: |
|
||||
# set -e
|
||||
# mv *.snap $(Build.ArtifactStagingDirectory)
|
||||
# mv certbot-dns-*/*.snap $(Build.ArtifactStagingDirectory)
|
||||
# displayName: Prepare artifacts
|
||||
# - task: PublishPipelineArtifact@1
|
||||
# inputs:
|
||||
# path: $(Build.ArtifactStagingDirectory)
|
||||
# artifact: snaps
|
||||
# displayName: Store snaps artifacts
|
||||
# - job: snap_run
|
||||
# dependsOn: snaps_build
|
||||
# pool:
|
||||
# vmImage: ubuntu-18.04
|
||||
# steps:
|
||||
# - task: UsePythonVersion@0
|
||||
# inputs:
|
||||
# versionSpec: 3.8
|
||||
# addToPath: true
|
||||
# - script: |
|
||||
# set -e
|
||||
# sudo apt-get update
|
||||
# sudo apt-get install -y --no-install-recommends nginx-light snapd
|
||||
# python3 -m venv venv
|
||||
# venv/bin/python tools/pipstrap.py
|
||||
# venv/bin/python tools/pip_install.py -U tox
|
||||
# displayName: Install dependencies
|
||||
# - task: DownloadPipelineArtifact@2
|
||||
# inputs:
|
||||
# artifact: snaps
|
||||
# path: $(Build.SourcesDirectory)/snap
|
||||
# displayName: Retrieve Certbot snaps
|
||||
# - script: |
|
||||
# set -e
|
||||
# sudo snap install --dangerous --classic snap/certbot_*_amd64.snap
|
||||
# displayName: Install Certbot snap
|
||||
# - script: |
|
||||
# set -e
|
||||
# venv/bin/python -m tox -e integration-external,apacheconftest-external-with-pebble
|
||||
# displayName: Run tox
|
||||
# - job: snap_dns_run
|
||||
# dependsOn: snaps_build
|
||||
# pool:
|
||||
# vmImage: ubuntu-18.04
|
||||
# steps:
|
||||
# - script: |
|
||||
# set -e
|
||||
# sudo apt-get update
|
||||
# sudo apt-get install -y --no-install-recommends snapd
|
||||
# displayName: Install dependencies
|
||||
# - task: UsePythonVersion@0
|
||||
# inputs:
|
||||
# versionSpec: 3.8
|
||||
# addToPath: true
|
||||
# - task: DownloadPipelineArtifact@2
|
||||
# inputs:
|
||||
# artifact: snaps
|
||||
# path: $(Build.SourcesDirectory)/snap
|
||||
# displayName: Retrieve Certbot snaps
|
||||
# - script: |
|
||||
# set -e
|
||||
# python3 -m venv venv
|
||||
# venv/bin/python tools/pipstrap.py
|
||||
# venv/bin/python tools/pip_install.py -e certbot-ci
|
||||
# displayName: Prepare Certbot-CI
|
||||
# - script: |
|
||||
# set -e
|
||||
# sudo -E venv/bin/pytest certbot-ci/snap_integration_tests/dns_tests --allow-persistent-changes --snap-folder $(Build.SourcesDirectory)/snap --snap-arch amd64
|
||||
# displayName: Test DNS plugins snaps
|
||||
|
||||
@@ -1,75 +0,0 @@
|
||||
# As (somewhat) described at
|
||||
# https://docs.microsoft.com/en-us/azure/devops/pipelines/process/templates?view=azure-devops#context,
|
||||
# each template only has access to the parameters passed into it. To help make
|
||||
# use of this design, we define snapReleaseChannel without a default value
|
||||
# which requires the user of this template to define it as described at
|
||||
# https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema/parameters-name?view=azure-pipelines#remarks.
|
||||
# This makes the user of this template be explicit while allowing them to
|
||||
# define their own parameters with defaults that make sense for that context.
|
||||
parameters:
|
||||
- name: snapReleaseChannel
|
||||
type: string
|
||||
values:
|
||||
- edge
|
||||
- beta
|
||||
- candidate
|
||||
|
||||
jobs:
|
||||
# This job relies on credentials used to publish the Certbot snaps. This
|
||||
# credential file was created by running:
|
||||
#
|
||||
# snapcraft logout
|
||||
# snapcraft export-login --channels=candidate,beta,edge snapcraft.cfg
|
||||
# (provide the shared snapcraft credentials when prompted)
|
||||
#
|
||||
# Then the file was added as a secure file in Azure pipelines
|
||||
# with the name snapcraft.cfg by following the instructions at
|
||||
# https://docs.microsoft.com/en-us/azure/devops/pipelines/library/secure-files?view=azure-devops
|
||||
# including authorizing the file for use in the "nightly" and "release"
|
||||
# pipelines as described at
|
||||
# https://docs.microsoft.com/en-us/azure/devops/pipelines/library/secure-files?view=azure-devops#q-how-do-i-authorize-a-secure-file-for-use-in-a-specific-pipeline.
|
||||
#
|
||||
# This file has a maximum lifetime of one year and the current file will
|
||||
# expire on 2023-09-06. The file will need to be updated before then to
|
||||
# prevent automated deploys from breaking.
|
||||
#
|
||||
# Revoking these credentials can be done by changing the password of the
|
||||
# account used to generate the credentials. See
|
||||
# https://forum.snapcraft.io/t/revoking-exported-credentials/19031 for
|
||||
# more info.
|
||||
- job: publish_snap
|
||||
pool:
|
||||
vmImage: ubuntu-22.04
|
||||
variables:
|
||||
- group: certbot-common
|
||||
strategy:
|
||||
matrix:
|
||||
amd64:
|
||||
SNAP_ARCH: amd64
|
||||
arm32v6:
|
||||
SNAP_ARCH: armhf
|
||||
arm64v8:
|
||||
SNAP_ARCH: arm64
|
||||
steps:
|
||||
- bash: |
|
||||
set -e
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y --no-install-recommends snapd
|
||||
sudo snap install --classic snapcraft
|
||||
displayName: Install dependencies
|
||||
- task: DownloadPipelineArtifact@2
|
||||
inputs:
|
||||
artifact: snaps_$(SNAP_ARCH)
|
||||
path: $(Build.SourcesDirectory)/snap
|
||||
displayName: Retrieve Certbot snaps
|
||||
- task: DownloadSecureFile@1
|
||||
name: snapcraftCfg
|
||||
inputs:
|
||||
secureFile: snapcraft.cfg
|
||||
- bash: |
|
||||
set -e
|
||||
export SNAPCRAFT_STORE_CREDENTIALS=$(cat "$(snapcraftCfg.secureFilePath)")
|
||||
for SNAP_FILE in snap/*.snap; do
|
||||
tools/retry.sh eval snapcraft upload --release=${{ parameters.snapReleaseChannel }} "${SNAP_FILE}"
|
||||
done
|
||||
displayName: Publish to Snap store
|
||||
@@ -1,69 +1,75 @@
|
||||
jobs:
|
||||
- job: test
|
||||
variables:
|
||||
PYTHON_VERSION: 3.11
|
||||
PYTHON_VERSION: 3.9
|
||||
strategy:
|
||||
matrix:
|
||||
macos-py37-cover:
|
||||
IMAGE_NAME: macOS-12
|
||||
PYTHON_VERSION: 3.7
|
||||
TOXENV: cover
|
||||
macos-cover:
|
||||
IMAGE_NAME: macOS-12
|
||||
TOXENV: cover
|
||||
windows-py37:
|
||||
IMAGE_NAME: windows-2019
|
||||
PYTHON_VERSION: 3.7
|
||||
TOXENV: py37-win
|
||||
windows-py39-cover:
|
||||
IMAGE_NAME: windows-2019
|
||||
macos-py27:
|
||||
IMAGE_NAME: macOS-10.15
|
||||
PYTHON_VERSION: 2.7
|
||||
TOXENV: py27
|
||||
macos-py39:
|
||||
IMAGE_NAME: macOS-10.15
|
||||
PYTHON_VERSION: 3.9
|
||||
TOXENV: cover-win
|
||||
TOXENV: py39
|
||||
windows-py36:
|
||||
IMAGE_NAME: vs2017-win2016
|
||||
PYTHON_VERSION: 3.6
|
||||
TOXENV: py36
|
||||
windows-py38-cover:
|
||||
IMAGE_NAME: vs2017-win2016
|
||||
PYTHON_VERSION: 3.8
|
||||
TOXENV: py38-cover
|
||||
windows-integration-certbot:
|
||||
IMAGE_NAME: windows-2019
|
||||
PYTHON_VERSION: 3.9
|
||||
IMAGE_NAME: vs2017-win2016
|
||||
PYTHON_VERSION: 3.8
|
||||
TOXENV: integration-certbot
|
||||
linux-oldest-tests-1:
|
||||
IMAGE_NAME: ubuntu-22.04
|
||||
PYTHON_VERSION: 3.7
|
||||
TOXENV: '{acme,apache,apache-v2,certbot}-oldest'
|
||||
IMAGE_NAME: ubuntu-18.04
|
||||
TOXENV: py27-{acme,apache,apache-v2,certbot}-oldest
|
||||
linux-oldest-tests-2:
|
||||
IMAGE_NAME: ubuntu-22.04
|
||||
IMAGE_NAME: ubuntu-18.04
|
||||
TOXENV: py27-{dns,nginx}-oldest
|
||||
linux-py27:
|
||||
IMAGE_NAME: ubuntu-18.04
|
||||
PYTHON_VERSION: 2.7
|
||||
TOXENV: py27
|
||||
linux-py36:
|
||||
IMAGE_NAME: ubuntu-18.04
|
||||
PYTHON_VERSION: 3.6
|
||||
TOXENV: py36
|
||||
linux-py39-cover:
|
||||
IMAGE_NAME: ubuntu-18.04
|
||||
PYTHON_VERSION: 3.9
|
||||
TOXENV: py39-cover
|
||||
linux-py37-lint:
|
||||
IMAGE_NAME: ubuntu-18.04
|
||||
PYTHON_VERSION: 3.7
|
||||
TOXENV: '{dns,nginx}-oldest'
|
||||
linux-py37:
|
||||
IMAGE_NAME: ubuntu-22.04
|
||||
PYTHON_VERSION: 3.7
|
||||
TOXENV: py37
|
||||
linux-cover:
|
||||
IMAGE_NAME: ubuntu-22.04
|
||||
TOXENV: cover
|
||||
linux-lint:
|
||||
IMAGE_NAME: ubuntu-22.04
|
||||
TOXENV: lint-posix
|
||||
linux-mypy:
|
||||
IMAGE_NAME: ubuntu-22.04
|
||||
TOXENV: mypy-posix
|
||||
TOXENV: lint
|
||||
linux-py36-mypy:
|
||||
IMAGE_NAME: ubuntu-18.04
|
||||
PYTHON_VERSION: 3.6
|
||||
TOXENV: mypy
|
||||
linux-integration:
|
||||
IMAGE_NAME: ubuntu-22.04
|
||||
IMAGE_NAME: ubuntu-18.04
|
||||
PYTHON_VERSION: 3.8
|
||||
TOXENV: integration
|
||||
ACME_SERVER: pebble
|
||||
apache-compat:
|
||||
IMAGE_NAME: ubuntu-22.04
|
||||
IMAGE_NAME: ubuntu-18.04
|
||||
TOXENV: apache_compat
|
||||
le-modification:
|
||||
IMAGE_NAME: ubuntu-18.04
|
||||
TOXENV: modification
|
||||
apacheconftest:
|
||||
IMAGE_NAME: ubuntu-22.04
|
||||
IMAGE_NAME: ubuntu-18.04
|
||||
PYTHON_VERSION: 2.7
|
||||
TOXENV: apacheconftest-with-pebble
|
||||
nginxroundtrip:
|
||||
IMAGE_NAME: ubuntu-22.04
|
||||
IMAGE_NAME: ubuntu-18.04
|
||||
PYTHON_VERSION: 2.7
|
||||
TOXENV: nginxroundtrip
|
||||
pool:
|
||||
vmImage: $(IMAGE_NAME)
|
||||
steps:
|
||||
- template: ../steps/tox-steps.yml
|
||||
- job: test_sphinx_builds
|
||||
pool:
|
||||
vmImage: ubuntu-20.04
|
||||
steps:
|
||||
- template: ../steps/sphinx-steps.yml
|
||||
|
||||
@@ -3,7 +3,7 @@ stages:
|
||||
jobs:
|
||||
- job: prepare
|
||||
pool:
|
||||
vmImage: windows-2019
|
||||
vmImage: vs2017-win2016
|
||||
steps:
|
||||
# If we change the output filename from `release_notes.md`, it should also be changed in tools/create_github_release.py
|
||||
- bash: |
|
||||
|
||||
@@ -1,19 +1,69 @@
|
||||
parameters:
|
||||
# We do not define acceptable values for this parameter here as it is passed
|
||||
# through to ../jobs/snap-deploy-job.yml which does its own sanity checking.
|
||||
- name: snapReleaseChannel
|
||||
type: string
|
||||
default: edge
|
||||
values:
|
||||
- edge
|
||||
- beta
|
||||
|
||||
stages:
|
||||
- stage: Deploy
|
||||
jobs:
|
||||
- template: ../jobs/snap-deploy-job.yml
|
||||
parameters:
|
||||
snapReleaseChannel: ${{ parameters.snapReleaseChannel }}
|
||||
# This job relies on credentials used to publish the Certbot snaps. This
|
||||
# credential file was created by running:
|
||||
#
|
||||
# snapcraft logout
|
||||
# snapcraft login (provide the shared snapcraft credentials when prompted)
|
||||
# snapcraft export-login --channels=beta,edge snapcraft.cfg
|
||||
#
|
||||
# Then the file was added as a secure file in Azure pipelines
|
||||
# with the name snapcraft.cfg by following the instructions at
|
||||
# https://docs.microsoft.com/en-us/azure/devops/pipelines/library/secure-files?view=azure-devops
|
||||
# including authorizing the file in all pipelines as described at
|
||||
# https://docs.microsoft.com/en-us/azure/devops/pipelines/library/secure-files?view=azure-devops#how-do-i-authorize-a-secure-file-for-use-in-all-pipelines.
|
||||
#
|
||||
# This file has a maximum lifetime of one year and the current
|
||||
# file will expire on 2021-07-28 which is also tracked by
|
||||
# https://github.com/certbot/certbot/issues/7931. The file will
|
||||
# need to be updated before then to prevent automated deploys
|
||||
# from breaking.
|
||||
#
|
||||
# Revoking these credentials can be done by changing the password of the
|
||||
# account used to generate the credentials. See
|
||||
# https://forum.snapcraft.io/t/revoking-exported-credentials/19031 for
|
||||
# more info.
|
||||
- job: publish_snap
|
||||
pool:
|
||||
vmImage: ubuntu-18.04
|
||||
variables:
|
||||
- group: certbot-common
|
||||
steps:
|
||||
- bash: |
|
||||
set -e
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y --no-install-recommends snapd
|
||||
sudo snap install --classic snapcraft
|
||||
displayName: Install dependencies
|
||||
- task: DownloadPipelineArtifact@2
|
||||
inputs:
|
||||
artifact: snaps
|
||||
path: $(Build.SourcesDirectory)/snap
|
||||
displayName: Retrieve Certbot snaps
|
||||
- task: DownloadSecureFile@1
|
||||
name: snapcraftCfg
|
||||
inputs:
|
||||
secureFile: snapcraft.cfg
|
||||
- bash: |
|
||||
set -e
|
||||
mkdir -p .snapcraft
|
||||
ln -s $(snapcraftCfg.secureFilePath) .snapcraft/snapcraft.cfg
|
||||
for SNAP_FILE in snap/*.snap; do
|
||||
tools/retry.sh eval snapcraft upload --release=${{ parameters.snapReleaseChannel }} "${SNAP_FILE}"
|
||||
done
|
||||
displayName: Publish to Snap store
|
||||
- job: publish_docker
|
||||
pool:
|
||||
vmImage: ubuntu-22.04
|
||||
vmImage: ubuntu-18.04
|
||||
strategy:
|
||||
matrix:
|
||||
amd64:
|
||||
@@ -38,16 +88,11 @@ stages:
|
||||
# which was created by following the instructions at
|
||||
# https://docs.microsoft.com/en-us/azure/devops/pipelines/library/service-endpoints?view=azure-devops&tabs=yaml#sep-docreg.
|
||||
# The name given to this service account must match the value
|
||||
# given to containerRegistry below. The authentication used when
|
||||
# creating this service account was a personal access token
|
||||
# rather than a password to bypass 2FA. When Brad set this up,
|
||||
# Azure Pipelines failed to verify the credentials with an error
|
||||
# like "access is forbidden with a JWT issued from a personal
|
||||
# access token", but after saving them without verification, the
|
||||
# access token worked when the pipeline actually ran. "Grant
|
||||
# access to all pipelines" should also be checked on the service
|
||||
# account. The access token can be deleted on Docker Hub if
|
||||
# these credentials need to be revoked.
|
||||
# given to containerRegistry below. "Grant access to all
|
||||
# pipelines" should also be checked. To revoke these
|
||||
# credentials, we can change the password on the certbotbot
|
||||
# Docker Hub account or remove the account from the
|
||||
# Certbot organization on Docker Hub.
|
||||
containerRegistry: docker-hub
|
||||
displayName: Login to Docker Hub
|
||||
- bash: set -e && tools/docker/deploy.sh $(dockerTag) $DOCKER_ARCH
|
||||
|
||||
@@ -5,7 +5,7 @@ stages:
|
||||
variables:
|
||||
- group: certbot-common
|
||||
pool:
|
||||
vmImage: ubuntu-20.04
|
||||
vmImage: ubuntu-latest
|
||||
steps:
|
||||
- bash: |
|
||||
set -e
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
stages:
|
||||
- stage: TestAndPackage
|
||||
jobs:
|
||||
- template: ../jobs/standard-tests-jobs.yml
|
||||
- template: ../jobs/extended-tests-jobs.yml
|
||||
# - template: ../jobs/standard-tests-jobs.yml
|
||||
# - template: ../jobs/extended-tests-jobs.yml
|
||||
- template: ../jobs/packaging-jobs.yml
|
||||
|
||||
@@ -1,24 +0,0 @@
|
||||
steps:
|
||||
- bash: |
|
||||
set -e
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y --no-install-recommends libaugeas0
|
||||
FINAL_STATUS=0
|
||||
declare -a FAILED_BUILDS
|
||||
tools/venv.py
|
||||
source venv/bin/activate
|
||||
for doc_path in */docs
|
||||
do
|
||||
echo ""
|
||||
echo "##[group]Building $doc_path"
|
||||
if ! sphinx-build -W --keep-going -b html $doc_path $doc_path/_build/html; then
|
||||
FINAL_STATUS=1
|
||||
FAILED_BUILDS[${#FAILED_BUILDS[@]}]="${doc_path%/docs}"
|
||||
fi
|
||||
echo "##[endgroup]"
|
||||
done
|
||||
if [[ $FINAL_STATUS -ne 0 ]]; then
|
||||
echo "##[error]The following builds failed: ${FAILED_BUILDS[*]}"
|
||||
exit 1
|
||||
fi
|
||||
displayName: Build Sphinx Documentation
|
||||
@@ -1,10 +1,6 @@
|
||||
steps:
|
||||
# We run brew update because we've seen attempts to install an older version
|
||||
# of a package fail. See
|
||||
# https://github.com/actions/virtual-environments/issues/3165.
|
||||
- bash: |
|
||||
set -e
|
||||
brew update
|
||||
brew install augeas
|
||||
condition: startswith(variables['IMAGE_NAME'], 'macOS')
|
||||
displayName: Install MacOS dependencies
|
||||
@@ -12,7 +8,7 @@ steps:
|
||||
set -e
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y --no-install-recommends \
|
||||
python3-dev \
|
||||
python-dev \
|
||||
gcc \
|
||||
libaugeas0 \
|
||||
libssl-dev \
|
||||
@@ -36,8 +32,8 @@ steps:
|
||||
# problems with its lack of real dependency resolution.
|
||||
- bash: |
|
||||
set -e
|
||||
python3 tools/pipstrap.py
|
||||
python3 tools/pip_install.py -I tox virtualenv
|
||||
python tools/pipstrap.py
|
||||
python tools/pip_install.py -I tox virtualenv
|
||||
displayName: Install runtime dependencies
|
||||
- task: DownloadSecureFile@1
|
||||
name: testFarmPem
|
||||
@@ -49,34 +45,13 @@ steps:
|
||||
export TARGET_BRANCH="`echo "${BUILD_SOURCEBRANCH}" | sed -E 's!refs/(heads|tags)/!!g'`"
|
||||
[ -z "${SYSTEM_PULLREQUEST_TARGETBRANCH}" ] || export TARGET_BRANCH="${SYSTEM_PULLREQUEST_TARGETBRANCH}"
|
||||
env
|
||||
python3 -m tox
|
||||
if [[ "${TOXENV}" == *"oldest"* ]]; then
|
||||
tools/run_oldest_tests.sh
|
||||
else
|
||||
python -m tox
|
||||
fi
|
||||
env:
|
||||
AWS_ACCESS_KEY_ID: $(AWS_ACCESS_KEY_ID)
|
||||
AWS_SECRET_ACCESS_KEY: $(AWS_SECRET_ACCESS_KEY)
|
||||
AWS_EC2_PEM_FILE: $(testFarmPem.secureFilePath)
|
||||
displayName: Run tox
|
||||
# For now, let's omit `set -e` and avoid the script exiting with a nonzero
|
||||
# status code to prevent problems here from causing build failures. If
|
||||
# this turns out to work well, we can change this.
|
||||
- bash: |
|
||||
python3 tools/pip_install.py -I coverage
|
||||
case "$AGENT_OS" in
|
||||
Darwin)
|
||||
CODECOV_URL="https://uploader.codecov.io/latest/macos/codecov"
|
||||
;;
|
||||
Linux)
|
||||
CODECOV_URL="https://uploader.codecov.io/latest/linux/codecov"
|
||||
;;
|
||||
Windows_NT)
|
||||
CODECOV_URL="https://uploader.codecov.io/latest/windows/codecov.exe"
|
||||
;;
|
||||
*)
|
||||
echo "Unexpected OS"
|
||||
exit 0
|
||||
esac
|
||||
curl --retry 3 -o codecov "$CODECOV_URL"
|
||||
chmod +x codecov
|
||||
coverage xml
|
||||
./codecov || echo "Uploading coverage data failed"
|
||||
condition: and(eq(variables['uploadCoverage'], true), or(startsWith(variables['TOXENV'], 'cover'), startsWith(variables['TOXENV'], 'integration')))
|
||||
displayName: Upload coverage data
|
||||
|
||||
@@ -8,4 +8,5 @@
|
||||
.git
|
||||
.tox
|
||||
venv
|
||||
venv3
|
||||
docs
|
||||
|
||||
2
.envrc
2
.envrc
@@ -3,7 +3,7 @@
|
||||
# activated and then deactivated when you cd elsewhere. Developers have to have
|
||||
# direnv set up and run `direnv allow` to allow this file to execute on their
|
||||
# system. You can find more information at https://direnv.net/.
|
||||
. venv/bin/activate
|
||||
. venv3/bin/activate
|
||||
# direnv doesn't support modifying PS1 so we unset it to squelch the error
|
||||
# it'll otherwise print about this being done in the activate script. See
|
||||
# https://github.com/direnv/direnv/wiki/PS1. If you would like your shell
|
||||
|
||||
1
.github/FUNDING.yml
vendored
1
.github/FUNDING.yml
vendored
@@ -1 +0,0 @@
|
||||
custom: https://supporters.eff.org/donate/support-work-on-certbot
|
||||
7
.github/codecov.yml
vendored
7
.github/codecov.yml
vendored
@@ -1,7 +0,0 @@
|
||||
# This disables all reporting from codecov. Let's just set it up to collect
|
||||
# data for now and then we can play with the settings here.
|
||||
comment: false
|
||||
coverage:
|
||||
status:
|
||||
project: off
|
||||
patch: off
|
||||
35
.github/stale.yml
vendored
Normal file
35
.github/stale.yml
vendored
Normal file
@@ -0,0 +1,35 @@
|
||||
# Configuration for https://github.com/marketplace/stale
|
||||
|
||||
# Number of days of inactivity before an Issue or Pull Request becomes stale
|
||||
daysUntilStale: 365
|
||||
|
||||
# Number of days of inactivity before an Issue or Pull Request with the stale label is closed.
|
||||
# Set to false to disable. If disabled, issues still need to be closed manually, but will remain marked as stale.
|
||||
# When changing this value, be sure to also update markComment below.
|
||||
daysUntilClose: 30
|
||||
|
||||
# Ignore issues with an assignee (defaults to false)
|
||||
exemptAssignees: true
|
||||
|
||||
# Label to use when marking as stale
|
||||
staleLabel: needs-update
|
||||
|
||||
# Comment to post when marking as stale. Set to `false` to disable
|
||||
markComment: >
|
||||
We've made a lot of changes to Certbot since this issue was opened. If you
|
||||
still have this issue with an up-to-date version of Certbot, can you please
|
||||
add a comment letting us know? This helps us to better see what issues are
|
||||
still affecting our users. If there is no activity in the next 30 days, this
|
||||
issue will be automatically closed.
|
||||
|
||||
# Comment to post when closing a stale Issue or Pull Request.
|
||||
closeComment: >
|
||||
This issue has been closed due to lack of activity, but if you think it
|
||||
should be reopened, please open a new issue with a link to this one and we'll
|
||||
take a look.
|
||||
|
||||
# Limit the number of actions per hour, from 1-30. Default is 30
|
||||
limitPerRun: 1
|
||||
|
||||
# Don't mark pull requests as stale.
|
||||
only: issues
|
||||
42
.github/workflows/stale.yml
vendored
42
.github/workflows/stale.yml
vendored
@@ -1,42 +0,0 @@
|
||||
name: Update Stale Issues
|
||||
on:
|
||||
schedule:
|
||||
# Run at midnight every night
|
||||
- cron: '24 1 * * *'
|
||||
permissions:
|
||||
issues: write
|
||||
jobs:
|
||||
stale:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/stale@v6
|
||||
with:
|
||||
# REMOVEME: dry run to see if this works
|
||||
debug-only: true
|
||||
|
||||
# Idle number of days before marking issues stale
|
||||
days-before-issue-stale: 365
|
||||
|
||||
# Idle number of days before closing stale issues
|
||||
days-before-issue-close: 30
|
||||
|
||||
# Ignore issues with an assignee
|
||||
exempt-all-issue-assignees: true
|
||||
|
||||
# Label to use when marking as stale
|
||||
stale-issue-label: needs-update
|
||||
|
||||
stale-issue-message: >
|
||||
We've made a lot of changes to Certbot since this issue was opened. If you
|
||||
still have this issue with an up-to-date version of Certbot, can you please
|
||||
add a comment letting us know? This helps us to better see what issues are
|
||||
still affecting our users. If there is no activity in the next 30 days, this
|
||||
issue will be automatically closed.
|
||||
|
||||
close-issue-message: >
|
||||
This issue has been closed due to lack of activity, but if you think it
|
||||
should be reopened, please open a new issue with a link to this one and we'll
|
||||
take a look.
|
||||
|
||||
# Limit the number of actions per hour, from 1-30. Default is 30
|
||||
operations-per-run: 1
|
||||
11
.gitignore
vendored
11
.gitignore
vendored
@@ -4,16 +4,16 @@
|
||||
build/
|
||||
dist*/
|
||||
/venv*/
|
||||
/kgs/
|
||||
/.tox/
|
||||
/releases*/
|
||||
/log*
|
||||
letsencrypt.log
|
||||
certbot.log
|
||||
poetry.lock
|
||||
letsencrypt-auto-source/letsencrypt-auto.sig.lzma.base64
|
||||
|
||||
# coverage
|
||||
.coverage
|
||||
.coverage.*
|
||||
/htmlcov/
|
||||
|
||||
/.vagrant
|
||||
@@ -31,6 +31,12 @@ tags
|
||||
# auth --cert-path --chain-path
|
||||
/*.pem
|
||||
|
||||
# letstest
|
||||
tests/letstest/letest-*/
|
||||
tests/letstest/*.pem
|
||||
tests/letstest/venv/
|
||||
tests/letstest/venv3/
|
||||
|
||||
.venv
|
||||
|
||||
# pytest cache
|
||||
@@ -59,4 +65,3 @@ certbot-dns*/certbot-dns*_arm*.txt
|
||||
/certbot_amd64*.txt
|
||||
/certbot_arm*.txt
|
||||
certbot-dns*/snap
|
||||
snapcraft.cfg
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
[settings]
|
||||
skip_glob=venv*
|
||||
skip=letsencrypt-auto-source
|
||||
force_sort_within_sections=True
|
||||
force_single_line=True
|
||||
order_by_type=False
|
||||
|
||||
793
.pylintrc
793
.pylintrc
@@ -1,370 +1,51 @@
|
||||
[MAIN]
|
||||
[MASTER]
|
||||
|
||||
# Analyse import fallback blocks. This can be used to support both Python 2 and
|
||||
# 3 compatible code, which means that the block might have code that exists
|
||||
# only in one or another interpreter, leading to false positives when analysed.
|
||||
analyse-fallback-blocks=no
|
||||
# use as many jobs as there are cores
|
||||
jobs=0
|
||||
|
||||
# Load and enable all available extensions. Use --list-extensions to see a list
|
||||
# all available extensions.
|
||||
#enable-all-extensions=
|
||||
|
||||
# In error mode, messages with a category besides ERROR or FATAL are
|
||||
# suppressed, and no reports are done by default. Error mode is compatible with
|
||||
# disabling specific errors.
|
||||
#errors-only=
|
||||
|
||||
# Always return a 0 (non-error) status code, even if lint errors are found.
|
||||
# This is primarily useful in continuous integration scripts.
|
||||
#exit-zero=
|
||||
|
||||
# A comma-separated list of package or module names from where C extensions may
|
||||
# be loaded. Extensions are loading into the active Python interpreter and may
|
||||
# run arbitrary code.
|
||||
extension-pkg-allow-list=
|
||||
|
||||
# A comma-separated list of package or module names from where C extensions may
|
||||
# be loaded. Extensions are loading into the active Python interpreter and may
|
||||
# run arbitrary code. (This is an alternative name to extension-pkg-allow-list
|
||||
# for backward compatibility.)
|
||||
extension-pkg-whitelist=pywintypes,win32api,win32file,win32security
|
||||
|
||||
# Return non-zero exit code if any of these messages/categories are detected,
|
||||
# even if score is above --fail-under value. Syntax same as enable. Messages
|
||||
# specified are enabled, while categories only check already-enabled messages.
|
||||
fail-on=
|
||||
|
||||
# Specify a score threshold under which the program will exit with error.
|
||||
fail-under=10
|
||||
|
||||
# Interpret the stdin as a python script, whose filename needs to be passed as
|
||||
# the module_or_package argument.
|
||||
#from-stdin=
|
||||
|
||||
# Files or directories to be skipped. They should be base names, not paths.
|
||||
ignore=CVS
|
||||
|
||||
# Add files or directories matching the regular expressions patterns to the
|
||||
# ignore-list. The regex matches against paths and can be in Posix or Windows
|
||||
# format. Because '\' represents the directory delimiter on Windows systems, it
|
||||
# can't be used as an escape character.
|
||||
ignore-paths=
|
||||
|
||||
# Files or directories matching the regular expression patterns are skipped.
|
||||
# The regex matches against base names, not paths. The default value ignores
|
||||
# Emacs file locks
|
||||
ignore-patterns=^\.#
|
||||
|
||||
# List of module names for which member attributes should not be checked
|
||||
# (useful for modules/projects where namespaces are manipulated during runtime
|
||||
# and thus existing member attributes cannot be deduced by static analysis). It
|
||||
# supports qualified module names, as well as Unix pattern matching.
|
||||
ignored-modules=
|
||||
# Specify a configuration file.
|
||||
#rcfile=
|
||||
|
||||
# Python code to execute, usually for sys.path manipulation such as
|
||||
# pygtk.require().
|
||||
# CERTBOT COMMENT
|
||||
# This is needed for pylint to import linter_plugin.py since
|
||||
# https://github.com/PyCQA/pylint/pull/3396.
|
||||
init-hook="import pylint.config, os, sys; sys.path.append(os.path.dirname(pylint.config.PYLINTRC))"
|
||||
#init-hook=
|
||||
|
||||
# Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the
|
||||
# number of processors available to use, and will cap the count on Windows to
|
||||
# avoid hangs.
|
||||
jobs=0
|
||||
# Profiled execution.
|
||||
profile=no
|
||||
|
||||
# Control the amount of potential inferred values when inferring a single
|
||||
# object. This can help the performance when dealing with large functions or
|
||||
# complex, nested conditions.
|
||||
limit-inference-results=100
|
||||
|
||||
# List of plugins (as comma separated values of python module names) to load,
|
||||
# usually to register additional checkers.
|
||||
load-plugins=linter_plugin
|
||||
# Add files or directories to the blacklist. They should be base names, not
|
||||
# paths.
|
||||
ignore=CVS
|
||||
|
||||
# Pickle collected data for later comparisons.
|
||||
persistent=yes
|
||||
|
||||
# Minimum Python version to use for version dependent checks. Will default to
|
||||
# the version used to run pylint.
|
||||
py-version=3.10
|
||||
|
||||
# Discover python modules and packages in the file system subtree.
|
||||
recursive=no
|
||||
|
||||
# When enabled, pylint would attempt to guess common misconfiguration and emit
|
||||
# user-friendly hints instead of false-positive error messages.
|
||||
suggestion-mode=yes
|
||||
|
||||
# Allow loading of arbitrary C extensions. Extensions are imported into the
|
||||
# active Python interpreter and may run arbitrary code.
|
||||
unsafe-load-any-extension=no
|
||||
|
||||
# In verbose mode, extra non-checker-related info will be displayed.
|
||||
#verbose=
|
||||
|
||||
|
||||
[BASIC]
|
||||
|
||||
# Naming style matching correct argument names.
|
||||
argument-naming-style=snake_case
|
||||
|
||||
# Regular expression matching correct argument names. Overrides argument-
|
||||
# naming-style. If left empty, argument names will be checked with the set
|
||||
# naming style.
|
||||
#argument-rgx=
|
||||
|
||||
# Naming style matching correct attribute names.
|
||||
attr-naming-style=snake_case
|
||||
|
||||
# Regular expression matching correct attribute names. Overrides attr-naming-
|
||||
# style. If left empty, attribute names will be checked with the set naming
|
||||
# style.
|
||||
#attr-rgx=
|
||||
|
||||
# Bad variable names which should always be refused, separated by a comma.
|
||||
bad-names=foo,
|
||||
bar,
|
||||
baz,
|
||||
toto,
|
||||
tutu,
|
||||
tata
|
||||
|
||||
# Bad variable names regexes, separated by a comma. If names match any regex,
|
||||
# they will always be refused
|
||||
bad-names-rgxs=
|
||||
|
||||
# Naming style matching correct class attribute names.
|
||||
class-attribute-naming-style=any
|
||||
|
||||
# Regular expression matching correct class attribute names. Overrides class-
|
||||
# attribute-naming-style. If left empty, class attribute names will be checked
|
||||
# with the set naming style.
|
||||
#class-attribute-rgx=
|
||||
|
||||
# Naming style matching correct class constant names.
|
||||
class-const-naming-style=UPPER_CASE
|
||||
|
||||
# Regular expression matching correct class constant names. Overrides class-
|
||||
# const-naming-style. If left empty, class constant names will be checked with
|
||||
# the set naming style.
|
||||
#class-const-rgx=
|
||||
|
||||
# Naming style matching correct class names.
|
||||
class-naming-style=PascalCase
|
||||
|
||||
# Regular expression matching correct class names. Overrides class-naming-
|
||||
# style. If left empty, class names will be checked with the set naming style.
|
||||
#class-rgx=
|
||||
|
||||
# Naming style matching correct constant names.
|
||||
const-naming-style=UPPER_CASE
|
||||
|
||||
# Regular expression matching correct constant names. Overrides const-naming-
|
||||
# style. If left empty, constant names will be checked with the set naming
|
||||
# style.
|
||||
#const-rgx=
|
||||
|
||||
# Minimum line length for functions/classes that require docstrings, shorter
|
||||
# ones are exempt.
|
||||
docstring-min-length=-1
|
||||
|
||||
# Naming style matching correct function names.
|
||||
function-naming-style=snake_case
|
||||
|
||||
# Regular expression matching correct function names. Overrides function-
|
||||
# naming-style. If left empty, function names will be checked with the set
|
||||
# naming style.
|
||||
function-rgx=[a-z_][a-z0-9_]{2,40}$
|
||||
|
||||
# Good variable names which should always be accepted, separated by a comma.
|
||||
good-names=i,
|
||||
j,
|
||||
k,
|
||||
ex,
|
||||
Run,
|
||||
_,
|
||||
fd,
|
||||
logger
|
||||
|
||||
# Good variable names regexes, separated by a comma. If names match any regex,
|
||||
# they will always be accepted
|
||||
good-names-rgxs=
|
||||
|
||||
# Include a hint for the correct naming format with invalid-name.
|
||||
include-naming-hint=no
|
||||
|
||||
# Naming style matching correct inline iteration names.
|
||||
inlinevar-naming-style=any
|
||||
|
||||
# Regular expression matching correct inline iteration names. Overrides
|
||||
# inlinevar-naming-style. If left empty, inline iteration names will be checked
|
||||
# with the set naming style.
|
||||
#inlinevar-rgx=
|
||||
|
||||
# Naming style matching correct method names.
|
||||
method-naming-style=snake_case
|
||||
|
||||
# Regular expression matching correct method names. Overrides method-naming-
|
||||
# style. If left empty, method names will be checked with the set naming style.
|
||||
method-rgx=[a-z_][a-z0-9_]{2,50}$
|
||||
|
||||
# Naming style matching correct module names.
|
||||
module-naming-style=snake_case
|
||||
|
||||
# Regular expression matching correct module names. Overrides module-naming-
|
||||
# style. If left empty, module names will be checked with the set naming style.
|
||||
#module-rgx=
|
||||
|
||||
# Colon-delimited sets of names that determine each other's naming style when
|
||||
# the name regexes allow several styles.
|
||||
name-group=
|
||||
|
||||
# Regular expression which should only match function or class names that do
|
||||
# not require a docstring.
|
||||
no-docstring-rgx=(__.*__)|(test_[A-Za-z0-9_]*)|(_.*)|(.*Test$)
|
||||
|
||||
# List of decorators that produce properties, such as abc.abstractproperty. Add
|
||||
# to this list to register other decorators that produce valid properties.
|
||||
# These decorators are taken in consideration only for invalid-name.
|
||||
property-classes=abc.abstractproperty
|
||||
|
||||
# Regular expression matching correct type variable names. If left empty, type
|
||||
# variable names will be checked with the set naming style.
|
||||
#typevar-rgx=
|
||||
|
||||
# Naming style matching correct variable names.
|
||||
variable-naming-style=snake_case
|
||||
|
||||
# Regular expression matching correct variable names. Overrides variable-
|
||||
# naming-style. If left empty, variable names will be checked with the set
|
||||
# naming style.
|
||||
variable-rgx=[a-z_][a-z0-9_]{1,30}$
|
||||
|
||||
|
||||
[CLASSES]
|
||||
|
||||
# Warn about protected attribute access inside special methods
|
||||
check-protected-access-in-special-methods=no
|
||||
|
||||
# List of method names used to declare (i.e. assign) instance attributes.
|
||||
defining-attr-methods=__init__,
|
||||
__new__,
|
||||
setUp,
|
||||
__post_init__
|
||||
|
||||
# List of valid names for the first argument in a class method.
|
||||
valid-classmethod-first-arg=cls
|
||||
|
||||
# List of valid names for the first argument in a metaclass class method.
|
||||
valid-metaclass-classmethod-first-arg=cls
|
||||
|
||||
|
||||
[EXCEPTIONS]
|
||||
|
||||
# Exceptions that will emit a warning when caught.
|
||||
overgeneral-exceptions=BaseException,
|
||||
Exception
|
||||
|
||||
|
||||
[FORMAT]
|
||||
|
||||
# Expected format of line ending, e.g. empty (any line ending), LF or CRLF.
|
||||
expected-line-ending-format=
|
||||
|
||||
# Regexp for a line that is allowed to be longer than the limit.
|
||||
ignore-long-lines=^\s*(# )?<?https?://\S+>?$
|
||||
|
||||
# Number of spaces of indent required inside a hanging or continued line.
|
||||
# git history told me that "This does something silly/broken..."
|
||||
#indent-after-paren=4
|
||||
|
||||
# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1
|
||||
# tab).
|
||||
indent-string=' '
|
||||
|
||||
# Maximum number of characters on a single line.
|
||||
max-line-length=100
|
||||
|
||||
# Maximum number of lines in a module.
|
||||
max-module-lines=1250
|
||||
|
||||
# Allow the body of a class to be on the same line as the declaration if body
|
||||
# contains single statement.
|
||||
single-line-class-stmt=no
|
||||
|
||||
# Allow the body of an if to be on the same line as the test if there is no
|
||||
# else.
|
||||
single-line-if-stmt=no
|
||||
|
||||
|
||||
[IMPORTS]
|
||||
|
||||
# List of modules that can be imported at any level, not just the top level
|
||||
# one.
|
||||
allow-any-import-level=
|
||||
|
||||
# Allow wildcard imports from modules that define __all__.
|
||||
allow-wildcard-with-all=no
|
||||
|
||||
# Deprecated modules which should not be used, separated by a comma.
|
||||
deprecated-modules=
|
||||
|
||||
# Output a graph (.gv or any supported image format) of external dependencies
|
||||
# to the given file (report RP0402 must not be disabled).
|
||||
ext-import-graph=
|
||||
|
||||
# Output a graph (.gv or any supported image format) of all (i.e. internal and
|
||||
# external) dependencies to the given file (report RP0402 must not be
|
||||
# disabled).
|
||||
import-graph=
|
||||
|
||||
# Output a graph (.gv or any supported image format) of internal dependencies
|
||||
# to the given file (report RP0402 must not be disabled).
|
||||
int-import-graph=
|
||||
|
||||
# Force import order to recognize a module as part of the standard
|
||||
# compatibility libraries.
|
||||
known-standard-library=
|
||||
|
||||
# Force import order to recognize a module as part of a third party library.
|
||||
known-third-party=enchant
|
||||
|
||||
# Couples of modules and preferred modules, separated by a comma.
|
||||
preferred-modules=
|
||||
|
||||
|
||||
[LOGGING]
|
||||
|
||||
# The type of string formatting that logging methods do. `old` means using %
|
||||
# formatting, `new` is for `{}` formatting.
|
||||
logging-format-style=old
|
||||
|
||||
# Logging modules to check that the string format arguments are in logging
|
||||
# function parameter format.
|
||||
logging-modules=logging,logger
|
||||
# List of plugins (as comma separated values of python modules names) to load,
|
||||
# usually to register additional checkers.
|
||||
load-plugins=linter_plugin
|
||||
|
||||
# A comma-separated list of package or module names from where C extensions may
|
||||
# be loaded. Extensions are loading into the active Python interpreter and may
|
||||
# run arbitrary code.
|
||||
extension-pkg-whitelist=pywintypes,win32api,win32file,win32security
|
||||
|
||||
|
||||
[MESSAGES CONTROL]
|
||||
|
||||
# Only show warnings with the listed confidence levels. Leave empty to show
|
||||
# all. Valid levels: HIGH, CONTROL_FLOW, INFERENCE, INFERENCE_FAILURE,
|
||||
# UNDEFINED.
|
||||
confidence=HIGH,
|
||||
CONTROL_FLOW,
|
||||
INFERENCE,
|
||||
INFERENCE_FAILURE,
|
||||
UNDEFINED
|
||||
# Enable the message, report, category or checker with the given id(s). You can
|
||||
# either give multiple identifier separated by comma (,) or put this option
|
||||
# multiple time. See also the "--disable" option for examples.
|
||||
#enable=
|
||||
|
||||
# Disable the message, report, category or checker with the given id(s). You
|
||||
# can either give multiple identifiers separated by comma (,) or put this
|
||||
# option multiple times (only on the command line, not in the configuration
|
||||
# file where it should appear only once). You can also use "--disable=all" to
|
||||
# disable everything first and then re-enable specific checks. For example, if
|
||||
# file where it should appear only once).You can also use "--disable=all" to
|
||||
# disable everything first and then reenable specific checks. For example, if
|
||||
# you want to run only the similarities checker, you can use "--disable=all
|
||||
# --enable=similarities". If you want to run only the classes checker, but have
|
||||
# no Warning level messages displayed, use "--disable=all --enable=classes
|
||||
# --disable=W".
|
||||
# no Warning level messages displayed, use"--disable=all --enable=classes
|
||||
# --disable=W"
|
||||
# CERTBOT COMMENT
|
||||
# 1) Once certbot codebase is claimed to be compatible exclusively with Python 3,
|
||||
# the useless-object-inheritance check can be enabled again, and code fixed accordingly.
|
||||
@@ -372,203 +53,261 @@ confidence=HIGH,
|
||||
# See https://github.com/PyCQA/pylint/issues/1498.
|
||||
# 3) Same as point 2 for no-value-for-parameter.
|
||||
# See https://github.com/PyCQA/pylint/issues/2820.
|
||||
# 4) raise-missing-from makes it an error to raise an exception from except
|
||||
# block without using explicit exception chaining. While explicit exception
|
||||
# chaining results in a slightly more informative traceback, I don't think
|
||||
# it's beneficial enough for us to change all of our current instances and
|
||||
# give Certbot developers errors about this when they're working on new code
|
||||
# in the future. You can read more about exception chaining and this pylint
|
||||
# check at
|
||||
# https://blog.ram.rachum.com/post/621791438475296768/improving-python-exception-chaining-with.
|
||||
# 5) wrong-import-order generates false positives and a pylint developer
|
||||
# suggests that people using isort should disable this check at
|
||||
# https://github.com/PyCQA/pylint/issues/3817#issuecomment-687892090.
|
||||
# 6) unspecified-encoding generates errors when encoding is not specified in
|
||||
# in a call to the built-in open function. This relates more to a design decision
|
||||
# (unspecified encoding makes the open function use the default encoding of the system)
|
||||
# than a clear flaw on which a check should be enforced. Anyway the project does
|
||||
# not need to enforce encoding on files so we disable this check.
|
||||
# 7) consider-using-f-string is "suggesting" to move to f-string when possible with an error. This
|
||||
# clearly relates to code design and not to potential defects in the code, let's just ignore that.
|
||||
disable=fixme,locally-disabled,invalid-name,cyclic-import,duplicate-code,design,import-outside-toplevel,useless-object-inheritance,unsubscriptable-object,no-value-for-parameter,no-else-return,no-else-raise,no-else-break,no-else-continue,raise-missing-from,wrong-import-order,unspecified-encoding,consider-using-f-string,raw-checker-failed,bad-inline-option,file-ignored,suppressed-message,useless-suppression,deprecated-pragma,use-symbolic-message-instead
|
||||
disable=fixme,locally-disabled,locally-enabled,bad-continuation,no-self-use,invalid-name,cyclic-import,duplicate-code,design,import-outside-toplevel,useless-object-inheritance,unsubscriptable-object,no-value-for-parameter,no-else-return,no-else-raise,no-else-break,no-else-continue
|
||||
|
||||
# Enable the message, report, category or checker with the given id(s). You can
|
||||
# either give multiple identifier separated by comma (,) or put this option
|
||||
# multiple time (only on the command line, not in the configuration file where
|
||||
# it should appear only once). See also the "--disable" option for examples.
|
||||
enable=c-extension-no-member
|
||||
[REPORTS]
|
||||
|
||||
# Set the output format. Available formats are text, parseable, colorized, msvs
|
||||
# (visual studio) and html. You can also give a reporter class, eg
|
||||
# mypackage.mymodule.MyReporterClass.
|
||||
output-format=text
|
||||
|
||||
# Put messages in a separate file for each module / package specified on the
|
||||
# command line instead of printing them on stdout. Reports (if any) will be
|
||||
# written in a file name "pylint_global.[txt|html]".
|
||||
files-output=no
|
||||
|
||||
# Tells whether to display a full report or only the messages
|
||||
reports=yes
|
||||
|
||||
# Python expression which should return a note less than 10 (10 is the highest
|
||||
# note). You have access to the variables errors warning, statement which
|
||||
# respectively contain the number of errors / warnings messages and the total
|
||||
# number of statements analyzed. This is used by the global evaluation report
|
||||
# (RP0004).
|
||||
evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)
|
||||
|
||||
# Add a comment according to your evaluation note. This is used by the global
|
||||
# evaluation report (RP0004).
|
||||
comment=no
|
||||
|
||||
# Template used to display messages. This is a python new-style format string
|
||||
# used to format the message information. See doc for all details
|
||||
#msg-template=
|
||||
|
||||
|
||||
[METHOD_ARGS]
|
||||
[BASIC]
|
||||
|
||||
# List of qualified names (i.e., library.method) which require a timeout
|
||||
# parameter e.g. 'requests.api.get,requests.api.post'
|
||||
timeout-methods=requests.api.delete,requests.api.get,requests.api.head,requests.api.options,requests.api.patch,requests.api.post,requests.api.put,requests.api.request
|
||||
# Required attributes for module, separated by a comma
|
||||
required-attributes=
|
||||
|
||||
# List of builtins function names that should not be used, separated by a comma
|
||||
bad-functions=map,filter,apply,input,file
|
||||
|
||||
# Good variable names which should always be accepted, separated by a comma
|
||||
good-names=f,i,j,k,ex,Run,_,fd,logger
|
||||
|
||||
# Bad variable names which should always be refused, separated by a comma
|
||||
bad-names=foo,bar,baz,toto,tutu,tata
|
||||
|
||||
# Colon-delimited sets of names that determine each other's naming style when
|
||||
# the name regexes allow several styles.
|
||||
name-group=
|
||||
|
||||
# Include a hint for the correct naming format with invalid-name
|
||||
include-naming-hint=no
|
||||
|
||||
# Regular expression matching correct function names
|
||||
function-rgx=[a-z_][a-z0-9_]{2,40}$
|
||||
|
||||
# Naming hint for function names
|
||||
function-name-hint=[a-z_][a-z0-9_]{2,40}$
|
||||
|
||||
# Regular expression matching correct variable names
|
||||
variable-rgx=[a-z_][a-z0-9_]{1,30}$
|
||||
|
||||
# Naming hint for variable names
|
||||
variable-name-hint=[a-z_][a-z0-9_]{2,30}$
|
||||
|
||||
# Regular expression matching correct constant names
|
||||
const-rgx=(([A-Z_][A-Z0-9_]*)|(__.*__))$
|
||||
|
||||
# Naming hint for constant names
|
||||
const-name-hint=(([A-Z_][A-Z0-9_]*)|(__.*__))$
|
||||
|
||||
# Regular expression matching correct attribute names
|
||||
attr-rgx=[a-z_][a-z0-9_]{2,30}$
|
||||
|
||||
# Naming hint for attribute names
|
||||
attr-name-hint=[a-z_][a-z0-9_]{2,30}$
|
||||
|
||||
# Regular expression matching correct argument names
|
||||
argument-rgx=[a-z_][a-z0-9_]{2,30}$
|
||||
|
||||
# Naming hint for argument names
|
||||
argument-name-hint=[a-z_][a-z0-9_]{2,30}$
|
||||
|
||||
# Regular expression matching correct class attribute names
|
||||
class-attribute-rgx=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$
|
||||
|
||||
# Naming hint for class attribute names
|
||||
class-attribute-name-hint=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$
|
||||
|
||||
# Regular expression matching correct inline iteration names
|
||||
inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$
|
||||
|
||||
# Naming hint for inline iteration names
|
||||
inlinevar-name-hint=[A-Za-z_][A-Za-z0-9_]*$
|
||||
|
||||
# Regular expression matching correct class names
|
||||
class-rgx=[A-Z_][a-zA-Z0-9]+$
|
||||
|
||||
# Naming hint for class names
|
||||
class-name-hint=[A-Z_][a-zA-Z0-9]+$
|
||||
|
||||
# Regular expression matching correct module names
|
||||
module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$
|
||||
|
||||
# Naming hint for module names
|
||||
module-name-hint=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$
|
||||
|
||||
# Regular expression matching correct method names
|
||||
method-rgx=[a-z_][a-z0-9_]{2,50}$
|
||||
|
||||
# Naming hint for method names
|
||||
method-name-hint=[a-z_][a-z0-9_]{2,50}$
|
||||
|
||||
# Regular expression which should only match function or class names that do
|
||||
# not require a docstring.
|
||||
no-docstring-rgx=(__.*__)|(test_[A-Za-z0-9_]*)|(_.*)|(.*Test$)
|
||||
|
||||
# Minimum line length for functions/classes that require docstrings, shorter
|
||||
# ones are exempt.
|
||||
docstring-min-length=-1
|
||||
|
||||
|
||||
[MISCELLANEOUS]
|
||||
|
||||
# List of note tags to take in consideration, separated by a comma.
|
||||
notes=FIXME,
|
||||
XXX,
|
||||
TODO
|
||||
|
||||
# Regular expression of note tags to take in consideration.
|
||||
notes-rgx=
|
||||
notes=FIXME,XXX,TODO
|
||||
|
||||
|
||||
[REFACTORING]
|
||||
[LOGGING]
|
||||
|
||||
# Maximum number of nested blocks for function / method body
|
||||
max-nested-blocks=5
|
||||
|
||||
# Complete name of functions that never returns. When checking for
|
||||
# inconsistent-return-statements if a never returning function is called then
|
||||
# it will be considered as an explicit return statement and no message will be
|
||||
# printed.
|
||||
never-returning-functions=sys.exit,argparse.parse_error
|
||||
|
||||
|
||||
[REPORTS]
|
||||
|
||||
# Python expression which should return a score less than or equal to 10. You
|
||||
# have access to the variables 'fatal', 'error', 'warning', 'refactor',
|
||||
# 'convention', and 'info' which contain the number of messages in each
|
||||
# category, as well as 'statement' which is the total number of statements
|
||||
# analyzed. This score is used by the global evaluation report (RP0004).
|
||||
evaluation=max(0, 0 if fatal else 10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10))
|
||||
|
||||
# Template used to display messages. This is a python new-style format string
|
||||
# used to format the message information. See doc for all details.
|
||||
msg-template=
|
||||
|
||||
# Set the output format. Available formats are text, parseable, colorized, json
|
||||
# and msvs (visual studio). You can also give a reporter class, e.g.
|
||||
# mypackage.mymodule.MyReporterClass.
|
||||
#output-format=
|
||||
|
||||
# Tells whether to display a full report or only the messages.
|
||||
reports=no
|
||||
|
||||
# Activate the evaluation score.
|
||||
score=yes
|
||||
|
||||
|
||||
[SIMILARITIES]
|
||||
|
||||
# Comments are removed from the similarity computation
|
||||
ignore-comments=yes
|
||||
|
||||
# Docstrings are removed from the similarity computation
|
||||
ignore-docstrings=yes
|
||||
|
||||
# Imports are removed from the similarity computation
|
||||
ignore-imports=yes
|
||||
|
||||
# Signatures are removed from the similarity computation
|
||||
ignore-signatures=yes
|
||||
|
||||
# Minimum lines number of a similarity.
|
||||
min-similarity-lines=6
|
||||
|
||||
|
||||
[STRING]
|
||||
|
||||
# This flag controls whether inconsistent-quotes generates a warning when the
|
||||
# character used as a quote delimiter is used inconsistently within a module.
|
||||
check-quote-consistency=no
|
||||
|
||||
# This flag controls whether the implicit-str-concat should generate a warning
|
||||
# on implicit string concatenation in sequences defined over several lines.
|
||||
check-str-concat-over-line-jumps=no
|
||||
|
||||
|
||||
[TYPECHECK]
|
||||
|
||||
# List of decorators that produce context managers, such as
|
||||
# contextlib.contextmanager. Add to this list to register other decorators that
|
||||
# produce valid context managers.
|
||||
contextmanager-decorators=contextlib.contextmanager
|
||||
|
||||
# List of members which are set dynamically and missed by pylint inference
|
||||
# system, and so shouldn't trigger E1101 when accessed. Python regular
|
||||
# expressions are accepted.
|
||||
generated-members=
|
||||
|
||||
# Tells whether to warn about missing members when the owner of the attribute
|
||||
# is inferred to be None.
|
||||
ignore-none=yes
|
||||
|
||||
# This flag controls whether pylint should warn about no-member and similar
|
||||
# checks whenever an opaque object is returned when inferring. The inference
|
||||
# can return multiple potential results while evaluating a Python object, but
|
||||
# some branches might not be evaluated, which results in partial inference. In
|
||||
# that case, it might be useful to still emit no-member and other checks for
|
||||
# the rest of the inferred objects.
|
||||
ignore-on-opaque-inference=yes
|
||||
|
||||
# List of symbolic message names to ignore for Mixin members.
|
||||
ignored-checks-for-mixins=no-member,
|
||||
not-async-context-manager,
|
||||
not-context-manager,
|
||||
attribute-defined-outside-init
|
||||
|
||||
# List of class names for which member attributes should not be checked (useful
|
||||
# for classes with dynamically set attributes). This supports the use of
|
||||
# qualified names.
|
||||
ignored-classes=optparse.Values,thread._local,_thread._local,argparse.Namespace,Field,Header,JWS,closing
|
||||
|
||||
# List of module names for which member attributes should not be checked
|
||||
# (useful for modules/projects where namespaces are manipulated during runtime
|
||||
# and thus existing member attributes cannot be deduced by static analysis
|
||||
ignored-modules=pkg_resources,confargparse,argparse
|
||||
|
||||
# Show a hint with possible names when a member name was not found. The aspect
|
||||
# of finding the hint is based on edit distance.
|
||||
missing-member-hint=yes
|
||||
|
||||
# The minimum edit distance a name should have in order to be considered a
|
||||
# similar match for a missing member name.
|
||||
missing-member-hint-distance=1
|
||||
|
||||
# The total number of similar names that should be taken in consideration when
|
||||
# showing a hint for a missing member.
|
||||
missing-member-max-choices=1
|
||||
|
||||
# Regex pattern to define which classes are considered mixins.
|
||||
mixin-class-rgx=.*[Mm]ixin
|
||||
|
||||
# List of decorators that change the signature of a decorated function.
|
||||
signature-mutators=
|
||||
# Logging modules to check that the string format arguments are in logging
|
||||
# function parameter format
|
||||
logging-modules=logging,logger
|
||||
|
||||
|
||||
[VARIABLES]
|
||||
|
||||
# List of additional names supposed to be defined in builtins. Remember that
|
||||
# you should avoid defining new builtins when possible.
|
||||
additional-builtins=
|
||||
|
||||
# Tells whether unused global variables should be treated as a violation.
|
||||
allow-global-unused-variables=yes
|
||||
|
||||
# List of names allowed to shadow builtins
|
||||
allowed-redefined-builtins=
|
||||
|
||||
# List of strings which can identify a callback function by name. A callback
|
||||
# name must start or end with one of those strings.
|
||||
callbacks=cb_,
|
||||
_cb
|
||||
|
||||
# A regular expression matching the name of dummy variables (i.e. expected to
|
||||
# not be used).
|
||||
dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_
|
||||
|
||||
# Argument names that match this expression will be ignored.
|
||||
ignored-argument-names=_.*|^ignored_|^unused_
|
||||
|
||||
# Tells whether we should check for unused import in __init__ files.
|
||||
init-import=no
|
||||
|
||||
# List of qualified module names which can have objects that can redefine
|
||||
# builtins.
|
||||
redefining-builtins-modules=six.moves,past.builtins,future.builtins,builtins,io
|
||||
# A regular expression matching the name of dummy variables (i.e. expectedly
|
||||
# not used).
|
||||
dummy-variables-rgx=(unused)?_.*|dummy
|
||||
|
||||
# List of additional names supposed to be defined in builtins. Remember that
|
||||
# you should avoid to define new builtins when possible.
|
||||
additional-builtins=
|
||||
|
||||
|
||||
[SIMILARITIES]
|
||||
|
||||
# Minimum lines number of a similarity.
|
||||
min-similarity-lines=6
|
||||
|
||||
# Ignore comments when computing similarities.
|
||||
ignore-comments=yes
|
||||
|
||||
# Ignore docstrings when computing similarities.
|
||||
ignore-docstrings=yes
|
||||
|
||||
# Ignore imports when computing similarities.
|
||||
ignore-imports=yes
|
||||
|
||||
|
||||
[FORMAT]
|
||||
|
||||
# Maximum number of characters on a single line.
|
||||
max-line-length=100
|
||||
|
||||
# Regexp for a line that is allowed to be longer than the limit.
|
||||
ignore-long-lines=^\s*(# )?<?https?://\S+>?$
|
||||
|
||||
# Allow the body of an if to be on the same line as the test if there is no
|
||||
# else.
|
||||
single-line-if-stmt=no
|
||||
|
||||
# List of optional constructs for which whitespace checking is disabled
|
||||
no-space-check=trailing-comma
|
||||
|
||||
# Maximum number of lines in a module
|
||||
max-module-lines=1250
|
||||
|
||||
# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1
|
||||
# tab).
|
||||
indent-string=' '
|
||||
|
||||
# Number of spaces of indent required inside a hanging or continued line.
|
||||
# This does something silly/broken...
|
||||
#indent-after-paren=4
|
||||
|
||||
|
||||
[TYPECHECK]
|
||||
|
||||
# Tells whether missing members accessed in mixin class should be ignored. A
|
||||
# mixin class is detected if its name ends with "mixin" (case insensitive).
|
||||
ignore-mixin-members=yes
|
||||
|
||||
# List of module names for which member attributes should not be checked
|
||||
# (useful for modules/projects where namespaces are manipulated during runtime
|
||||
# and thus existing member attributes cannot be deduced by static analysis
|
||||
ignored-modules=pkg_resources,confargparse,argparse,six.moves,six.moves.urllib
|
||||
# import errors ignored only in 1.4.4
|
||||
# https://bitbucket.org/logilab/pylint/commits/cd000904c9e2
|
||||
|
||||
# List of classes names for which member attributes should not be checked
|
||||
# (useful for classes with attributes dynamically set).
|
||||
ignored-classes=Field,Header,JWS,closing
|
||||
|
||||
# When zope mode is activated, add a predefined set of Zope acquired attributes
|
||||
# to generated-members.
|
||||
zope=yes
|
||||
|
||||
# List of members which are set dynamically and missed by pylint inference
|
||||
# system, and so shouldn't trigger E0201 when accessed. Python regular
|
||||
# expressions are accepted.
|
||||
generated-members=REQUEST,acl_users,aq_parent
|
||||
|
||||
|
||||
[IMPORTS]
|
||||
|
||||
# Deprecated modules which should not be used, separated by a comma
|
||||
deprecated-modules=regsub,TERMIOS,Bastion,rexec
|
||||
|
||||
# Create a graph of every (i.e. internal and external) dependencies in the
|
||||
# given file (report RP0402 must not be disabled)
|
||||
import-graph=
|
||||
|
||||
# Create a graph of external dependencies in the given file (report RP0402 must
|
||||
# not be disabled)
|
||||
ext-import-graph=
|
||||
|
||||
# Create a graph of internal dependencies in the given file (report RP0402 must
|
||||
# not be disabled)
|
||||
int-import-graph=
|
||||
|
||||
|
||||
[CLASSES]
|
||||
|
||||
# List of interface methods to ignore, separated by a comma. This is used for
|
||||
# instance to not check methods defined in Zope's Interface base class.
|
||||
ignore-iface-methods=isImplementedBy,deferred,extends,names,namesAndDescriptions,queryDescriptionFor,getBases,getDescriptionFor,getDoc,getName,getTaggedValue,getTaggedValueTags,isEqualOrExtendedBy,setTaggedValue,isImplementedByInstancesOf,adaptWith,is_implemented_by,implementedBy,providedBy
|
||||
|
||||
# List of method names used to declare (i.e. assign) instance attributes.
|
||||
defining-attr-methods=__init__,__new__,setUp
|
||||
|
||||
# List of valid names for the first argument in a class method.
|
||||
valid-classmethod-first-arg=cls
|
||||
|
||||
# List of valid names for the first argument in a metaclass class method.
|
||||
valid-metaclass-classmethod-first-arg=mcs
|
||||
|
||||
|
||||
[EXCEPTIONS]
|
||||
|
||||
# Exceptions that will emit a warning when being caught. Defaults to
|
||||
# "Exception"
|
||||
overgeneral-exceptions=Exception
|
||||
|
||||
13
AUTHORS.md
13
AUTHORS.md
@@ -1,7 +1,6 @@
|
||||
Authors
|
||||
=======
|
||||
|
||||
* [Aaron Gable](https://github.com/aarongable)
|
||||
* [Aaron Zirbes](https://github.com/aaronzirbes)
|
||||
* Aaron Zuehlke
|
||||
* Ada Lovelace
|
||||
@@ -17,10 +16,7 @@ Authors
|
||||
* [Alex Halderman](https://github.com/jhalderm)
|
||||
* [Alex Jordan](https://github.com/strugee)
|
||||
* [Alex Zorin](https://github.com/alexzorin)
|
||||
* [Alexis Hancock](https://github.com/zoracon)
|
||||
* [Amir Omidi](https://github.com/aaomidi)
|
||||
* [Amjad Mashaal](https://github.com/TheNavigat)
|
||||
* [amplifi](https://github.com/amplifi)
|
||||
* [Andrew Murray](https://github.com/radarhere)
|
||||
* [Andrzej Górski](https://github.com/andrzej3393)
|
||||
* [Anselm Levskaya](https://github.com/levskaya)
|
||||
@@ -64,7 +60,6 @@ Authors
|
||||
* [DanCld](https://github.com/DanCld)
|
||||
* [Daniel Albers](https://github.com/AID)
|
||||
* [Daniel Aleksandersen](https://github.com/da2x)
|
||||
* [Daniel Almasi](https://github.com/almasen)
|
||||
* [Daniel Convissor](https://github.com/convissor)
|
||||
* [Daniel "Drex" Drexler](https://github.com/aeturnum)
|
||||
* [Daniel Huang](https://github.com/dhuang)
|
||||
@@ -118,7 +113,6 @@ Authors
|
||||
* [Jacob Sachs](https://github.com/jsachs)
|
||||
* [Jairo Llopis](https://github.com/Yajo)
|
||||
* [Jakub Warmuz](https://github.com/kuba)
|
||||
* [James Balazs](https://github.com/jamesbalazs)
|
||||
* [James Kasten](https://github.com/jdkasten)
|
||||
* [Jason Grinblat](https://github.com/ptychomancer)
|
||||
* [Jay Faulkner](https://github.com/jayofdoom)
|
||||
@@ -142,7 +136,6 @@ Authors
|
||||
* [Joubin Jabbari](https://github.com/joubin)
|
||||
* [Juho Juopperi](https://github.com/jkjuopperi)
|
||||
* [Kane York](https://github.com/riking)
|
||||
* [Katsuyoshi Ozaki](https://github.com/moratori)
|
||||
* [Kenichi Maehashi](https://github.com/kmaehashi)
|
||||
* [Kenneth Skovhede](https://github.com/kenkendk)
|
||||
* [Kevin Burke](https://github.com/kevinburke)
|
||||
@@ -178,7 +171,6 @@ Authors
|
||||
* [Mathieu Leduc-Hamel](https://github.com/mlhamel)
|
||||
* [Matt Bostock](https://github.com/mattbostock)
|
||||
* [Matthew Ames](https://github.com/SuperMatt)
|
||||
* [Matthew W. Thomas](https://github.com/mwt)
|
||||
* [Michael Schumacher](https://github.com/schumaml)
|
||||
* [Michael Strache](https://github.com/Jarodiv)
|
||||
* [Michael Sverdlin](https://github.com/sveder)
|
||||
@@ -203,21 +195,18 @@ Authors
|
||||
* [osirisinferi](https://github.com/osirisinferi)
|
||||
* Patrick Figel
|
||||
* [Patrick Heppler](https://github.com/PatrickHeppler)
|
||||
* [Paul Buonopane](https://github.com/Zenexer)
|
||||
* [Paul Feitzinger](https://github.com/pfeyz)
|
||||
* [Pavan Gupta](https://github.com/pavgup)
|
||||
* [Pavel Pavlov](https://github.com/ghost355)
|
||||
* [Peter Conrad](https://github.com/pconrad-fb)
|
||||
* [Peter Eckersley](https://github.com/pde)
|
||||
* [Peter Mosmans](https://github.com/PeterMosmans)
|
||||
* [Phil Martin](https://github.com/frillip)
|
||||
* [Philippe Langlois](https://github.com/langloisjp)
|
||||
* [Philipp Spitzer](https://github.com/spitza)
|
||||
* [Piero Steinger](https://github.com/Jadaw1n)
|
||||
* [Pierre Jaury](https://github.com/kaiyou)
|
||||
* [Piotr Kasprzyk](https://github.com/kwadrat)
|
||||
* [Prayag Verma](https://github.com/pra85)
|
||||
* [Preston Locke](https://github.com/Preston12321)
|
||||
* [Rasesh Patel](https://github.com/raspat1)
|
||||
* [Reinaldo de Souza Jr](https://github.com/juniorz)
|
||||
* [Remi Rampin](https://github.com/remram44)
|
||||
@@ -281,7 +270,6 @@ Authors
|
||||
* [Wilfried Teiken](https://github.com/wteiken)
|
||||
* [Willem Fibbe](https://github.com/fibbers)
|
||||
* [William Budington](https://github.com/Hainish)
|
||||
* [Will Greenberg](https://github.com/wgreenberg)
|
||||
* [Will Newby](https://github.com/willnewby)
|
||||
* [Will Oller](https://github.com/willoller)
|
||||
* [Yan](https://github.com/diracdeltas)
|
||||
@@ -292,4 +280,3 @@ Authors
|
||||
* [Yuseong Cho](https://github.com/g6123)
|
||||
* [Zach Shepherd](https://github.com/zjs)
|
||||
* [陈三](https://github.com/chenxsan)
|
||||
* [Shahar Naveh](https://github.com/ShaharNaveh)
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# This Dockerfile builds an image for development.
|
||||
FROM ubuntu:focal
|
||||
FROM debian:buster
|
||||
|
||||
# Note: this only exposes the port to other docker containers.
|
||||
EXPOSE 80 443
|
||||
@@ -8,14 +8,13 @@ WORKDIR /opt/certbot/src
|
||||
|
||||
COPY . .
|
||||
RUN apt-get update && \
|
||||
DEBIAN_FRONTEND=noninteractive apt-get install apache2 git python3-dev \
|
||||
python3-venv gcc libaugeas0 libssl-dev libffi-dev ca-certificates \
|
||||
openssl nginx-light -y --no-install-recommends && \
|
||||
apt-get install apache2 git python3-dev python3-venv gcc libaugeas0 \
|
||||
libssl-dev libffi-dev ca-certificates openssl nginx-light -y && \
|
||||
apt-get clean && \
|
||||
rm -rf /var/lib/apt/lists/* \
|
||||
/tmp/* \
|
||||
/var/tmp/*
|
||||
|
||||
RUN VENV_NAME="../venv" python3 tools/venv.py
|
||||
RUN VENV_NAME="../venv3" python3 tools/venv3.py
|
||||
|
||||
ENV PATH /opt/certbot/venv/bin:$PATH
|
||||
ENV PATH /opt/certbot/venv3/bin:$PATH
|
||||
|
||||
@@ -7,7 +7,7 @@ questions.
|
||||
## My operating system is (include version):
|
||||
|
||||
|
||||
## I installed Certbot with (snap, OS package manager, pip, certbot-auto, etc):
|
||||
## I installed Certbot with (certbot-auto, OS package manager, pip, etc):
|
||||
|
||||
|
||||
## I ran this command and it produced this output:
|
||||
@@ -4,6 +4,5 @@ include pytest.ini
|
||||
recursive-include docs *
|
||||
recursive-include examples *
|
||||
recursive-include tests *
|
||||
include acme/py.typed
|
||||
global-exclude __pycache__
|
||||
global-exclude *.py[cod]
|
||||
|
||||
@@ -2,10 +2,11 @@
|
||||
|
||||
This module is an implementation of the `ACME protocol`_.
|
||||
|
||||
.. _`ACME protocol`: https://datatracker.ietf.org/doc/html/rfc8555
|
||||
.. _`ACME protocol`: https://ietf-wg-acme.github.io/acme
|
||||
|
||||
"""
|
||||
import sys
|
||||
import warnings
|
||||
|
||||
# This code exists to keep backwards compatibility with people using acme.jose
|
||||
# before it became the standalone josepy package.
|
||||
@@ -19,3 +20,10 @@ for mod in list(sys.modules):
|
||||
# preserved (acme.jose.* is josepy.*)
|
||||
if mod == 'josepy' or mod.startswith('josepy.'):
|
||||
sys.modules['acme.' + mod.replace('josepy', 'jose', 1)] = sys.modules[mod]
|
||||
|
||||
if sys.version_info[0] == 2:
|
||||
warnings.warn(
|
||||
"Python 2 support will be dropped in the next release of acme. "
|
||||
"Please upgrade your Python version.",
|
||||
PendingDeprecationWarning,
|
||||
) # pragma: no cover
|
||||
|
||||
@@ -5,56 +5,42 @@ import functools
|
||||
import hashlib
|
||||
import logging
|
||||
import socket
|
||||
from typing import cast
|
||||
from typing import Any
|
||||
from typing import Dict
|
||||
from typing import Mapping
|
||||
from typing import Optional
|
||||
from typing import Tuple
|
||||
from typing import Type
|
||||
from typing import TypeVar
|
||||
from typing import Union
|
||||
|
||||
from cryptography.hazmat.primitives import hashes
|
||||
from cryptography.hazmat.primitives import hashes # type: ignore
|
||||
import josepy as jose
|
||||
from OpenSSL import crypto
|
||||
from OpenSSL import SSL
|
||||
import requests
|
||||
import six
|
||||
from OpenSSL import SSL # type: ignore # https://github.com/python/typeshed/issues/2052
|
||||
from OpenSSL import crypto
|
||||
|
||||
from acme import crypto_util
|
||||
from acme import errors
|
||||
from acme import fields
|
||||
from acme.mixins import ResourceMixin, TypeMixin
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
GenericChallenge = TypeVar('GenericChallenge', bound='Challenge')
|
||||
|
||||
|
||||
class Challenge(jose.TypedJSONObjectWithFields):
|
||||
# _fields_to_partial_json
|
||||
"""ACME challenge."""
|
||||
TYPES: Dict[str, Type['Challenge']] = {}
|
||||
TYPES = {} # type: dict
|
||||
|
||||
@classmethod
|
||||
def from_json(cls: Type[GenericChallenge],
|
||||
jobj: Mapping[str, Any]) -> Union[GenericChallenge, 'UnrecognizedChallenge']:
|
||||
def from_json(cls, jobj):
|
||||
try:
|
||||
return cast(GenericChallenge, super().from_json(jobj))
|
||||
return super(Challenge, cls).from_json(jobj)
|
||||
except jose.UnrecognizedTypeError as error:
|
||||
logger.debug(error)
|
||||
return UnrecognizedChallenge.from_json(jobj)
|
||||
|
||||
|
||||
class ChallengeResponse(jose.TypedJSONObjectWithFields):
|
||||
class ChallengeResponse(ResourceMixin, TypeMixin, jose.TypedJSONObjectWithFields):
|
||||
# _fields_to_partial_json
|
||||
"""ACME challenge response."""
|
||||
TYPES: Dict[str, Type['ChallengeResponse']] = {}
|
||||
|
||||
def to_partial_json(self) -> Dict[str, Any]:
|
||||
# Removes the `type` field which is inserted by TypedJSONObjectWithFields.to_partial_json.
|
||||
# This field breaks RFC8555 compliance.
|
||||
jobj = super().to_partial_json()
|
||||
jobj.pop(self.type_field_name, None)
|
||||
return jobj
|
||||
TYPES = {} # type: dict
|
||||
resource_type = 'challenge'
|
||||
resource = fields.Resource(resource_type)
|
||||
|
||||
|
||||
class UnrecognizedChallenge(Challenge):
|
||||
@@ -69,17 +55,16 @@ class UnrecognizedChallenge(Challenge):
|
||||
:ivar jobj: Original JSON decoded object.
|
||||
|
||||
"""
|
||||
jobj: Dict[str, Any]
|
||||
|
||||
def __init__(self, jobj: Mapping[str, Any]) -> None:
|
||||
super().__init__()
|
||||
def __init__(self, jobj):
|
||||
super(UnrecognizedChallenge, self).__init__()
|
||||
object.__setattr__(self, "jobj", jobj)
|
||||
|
||||
def to_partial_json(self) -> Dict[str, Any]:
|
||||
def to_partial_json(self):
|
||||
return self.jobj # pylint: disable=no-member
|
||||
|
||||
@classmethod
|
||||
def from_json(cls, jobj: Mapping[str, Any]) -> 'UnrecognizedChallenge':
|
||||
def from_json(cls, jobj):
|
||||
return cls(jobj)
|
||||
|
||||
|
||||
@@ -93,13 +78,13 @@ class _TokenChallenge(Challenge):
|
||||
"""Minimum size of the :attr:`token` in bytes."""
|
||||
|
||||
# TODO: acme-spec doesn't specify token as base64-encoded value
|
||||
token: bytes = jose.field(
|
||||
token = jose.Field(
|
||||
"token", encoder=jose.encode_b64jose, decoder=functools.partial(
|
||||
jose.decode_b64jose, size=TOKEN_SIZE, minimum=True))
|
||||
|
||||
# XXX: rename to ~token_good_for_url
|
||||
@property
|
||||
def good_token(self) -> bool: # XXX: @token.decoder
|
||||
def good_token(self): # XXX: @token.decoder
|
||||
"""Is `token` good?
|
||||
|
||||
.. todo:: acme-spec wants "It MUST NOT contain any non-ASCII
|
||||
@@ -116,13 +101,13 @@ class _TokenChallenge(Challenge):
|
||||
class KeyAuthorizationChallengeResponse(ChallengeResponse):
|
||||
"""Response to Challenges based on Key Authorization.
|
||||
|
||||
:param str key_authorization:
|
||||
:param unicode key_authorization:
|
||||
|
||||
"""
|
||||
key_authorization: str = jose.field("keyAuthorization")
|
||||
key_authorization = jose.Field("keyAuthorization")
|
||||
thumbprint_hash_function = hashes.SHA256
|
||||
|
||||
def verify(self, chall: 'KeyAuthorizationChallenge', account_public_key: jose.JWK) -> bool:
|
||||
def verify(self, chall, account_public_key):
|
||||
"""Verify the key authorization.
|
||||
|
||||
:param KeyAuthorization chall: Challenge that corresponds to
|
||||
@@ -134,7 +119,7 @@ class KeyAuthorizationChallengeResponse(ChallengeResponse):
|
||||
:rtype: bool
|
||||
|
||||
"""
|
||||
parts = self.key_authorization.split('.') # pylint: disable=no-member
|
||||
parts = self.key_authorization.split('.')
|
||||
if len(parts) != 2:
|
||||
logger.debug("Key authorization (%r) is not well formed",
|
||||
self.key_authorization)
|
||||
@@ -154,39 +139,37 @@ class KeyAuthorizationChallengeResponse(ChallengeResponse):
|
||||
|
||||
return True
|
||||
|
||||
def to_partial_json(self) -> Dict[str, Any]:
|
||||
jobj = super().to_partial_json()
|
||||
def to_partial_json(self):
|
||||
jobj = super(KeyAuthorizationChallengeResponse, self).to_partial_json()
|
||||
jobj.pop('keyAuthorization', None)
|
||||
return jobj
|
||||
|
||||
|
||||
# TODO: Make this method a generic of K (bound=KeyAuthorizationChallenge), response_cls of type
|
||||
# Type[K] and use it in response/response_and_validation return types once Python 3.6 support is
|
||||
# dropped (do not support generic ABC classes, see https://github.com/python/typing/issues/449).
|
||||
class KeyAuthorizationChallenge(_TokenChallenge, metaclass=abc.ABCMeta):
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class KeyAuthorizationChallenge(_TokenChallenge):
|
||||
"""Challenge based on Key Authorization.
|
||||
|
||||
:param response_cls: Subclass of `KeyAuthorizationChallengeResponse`
|
||||
that will be used to generate ``response``.
|
||||
that will be used to generate `response`.
|
||||
:param str typ: type of the challenge
|
||||
"""
|
||||
typ: str = NotImplemented
|
||||
response_cls: Type[KeyAuthorizationChallengeResponse] = NotImplemented
|
||||
typ = NotImplemented
|
||||
response_cls = NotImplemented
|
||||
thumbprint_hash_function = (
|
||||
KeyAuthorizationChallengeResponse.thumbprint_hash_function)
|
||||
|
||||
def key_authorization(self, account_key: jose.JWK) -> str:
|
||||
def key_authorization(self, account_key):
|
||||
"""Generate Key Authorization.
|
||||
|
||||
:param JWK account_key:
|
||||
:rtype str:
|
||||
:rtype unicode:
|
||||
|
||||
"""
|
||||
return self.encode("token") + "." + jose.b64encode(
|
||||
account_key.thumbprint(
|
||||
hash_function=self.thumbprint_hash_function)).decode()
|
||||
|
||||
def response(self, account_key: jose.JWK) -> KeyAuthorizationChallengeResponse:
|
||||
def response(self, account_key):
|
||||
"""Generate response to the challenge.
|
||||
|
||||
:param JWK account_key:
|
||||
@@ -199,7 +182,7 @@ class KeyAuthorizationChallenge(_TokenChallenge, metaclass=abc.ABCMeta):
|
||||
key_authorization=self.key_authorization(account_key))
|
||||
|
||||
@abc.abstractmethod
|
||||
def validation(self, account_key: jose.JWK, **kwargs: Any) -> Any:
|
||||
def validation(self, account_key, **kwargs):
|
||||
"""Generate validation for the challenge.
|
||||
|
||||
Subclasses must implement this method, but they are likely to
|
||||
@@ -213,8 +196,7 @@ class KeyAuthorizationChallenge(_TokenChallenge, metaclass=abc.ABCMeta):
|
||||
"""
|
||||
raise NotImplementedError() # pragma: no cover
|
||||
|
||||
def response_and_validation(self, account_key: jose.JWK, *args: Any, **kwargs: Any
|
||||
) -> Tuple[KeyAuthorizationChallengeResponse, Any]:
|
||||
def response_and_validation(self, account_key, *args, **kwargs):
|
||||
"""Generate response and validation.
|
||||
|
||||
Convenience function that return results of `response` and
|
||||
@@ -233,14 +215,14 @@ class DNS01Response(KeyAuthorizationChallengeResponse):
|
||||
"""ACME dns-01 challenge response."""
|
||||
typ = "dns-01"
|
||||
|
||||
def simple_verify(self, chall: 'DNS01', domain: str, account_public_key: jose.JWK) -> bool: # pylint: disable=unused-argument
|
||||
def simple_verify(self, chall, domain, account_public_key): # pylint: disable=unused-argument
|
||||
"""Simple verify.
|
||||
|
||||
This method no longer checks DNS records and is a simple wrapper
|
||||
around `KeyAuthorizationChallengeResponse.verify`.
|
||||
|
||||
:param challenges.DNS01 chall: Corresponding challenge.
|
||||
:param str domain: Domain name being verified.
|
||||
:param unicode domain: Domain name being verified.
|
||||
:param JWK account_public_key: Public key for the key pair
|
||||
being authorized.
|
||||
|
||||
@@ -264,24 +246,23 @@ class DNS01(KeyAuthorizationChallenge):
|
||||
LABEL = "_acme-challenge"
|
||||
"""Label clients prepend to the domain name being validated."""
|
||||
|
||||
def validation(self, account_key: jose.JWK, **unused_kwargs: Any) -> str:
|
||||
def validation(self, account_key, **unused_kwargs):
|
||||
"""Generate validation.
|
||||
|
||||
:param JWK account_key:
|
||||
:rtype: str
|
||||
:rtype: unicode
|
||||
|
||||
"""
|
||||
return jose.b64encode(hashlib.sha256(self.key_authorization(
|
||||
account_key).encode("utf-8")).digest()).decode()
|
||||
|
||||
def validation_domain_name(self, name: str) -> str:
|
||||
def validation_domain_name(self, name):
|
||||
"""Domain name for TXT validation record.
|
||||
|
||||
:param str name: Domain name being validated.
|
||||
:rtype: str
|
||||
:param unicode name: Domain name being validated.
|
||||
|
||||
"""
|
||||
return f"{self.LABEL}.{name}"
|
||||
return "{0}.{1}".format(self.LABEL, name)
|
||||
|
||||
|
||||
@ChallengeResponse.register
|
||||
@@ -300,16 +281,14 @@ class HTTP01Response(KeyAuthorizationChallengeResponse):
|
||||
WHITESPACE_CUTSET = "\n\r\t "
|
||||
"""Whitespace characters which should be ignored at the end of the body."""
|
||||
|
||||
def simple_verify(self, chall: 'HTTP01', domain: str, account_public_key: jose.JWK,
|
||||
port: Optional[int] = None, timeout: int = 30) -> bool:
|
||||
def simple_verify(self, chall, domain, account_public_key, port=None):
|
||||
"""Simple verify.
|
||||
|
||||
:param challenges.SimpleHTTP chall: Corresponding challenge.
|
||||
:param str domain: Domain name being verified.
|
||||
:param unicode domain: Domain name being verified.
|
||||
:param JWK account_public_key: Public key for the key pair
|
||||
being authorized.
|
||||
:param int port: Port used in the validation.
|
||||
:param int timeout: Timeout in seconds.
|
||||
|
||||
:returns: ``True`` iff validation with the files currently served by the
|
||||
HTTP server is successful.
|
||||
@@ -331,19 +310,10 @@ class HTTP01Response(KeyAuthorizationChallengeResponse):
|
||||
uri = chall.uri(domain)
|
||||
logger.debug("Verifying %s at %s...", chall.typ, uri)
|
||||
try:
|
||||
http_response = requests.get(uri, verify=False, timeout=timeout)
|
||||
http_response = requests.get(uri, verify=False)
|
||||
except requests.exceptions.RequestException as error:
|
||||
logger.error("Unable to reach %s: %s", uri, error)
|
||||
return False
|
||||
# By default, http_response.text will try to guess the encoding to use
|
||||
# when decoding the response to Python unicode strings. This guesswork
|
||||
# is error prone. RFC 8555 specifies that HTTP-01 responses should be
|
||||
# key authorizations with possible trailing whitespace. Since key
|
||||
# authorizations must be composed entirely of the base64url alphabet
|
||||
# plus ".", we tell requests that the response should be ASCII. See
|
||||
# https://datatracker.ietf.org/doc/html/rfc8555#section-8.3 for more
|
||||
# info.
|
||||
http_response.encoding = "ascii"
|
||||
logger.debug("Received %s: %s. Headers: %s", http_response,
|
||||
http_response.text, http_response.headers)
|
||||
|
||||
@@ -367,31 +337,31 @@ class HTTP01(KeyAuthorizationChallenge):
|
||||
"""URI root path for the server provisioned resource."""
|
||||
|
||||
@property
|
||||
def path(self) -> str:
|
||||
def path(self):
|
||||
"""Path (starting with '/') for provisioned resource.
|
||||
|
||||
:rtype: str
|
||||
:rtype: string
|
||||
|
||||
"""
|
||||
return '/' + self.URI_ROOT_PATH + '/' + self.encode('token')
|
||||
|
||||
def uri(self, domain: str) -> str:
|
||||
def uri(self, domain):
|
||||
"""Create an URI to the provisioned resource.
|
||||
|
||||
Forms an URI to the HTTPS server provisioned resource
|
||||
(containing :attr:`~SimpleHTTP.token`).
|
||||
|
||||
:param str domain: Domain name being verified.
|
||||
:rtype: str
|
||||
:param unicode domain: Domain name being verified.
|
||||
:rtype: string
|
||||
|
||||
"""
|
||||
return "http://" + domain + self.path
|
||||
|
||||
def validation(self, account_key: jose.JWK, **unused_kwargs: Any) -> str:
|
||||
def validation(self, account_key, **unused_kwargs):
|
||||
"""Generate validation.
|
||||
|
||||
:param JWK account_key:
|
||||
:rtype: str
|
||||
:rtype: unicode
|
||||
|
||||
"""
|
||||
return self.key_authorization(account_key)
|
||||
@@ -411,18 +381,17 @@ class TLSALPN01Response(KeyAuthorizationChallengeResponse):
|
||||
"""
|
||||
|
||||
ID_PE_ACME_IDENTIFIER_V1 = b"1.3.6.1.5.5.7.1.30.1"
|
||||
ACME_TLS_1_PROTOCOL = b"acme-tls/1"
|
||||
ACME_TLS_1_PROTOCOL = "acme-tls/1"
|
||||
|
||||
@property
|
||||
def h(self) -> bytes:
|
||||
def h(self):
|
||||
"""Hash value stored in challenge certificate"""
|
||||
return hashlib.sha256(self.key_authorization.encode('utf-8')).digest()
|
||||
|
||||
def gen_cert(self, domain: str, key: Optional[crypto.PKey] = None, bits: int = 2048
|
||||
) -> Tuple[crypto.X509, crypto.PKey]:
|
||||
def gen_cert(self, domain, key=None, bits=2048):
|
||||
"""Generate tls-alpn-01 certificate.
|
||||
|
||||
:param str domain: Domain verified by the challenge.
|
||||
:param unicode domain: Domain verified by the challenge.
|
||||
:param OpenSSL.crypto.PKey key: Optional private key used in
|
||||
certificate generation. If not provided (``None``), then
|
||||
fresh key will be generated.
|
||||
@@ -435,19 +404,19 @@ class TLSALPN01Response(KeyAuthorizationChallengeResponse):
|
||||
key = crypto.PKey()
|
||||
key.generate_key(crypto.TYPE_RSA, bits)
|
||||
|
||||
|
||||
der_value = b"DER:" + codecs.encode(self.h, 'hex')
|
||||
acme_extension = crypto.X509Extension(self.ID_PE_ACME_IDENTIFIER_V1,
|
||||
critical=True, value=der_value)
|
||||
critical=True, value=der_value)
|
||||
|
||||
return crypto_util.gen_ss_cert(key, [domain], force_san=True,
|
||||
extensions=[acme_extension]), key
|
||||
extensions=[acme_extension]), key
|
||||
|
||||
def probe_cert(self, domain: str, host: Optional[str] = None,
|
||||
port: Optional[int] = None) -> crypto.X509:
|
||||
def probe_cert(self, domain, host=None, port=None):
|
||||
"""Probe tls-alpn-01 challenge certificate.
|
||||
|
||||
:param str domain: domain being validated, required.
|
||||
:param str host: IP address used to probe the certificate.
|
||||
:param unicode domain: domain being validated, required.
|
||||
:param string host: IP address used to probe the certificate.
|
||||
:param int port: Port used to probe the certificate.
|
||||
|
||||
"""
|
||||
@@ -457,13 +426,13 @@ class TLSALPN01Response(KeyAuthorizationChallengeResponse):
|
||||
if port is None:
|
||||
port = self.PORT
|
||||
|
||||
return crypto_util.probe_sni(host=host.encode(), port=port, name=domain.encode(),
|
||||
alpn_protocols=[self.ACME_TLS_1_PROTOCOL])
|
||||
return crypto_util.probe_sni(host=host, port=port, name=domain,
|
||||
alpn_protocols=[self.ACME_TLS_1_PROTOCOL])
|
||||
|
||||
def verify_cert(self, domain: str, cert: crypto.X509) -> bool:
|
||||
def verify_cert(self, domain, cert):
|
||||
"""Verify tls-alpn-01 challenge certificate.
|
||||
|
||||
:param str domain: Domain name being validated.
|
||||
:param unicode domain: Domain name being validated.
|
||||
:param OpensSSL.crypto.X509 cert: Challenge certificate.
|
||||
|
||||
:returns: Whether the certificate was successfully verified.
|
||||
@@ -472,10 +441,7 @@ class TLSALPN01Response(KeyAuthorizationChallengeResponse):
|
||||
"""
|
||||
# pylint: disable=protected-access
|
||||
names = crypto_util._pyopenssl_cert_or_req_all_names(cert)
|
||||
# Type ignore needed due to
|
||||
# https://github.com/pyca/pyopenssl/issues/730.
|
||||
logger.debug('Certificate %s. SANs: %s',
|
||||
cert.digest('sha256'), names)
|
||||
logger.debug('Certificate %s. SANs: %s', cert.digest('sha256'), names)
|
||||
if len(names) != 1 or names[0].lower() != domain.lower():
|
||||
return False
|
||||
|
||||
@@ -490,9 +456,8 @@ class TLSALPN01Response(KeyAuthorizationChallengeResponse):
|
||||
return False
|
||||
|
||||
# pylint: disable=too-many-arguments
|
||||
def simple_verify(self, chall: 'TLSALPN01', domain: str, account_public_key: jose.JWK,
|
||||
cert: Optional[crypto.X509] = None, host: Optional[str] = None,
|
||||
port: Optional[int] = None) -> bool:
|
||||
def simple_verify(self, chall, domain, account_public_key,
|
||||
cert=None, host=None, port=None):
|
||||
"""Simple verify.
|
||||
|
||||
Verify ``validation`` using ``account_public_key``, optionally
|
||||
@@ -532,11 +497,11 @@ class TLSALPN01(KeyAuthorizationChallenge):
|
||||
response_cls = TLSALPN01Response
|
||||
typ = response_cls.typ
|
||||
|
||||
def validation(self, account_key: jose.JWK, **kwargs: Any) -> Tuple[crypto.X509, crypto.PKey]:
|
||||
def validation(self, account_key, **kwargs):
|
||||
"""Generate validation.
|
||||
|
||||
:param JWK account_key:
|
||||
:param str domain: Domain verified by the challenge.
|
||||
:param unicode domain: Domain verified by the challenge.
|
||||
:param OpenSSL.crypto.PKey cert_key: Optional private key used
|
||||
in certificate generation. If not provided (``None``), then
|
||||
fresh key will be generated.
|
||||
@@ -544,13 +509,12 @@ class TLSALPN01(KeyAuthorizationChallenge):
|
||||
:rtype: `tuple` of `OpenSSL.crypto.X509` and `OpenSSL.crypto.PKey`
|
||||
|
||||
"""
|
||||
# TODO: Remove cast when response() is generic.
|
||||
return cast(TLSALPN01Response, self.response(account_key)).gen_cert(
|
||||
return self.response(account_key).gen_cert(
|
||||
key=kwargs.get('cert_key'),
|
||||
domain=cast(str, kwargs.get('domain')))
|
||||
domain=kwargs.get('domain'))
|
||||
|
||||
@staticmethod
|
||||
def is_supported() -> bool:
|
||||
def is_supported():
|
||||
"""
|
||||
Check if TLS-ALPN-01 challenge is supported on this machine.
|
||||
This implies that a recent version of OpenSSL is installed (>= 1.0.2),
|
||||
@@ -572,8 +536,7 @@ class DNS(_TokenChallenge):
|
||||
LABEL = "_acme-challenge"
|
||||
"""Label clients prepend to the domain name being validated."""
|
||||
|
||||
def gen_validation(self, account_key: jose.JWK, alg: jose.JWASignature = jose.RS256,
|
||||
**kwargs: Any) -> jose.JWS:
|
||||
def gen_validation(self, account_key, alg=jose.RS256, **kwargs):
|
||||
"""Generate validation.
|
||||
|
||||
:param .JWK account_key: Private account key.
|
||||
@@ -587,7 +550,7 @@ class DNS(_TokenChallenge):
|
||||
payload=self.json_dumps(sort_keys=True).encode('utf-8'),
|
||||
key=account_key, alg=alg, **kwargs)
|
||||
|
||||
def check_validation(self, validation: jose.JWS, account_public_key: jose.JWK) -> bool:
|
||||
def check_validation(self, validation, account_public_key):
|
||||
"""Check validation.
|
||||
|
||||
:param JWS validation:
|
||||
@@ -604,7 +567,7 @@ class DNS(_TokenChallenge):
|
||||
logger.debug("Checking validation for DNS failed: %s", error)
|
||||
return False
|
||||
|
||||
def gen_response(self, account_key: jose.JWK, **kwargs: Any) -> 'DNSResponse':
|
||||
def gen_response(self, account_key, **kwargs):
|
||||
"""Generate response.
|
||||
|
||||
:param .JWK account_key: Private account key.
|
||||
@@ -613,12 +576,13 @@ class DNS(_TokenChallenge):
|
||||
:rtype: DNSResponse
|
||||
|
||||
"""
|
||||
return DNSResponse(validation=self.gen_validation(account_key, **kwargs))
|
||||
return DNSResponse(validation=self.gen_validation(
|
||||
account_key, **kwargs))
|
||||
|
||||
def validation_domain_name(self, name: str) -> str:
|
||||
def validation_domain_name(self, name):
|
||||
"""Domain name for TXT validation record.
|
||||
|
||||
:param str name: Domain name being validated.
|
||||
:param unicode name: Domain name being validated.
|
||||
|
||||
"""
|
||||
return "{0}.{1}".format(self.LABEL, name)
|
||||
@@ -633,9 +597,9 @@ class DNSResponse(ChallengeResponse):
|
||||
"""
|
||||
typ = "dns"
|
||||
|
||||
validation: jose.JWS = jose.field("validation", decoder=jose.JWS.from_json)
|
||||
validation = jose.Field("validation", decoder=jose.JWS.from_json)
|
||||
|
||||
def check_validation(self, chall: 'DNS', account_public_key: jose.JWK) -> bool:
|
||||
def check_validation(self, chall, account_public_key):
|
||||
"""Check validation.
|
||||
|
||||
:param challenges.DNS chall:
|
||||
|
||||
1176
acme/acme/client.py
1176
acme/acme/client.py
File diff suppressed because it is too large
Load Diff
@@ -1,26 +1,19 @@
|
||||
"""Crypto utilities."""
|
||||
import binascii
|
||||
import contextlib
|
||||
import ipaddress
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import socket
|
||||
from typing import Any
|
||||
from typing import Callable
|
||||
from typing import List
|
||||
from typing import Mapping
|
||||
from typing import Optional
|
||||
from typing import Sequence
|
||||
from typing import Set
|
||||
from typing import Tuple
|
||||
from typing import Union
|
||||
|
||||
import josepy as jose
|
||||
from OpenSSL import crypto
|
||||
from OpenSSL import SSL
|
||||
from OpenSSL import SSL # type: ignore # https://github.com/python/typeshed/issues/2052
|
||||
|
||||
from acme import errors
|
||||
from acme.magic_typing import Callable
|
||||
from acme.magic_typing import Tuple
|
||||
from acme.magic_typing import Union
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -31,21 +24,19 @@ logger = logging.getLogger(__name__)
|
||||
# https://www.openssl.org/docs/ssl/SSLv23_method.html). _serve_sni
|
||||
# should be changed to use "set_options" to disable SSLv2 and SSLv3,
|
||||
# in case it's used for things other than probing/serving!
|
||||
_DEFAULT_SSL_METHOD = SSL.SSLv23_METHOD
|
||||
_DEFAULT_SSL_METHOD = SSL.SSLv23_METHOD # type: ignore
|
||||
|
||||
|
||||
class _DefaultCertSelection:
|
||||
def __init__(self, certs: Mapping[bytes, Tuple[crypto.PKey, crypto.X509]]):
|
||||
class _DefaultCertSelection(object):
|
||||
def __init__(self, certs):
|
||||
self.certs = certs
|
||||
|
||||
def __call__(self, connection: SSL.Connection) -> Optional[Tuple[crypto.PKey, crypto.X509]]:
|
||||
def __call__(self, connection):
|
||||
server_name = connection.get_servername()
|
||||
if server_name:
|
||||
return self.certs.get(server_name, None)
|
||||
return None # pragma: no cover
|
||||
return self.certs.get(server_name, None)
|
||||
|
||||
|
||||
class SSLSocket: # pylint: disable=too-few-public-methods
|
||||
class SSLSocket(object): # pylint: disable=too-few-public-methods
|
||||
"""SSL wrapper for sockets.
|
||||
|
||||
:ivar socket sock: Original wrapped socket.
|
||||
@@ -58,14 +49,9 @@ class SSLSocket: # pylint: disable=too-few-public-methods
|
||||
`certs` parameter would be ignored, and therefore must be empty.
|
||||
|
||||
"""
|
||||
def __init__(self, sock: socket.socket,
|
||||
certs: Optional[Mapping[bytes, Tuple[crypto.PKey, crypto.X509]]] = None,
|
||||
method: int = _DEFAULT_SSL_METHOD,
|
||||
alpn_selection: Optional[Callable[[SSL.Connection, List[bytes]], bytes]] = None,
|
||||
cert_selection: Optional[Callable[[SSL.Connection],
|
||||
Optional[Tuple[crypto.PKey,
|
||||
crypto.X509]]]] = None
|
||||
) -> None:
|
||||
def __init__(self, sock, certs=None,
|
||||
method=_DEFAULT_SSL_METHOD, alpn_selection=None,
|
||||
cert_selection=None):
|
||||
self.sock = sock
|
||||
self.alpn_selection = alpn_selection
|
||||
self.method = method
|
||||
@@ -73,18 +59,14 @@ class SSLSocket: # pylint: disable=too-few-public-methods
|
||||
raise ValueError("Neither cert_selection or certs specified.")
|
||||
if cert_selection and certs:
|
||||
raise ValueError("Both cert_selection and certs specified.")
|
||||
actual_cert_selection: Union[_DefaultCertSelection,
|
||||
Optional[Callable[[SSL.Connection],
|
||||
Optional[Tuple[crypto.PKey,
|
||||
crypto.X509]]]]] = cert_selection
|
||||
if actual_cert_selection is None:
|
||||
actual_cert_selection = _DefaultCertSelection(certs if certs else {})
|
||||
self.cert_selection = actual_cert_selection
|
||||
if cert_selection is None:
|
||||
cert_selection = _DefaultCertSelection(certs)
|
||||
self.cert_selection = cert_selection
|
||||
|
||||
def __getattr__(self, name: str) -> Any:
|
||||
def __getattr__(self, name):
|
||||
return getattr(self.sock, name)
|
||||
|
||||
def _pick_certificate_cb(self, connection: SSL.Connection) -> None:
|
||||
def _pick_certificate_cb(self, connection):
|
||||
"""SNI certificate callback.
|
||||
|
||||
This method will set a new OpenSSL context object for this
|
||||
@@ -111,29 +93,22 @@ class SSLSocket: # pylint: disable=too-few-public-methods
|
||||
new_context.set_alpn_select_callback(self.alpn_selection)
|
||||
connection.set_context(new_context)
|
||||
|
||||
class FakeConnection:
|
||||
class FakeConnection(object):
|
||||
"""Fake OpenSSL.SSL.Connection."""
|
||||
|
||||
# pylint: disable=missing-function-docstring
|
||||
|
||||
def __init__(self, connection: SSL.Connection) -> None:
|
||||
def __init__(self, connection):
|
||||
self._wrapped = connection
|
||||
|
||||
def __getattr__(self, name: str) -> Any:
|
||||
def __getattr__(self, name):
|
||||
return getattr(self._wrapped, name)
|
||||
|
||||
def shutdown(self, *unused_args: Any) -> bool:
|
||||
def shutdown(self, *unused_args):
|
||||
# OpenSSL.SSL.Connection.shutdown doesn't accept any args
|
||||
try:
|
||||
return self._wrapped.shutdown()
|
||||
except SSL.Error as error:
|
||||
# We wrap the error so we raise the same error type as sockets
|
||||
# in the standard library. This is useful when this object is
|
||||
# used by code which expects a standard socket such as
|
||||
# socketserver in the standard library.
|
||||
raise socket.error(error)
|
||||
return self._wrapped.shutdown()
|
||||
|
||||
def accept(self) -> Tuple[FakeConnection, Any]: # pylint: disable=missing-function-docstring
|
||||
def accept(self): # pylint: disable=missing-function-docstring
|
||||
sock, addr = self.sock.accept()
|
||||
|
||||
context = SSL.Context(self.method)
|
||||
@@ -146,8 +121,6 @@ class SSLSocket: # pylint: disable=too-few-public-methods
|
||||
ssl_sock = self.FakeConnection(SSL.Connection(context, sock))
|
||||
ssl_sock.set_accept_state()
|
||||
|
||||
# This log line is especially desirable because without it requests to
|
||||
# our standalone TLSALPN server would not be logged.
|
||||
logger.debug("Performing handshake with %s", addr)
|
||||
try:
|
||||
ssl_sock.do_handshake()
|
||||
@@ -159,9 +132,9 @@ class SSLSocket: # pylint: disable=too-few-public-methods
|
||||
return ssl_sock, addr
|
||||
|
||||
|
||||
def probe_sni(name: bytes, host: bytes, port: int = 443, timeout: int = 300, # pylint: disable=too-many-arguments
|
||||
method: int = _DEFAULT_SSL_METHOD, source_address: Tuple[str, int] = ('', 0),
|
||||
alpn_protocols: Optional[Sequence[bytes]] = None) -> crypto.X509:
|
||||
def probe_sni(name, host, port=443, timeout=300, # pylint: disable=too-many-arguments
|
||||
method=_DEFAULT_SSL_METHOD, source_address=('', 0),
|
||||
alpn_protocols=None):
|
||||
"""Probe SNI server for SSL certificate.
|
||||
|
||||
:param bytes name: Byte string to send as the server name in the
|
||||
@@ -174,7 +147,7 @@ def probe_sni(name: bytes, host: bytes, port: int = 443, timeout: int = 300, #
|
||||
of source interface). See `socket.creation_connection` for more
|
||||
info. Available only in Python 2.7+.
|
||||
:param alpn_protocols: Protocols to request using ALPN.
|
||||
:type alpn_protocols: `Sequence` of `bytes`
|
||||
:type alpn_protocols: `list` of `bytes`
|
||||
|
||||
:raises acme.errors.Error: In case of any problems.
|
||||
|
||||
@@ -193,10 +166,10 @@ def probe_sni(name: bytes, host: bytes, port: int = 443, timeout: int = 300, #
|
||||
" from {0}:{1}".format(
|
||||
source_address[0],
|
||||
source_address[1]
|
||||
) if any(source_address) else ""
|
||||
) if socket_kwargs else ""
|
||||
)
|
||||
socket_tuple: Tuple[bytes, int] = (host, port)
|
||||
sock = socket.create_connection(socket_tuple, **socket_kwargs) # type: ignore[arg-type]
|
||||
socket_tuple = (host, port) # type: Tuple[str, int]
|
||||
sock = socket.create_connection(socket_tuple, **socket_kwargs) # type: ignore
|
||||
except socket.error as error:
|
||||
raise errors.Error(error)
|
||||
|
||||
@@ -211,50 +184,26 @@ def probe_sni(name: bytes, host: bytes, port: int = 443, timeout: int = 300, #
|
||||
client_ssl.shutdown()
|
||||
except SSL.Error as error:
|
||||
raise errors.Error(error)
|
||||
cert = client_ssl.get_peer_certificate()
|
||||
assert cert # Appease mypy. We would have crashed out by now if there was no certificate.
|
||||
return cert
|
||||
return client_ssl.get_peer_certificate()
|
||||
|
||||
|
||||
def make_csr(private_key_pem: bytes, domains: Optional[Union[Set[str], List[str]]] = None,
|
||||
must_staple: bool = False,
|
||||
ipaddrs: Optional[List[Union[ipaddress.IPv4Address, ipaddress.IPv6Address]]] = None
|
||||
) -> bytes:
|
||||
"""Generate a CSR containing domains or IPs as subjectAltNames.
|
||||
def make_csr(private_key_pem, domains, must_staple=False):
|
||||
"""Generate a CSR containing a list of domains as subjectAltNames.
|
||||
|
||||
:param buffer private_key_pem: Private key, in PEM PKCS#8 format.
|
||||
:param list domains: List of DNS names to include in subjectAltNames of CSR.
|
||||
:param bool must_staple: Whether to include the TLS Feature extension (aka
|
||||
OCSP Must Staple: https://tools.ietf.org/html/rfc7633).
|
||||
:param list ipaddrs: List of IPaddress(type ipaddress.IPv4Address or ipaddress.IPv6Address)
|
||||
names to include in subbjectAltNames of CSR.
|
||||
params ordered this way for backward competablity when called by positional argument.
|
||||
:returns: buffer PEM-encoded Certificate Signing Request.
|
||||
"""
|
||||
private_key = crypto.load_privatekey(
|
||||
crypto.FILETYPE_PEM, private_key_pem)
|
||||
csr = crypto.X509Req()
|
||||
sanlist = []
|
||||
# if domain or ip list not supplied make it empty list so it's easier to iterate
|
||||
if domains is None:
|
||||
domains = []
|
||||
if ipaddrs is None:
|
||||
ipaddrs = []
|
||||
if len(domains)+len(ipaddrs) == 0:
|
||||
raise ValueError("At least one of domains or ipaddrs parameter need to be not empty")
|
||||
for address in domains:
|
||||
sanlist.append('DNS:' + address)
|
||||
for ips in ipaddrs:
|
||||
sanlist.append('IP:' + ips.exploded)
|
||||
# make sure its ascii encoded
|
||||
san_string = ', '.join(sanlist).encode('ascii')
|
||||
# for IP san it's actually need to be octet-string,
|
||||
# but somewhere downsteam thankfully handle it for us
|
||||
extensions = [
|
||||
crypto.X509Extension(
|
||||
b'subjectAltName',
|
||||
critical=False,
|
||||
value=san_string
|
||||
value=', '.join('DNS:' + d for d in domains).encode('ascii')
|
||||
),
|
||||
]
|
||||
if must_staple:
|
||||
@@ -264,16 +213,13 @@ def make_csr(private_key_pem: bytes, domains: Optional[Union[Set[str], List[str]
|
||||
value=b"DER:30:03:02:01:05"))
|
||||
csr.add_extensions(extensions)
|
||||
csr.set_pubkey(private_key)
|
||||
# RFC 2986 Section 4.1 only defines version 0
|
||||
csr.set_version(0)
|
||||
csr.set_version(2)
|
||||
csr.sign(private_key, 'sha256')
|
||||
return crypto.dump_certificate_request(
|
||||
crypto.FILETYPE_PEM, csr)
|
||||
|
||||
|
||||
def _pyopenssl_cert_or_req_all_names(loaded_cert_or_req: Union[crypto.X509, crypto.X509Req]
|
||||
) -> List[str]:
|
||||
# unlike its name this only outputs DNS names, other type of idents will ignored
|
||||
def _pyopenssl_cert_or_req_all_names(loaded_cert_or_req):
|
||||
common_name = loaded_cert_or_req.get_subject().CN
|
||||
sans = _pyopenssl_cert_or_req_san(loaded_cert_or_req)
|
||||
|
||||
@@ -282,7 +228,7 @@ def _pyopenssl_cert_or_req_all_names(loaded_cert_or_req: Union[crypto.X509, cryp
|
||||
return [common_name] + [d for d in sans if d != common_name]
|
||||
|
||||
|
||||
def _pyopenssl_cert_or_req_san(cert_or_req: Union[crypto.X509, crypto.X509Req]) -> List[str]:
|
||||
def _pyopenssl_cert_or_req_san(cert_or_req):
|
||||
"""Get Subject Alternative Names from certificate or CSR using pyOpenSSL.
|
||||
|
||||
.. todo:: Implement directly in PyOpenSSL!
|
||||
@@ -293,87 +239,47 @@ def _pyopenssl_cert_or_req_san(cert_or_req: Union[crypto.X509, crypto.X509Req])
|
||||
:param cert_or_req: Certificate or CSR.
|
||||
:type cert_or_req: `OpenSSL.crypto.X509` or `OpenSSL.crypto.X509Req`.
|
||||
|
||||
:returns: A list of Subject Alternative Names that is DNS.
|
||||
:rtype: `list` of `str`
|
||||
:returns: A list of Subject Alternative Names.
|
||||
:rtype: `list` of `unicode`
|
||||
|
||||
"""
|
||||
# This function finds SANs with dns name
|
||||
# This function finds SANs by dumping the certificate/CSR to text and
|
||||
# searching for "X509v3 Subject Alternative Name" in the text. This method
|
||||
# is used to support PyOpenSSL version 0.13 where the
|
||||
# `_subjectAltNameString` and `get_extensions` methods are not available
|
||||
# for CSRs.
|
||||
|
||||
# constants based on PyOpenSSL certificate/CSR text dump
|
||||
part_separator = ":"
|
||||
parts_separator = ", "
|
||||
prefix = "DNS" + part_separator
|
||||
|
||||
sans_parts = _pyopenssl_extract_san_list_raw(cert_or_req)
|
||||
if isinstance(cert_or_req, crypto.X509):
|
||||
# pylint: disable=line-too-long
|
||||
func = crypto.dump_certificate # type: Union[Callable[[int, crypto.X509Req], bytes], Callable[[int, crypto.X509], bytes]]
|
||||
else:
|
||||
func = crypto.dump_certificate_request
|
||||
text = func(crypto.FILETYPE_TEXT, cert_or_req).decode("utf-8")
|
||||
# WARNING: this function does not support multiple SANs extensions.
|
||||
# Multiple X509v3 extensions of the same type is disallowed by RFC 5280.
|
||||
match = re.search(r"X509v3 Subject Alternative Name:(?: critical)?\s*(.*)", text)
|
||||
# WARNING: this function assumes that no SAN can include
|
||||
# parts_separator, hence the split!
|
||||
sans_parts = [] if match is None else match.group(1).split(parts_separator)
|
||||
|
||||
return [part.split(part_separator)[1]
|
||||
for part in sans_parts if part.startswith(prefix)]
|
||||
|
||||
|
||||
def _pyopenssl_cert_or_req_san_ip(cert_or_req: Union[crypto.X509, crypto.X509Req]) -> List[str]:
|
||||
"""Get Subject Alternative Names IPs from certificate or CSR using pyOpenSSL.
|
||||
|
||||
:param cert_or_req: Certificate or CSR.
|
||||
:type cert_or_req: `OpenSSL.crypto.X509` or `OpenSSL.crypto.X509Req`.
|
||||
|
||||
:returns: A list of Subject Alternative Names that are IP Addresses.
|
||||
:rtype: `list` of `str`. note that this returns as string, not IPaddress object
|
||||
|
||||
"""
|
||||
|
||||
# constants based on PyOpenSSL certificate/CSR text dump
|
||||
part_separator = ":"
|
||||
prefix = "IP Address" + part_separator
|
||||
|
||||
sans_parts = _pyopenssl_extract_san_list_raw(cert_or_req)
|
||||
|
||||
return [part[len(prefix):] for part in sans_parts if part.startswith(prefix)]
|
||||
|
||||
|
||||
def _pyopenssl_extract_san_list_raw(cert_or_req: Union[crypto.X509, crypto.X509Req]) -> List[str]:
|
||||
"""Get raw SAN string from cert or csr, parse it as UTF-8 and return.
|
||||
|
||||
:param cert_or_req: Certificate or CSR.
|
||||
:type cert_or_req: `OpenSSL.crypto.X509` or `OpenSSL.crypto.X509Req`.
|
||||
|
||||
:returns: raw san strings, parsed byte as utf-8
|
||||
:rtype: `list` of `str`
|
||||
|
||||
"""
|
||||
# This function finds SANs by dumping the certificate/CSR to text and
|
||||
# searching for "X509v3 Subject Alternative Name" in the text. This method
|
||||
# is used to because in PyOpenSSL version <0.17 `_subjectAltNameString` methods are
|
||||
# not able to Parse IP Addresses in subjectAltName string.
|
||||
|
||||
if isinstance(cert_or_req, crypto.X509):
|
||||
# pylint: disable=line-too-long
|
||||
text = crypto.dump_certificate(crypto.FILETYPE_TEXT, cert_or_req).decode('utf-8')
|
||||
else:
|
||||
text = crypto.dump_certificate_request(crypto.FILETYPE_TEXT, cert_or_req).decode('utf-8')
|
||||
# WARNING: this function does not support multiple SANs extensions.
|
||||
# Multiple X509v3 extensions of the same type is disallowed by RFC 5280.
|
||||
raw_san = re.search(r"X509v3 Subject Alternative Name:(?: critical)?\s*(.*)", text)
|
||||
|
||||
parts_separator = ", "
|
||||
# WARNING: this function assumes that no SAN can include
|
||||
# parts_separator, hence the split!
|
||||
sans_parts = [] if raw_san is None else raw_san.group(1).split(parts_separator)
|
||||
return sans_parts
|
||||
|
||||
|
||||
def gen_ss_cert(key: crypto.PKey, domains: Optional[List[str]] = None,
|
||||
not_before: Optional[int] = None,
|
||||
validity: int = (7 * 24 * 60 * 60), force_san: bool = True,
|
||||
extensions: Optional[List[crypto.X509Extension]] = None,
|
||||
ips: Optional[List[Union[ipaddress.IPv4Address, ipaddress.IPv6Address]]] = None
|
||||
) -> crypto.X509:
|
||||
def gen_ss_cert(key, domains, not_before=None,
|
||||
validity=(7 * 24 * 60 * 60), force_san=True, extensions=None):
|
||||
"""Generate new self-signed certificate.
|
||||
|
||||
:type domains: `list` of `str`
|
||||
:type domains: `list` of `unicode`
|
||||
:param OpenSSL.crypto.PKey key:
|
||||
:param bool force_san:
|
||||
:param extensions: List of additional extensions to include in the cert.
|
||||
:type extensions: `list` of `OpenSSL.crypto.X509Extension`
|
||||
:type ips: `list` of (`ipaddress.IPv4Address` or `ipaddress.IPv6Address`)
|
||||
|
||||
If more than one domain is provided, all of the domains are put into
|
||||
``subjectAltName`` X.509 extension and first domain is set as the
|
||||
@@ -381,39 +287,28 @@ def gen_ss_cert(key: crypto.PKey, domains: Optional[List[str]] = None,
|
||||
extension is used, unless `force_san` is ``True``.
|
||||
|
||||
"""
|
||||
assert domains or ips, "Must provide one or more hostnames or IPs for the cert."
|
||||
|
||||
assert domains, "Must provide one or more hostnames for the cert."
|
||||
cert = crypto.X509()
|
||||
cert.set_serial_number(int(binascii.hexlify(os.urandom(16)), 16))
|
||||
cert.set_version(2)
|
||||
|
||||
if extensions is None:
|
||||
extensions = []
|
||||
if domains is None:
|
||||
domains = []
|
||||
if ips is None:
|
||||
ips = []
|
||||
|
||||
extensions.append(
|
||||
crypto.X509Extension(
|
||||
b"basicConstraints", True, b"CA:TRUE, pathlen:0"),
|
||||
)
|
||||
|
||||
if len(domains) > 0:
|
||||
cert.get_subject().CN = domains[0]
|
||||
cert.get_subject().CN = domains[0]
|
||||
# TODO: what to put into cert.get_subject()?
|
||||
cert.set_issuer(cert.get_subject())
|
||||
|
||||
sanlist = []
|
||||
for address in domains:
|
||||
sanlist.append('DNS:' + address)
|
||||
for ip in ips:
|
||||
sanlist.append('IP:' + ip.exploded)
|
||||
san_string = ', '.join(sanlist).encode('ascii')
|
||||
if force_san or len(domains) > 1 or len(ips) > 0:
|
||||
if force_san or len(domains) > 1:
|
||||
extensions.append(crypto.X509Extension(
|
||||
b"subjectAltName",
|
||||
critical=False,
|
||||
value=san_string
|
||||
value=b", ".join(b"DNS:" + d.encode() for d in domains)
|
||||
))
|
||||
|
||||
cert.add_extensions(extensions)
|
||||
@@ -426,8 +321,7 @@ def gen_ss_cert(key: crypto.PKey, domains: Optional[List[str]] = None,
|
||||
return cert
|
||||
|
||||
|
||||
def dump_pyopenssl_chain(chain: Union[List[jose.ComparableX509], List[crypto.X509]],
|
||||
filetype: int = crypto.FILETYPE_PEM) -> bytes:
|
||||
def dump_pyopenssl_chain(chain, filetype=crypto.FILETYPE_PEM):
|
||||
"""Dump certificate chain into a bundle.
|
||||
|
||||
:param list chain: List of `OpenSSL.crypto.X509` (or wrapped in
|
||||
@@ -440,10 +334,8 @@ def dump_pyopenssl_chain(chain: Union[List[jose.ComparableX509], List[crypto.X50
|
||||
# XXX: returns empty string when no chain is available, which
|
||||
# shuts up RenewableCert, but might not be the best solution...
|
||||
|
||||
def _dump_cert(cert: Union[jose.ComparableX509, crypto.X509]) -> bytes:
|
||||
def _dump_cert(cert):
|
||||
if isinstance(cert, jose.ComparableX509):
|
||||
if isinstance(cert.wrapped, crypto.X509Req):
|
||||
raise errors.Error("Unexpected CSR provided.") # pragma: no cover
|
||||
cert = cert.wrapped
|
||||
return crypto.dump_certificate(filetype, cert)
|
||||
|
||||
|
||||
@@ -1,17 +1,5 @@
|
||||
"""ACME errors."""
|
||||
import typing
|
||||
from typing import Any
|
||||
from typing import List
|
||||
from typing import Mapping
|
||||
from typing import Set
|
||||
|
||||
from josepy import errors as jose_errors
|
||||
import requests
|
||||
|
||||
# We import acme.messages only during type check to avoid circular dependencies. Type references
|
||||
# to acme.message.* must be quoted to be lazily initialized and avoid compilation errors.
|
||||
if typing.TYPE_CHECKING:
|
||||
from acme import messages # pragma: no cover
|
||||
|
||||
|
||||
class Error(Exception):
|
||||
@@ -40,12 +28,17 @@ class NonceError(ClientError):
|
||||
|
||||
class BadNonce(NonceError):
|
||||
"""Bad nonce error."""
|
||||
def __init__(self, nonce: str, error: Exception, *args: Any) -> None:
|
||||
super().__init__(*args)
|
||||
def __init__(self, nonce, error, *args, **kwargs):
|
||||
# MyPy complains here that there is too many arguments for BaseException constructor.
|
||||
# This is an error fixed in typeshed, see https://github.com/python/mypy/issues/4183
|
||||
# The fix is included in MyPy>=0.740, but upgrading it would bring dozen of errors due to
|
||||
# new types definitions. So we ignore the error until the code base is fixed to match
|
||||
# with MyPy>=0.740 referential.
|
||||
super(BadNonce, self).__init__(*args, **kwargs) # type: ignore
|
||||
self.nonce = nonce
|
||||
self.error = error
|
||||
|
||||
def __str__(self) -> str:
|
||||
def __str__(self):
|
||||
return 'Invalid nonce ({0!r}): {1}'.format(self.nonce, self.error)
|
||||
|
||||
|
||||
@@ -56,14 +49,15 @@ class MissingNonce(NonceError):
|
||||
Replay-Nonce header field in each successful response to a POST it
|
||||
provides to a client (...)".
|
||||
|
||||
:ivar requests.Response ~.response: HTTP Response
|
||||
:ivar requests.Response response: HTTP Response
|
||||
|
||||
"""
|
||||
def __init__(self, response: requests.Response, *args: Any) -> None:
|
||||
super().__init__(*args)
|
||||
def __init__(self, response, *args, **kwargs):
|
||||
# See comment in BadNonce constructor above for an explanation of type: ignore here.
|
||||
super(MissingNonce, self).__init__(*args, **kwargs) # type: ignore
|
||||
self.response = response
|
||||
|
||||
def __str__(self) -> str:
|
||||
def __str__(self):
|
||||
return ('Server {0} response did not include a replay '
|
||||
'nonce, headers: {1} (This may be a service outage)'.format(
|
||||
self.response.request.method, self.response.headers))
|
||||
@@ -81,20 +75,17 @@ class PollError(ClientError):
|
||||
to the most recently updated one
|
||||
|
||||
"""
|
||||
def __init__(self, exhausted: Set['messages.AuthorizationResource'],
|
||||
updated: Mapping['messages.AuthorizationResource',
|
||||
'messages.AuthorizationResource']
|
||||
) -> None:
|
||||
def __init__(self, exhausted, updated):
|
||||
self.exhausted = exhausted
|
||||
self.updated = updated
|
||||
super().__init__()
|
||||
super(PollError, self).__init__()
|
||||
|
||||
@property
|
||||
def timeout(self) -> bool:
|
||||
def timeout(self):
|
||||
"""Was the error caused by timeout?"""
|
||||
return bool(self.exhausted)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
def __repr__(self):
|
||||
return '{0}(exhausted={1!r}, updated={2!r})'.format(
|
||||
self.__class__.__name__, self.exhausted, self.updated)
|
||||
|
||||
@@ -103,9 +94,9 @@ class ValidationError(Error):
|
||||
"""Error for authorization failures. Contains a list of authorization
|
||||
resources, each of which is invalid and should have an error field.
|
||||
"""
|
||||
def __init__(self, failed_authzrs: List['messages.AuthorizationResource']) -> None:
|
||||
def __init__(self, failed_authzrs):
|
||||
self.failed_authzrs = failed_authzrs
|
||||
super().__init__()
|
||||
super(ValidationError, self).__init__()
|
||||
|
||||
|
||||
class TimeoutError(Error): # pylint: disable=redefined-builtin
|
||||
@@ -115,13 +106,13 @@ class TimeoutError(Error): # pylint: disable=redefined-builtin
|
||||
class IssuanceError(Error):
|
||||
"""Error sent by the server after requesting issuance of a certificate."""
|
||||
|
||||
def __init__(self, error: 'messages.Error') -> None:
|
||||
def __init__(self, error):
|
||||
"""Initialize.
|
||||
|
||||
:param messages.Error error: The error provided by the server.
|
||||
"""
|
||||
self.error = error
|
||||
super().__init__()
|
||||
super(IssuanceError, self).__init__()
|
||||
|
||||
|
||||
class ConflictError(ClientError):
|
||||
@@ -132,9 +123,9 @@ class ConflictError(ClientError):
|
||||
|
||||
Also used in V2 of the ACME client for the same purpose.
|
||||
"""
|
||||
def __init__(self, location: str) -> None:
|
||||
def __init__(self, location):
|
||||
self.location = location
|
||||
super().__init__()
|
||||
super(ConflictError, self).__init__()
|
||||
|
||||
|
||||
class WildcardUnsupportedError(Error):
|
||||
|
||||
@@ -1,7 +1,4 @@
|
||||
"""ACME JSON fields."""
|
||||
import datetime
|
||||
from typing import Any
|
||||
|
||||
import logging
|
||||
|
||||
import josepy as jose
|
||||
@@ -13,17 +10,17 @@ logger = logging.getLogger(__name__)
|
||||
class Fixed(jose.Field):
|
||||
"""Fixed field."""
|
||||
|
||||
def __init__(self, json_name: str, value: Any) -> None:
|
||||
def __init__(self, json_name, value):
|
||||
self.value = value
|
||||
super().__init__(
|
||||
super(Fixed, self).__init__(
|
||||
json_name=json_name, default=value, omitempty=False)
|
||||
|
||||
def decode(self, value: Any) -> Any:
|
||||
def decode(self, value):
|
||||
if value != self.value:
|
||||
raise jose.DeserializationError('Expected {0!r}'.format(self.value))
|
||||
return self.value
|
||||
|
||||
def encode(self, value: Any) -> Any:
|
||||
def encode(self, value):
|
||||
if value != self.value:
|
||||
logger.warning(
|
||||
'Overriding fixed field (%s) with %r', self.json_name, value)
|
||||
@@ -40,22 +37,28 @@ class RFC3339Field(jose.Field):
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
def default_encoder(cls, value: datetime.datetime) -> str:
|
||||
def default_encoder(cls, value):
|
||||
return pyrfc3339.generate(value)
|
||||
|
||||
@classmethod
|
||||
def default_decoder(cls, value: str) -> datetime.datetime:
|
||||
def default_decoder(cls, value):
|
||||
try:
|
||||
return pyrfc3339.parse(value)
|
||||
except ValueError as error:
|
||||
raise jose.DeserializationError(error)
|
||||
|
||||
|
||||
def fixed(json_name: str, value: Any) -> Any:
|
||||
"""Generates a type-friendly Fixed field."""
|
||||
return Fixed(json_name, value)
|
||||
class Resource(jose.Field):
|
||||
"""Resource MITM field."""
|
||||
|
||||
def __init__(self, resource_type, *args, **kwargs):
|
||||
self.resource_type = resource_type
|
||||
super(Resource, self).__init__(
|
||||
'resource', default=resource_type, *args, **kwargs)
|
||||
|
||||
def rfc3339(json_name: str, omitempty: bool = False) -> Any:
|
||||
"""Generates a type-friendly RFC3339 field."""
|
||||
return RFC3339Field(json_name, omitempty=omitempty)
|
||||
def decode(self, value):
|
||||
if value != self.resource_type:
|
||||
raise jose.DeserializationError(
|
||||
'Wrong resource type: {0} instead of {1}'.format(
|
||||
value, self.resource_type))
|
||||
return value
|
||||
|
||||
@@ -4,22 +4,18 @@ The JWS implementation in josepy only implements the base JOSE standard. In
|
||||
order to support the new header fields defined in ACME, this module defines some
|
||||
ACME-specific classes that layer on top of josepy.
|
||||
"""
|
||||
from typing import Optional
|
||||
|
||||
import josepy as jose
|
||||
|
||||
|
||||
class Header(jose.Header):
|
||||
"""ACME-specific JOSE Header. Implements nonce, kid, and url.
|
||||
"""
|
||||
nonce: Optional[bytes] = jose.field('nonce', omitempty=True, encoder=jose.encode_b64jose)
|
||||
kid: Optional[str] = jose.field('kid', omitempty=True)
|
||||
url: Optional[str] = jose.field('url', omitempty=True)
|
||||
nonce = jose.Field('nonce', omitempty=True, encoder=jose.encode_b64jose)
|
||||
kid = jose.Field('kid', omitempty=True)
|
||||
url = jose.Field('url', omitempty=True)
|
||||
|
||||
# Mypy does not understand the josepy magic happening here, and falsely claims
|
||||
# that nonce is redefined. Let's ignore the type check here.
|
||||
@nonce.decoder # type: ignore[no-redef,union-attr]
|
||||
def nonce(value: str) -> bytes: # type: ignore[misc] # pylint: disable=no-self-argument,missing-function-docstring
|
||||
@nonce.decoder
|
||||
def nonce(value): # pylint: disable=no-self-argument,missing-function-docstring
|
||||
try:
|
||||
return jose.decode_b64jose(value)
|
||||
except jose.DeserializationError as error:
|
||||
@@ -29,12 +25,12 @@ class Header(jose.Header):
|
||||
|
||||
class Signature(jose.Signature):
|
||||
"""ACME-specific Signature. Uses ACME-specific Header for customer fields."""
|
||||
__slots__ = jose.Signature._orig_slots # type: ignore[attr-defined] # pylint: disable=protected-access,no-member
|
||||
__slots__ = jose.Signature._orig_slots # pylint: disable=no-member
|
||||
|
||||
# TODO: decoder/encoder should accept cls? Otherwise, subclassing
|
||||
# JSONObjectWithFields is tricky...
|
||||
header_cls = Header
|
||||
header: Header = jose.field(
|
||||
header = jose.Field(
|
||||
'header', omitempty=True, default=header_cls(),
|
||||
decoder=header_cls.from_json)
|
||||
|
||||
@@ -44,16 +40,15 @@ class Signature(jose.Signature):
|
||||
class JWS(jose.JWS):
|
||||
"""ACME-specific JWS. Includes none, url, and kid in protected header."""
|
||||
signature_cls = Signature
|
||||
__slots__ = jose.JWS._orig_slots # type: ignore[attr-defined] # pylint: disable=protected-access
|
||||
__slots__ = jose.JWS._orig_slots
|
||||
|
||||
@classmethod
|
||||
# type: ignore[override] # pylint: disable=arguments-differ
|
||||
def sign(cls, payload: bytes, key: jose.JWK, alg: jose.JWASignature, nonce: Optional[bytes],
|
||||
url: Optional[str] = None, kid: Optional[str] = None) -> jose.JWS:
|
||||
# pylint: disable=arguments-differ
|
||||
def sign(cls, payload, key, alg, nonce, url=None, kid=None):
|
||||
# Per ACME spec, jwk and kid are mutually exclusive, so only include a
|
||||
# jwk field if kid is not provided.
|
||||
include_jwk = kid is None
|
||||
return super().sign(payload, key=key, alg=alg,
|
||||
protect=frozenset(['nonce', 'url', 'kid', 'jwk', 'alg']),
|
||||
nonce=nonce, url=url, kid=kid,
|
||||
include_jwk=include_jwk)
|
||||
return super(JWS, cls).sign(payload, key=key, alg=alg,
|
||||
protect=frozenset(['nonce', 'url', 'kid', 'jwk', 'alg']),
|
||||
nonce=nonce, url=url, kid=kid,
|
||||
include_jwk=include_jwk)
|
||||
|
||||
16
acme/acme/magic_typing.py
Normal file
16
acme/acme/magic_typing.py
Normal file
@@ -0,0 +1,16 @@
|
||||
"""Shim class to not have to depend on typing module in prod."""
|
||||
import sys
|
||||
|
||||
|
||||
class TypingClass(object):
|
||||
"""Ignore import errors by getting anything"""
|
||||
def __getattr__(self, name):
|
||||
return None
|
||||
|
||||
try:
|
||||
# mypy doesn't respect modifying sys.modules
|
||||
from typing import * # pylint: disable=wildcard-import, unused-wildcard-import
|
||||
from typing import Collection, IO # type: ignore
|
||||
except ImportError:
|
||||
# mypy complains because TypingClass is not a module
|
||||
sys.modules[__name__] = TypingClass() # type: ignore
|
||||
@@ -1,27 +1,24 @@
|
||||
"""ACME protocol messages."""
|
||||
import datetime
|
||||
from collections.abc import Hashable
|
||||
import json
|
||||
from typing import Any
|
||||
from typing import Dict
|
||||
from typing import Iterator
|
||||
from typing import List
|
||||
from typing import Mapping
|
||||
from typing import MutableMapping
|
||||
from typing import Optional
|
||||
from typing import Tuple
|
||||
from typing import Type
|
||||
from typing import TypeVar
|
||||
|
||||
import josepy as jose
|
||||
import six
|
||||
|
||||
from acme import challenges
|
||||
from acme import errors
|
||||
from acme import fields
|
||||
from acme import jws
|
||||
from acme import util
|
||||
from acme.mixins import ResourceMixin
|
||||
|
||||
try:
|
||||
from collections.abc import Hashable
|
||||
except ImportError: # pragma: no cover
|
||||
from collections import Hashable
|
||||
|
||||
|
||||
|
||||
OLD_ERROR_PREFIX = "urn:acme:error:"
|
||||
ERROR_PREFIX = "urn:ietf:params:acme:error:"
|
||||
|
||||
ERROR_CODES = {
|
||||
@@ -57,101 +54,40 @@ ERROR_CODES = {
|
||||
'externalAccountRequired': 'The server requires external account binding',
|
||||
}
|
||||
|
||||
ERROR_TYPE_DESCRIPTIONS = {**{
|
||||
ERROR_PREFIX + name: desc for name, desc in ERROR_CODES.items()
|
||||
}}
|
||||
ERROR_TYPE_DESCRIPTIONS = dict(
|
||||
(ERROR_PREFIX + name, desc) for name, desc in ERROR_CODES.items())
|
||||
|
||||
ERROR_TYPE_DESCRIPTIONS.update(dict( # add errors with old prefix, deprecate me
|
||||
(OLD_ERROR_PREFIX + name, desc) for name, desc in ERROR_CODES.items()))
|
||||
|
||||
|
||||
def is_acme_error(err: BaseException) -> bool:
|
||||
def is_acme_error(err):
|
||||
"""Check if argument is an ACME error."""
|
||||
if isinstance(err, Error) and (err.typ is not None):
|
||||
return ERROR_PREFIX in err.typ
|
||||
return (ERROR_PREFIX in err.typ) or (OLD_ERROR_PREFIX in err.typ)
|
||||
return False
|
||||
|
||||
|
||||
class _Constant(jose.JSONDeSerializable, Hashable):
|
||||
"""ACME constant."""
|
||||
__slots__ = ('name',)
|
||||
POSSIBLE_NAMES: Dict[str, '_Constant'] = NotImplemented
|
||||
|
||||
def __init__(self, name: str) -> None:
|
||||
super().__init__()
|
||||
self.POSSIBLE_NAMES[name] = self # pylint: disable=unsupported-assignment-operation
|
||||
self.name = name
|
||||
|
||||
def to_partial_json(self) -> str:
|
||||
return self.name
|
||||
|
||||
@classmethod
|
||||
def from_json(cls, jobj: str) -> '_Constant':
|
||||
if jobj not in cls.POSSIBLE_NAMES: # pylint: disable=unsupported-membership-test
|
||||
raise jose.DeserializationError(f'{cls.__name__} not recognized')
|
||||
return cls.POSSIBLE_NAMES[jobj]
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f'{self.__class__.__name__}({self.name})'
|
||||
|
||||
def __eq__(self, other: Any) -> bool:
|
||||
return isinstance(other, type(self)) and other.name == self.name
|
||||
|
||||
def __hash__(self) -> int:
|
||||
return hash((self.__class__, self.name))
|
||||
|
||||
|
||||
class IdentifierType(_Constant):
|
||||
"""ACME identifier type."""
|
||||
POSSIBLE_NAMES: Dict[str, _Constant] = {}
|
||||
|
||||
|
||||
IDENTIFIER_FQDN = IdentifierType('dns') # IdentifierDNS in Boulder
|
||||
IDENTIFIER_IP = IdentifierType('ip') # IdentifierIP in pebble - not in Boulder yet
|
||||
|
||||
|
||||
class Identifier(jose.JSONObjectWithFields):
|
||||
"""ACME identifier.
|
||||
|
||||
:ivar IdentifierType typ:
|
||||
:ivar str value:
|
||||
|
||||
"""
|
||||
typ: IdentifierType = jose.field('type', decoder=IdentifierType.from_json)
|
||||
value: str = jose.field('value')
|
||||
|
||||
|
||||
@six.python_2_unicode_compatible
|
||||
class Error(jose.JSONObjectWithFields, errors.Error):
|
||||
"""ACME error.
|
||||
|
||||
https://datatracker.ietf.org/doc/html/rfc7807
|
||||
https://tools.ietf.org/html/draft-ietf-appsawg-http-problem-00
|
||||
|
||||
Note: Although Error inherits from JSONObjectWithFields, which is immutable,
|
||||
we add mutability for Error to comply with the Python exception API.
|
||||
|
||||
:ivar str typ:
|
||||
:ivar str title:
|
||||
:ivar str detail:
|
||||
:ivar Identifier identifier:
|
||||
:ivar tuple subproblems: An array of ACME Errors which may be present when the CA
|
||||
returns multiple errors related to the same request, `tuple` of `Error`.
|
||||
:ivar unicode typ:
|
||||
:ivar unicode title:
|
||||
:ivar unicode detail:
|
||||
|
||||
"""
|
||||
typ: str = jose.field('type', omitempty=True, default='about:blank')
|
||||
title: str = jose.field('title', omitempty=True)
|
||||
detail: str = jose.field('detail', omitempty=True)
|
||||
identifier: Optional['Identifier'] = jose.field(
|
||||
'identifier', decoder=Identifier.from_json, omitempty=True)
|
||||
subproblems: Optional[Tuple['Error', ...]] = jose.field('subproblems', omitempty=True)
|
||||
|
||||
# Mypy does not understand the josepy magic happening here, and falsely claims
|
||||
# that subproblems is redefined. Let's ignore the type check here.
|
||||
@subproblems.decoder # type: ignore
|
||||
def subproblems(value: List[Dict[str, Any]]) -> Tuple['Error', ...]: # pylint: disable=no-self-argument,missing-function-docstring
|
||||
return tuple(Error.from_json(subproblem) for subproblem in value)
|
||||
typ = jose.Field('type', omitempty=True, default='about:blank')
|
||||
title = jose.Field('title', omitempty=True)
|
||||
detail = jose.Field('detail', omitempty=True)
|
||||
|
||||
@classmethod
|
||||
def with_code(cls, code: str, **kwargs: Any) -> 'Error':
|
||||
def with_code(cls, code, **kwargs):
|
||||
"""Create an Error instance with an ACME Error code.
|
||||
|
||||
:str code: An ACME error code, like 'dnssec'.
|
||||
:unicode code: An ACME error code, like 'dnssec'.
|
||||
:kwargs: kwargs to pass to Error.
|
||||
|
||||
"""
|
||||
@@ -159,57 +95,76 @@ class Error(jose.JSONObjectWithFields, errors.Error):
|
||||
raise ValueError("The supplied code: %s is not a known ACME error"
|
||||
" code" % code)
|
||||
typ = ERROR_PREFIX + code
|
||||
# Mypy will not understand that the Error constructor accepts a named argument
|
||||
# "typ" because of josepy magic. Let's ignore the type check here.
|
||||
return cls(typ=typ, **kwargs)
|
||||
|
||||
@property
|
||||
def description(self) -> Optional[str]:
|
||||
def description(self):
|
||||
"""Hardcoded error description based on its type.
|
||||
|
||||
:returns: Description if standard ACME error or ``None``.
|
||||
:rtype: str
|
||||
:rtype: unicode
|
||||
|
||||
"""
|
||||
return ERROR_TYPE_DESCRIPTIONS.get(self.typ)
|
||||
|
||||
@property
|
||||
def code(self) -> Optional[str]:
|
||||
def code(self):
|
||||
"""ACME error code.
|
||||
|
||||
Basically self.typ without the ERROR_PREFIX.
|
||||
|
||||
:returns: error code if standard ACME code or ``None``.
|
||||
:rtype: str
|
||||
:rtype: unicode
|
||||
|
||||
"""
|
||||
code = str(self.typ).rsplit(':', maxsplit=1)[-1]
|
||||
code = str(self.typ).split(':')[-1]
|
||||
if code in ERROR_CODES:
|
||||
return code
|
||||
return None
|
||||
|
||||
# Hack to allow mutability on Errors (see GH #9539)
|
||||
def __setattr__(self, name: str, value: Any) -> None:
|
||||
return object.__setattr__(self, name, value)
|
||||
|
||||
def __str__(self) -> str:
|
||||
result = b' :: '.join(
|
||||
def __str__(self):
|
||||
return b' :: '.join(
|
||||
part.encode('ascii', 'backslashreplace') for part in
|
||||
(self.typ, self.description, self.detail, self.title)
|
||||
if part is not None).decode()
|
||||
if self.identifier:
|
||||
result = f'Problem for {self.identifier.value}: ' + result # pylint: disable=no-member
|
||||
if self.subproblems and len(self.subproblems) > 0:
|
||||
for subproblem in self.subproblems:
|
||||
result += f'\n{subproblem}'
|
||||
return result
|
||||
|
||||
|
||||
class _Constant(jose.JSONDeSerializable, Hashable): # type: ignore
|
||||
"""ACME constant."""
|
||||
__slots__ = ('name',)
|
||||
POSSIBLE_NAMES = NotImplemented
|
||||
|
||||
def __init__(self, name):
|
||||
super(_Constant, self).__init__()
|
||||
self.POSSIBLE_NAMES[name] = self # pylint: disable=unsupported-assignment-operation
|
||||
self.name = name
|
||||
|
||||
def to_partial_json(self):
|
||||
return self.name
|
||||
|
||||
@classmethod
|
||||
def from_json(cls, jobj):
|
||||
if jobj not in cls.POSSIBLE_NAMES: # pylint: disable=unsupported-membership-test
|
||||
raise jose.DeserializationError(
|
||||
'{0} not recognized'.format(cls.__name__))
|
||||
return cls.POSSIBLE_NAMES[jobj]
|
||||
|
||||
def __repr__(self):
|
||||
return '{0}({1})'.format(self.__class__.__name__, self.name)
|
||||
|
||||
def __eq__(self, other):
|
||||
return isinstance(other, type(self)) and other.name == self.name
|
||||
|
||||
def __hash__(self):
|
||||
return hash((self.__class__, self.name))
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
||||
|
||||
class Status(_Constant):
|
||||
"""ACME "status" field."""
|
||||
POSSIBLE_NAMES: Dict[str, _Constant] = {}
|
||||
|
||||
|
||||
POSSIBLE_NAMES = {} # type: dict
|
||||
STATUS_UNKNOWN = Status('unknown')
|
||||
STATUS_PENDING = Status('pending')
|
||||
STATUS_PROCESSING = Status('processing')
|
||||
@@ -220,57 +175,90 @@ STATUS_READY = Status('ready')
|
||||
STATUS_DEACTIVATED = Status('deactivated')
|
||||
|
||||
|
||||
class Directory(jose.JSONDeSerializable):
|
||||
"""Directory.
|
||||
class IdentifierType(_Constant):
|
||||
"""ACME identifier type."""
|
||||
POSSIBLE_NAMES = {} # type: dict
|
||||
IDENTIFIER_FQDN = IdentifierType('dns') # IdentifierDNS in Boulder
|
||||
|
||||
|
||||
class Identifier(jose.JSONObjectWithFields):
|
||||
"""ACME identifier.
|
||||
|
||||
:ivar IdentifierType typ:
|
||||
:ivar unicode value:
|
||||
|
||||
Directory resources must be accessed by the exact field name in RFC8555 (section 9.7.5).
|
||||
"""
|
||||
typ = jose.Field('type', decoder=IdentifierType.from_json)
|
||||
value = jose.Field('value')
|
||||
|
||||
|
||||
class Directory(jose.JSONDeSerializable):
|
||||
"""Directory."""
|
||||
|
||||
_REGISTERED_TYPES = {} # type: dict
|
||||
|
||||
class Meta(jose.JSONObjectWithFields):
|
||||
"""Directory Meta."""
|
||||
_terms_of_service: str = jose.field('termsOfService', omitempty=True)
|
||||
website: str = jose.field('website', omitempty=True)
|
||||
caa_identities: List[str] = jose.field('caaIdentities', omitempty=True)
|
||||
external_account_required: bool = jose.field('externalAccountRequired', omitempty=True)
|
||||
_terms_of_service = jose.Field('terms-of-service', omitempty=True)
|
||||
_terms_of_service_v2 = jose.Field('termsOfService', omitempty=True)
|
||||
website = jose.Field('website', omitempty=True)
|
||||
caa_identities = jose.Field('caaIdentities', omitempty=True)
|
||||
external_account_required = jose.Field('externalAccountRequired', omitempty=True)
|
||||
|
||||
def __init__(self, **kwargs: Any) -> None:
|
||||
def __init__(self, **kwargs):
|
||||
kwargs = {self._internal_name(k): v for k, v in kwargs.items()}
|
||||
super().__init__(**kwargs)
|
||||
super(Directory.Meta, self).__init__(**kwargs)
|
||||
|
||||
@property
|
||||
def terms_of_service(self) -> str:
|
||||
def terms_of_service(self):
|
||||
"""URL for the CA TOS"""
|
||||
return self._terms_of_service
|
||||
return self._terms_of_service or self._terms_of_service_v2
|
||||
|
||||
def __iter__(self) -> Iterator[str]:
|
||||
def __iter__(self):
|
||||
# When iterating over fields, use the external name 'terms_of_service' instead of
|
||||
# the internal '_terms_of_service'.
|
||||
for name in super().__iter__():
|
||||
for name in super(Directory.Meta, self).__iter__():
|
||||
yield name[1:] if name == '_terms_of_service' else name
|
||||
|
||||
def _internal_name(self, name: str) -> str:
|
||||
def _internal_name(self, name):
|
||||
return '_' + name if name == 'terms_of_service' else name
|
||||
|
||||
def __init__(self, jobj: Mapping[str, Any]) -> None:
|
||||
self._jobj = jobj
|
||||
|
||||
def __getattr__(self, name: str) -> Any:
|
||||
@classmethod
|
||||
def _canon_key(cls, key):
|
||||
return getattr(key, 'resource_type', key)
|
||||
|
||||
@classmethod
|
||||
def register(cls, resource_body_cls):
|
||||
"""Register resource."""
|
||||
resource_type = resource_body_cls.resource_type
|
||||
assert resource_type not in cls._REGISTERED_TYPES
|
||||
cls._REGISTERED_TYPES[resource_type] = resource_body_cls
|
||||
return resource_body_cls
|
||||
|
||||
def __init__(self, jobj):
|
||||
canon_jobj = util.map_keys(jobj, self._canon_key)
|
||||
# TODO: check that everything is an absolute URL; acme-spec is
|
||||
# not clear on that
|
||||
self._jobj = canon_jobj
|
||||
|
||||
def __getattr__(self, name):
|
||||
try:
|
||||
return self[name]
|
||||
return self[name.replace('_', '-')]
|
||||
except KeyError as error:
|
||||
raise AttributeError(str(error))
|
||||
|
||||
def __getitem__(self, name: str) -> Any:
|
||||
def __getitem__(self, name):
|
||||
try:
|
||||
return self._jobj[name]
|
||||
return self._jobj[self._canon_key(name)]
|
||||
except KeyError:
|
||||
raise KeyError(f'Directory field "{name}" not found')
|
||||
raise KeyError('Directory field "' + self._canon_key(name) + '" not found')
|
||||
|
||||
def to_partial_json(self) -> Dict[str, Any]:
|
||||
return util.map_keys(self._jobj, lambda k: k)
|
||||
def to_partial_json(self):
|
||||
return self._jobj
|
||||
|
||||
@classmethod
|
||||
def from_json(cls, jobj: MutableMapping[str, Any]) -> 'Directory':
|
||||
def from_json(cls, jobj):
|
||||
jobj['meta'] = cls.Meta.from_json(jobj.pop('meta', {}))
|
||||
return cls(jobj)
|
||||
|
||||
@@ -281,28 +269,27 @@ class Resource(jose.JSONObjectWithFields):
|
||||
:ivar acme.messages.ResourceBody body: Resource body.
|
||||
|
||||
"""
|
||||
body: "ResourceBody" = jose.field('body')
|
||||
body = jose.Field('body')
|
||||
|
||||
|
||||
class ResourceWithURI(Resource):
|
||||
"""ACME Resource with URI.
|
||||
|
||||
:ivar str uri: Location of the resource.
|
||||
:ivar unicode uri: Location of the resource.
|
||||
|
||||
"""
|
||||
uri: str = jose.field('uri') # no ChallengeResource.uri
|
||||
uri = jose.Field('uri') # no ChallengeResource.uri
|
||||
|
||||
|
||||
class ResourceBody(jose.JSONObjectWithFields):
|
||||
"""ACME Resource Body."""
|
||||
|
||||
|
||||
class ExternalAccountBinding:
|
||||
class ExternalAccountBinding(object):
|
||||
"""ACME External Account Binding"""
|
||||
|
||||
@classmethod
|
||||
def from_data(cls, account_public_key: jose.JWK, kid: str, hmac_key: str,
|
||||
directory: Directory) -> Dict[str, Any]:
|
||||
def from_data(cls, account_public_key, kid, hmac_key, directory):
|
||||
"""Create External Account Binding Resource from contact details, kid and hmac."""
|
||||
|
||||
key_json = json.dumps(account_public_key.to_partial_json()).encode()
|
||||
@@ -316,40 +303,33 @@ class ExternalAccountBinding:
|
||||
return eab.to_partial_json()
|
||||
|
||||
|
||||
GenericRegistration = TypeVar('GenericRegistration', bound='Registration')
|
||||
|
||||
|
||||
class Registration(ResourceBody):
|
||||
"""Registration Resource Body.
|
||||
|
||||
:ivar jose.JWK key: Public key.
|
||||
:ivar josepy.jwk.JWK key: Public key.
|
||||
:ivar tuple contact: Contact information following ACME spec,
|
||||
`tuple` of `str`.
|
||||
:ivar str agreement:
|
||||
`tuple` of `unicode`.
|
||||
:ivar unicode agreement:
|
||||
|
||||
"""
|
||||
# on new-reg key server ignores 'key' and populates it based on
|
||||
# JWS.signature.combined.jwk
|
||||
key: jose.JWK = jose.field('key', omitempty=True, decoder=jose.JWK.from_json)
|
||||
key = jose.Field('key', omitempty=True, decoder=jose.JWK.from_json)
|
||||
# Contact field implements special behavior to allow messages that clear existing
|
||||
# contacts while not expecting the `contact` field when loading from json.
|
||||
# This is implemented in the constructor and *_json methods.
|
||||
contact: Tuple[str, ...] = jose.field('contact', omitempty=True, default=())
|
||||
agreement: str = jose.field('agreement', omitempty=True)
|
||||
status: Status = jose.field('status', omitempty=True)
|
||||
terms_of_service_agreed: bool = jose.field('termsOfServiceAgreed', omitempty=True)
|
||||
only_return_existing: bool = jose.field('onlyReturnExisting', omitempty=True)
|
||||
external_account_binding: Dict[str, Any] = jose.field('externalAccountBinding',
|
||||
omitempty=True)
|
||||
contact = jose.Field('contact', omitempty=True, default=())
|
||||
agreement = jose.Field('agreement', omitempty=True)
|
||||
status = jose.Field('status', omitempty=True)
|
||||
terms_of_service_agreed = jose.Field('termsOfServiceAgreed', omitempty=True)
|
||||
only_return_existing = jose.Field('onlyReturnExisting', omitempty=True)
|
||||
external_account_binding = jose.Field('externalAccountBinding', omitempty=True)
|
||||
|
||||
phone_prefix = 'tel:'
|
||||
email_prefix = 'mailto:'
|
||||
|
||||
@classmethod
|
||||
def from_data(cls: Type[GenericRegistration], phone: Optional[str] = None,
|
||||
email: Optional[str] = None,
|
||||
external_account_binding: Optional[Dict[str, Any]] = None,
|
||||
**kwargs: Any) -> GenericRegistration:
|
||||
def from_data(cls, phone=None, email=None, external_account_binding=None, **kwargs):
|
||||
"""
|
||||
Create registration resource from contact details.
|
||||
|
||||
@@ -378,19 +358,19 @@ class Registration(ResourceBody):
|
||||
|
||||
return cls(**kwargs)
|
||||
|
||||
def __init__(self, **kwargs: Any) -> None:
|
||||
def __init__(self, **kwargs):
|
||||
"""Note if the user provides a value for the `contact` member."""
|
||||
if 'contact' in kwargs and kwargs['contact'] is not None:
|
||||
if 'contact' in kwargs:
|
||||
# Avoid the __setattr__ used by jose.TypedJSONObjectWithFields
|
||||
object.__setattr__(self, '_add_contact', True)
|
||||
super().__init__(**kwargs)
|
||||
super(Registration, self).__init__(**kwargs)
|
||||
|
||||
def _filter_contact(self, prefix: str) -> Tuple[str, ...]:
|
||||
def _filter_contact(self, prefix):
|
||||
return tuple(
|
||||
detail[len(prefix):] for detail in self.contact # pylint: disable=not-an-iterable
|
||||
if detail.startswith(prefix))
|
||||
|
||||
def _add_contact_if_appropriate(self, jobj: Dict[str, Any]) -> Dict[str, Any]:
|
||||
def _add_contact_if_appropriate(self, jobj):
|
||||
"""
|
||||
The `contact` member of Registration objects should not be required when
|
||||
de-serializing (as it would be if the Fields' `omitempty` flag were `False`), but
|
||||
@@ -407,46 +387,51 @@ class Registration(ResourceBody):
|
||||
|
||||
return jobj
|
||||
|
||||
def to_partial_json(self) -> Dict[str, Any]:
|
||||
def to_partial_json(self):
|
||||
"""Modify josepy.JSONDeserializable.to_partial_json()"""
|
||||
jobj = super().to_partial_json()
|
||||
jobj = super(Registration, self).to_partial_json()
|
||||
return self._add_contact_if_appropriate(jobj)
|
||||
|
||||
def fields_to_partial_json(self) -> Dict[str, Any]:
|
||||
def fields_to_partial_json(self):
|
||||
"""Modify josepy.JSONObjectWithFields.fields_to_partial_json()"""
|
||||
jobj = super().fields_to_partial_json()
|
||||
jobj = super(Registration, self).fields_to_partial_json()
|
||||
return self._add_contact_if_appropriate(jobj)
|
||||
|
||||
@property
|
||||
def phones(self) -> Tuple[str, ...]:
|
||||
def phones(self):
|
||||
"""All phones found in the ``contact`` field."""
|
||||
return self._filter_contact(self.phone_prefix)
|
||||
|
||||
@property
|
||||
def emails(self) -> Tuple[str, ...]:
|
||||
def emails(self):
|
||||
"""All emails found in the ``contact`` field."""
|
||||
return self._filter_contact(self.email_prefix)
|
||||
|
||||
|
||||
class NewRegistration(Registration):
|
||||
@Directory.register
|
||||
class NewRegistration(ResourceMixin, Registration):
|
||||
"""New registration."""
|
||||
resource_type = 'new-reg'
|
||||
resource = fields.Resource(resource_type)
|
||||
|
||||
|
||||
class UpdateRegistration(Registration):
|
||||
class UpdateRegistration(ResourceMixin, Registration):
|
||||
"""Update registration."""
|
||||
resource_type = 'reg'
|
||||
resource = fields.Resource(resource_type)
|
||||
|
||||
|
||||
class RegistrationResource(ResourceWithURI):
|
||||
"""Registration Resource.
|
||||
|
||||
:ivar acme.messages.Registration body:
|
||||
:ivar str new_authzr_uri: Deprecated. Do not use.
|
||||
:ivar str terms_of_service: URL for the CA TOS.
|
||||
:ivar unicode new_authzr_uri: Deprecated. Do not use.
|
||||
:ivar unicode terms_of_service: URL for the CA TOS.
|
||||
|
||||
"""
|
||||
body: Registration = jose.field('body', decoder=Registration.from_json)
|
||||
new_authzr_uri: str = jose.field('new_authzr_uri', omitempty=True)
|
||||
terms_of_service: str = jose.field('terms_of_service', omitempty=True)
|
||||
body = jose.Field('body', decoder=Registration.from_json)
|
||||
new_authzr_uri = jose.Field('new_authzr_uri', omitempty=True)
|
||||
terms_of_service = jose.Field('terms_of_service', omitempty=True)
|
||||
|
||||
|
||||
class ChallengeBody(ResourceBody):
|
||||
@@ -471,63 +456,64 @@ class ChallengeBody(ResourceBody):
|
||||
# challenge object supports either one, but should be accessed through the
|
||||
# name "uri". In Client.answer_challenge, whichever one is set will be
|
||||
# used.
|
||||
_url: str = jose.field('url', omitempty=True, default=None)
|
||||
status: Status = jose.field('status', decoder=Status.from_json,
|
||||
_uri = jose.Field('uri', omitempty=True, default=None)
|
||||
_url = jose.Field('url', omitempty=True, default=None)
|
||||
status = jose.Field('status', decoder=Status.from_json,
|
||||
omitempty=True, default=STATUS_PENDING)
|
||||
validated: datetime.datetime = fields.rfc3339('validated', omitempty=True)
|
||||
error: Error = jose.field('error', decoder=Error.from_json,
|
||||
validated = fields.RFC3339Field('validated', omitempty=True)
|
||||
error = jose.Field('error', decoder=Error.from_json,
|
||||
omitempty=True, default=None)
|
||||
|
||||
def __init__(self, **kwargs: Any) -> None:
|
||||
def __init__(self, **kwargs):
|
||||
kwargs = {self._internal_name(k): v for k, v in kwargs.items()}
|
||||
super().__init__(**kwargs)
|
||||
super(ChallengeBody, self).__init__(**kwargs)
|
||||
|
||||
def encode(self, name: str) -> Any:
|
||||
return super().encode(self._internal_name(name))
|
||||
def encode(self, name):
|
||||
return super(ChallengeBody, self).encode(self._internal_name(name))
|
||||
|
||||
def to_partial_json(self) -> Dict[str, Any]:
|
||||
jobj = super().to_partial_json()
|
||||
def to_partial_json(self):
|
||||
jobj = super(ChallengeBody, self).to_partial_json()
|
||||
jobj.update(self.chall.to_partial_json())
|
||||
return jobj
|
||||
|
||||
@classmethod
|
||||
def fields_from_json(cls, jobj: Mapping[str, Any]) -> Dict[str, Any]:
|
||||
jobj_fields = super().fields_from_json(jobj)
|
||||
def fields_from_json(cls, jobj):
|
||||
jobj_fields = super(ChallengeBody, cls).fields_from_json(jobj)
|
||||
jobj_fields['chall'] = challenges.Challenge.from_json(jobj)
|
||||
return jobj_fields
|
||||
|
||||
@property
|
||||
def uri(self) -> str:
|
||||
def uri(self):
|
||||
"""The URL of this challenge."""
|
||||
return self._url
|
||||
return self._url or self._uri
|
||||
|
||||
def __getattr__(self, name: str) -> Any:
|
||||
def __getattr__(self, name):
|
||||
return getattr(self.chall, name)
|
||||
|
||||
def __iter__(self) -> Iterator[str]:
|
||||
def __iter__(self):
|
||||
# When iterating over fields, use the external name 'uri' instead of
|
||||
# the internal '_uri'.
|
||||
for name in super().__iter__():
|
||||
yield 'uri' if name == '_url' else name
|
||||
for name in super(ChallengeBody, self).__iter__():
|
||||
yield name[1:] if name == '_uri' else name
|
||||
|
||||
def _internal_name(self, name: str) -> str:
|
||||
return '_url' if name == 'uri' else name
|
||||
def _internal_name(self, name):
|
||||
return '_' + name if name == 'uri' else name
|
||||
|
||||
|
||||
class ChallengeResource(Resource):
|
||||
"""Challenge Resource.
|
||||
|
||||
:ivar acme.messages.ChallengeBody body:
|
||||
:ivar str authzr_uri: URI found in the 'up' ``Link`` header.
|
||||
:ivar unicode authzr_uri: URI found in the 'up' ``Link`` header.
|
||||
|
||||
"""
|
||||
body: ChallengeBody = jose.field('body', decoder=ChallengeBody.from_json)
|
||||
authzr_uri: str = jose.field('authzr_uri')
|
||||
body = jose.Field('body', decoder=ChallengeBody.from_json)
|
||||
authzr_uri = jose.Field('authzr_uri')
|
||||
|
||||
@property
|
||||
def uri(self) -> str:
|
||||
def uri(self):
|
||||
"""The URL of the challenge body."""
|
||||
return self.body.uri # pylint: disable=no-member
|
||||
return self.body.uri
|
||||
|
||||
|
||||
class Authorization(ResourceBody):
|
||||
@@ -535,55 +521,70 @@ class Authorization(ResourceBody):
|
||||
|
||||
:ivar acme.messages.Identifier identifier:
|
||||
:ivar list challenges: `list` of `.ChallengeBody`
|
||||
:ivar tuple combinations: Challenge combinations (`tuple` of `tuple`
|
||||
of `int`, as opposed to `list` of `list` from the spec).
|
||||
:ivar acme.messages.Status status:
|
||||
:ivar datetime.datetime expires:
|
||||
|
||||
"""
|
||||
identifier: Identifier = jose.field('identifier', decoder=Identifier.from_json, omitempty=True)
|
||||
challenges: List[ChallengeBody] = jose.field('challenges', omitempty=True)
|
||||
identifier = jose.Field('identifier', decoder=Identifier.from_json, omitempty=True)
|
||||
challenges = jose.Field('challenges', omitempty=True)
|
||||
combinations = jose.Field('combinations', omitempty=True)
|
||||
|
||||
status: Status = jose.field('status', omitempty=True, decoder=Status.from_json)
|
||||
status = jose.Field('status', omitempty=True, decoder=Status.from_json)
|
||||
# TODO: 'expires' is allowed for Authorization Resources in
|
||||
# general, but for Key Authorization '[t]he "expires" field MUST
|
||||
# be absent'... then acme-spec gives example with 'expires'
|
||||
# present... That's confusing!
|
||||
expires: datetime.datetime = fields.rfc3339('expires', omitempty=True)
|
||||
wildcard: bool = jose.field('wildcard', omitempty=True)
|
||||
expires = fields.RFC3339Field('expires', omitempty=True)
|
||||
wildcard = jose.Field('wildcard', omitempty=True)
|
||||
|
||||
# Mypy does not understand the josepy magic happening here, and falsely claims
|
||||
# that challenge is redefined. Let's ignore the type check here.
|
||||
@challenges.decoder # type: ignore
|
||||
def challenges(value: List[Dict[str, Any]]) -> Tuple[ChallengeBody, ...]: # pylint: disable=no-self-argument,missing-function-docstring
|
||||
@challenges.decoder
|
||||
def challenges(value): # pylint: disable=no-self-argument,missing-function-docstring
|
||||
return tuple(ChallengeBody.from_json(chall) for chall in value)
|
||||
|
||||
@property
|
||||
def resolved_combinations(self):
|
||||
"""Combinations with challenges instead of indices."""
|
||||
return tuple(tuple(self.challenges[idx] for idx in combo)
|
||||
for combo in self.combinations) # pylint: disable=not-an-iterable
|
||||
|
||||
class NewAuthorization(Authorization):
|
||||
|
||||
@Directory.register
|
||||
class NewAuthorization(ResourceMixin, Authorization):
|
||||
"""New authorization."""
|
||||
resource_type = 'new-authz'
|
||||
resource = fields.Resource(resource_type)
|
||||
|
||||
|
||||
class UpdateAuthorization(Authorization):
|
||||
class UpdateAuthorization(ResourceMixin, Authorization):
|
||||
"""Update authorization."""
|
||||
resource_type = 'authz'
|
||||
resource = fields.Resource(resource_type)
|
||||
|
||||
|
||||
class AuthorizationResource(ResourceWithURI):
|
||||
"""Authorization Resource.
|
||||
|
||||
:ivar acme.messages.Authorization body:
|
||||
:ivar str new_cert_uri: Deprecated. Do not use.
|
||||
:ivar unicode new_cert_uri: Deprecated. Do not use.
|
||||
|
||||
"""
|
||||
body: Authorization = jose.field('body', decoder=Authorization.from_json)
|
||||
new_cert_uri: str = jose.field('new_cert_uri', omitempty=True)
|
||||
body = jose.Field('body', decoder=Authorization.from_json)
|
||||
new_cert_uri = jose.Field('new_cert_uri', omitempty=True)
|
||||
|
||||
|
||||
class CertificateRequest(jose.JSONObjectWithFields):
|
||||
"""ACME newOrder request.
|
||||
@Directory.register
|
||||
class CertificateRequest(ResourceMixin, jose.JSONObjectWithFields):
|
||||
"""ACME new-cert request.
|
||||
|
||||
:ivar jose.ComparableX509 csr:
|
||||
:ivar josepy.util.ComparableX509 csr:
|
||||
`OpenSSL.crypto.X509Req` wrapped in `.ComparableX509`
|
||||
|
||||
"""
|
||||
csr: jose.ComparableX509 = jose.field('csr', decoder=jose.decode_csr, encoder=jose.encode_csr)
|
||||
resource_type = 'new-cert'
|
||||
resource = fields.Resource(resource_type)
|
||||
csr = jose.Field('csr', decoder=jose.decode_csr, encoder=jose.encode_csr)
|
||||
|
||||
|
||||
class CertificateResource(ResourceWithURI):
|
||||
@@ -591,24 +592,27 @@ class CertificateResource(ResourceWithURI):
|
||||
|
||||
:ivar josepy.util.ComparableX509 body:
|
||||
`OpenSSL.crypto.X509` wrapped in `.ComparableX509`
|
||||
:ivar str cert_chain_uri: URI found in the 'up' ``Link`` header
|
||||
:ivar unicode cert_chain_uri: URI found in the 'up' ``Link`` header
|
||||
:ivar tuple authzrs: `tuple` of `AuthorizationResource`.
|
||||
|
||||
"""
|
||||
cert_chain_uri: str = jose.field('cert_chain_uri')
|
||||
authzrs: Tuple[AuthorizationResource, ...] = jose.field('authzrs')
|
||||
cert_chain_uri = jose.Field('cert_chain_uri')
|
||||
authzrs = jose.Field('authzrs')
|
||||
|
||||
|
||||
class Revocation(jose.JSONObjectWithFields):
|
||||
@Directory.register
|
||||
class Revocation(ResourceMixin, jose.JSONObjectWithFields):
|
||||
"""Revocation message.
|
||||
|
||||
:ivar jose.ComparableX509 certificate: `OpenSSL.crypto.X509` wrapped in
|
||||
`jose.ComparableX509`
|
||||
:ivar .ComparableX509 certificate: `OpenSSL.crypto.X509` wrapped in
|
||||
`.ComparableX509`
|
||||
|
||||
"""
|
||||
certificate: jose.ComparableX509 = jose.field(
|
||||
resource_type = 'revoke-cert'
|
||||
resource = fields.Resource(resource_type)
|
||||
certificate = jose.Field(
|
||||
'certificate', decoder=jose.decode_cert, encoder=jose.encode_cert)
|
||||
reason: int = jose.field('reason')
|
||||
reason = jose.Field('reason')
|
||||
|
||||
|
||||
class Order(ResourceBody):
|
||||
@@ -623,28 +627,26 @@ class Order(ResourceBody):
|
||||
:ivar str finalize: URL to POST to to request issuance once all
|
||||
authorizations have "valid" status.
|
||||
:ivar datetime.datetime expires: When the order expires.
|
||||
:ivar ~.Error error: Any error that occurred during finalization, if applicable.
|
||||
:ivar .Error error: Any error that occurred during finalization, if applicable.
|
||||
"""
|
||||
identifiers: List[Identifier] = jose.field('identifiers', omitempty=True)
|
||||
status: Status = jose.field('status', decoder=Status.from_json, omitempty=True)
|
||||
authorizations: List[str] = jose.field('authorizations', omitempty=True)
|
||||
certificate: str = jose.field('certificate', omitempty=True)
|
||||
finalize: str = jose.field('finalize', omitempty=True)
|
||||
expires: datetime.datetime = fields.rfc3339('expires', omitempty=True)
|
||||
error: Error = jose.field('error', omitempty=True, decoder=Error.from_json)
|
||||
identifiers = jose.Field('identifiers', omitempty=True)
|
||||
status = jose.Field('status', decoder=Status.from_json,
|
||||
omitempty=True)
|
||||
authorizations = jose.Field('authorizations', omitempty=True)
|
||||
certificate = jose.Field('certificate', omitempty=True)
|
||||
finalize = jose.Field('finalize', omitempty=True)
|
||||
expires = fields.RFC3339Field('expires', omitempty=True)
|
||||
error = jose.Field('error', omitempty=True, decoder=Error.from_json)
|
||||
|
||||
# Mypy does not understand the josepy magic happening here, and falsely claims
|
||||
# that identifiers is redefined. Let's ignore the type check here.
|
||||
@identifiers.decoder # type: ignore
|
||||
def identifiers(value: List[Dict[str, Any]]) -> Tuple[Identifier, ...]: # pylint: disable=no-self-argument,missing-function-docstring
|
||||
@identifiers.decoder
|
||||
def identifiers(value): # pylint: disable=no-self-argument,missing-function-docstring
|
||||
return tuple(Identifier.from_json(identifier) for identifier in value)
|
||||
|
||||
|
||||
class OrderResource(ResourceWithURI):
|
||||
"""Order Resource.
|
||||
|
||||
:ivar acme.messages.Order body:
|
||||
:ivar bytes csr_pem: The CSR this Order will be finalized with.
|
||||
:ivar str csr_pem: The CSR this Order will be finalized with.
|
||||
:ivar authorizations: Fully-fetched AuthorizationResource objects.
|
||||
:vartype authorizations: `list` of `acme.messages.AuthorizationResource`
|
||||
:ivar str fullchain_pem: The fetched contents of the certificate URL
|
||||
@@ -654,13 +656,13 @@ class OrderResource(ResourceWithURI):
|
||||
finalization.
|
||||
:vartype alternative_fullchains_pem: `list` of `str`
|
||||
"""
|
||||
body: Order = jose.field('body', decoder=Order.from_json)
|
||||
csr_pem: bytes = jose.field('csr_pem', omitempty=True)
|
||||
authorizations: List[AuthorizationResource] = jose.field('authorizations')
|
||||
fullchain_pem: str = jose.field('fullchain_pem', omitempty=True)
|
||||
alternative_fullchains_pem: List[str] = jose.field('alternative_fullchains_pem',
|
||||
omitempty=True)
|
||||
|
||||
body = jose.Field('body', decoder=Order.from_json)
|
||||
csr_pem = jose.Field('csr_pem', omitempty=True)
|
||||
authorizations = jose.Field('authorizations')
|
||||
fullchain_pem = jose.Field('fullchain_pem', omitempty=True)
|
||||
alternative_fullchains_pem = jose.Field('alternative_fullchains_pem', omitempty=True)
|
||||
|
||||
@Directory.register
|
||||
class NewOrder(Order):
|
||||
"""New order."""
|
||||
resource_type = 'new-order'
|
||||
|
||||
65
acme/acme/mixins.py
Normal file
65
acme/acme/mixins.py
Normal file
@@ -0,0 +1,65 @@
|
||||
"""Useful mixins for Challenge and Resource objects"""
|
||||
|
||||
|
||||
class VersionedLEACMEMixin(object):
|
||||
"""This mixin stores the version of Let's Encrypt's endpoint being used."""
|
||||
@property
|
||||
def le_acme_version(self):
|
||||
"""Define the version of ACME protocol to use"""
|
||||
return getattr(self, '_le_acme_version', 1)
|
||||
|
||||
@le_acme_version.setter
|
||||
def le_acme_version(self, version):
|
||||
# We need to use object.__setattr__ to not depend on the specific implementation of
|
||||
# __setattr__ in current class (eg. jose.TypedJSONObjectWithFields raises AttributeError
|
||||
# for any attempt to set an attribute to make objects immutable).
|
||||
object.__setattr__(self, '_le_acme_version', version)
|
||||
|
||||
def __setattr__(self, key, value):
|
||||
if key == 'le_acme_version':
|
||||
# Required for @property to operate properly. See comment above.
|
||||
object.__setattr__(self, key, value)
|
||||
else:
|
||||
super(VersionedLEACMEMixin, self).__setattr__(key, value) # pragma: no cover
|
||||
|
||||
|
||||
class ResourceMixin(VersionedLEACMEMixin):
|
||||
"""
|
||||
This mixin generates a RFC8555 compliant JWS payload
|
||||
by removing the `resource` field if needed (eg. ACME v2 protocol).
|
||||
"""
|
||||
def to_partial_json(self):
|
||||
"""See josepy.JSONDeserializable.to_partial_json()"""
|
||||
return _safe_jobj_compliance(super(ResourceMixin, self),
|
||||
'to_partial_json', 'resource')
|
||||
|
||||
def fields_to_partial_json(self):
|
||||
"""See josepy.JSONObjectWithFields.fields_to_partial_json()"""
|
||||
return _safe_jobj_compliance(super(ResourceMixin, self),
|
||||
'fields_to_partial_json', 'resource')
|
||||
|
||||
|
||||
class TypeMixin(VersionedLEACMEMixin):
|
||||
"""
|
||||
This mixin allows generation of a RFC8555 compliant JWS payload
|
||||
by removing the `type` field if needed (eg. ACME v2 protocol).
|
||||
"""
|
||||
def to_partial_json(self):
|
||||
"""See josepy.JSONDeserializable.to_partial_json()"""
|
||||
return _safe_jobj_compliance(super(TypeMixin, self),
|
||||
'to_partial_json', 'type')
|
||||
|
||||
def fields_to_partial_json(self):
|
||||
"""See josepy.JSONObjectWithFields.fields_to_partial_json()"""
|
||||
return _safe_jobj_compliance(super(TypeMixin, self),
|
||||
'fields_to_partial_json', 'type')
|
||||
|
||||
|
||||
def _safe_jobj_compliance(instance, jobj_method, uncompliant_field):
|
||||
if hasattr(instance, jobj_method):
|
||||
jobj = getattr(instance, jobj_method)()
|
||||
if instance.le_acme_version == 2:
|
||||
jobj.pop(uncompliant_field, None)
|
||||
return jobj
|
||||
|
||||
raise AttributeError('Method {0}() is not implemented.'.format(jobj_method)) # pragma: no cover
|
||||
@@ -1,26 +1,17 @@
|
||||
"""Support for standalone client challenge solvers. """
|
||||
import collections
|
||||
import functools
|
||||
import http.client as http_client
|
||||
import http.server as BaseHTTPServer
|
||||
import logging
|
||||
import socket
|
||||
import socketserver
|
||||
import threading
|
||||
from typing import Any
|
||||
from typing import cast
|
||||
from typing import List
|
||||
from typing import Mapping
|
||||
from typing import Optional
|
||||
from typing import Set
|
||||
from typing import Tuple
|
||||
from typing import Type
|
||||
|
||||
from OpenSSL import crypto
|
||||
from OpenSSL import SSL
|
||||
from six.moves import BaseHTTPServer # type: ignore
|
||||
from six.moves import http_client
|
||||
from six.moves import socketserver # type: ignore
|
||||
|
||||
from acme import challenges
|
||||
from acme import crypto_util
|
||||
from acme.magic_typing import List
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -28,32 +19,30 @@ logger = logging.getLogger(__name__)
|
||||
class TLSServer(socketserver.TCPServer):
|
||||
"""Generic TLS Server."""
|
||||
|
||||
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.ipv6 = kwargs.pop("ipv6", False)
|
||||
if self.ipv6:
|
||||
self.address_family = socket.AF_INET6
|
||||
else:
|
||||
self.address_family = socket.AF_INET
|
||||
self.certs = kwargs.pop("certs", {})
|
||||
self.method = kwargs.pop("method", crypto_util._DEFAULT_SSL_METHOD)
|
||||
self.method = kwargs.pop(
|
||||
"method", crypto_util._DEFAULT_SSL_METHOD)
|
||||
self.allow_reuse_address = kwargs.pop("allow_reuse_address", True)
|
||||
super().__init__(*args, **kwargs)
|
||||
socketserver.TCPServer.__init__(self, *args, **kwargs)
|
||||
|
||||
def _wrap_sock(self) -> None:
|
||||
self.socket = cast(socket.socket, crypto_util.SSLSocket(
|
||||
def _wrap_sock(self):
|
||||
self.socket = crypto_util.SSLSocket(
|
||||
self.socket, cert_selection=self._cert_selection,
|
||||
alpn_selection=getattr(self, '_alpn_selection', None),
|
||||
method=self.method))
|
||||
method=self.method)
|
||||
|
||||
def _cert_selection(self, connection: SSL.Connection
|
||||
) -> Optional[Tuple[crypto.PKey, crypto.X509]]: # pragma: no cover
|
||||
def _cert_selection(self, connection): # pragma: no cover
|
||||
"""Callback selecting certificate for connection."""
|
||||
server_name = connection.get_servername()
|
||||
if server_name:
|
||||
return self.certs.get(server_name, None)
|
||||
return None
|
||||
return self.certs.get(server_name, None)
|
||||
|
||||
def server_bind(self) -> None:
|
||||
def server_bind(self):
|
||||
self._wrap_sock()
|
||||
return socketserver.TCPServer.server_bind(self)
|
||||
|
||||
@@ -65,7 +54,7 @@ class ACMEServerMixin:
|
||||
allow_reuse_address = True
|
||||
|
||||
|
||||
class BaseDualNetworkedServers:
|
||||
class BaseDualNetworkedServers(object):
|
||||
"""Base class for a pair of IPv6 and IPv4 servers that tries to do everything
|
||||
it's asked for both servers, but where failures in one server don't
|
||||
affect the other.
|
||||
@@ -73,14 +62,10 @@ class BaseDualNetworkedServers:
|
||||
If two servers are instantiated, they will serve on the same port.
|
||||
"""
|
||||
|
||||
def __init__(self, ServerClass: Type[socketserver.TCPServer], server_address: Tuple[str, int],
|
||||
*remaining_args: Any, **kwargs: Any) -> None:
|
||||
def __init__(self, ServerClass, server_address, *remaining_args, **kwargs):
|
||||
port = server_address[1]
|
||||
self.threads: List[threading.Thread] = []
|
||||
self.servers: List[socketserver.BaseServer] = []
|
||||
|
||||
# Preserve socket error for re-raising, if no servers can be started
|
||||
last_socket_err: Optional[socket.error] = None
|
||||
self.threads = [] # type: List[threading.Thread]
|
||||
self.servers = [] # type: List[ACMEServerMixin]
|
||||
|
||||
# Must try True first.
|
||||
# Ubuntu, for example, will fail to bind to IPv4 if we've already bound
|
||||
@@ -98,8 +83,7 @@ class BaseDualNetworkedServers:
|
||||
logger.debug(
|
||||
"Successfully bound to %s:%s using %s", new_address[0],
|
||||
new_address[1], "IPv6" if ip_version else "IPv4")
|
||||
except socket.error as e:
|
||||
last_socket_err = e
|
||||
except socket.error:
|
||||
if self.servers:
|
||||
# Already bound using IPv6.
|
||||
logger.debug(
|
||||
@@ -118,12 +102,9 @@ class BaseDualNetworkedServers:
|
||||
# bind to the same port for both servers.
|
||||
port = server.socket.getsockname()[1]
|
||||
if not self.servers:
|
||||
if last_socket_err:
|
||||
raise last_socket_err
|
||||
else: # pragma: no cover
|
||||
raise socket.error("Could not bind to IPv4 or IPv6.")
|
||||
raise socket.error("Could not bind to IPv4 or IPv6.")
|
||||
|
||||
def serve_forever(self) -> None:
|
||||
def serve_forever(self):
|
||||
"""Wraps socketserver.TCPServer.serve_forever"""
|
||||
for server in self.servers:
|
||||
thread = threading.Thread(
|
||||
@@ -131,11 +112,11 @@ class BaseDualNetworkedServers:
|
||||
thread.start()
|
||||
self.threads.append(thread)
|
||||
|
||||
def getsocknames(self) -> List[Tuple[str, int]]:
|
||||
def getsocknames(self):
|
||||
"""Wraps socketserver.TCPServer.socket.getsockname"""
|
||||
return [server.socket.getsockname() for server in self.servers]
|
||||
|
||||
def shutdown_and_server_close(self) -> None:
|
||||
def shutdown_and_server_close(self):
|
||||
"""Wraps socketserver.TCPServer.shutdown, socketserver.TCPServer.server_close, and
|
||||
threading.Thread.join"""
|
||||
for server in self.servers:
|
||||
@@ -151,20 +132,13 @@ class TLSALPN01Server(TLSServer, ACMEServerMixin):
|
||||
|
||||
ACME_TLS_1_PROTOCOL = b"acme-tls/1"
|
||||
|
||||
def __init__(self, server_address: Tuple[str, int],
|
||||
certs: List[Tuple[crypto.PKey, crypto.X509]],
|
||||
challenge_certs: Mapping[bytes, Tuple[crypto.PKey, crypto.X509]],
|
||||
ipv6: bool = False) -> None:
|
||||
# We don't need to implement a request handler here because the work
|
||||
# (including logging) is being done by wrapped socket set up in the
|
||||
# parent TLSServer class.
|
||||
def __init__(self, server_address, certs, challenge_certs, ipv6=False):
|
||||
TLSServer.__init__(
|
||||
self, server_address, socketserver.BaseRequestHandler, certs=certs,
|
||||
self, server_address, _BaseRequestHandlerWithLogging, certs=certs,
|
||||
ipv6=ipv6)
|
||||
self.challenge_certs = challenge_certs
|
||||
|
||||
def _cert_selection(self, connection: SSL.Connection) -> Optional[Tuple[crypto.PKey,
|
||||
crypto.X509]]:
|
||||
def _cert_selection(self, connection):
|
||||
# TODO: We would like to serve challenge cert only if asked for it via
|
||||
# ALPN. To do this, we need to retrieve the list of protos from client
|
||||
# hello, but this is currently impossible with openssl [0], and ALPN
|
||||
@@ -173,12 +147,10 @@ class TLSALPN01Server(TLSServer, ACMEServerMixin):
|
||||
# handshake in alpn_selection() if ALPN protos are not what we expect.
|
||||
# [0] https://github.com/openssl/openssl/issues/4952
|
||||
server_name = connection.get_servername()
|
||||
if server_name:
|
||||
logger.debug("Serving challenge cert for server name %s", server_name)
|
||||
return self.challenge_certs[server_name]
|
||||
return None # pragma: no cover
|
||||
logger.debug("Serving challenge cert for server name %s", server_name)
|
||||
return self.challenge_certs.get(server_name, None)
|
||||
|
||||
def _alpn_selection(self, _connection: SSL.Connection, alpn_protos: List[bytes]) -> bytes:
|
||||
def _alpn_selection(self, _connection, alpn_protos):
|
||||
"""Callback to select alpn protocol."""
|
||||
if len(alpn_protos) == 1 and alpn_protos[0] == self.ACME_TLS_1_PROTOCOL:
|
||||
logger.debug("Agreed on %s ALPN", self.ACME_TLS_1_PROTOCOL)
|
||||
@@ -192,22 +164,21 @@ class TLSALPN01Server(TLSServer, ACMEServerMixin):
|
||||
class HTTPServer(BaseHTTPServer.HTTPServer):
|
||||
"""Generic HTTP Server."""
|
||||
|
||||
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.ipv6 = kwargs.pop("ipv6", False)
|
||||
if self.ipv6:
|
||||
self.address_family = socket.AF_INET6
|
||||
else:
|
||||
self.address_family = socket.AF_INET
|
||||
super().__init__(*args, **kwargs)
|
||||
BaseHTTPServer.HTTPServer.__init__(self, *args, **kwargs)
|
||||
|
||||
|
||||
class HTTP01Server(HTTPServer, ACMEServerMixin):
|
||||
"""HTTP01 Server."""
|
||||
|
||||
def __init__(self, server_address: Tuple[str, int], resources: Set[challenges.HTTP01],
|
||||
ipv6: bool = False, timeout: int = 30) -> None:
|
||||
super().__init__(
|
||||
server_address, HTTP01RequestHandler.partial_init(
|
||||
def __init__(self, server_address, resources, ipv6=False, timeout=30):
|
||||
HTTPServer.__init__(
|
||||
self, server_address, HTTP01RequestHandler.partial_init(
|
||||
simple_http_resources=resources, timeout=timeout), ipv6=ipv6)
|
||||
|
||||
|
||||
@@ -215,8 +186,8 @@ class HTTP01DualNetworkedServers(BaseDualNetworkedServers):
|
||||
"""HTTP01Server Wrapper. Tries everything for both. Failures for one don't
|
||||
affect the other."""
|
||||
|
||||
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
||||
super().__init__(HTTP01Server, *args, **kwargs)
|
||||
def __init__(self, *args, **kwargs):
|
||||
BaseDualNetworkedServers.__init__(self, HTTP01Server, *args, **kwargs)
|
||||
|
||||
|
||||
class HTTP01RequestHandler(BaseHTTPServer.BaseHTTPRequestHandler):
|
||||
@@ -231,37 +202,21 @@ class HTTP01RequestHandler(BaseHTTPServer.BaseHTTPRequestHandler):
|
||||
HTTP01Resource = collections.namedtuple(
|
||||
"HTTP01Resource", "chall response validation")
|
||||
|
||||
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.simple_http_resources = kwargs.pop("simple_http_resources", set())
|
||||
self._timeout = kwargs.pop('timeout', 30)
|
||||
super().__init__(*args, **kwargs)
|
||||
self.server: HTTP01Server
|
||||
self.timeout = kwargs.pop('timeout', 30)
|
||||
BaseHTTPServer.BaseHTTPRequestHandler.__init__(self, *args, **kwargs)
|
||||
|
||||
# In parent class BaseHTTPRequestHandler, 'timeout' is a class-level property but we
|
||||
# need to define its value during the initialization phase in HTTP01RequestHandler.
|
||||
# However MyPy does not appreciate that we dynamically shadow a class-level property
|
||||
# with an instance-level property (eg. self.timeout = ... in __init__()). So to make
|
||||
# everyone happy, we statically redefine 'timeout' as a method property, and set the
|
||||
# timeout value in a new internal instance-level property _timeout.
|
||||
@property
|
||||
def timeout(self) -> int: # type: ignore[override]
|
||||
"""
|
||||
The default timeout this server should apply to requests.
|
||||
:return: timeout to apply
|
||||
:rtype: int
|
||||
"""
|
||||
return self._timeout
|
||||
|
||||
def log_message(self, format: str, *args: Any) -> None: # pylint: disable=redefined-builtin
|
||||
def log_message(self, format, *args): # pylint: disable=redefined-builtin
|
||||
"""Log arbitrary message."""
|
||||
logger.debug("%s - - %s", self.client_address[0], format % args)
|
||||
|
||||
def handle(self) -> None:
|
||||
def handle(self):
|
||||
"""Handle request."""
|
||||
self.log_message("Incoming request")
|
||||
BaseHTTPServer.BaseHTTPRequestHandler.handle(self)
|
||||
|
||||
def do_GET(self) -> None: # pylint: disable=invalid-name,missing-function-docstring
|
||||
def do_GET(self): # pylint: disable=invalid-name,missing-function-docstring
|
||||
if self.path == "/":
|
||||
self.handle_index()
|
||||
elif self.path.startswith("/" + challenges.HTTP01.URI_ROOT_PATH):
|
||||
@@ -269,21 +224,21 @@ class HTTP01RequestHandler(BaseHTTPServer.BaseHTTPRequestHandler):
|
||||
else:
|
||||
self.handle_404()
|
||||
|
||||
def handle_index(self) -> None:
|
||||
def handle_index(self):
|
||||
"""Handle index page."""
|
||||
self.send_response(200)
|
||||
self.send_header("Content-Type", "text/html")
|
||||
self.end_headers()
|
||||
self.wfile.write(self.server.server_version.encode())
|
||||
|
||||
def handle_404(self) -> None:
|
||||
def handle_404(self):
|
||||
"""Handler 404 Not Found errors."""
|
||||
self.send_response(http_client.NOT_FOUND, message="Not Found")
|
||||
self.send_header("Content-type", "text/html")
|
||||
self.end_headers()
|
||||
self.wfile.write(b"404")
|
||||
|
||||
def handle_simple_http_resource(self) -> None:
|
||||
def handle_simple_http_resource(self):
|
||||
"""Handle HTTP01 provisioned resources."""
|
||||
for resource in self.simple_http_resources:
|
||||
if resource.chall.path == self.path:
|
||||
@@ -299,8 +254,7 @@ class HTTP01RequestHandler(BaseHTTPServer.BaseHTTPRequestHandler):
|
||||
self.path)
|
||||
|
||||
@classmethod
|
||||
def partial_init(cls, simple_http_resources: Set[challenges.HTTP01],
|
||||
timeout: int) -> 'functools.partial[HTTP01RequestHandler]':
|
||||
def partial_init(cls, simple_http_resources, timeout):
|
||||
"""Partially initialize this handler.
|
||||
|
||||
This is useful because `socketserver.BaseServer` takes
|
||||
@@ -311,3 +265,16 @@ class HTTP01RequestHandler(BaseHTTPServer.BaseHTTPRequestHandler):
|
||||
return functools.partial(
|
||||
cls, simple_http_resources=simple_http_resources,
|
||||
timeout=timeout)
|
||||
|
||||
|
||||
class _BaseRequestHandlerWithLogging(socketserver.BaseRequestHandler):
|
||||
"""BaseRequestHandler with logging."""
|
||||
|
||||
def log_message(self, format, *args): # pylint: disable=redefined-builtin
|
||||
"""Log arbitrary message."""
|
||||
logger.debug("%s - - %s", self.client_address[0], format % args)
|
||||
|
||||
def handle(self):
|
||||
"""Handle request."""
|
||||
self.log_message("Incoming request")
|
||||
socketserver.BaseRequestHandler.handle(self)
|
||||
|
||||
@@ -1,10 +1,7 @@
|
||||
"""ACME utilities."""
|
||||
from typing import Any
|
||||
from typing import Callable
|
||||
from typing import Dict
|
||||
from typing import Mapping
|
||||
import six
|
||||
|
||||
|
||||
def map_keys(dikt: Mapping[Any, Any], func: Callable[[Any], Any]) -> Dict[Any, Any]:
|
||||
def map_keys(dikt, func):
|
||||
"""Map dictionary keys."""
|
||||
return {func(key): value for key, value in dikt.items()}
|
||||
return {func(key): value for key, value in six.iteritems(dikt)}
|
||||
|
||||
@@ -58,7 +58,7 @@ master_doc = 'index'
|
||||
|
||||
# General information about the project.
|
||||
project = u'acme-python'
|
||||
copyright = u'2015, Let\'s Encrypt Project'
|
||||
copyright = u'2015-2015, Let\'s Encrypt Project'
|
||||
author = u'Let\'s Encrypt Project'
|
||||
|
||||
# The version info for the project you're documenting, acts as replacement for
|
||||
@@ -85,9 +85,7 @@ language = 'en'
|
||||
|
||||
# List of patterns, relative to source directory, that match files and
|
||||
# directories to ignore when looking for source files.
|
||||
exclude_patterns = [
|
||||
'_build',
|
||||
]
|
||||
exclude_patterns = ['_build']
|
||||
|
||||
# The reST default role (used for this markup: `text`) to use for all
|
||||
# documents.
|
||||
|
||||
@@ -1,3 +1 @@
|
||||
:orphan:
|
||||
|
||||
.. literalinclude:: ../jws-help.txt
|
||||
|
||||
@@ -163,7 +163,7 @@ def example_http():
|
||||
# Register account and accept TOS
|
||||
|
||||
net = client.ClientNetwork(acc_key, user_agent=USER_AGENT)
|
||||
directory = client.ClientV2.get_directory(DIRECTORY_URL, net)
|
||||
directory = messages.Directory.from_json(net.get(DIRECTORY_URL).json())
|
||||
client_acme = client.ClientV2(directory, net=net)
|
||||
|
||||
# Terms of Service URL is in client_acme.directory.meta.terms_of_service
|
||||
@@ -215,7 +215,8 @@ def example_http():
|
||||
try:
|
||||
regr = client_acme.query_registration(regr)
|
||||
except errors.Error as err:
|
||||
if err.typ == messages.ERROR_PREFIX + 'unauthorized':
|
||||
if err.typ == messages.OLD_ERROR_PREFIX + 'unauthorized' \
|
||||
or err.typ == messages.ERROR_PREFIX + 'unauthorized':
|
||||
# Status is deactivated.
|
||||
pass
|
||||
raise
|
||||
|
||||
2
acme/examples/standalone/README
Normal file
2
acme/examples/standalone/README
Normal file
@@ -0,0 +1,2 @@
|
||||
python -m acme.standalone -p 1234
|
||||
curl -k https://localhost:1234
|
||||
1
acme/examples/standalone/localhost/cert.pem
Symbolic link
1
acme/examples/standalone/localhost/cert.pem
Symbolic link
@@ -0,0 +1 @@
|
||||
../../../acme/testdata/rsa2048_cert.pem
|
||||
1
acme/examples/standalone/localhost/key.pem
Symbolic link
1
acme/examples/standalone/localhost/key.pem
Symbolic link
@@ -0,0 +1 @@
|
||||
../../../acme/testdata/rsa2048_key.pem
|
||||
@@ -7,7 +7,4 @@
|
||||
# in --editable mode (-e), just "pip install acme[docs]" does not work as
|
||||
# expected and "pip install -e acme[docs]" must be used instead
|
||||
|
||||
# We also pin our dependencies for increased stability.
|
||||
|
||||
-c ../tools/requirements.txt
|
||||
-e acme[docs]
|
||||
|
||||
2
acme/setup.cfg
Normal file
2
acme/setup.cfg
Normal file
@@ -0,0 +1,2 @@
|
||||
[bdist_wheel]
|
||||
universal = 1
|
||||
@@ -1,18 +1,44 @@
|
||||
from distutils.version import LooseVersion
|
||||
import sys
|
||||
|
||||
from setuptools import __version__ as setuptools_version
|
||||
from setuptools import find_packages
|
||||
from setuptools import setup
|
||||
|
||||
version = '2.3.0.dev0'
|
||||
version = '1.11.0.dev0'
|
||||
|
||||
# Please update tox.ini when modifying dependency version requirements
|
||||
install_requires = [
|
||||
'cryptography>=2.5.0',
|
||||
'josepy>=1.13.0',
|
||||
'PyOpenSSL>=17.5.0',
|
||||
# load_pem_private/public_key (>=0.6)
|
||||
# rsa_recover_prime_factors (>=0.8)
|
||||
'cryptography>=1.2.3',
|
||||
# formerly known as acme.jose:
|
||||
# 1.1.0+ is required to avoid the warnings described at
|
||||
# https://github.com/certbot/josepy/issues/13.
|
||||
'josepy>=1.1.0',
|
||||
# Connection.set_tlsext_host_name (>=0.13) + matching Xenial requirements (>=0.15.1)
|
||||
'PyOpenSSL>=0.15.1',
|
||||
'pyrfc3339',
|
||||
'pytz>=2019.3',
|
||||
'requests>=2.20.0',
|
||||
'setuptools>=41.6.0',
|
||||
'pytz',
|
||||
'requests[security]>=2.6.0', # security extras added in 2.4.1
|
||||
'requests-toolbelt>=0.3.0',
|
||||
'setuptools',
|
||||
'six>=1.9.0', # needed for python_2_unicode_compatible
|
||||
]
|
||||
|
||||
setuptools_known_environment_markers = (LooseVersion(setuptools_version) >= LooseVersion('36.2'))
|
||||
if setuptools_known_environment_markers:
|
||||
install_requires.append('mock ; python_version < "3.3"')
|
||||
elif 'bdist_wheel' in sys.argv[1:]:
|
||||
raise RuntimeError('Error, you are trying to build certbot wheels using an old version '
|
||||
'of setuptools. Version 36.2+ of setuptools is required.')
|
||||
elif sys.version_info < (3,3):
|
||||
install_requires.append('mock')
|
||||
|
||||
dev_extras = [
|
||||
'pytest',
|
||||
'pytest-xdist',
|
||||
'tox',
|
||||
]
|
||||
|
||||
docs_extras = [
|
||||
@@ -20,32 +46,27 @@ docs_extras = [
|
||||
'sphinx_rtd_theme',
|
||||
]
|
||||
|
||||
test_extras = [
|
||||
'pytest',
|
||||
'pytest-xdist',
|
||||
'typing-extensions',
|
||||
]
|
||||
|
||||
setup(
|
||||
name='acme',
|
||||
version=version,
|
||||
description='ACME protocol implementation in Python',
|
||||
url='https://github.com/letsencrypt/letsencrypt',
|
||||
author="Certbot Project",
|
||||
author_email='certbot-dev@eff.org',
|
||||
author_email='client-dev@letsencrypt.org',
|
||||
license='Apache License 2.0',
|
||||
python_requires='>=3.7',
|
||||
python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*',
|
||||
classifiers=[
|
||||
'Development Status :: 5 - Production/Stable',
|
||||
'Intended Audience :: Developers',
|
||||
'License :: OSI Approved :: Apache Software License',
|
||||
'Programming Language :: Python',
|
||||
'Programming Language :: Python :: 2',
|
||||
'Programming Language :: Python :: 2.7',
|
||||
'Programming Language :: Python :: 3',
|
||||
'Programming Language :: Python :: 3.6',
|
||||
'Programming Language :: Python :: 3.7',
|
||||
'Programming Language :: Python :: 3.8',
|
||||
'Programming Language :: Python :: 3.9',
|
||||
'Programming Language :: Python :: 3.10',
|
||||
'Programming Language :: Python :: 3.11',
|
||||
'Topic :: Internet :: WWW/HTTP',
|
||||
'Topic :: Security',
|
||||
],
|
||||
@@ -54,7 +75,7 @@ setup(
|
||||
include_package_data=True,
|
||||
install_requires=install_requires,
|
||||
extras_require={
|
||||
'dev': dev_extras,
|
||||
'docs': docs_extras,
|
||||
'test': test_extras,
|
||||
},
|
||||
)
|
||||
|
||||
@@ -1,12 +1,14 @@
|
||||
"""Tests for acme.challenges."""
|
||||
import urllib.parse as urllib_parse
|
||||
import unittest
|
||||
from unittest import mock
|
||||
|
||||
import josepy as jose
|
||||
import OpenSSL
|
||||
try:
|
||||
import mock
|
||||
except ImportError: # pragma: no cover
|
||||
from unittest import mock # type: ignore
|
||||
import requests
|
||||
from josepy.jwk import JWKEC
|
||||
from six.moves.urllib import parse as urllib_parse
|
||||
|
||||
from acme import errors
|
||||
|
||||
@@ -92,7 +94,8 @@ class DNS01ResponseTest(unittest.TestCase):
|
||||
self.response = self.chall.response(KEY)
|
||||
|
||||
def test_to_partial_json(self):
|
||||
self.assertEqual({}, self.msg.to_partial_json())
|
||||
self.assertEqual({k: v for k, v in self.jmsg.items() if k != 'keyAuthorization'},
|
||||
self.msg.to_partial_json())
|
||||
|
||||
def test_from_json(self):
|
||||
from acme.challenges import DNS01Response
|
||||
@@ -162,7 +165,8 @@ class HTTP01ResponseTest(unittest.TestCase):
|
||||
self.response = self.chall.response(KEY)
|
||||
|
||||
def test_to_partial_json(self):
|
||||
self.assertEqual({}, self.msg.to_partial_json())
|
||||
self.assertEqual({k: v for k, v in self.jmsg.items() if k != 'keyAuthorization'},
|
||||
self.msg.to_partial_json())
|
||||
|
||||
def test_from_json(self):
|
||||
from acme.challenges import HTTP01Response
|
||||
@@ -183,8 +187,7 @@ class HTTP01ResponseTest(unittest.TestCase):
|
||||
mock_get.return_value = mock.MagicMock(text=validation)
|
||||
self.assertTrue(self.response.simple_verify(
|
||||
self.chall, "local", KEY.public_key()))
|
||||
mock_get.assert_called_once_with(self.chall.uri("local"), verify=False,
|
||||
timeout=mock.ANY)
|
||||
mock_get.assert_called_once_with(self.chall.uri("local"), verify=False)
|
||||
|
||||
@mock.patch("acme.challenges.requests.get")
|
||||
def test_simple_verify_bad_validation(self, mock_get):
|
||||
@@ -200,8 +203,7 @@ class HTTP01ResponseTest(unittest.TestCase):
|
||||
HTTP01Response.WHITESPACE_CUTSET))
|
||||
self.assertTrue(self.response.simple_verify(
|
||||
self.chall, "local", KEY.public_key()))
|
||||
mock_get.assert_called_once_with(self.chall.uri("local"), verify=False,
|
||||
timeout=mock.ANY)
|
||||
mock_get.assert_called_once_with(self.chall.uri("local"), verify=False)
|
||||
|
||||
@mock.patch("acme.challenges.requests.get")
|
||||
def test_simple_verify_connection_error(self, mock_get):
|
||||
@@ -217,16 +219,6 @@ class HTTP01ResponseTest(unittest.TestCase):
|
||||
self.assertEqual("local:8080", urllib_parse.urlparse(
|
||||
mock_get.mock_calls[0][1][0]).netloc)
|
||||
|
||||
@mock.patch("acme.challenges.requests.get")
|
||||
def test_simple_verify_timeout(self, mock_get):
|
||||
self.response.simple_verify(self.chall, "local", KEY.public_key())
|
||||
mock_get.assert_called_once_with(self.chall.uri("local"), verify=False,
|
||||
timeout=30)
|
||||
mock_get.reset_mock()
|
||||
self.response.simple_verify(self.chall, "local", KEY.public_key(), timeout=1234)
|
||||
mock_get.assert_called_once_with(self.chall.uri("local"), verify=False,
|
||||
timeout=1234)
|
||||
|
||||
|
||||
class HTTP01Test(unittest.TestCase):
|
||||
|
||||
@@ -284,7 +276,8 @@ class TLSALPN01ResponseTest(unittest.TestCase):
|
||||
}
|
||||
|
||||
def test_to_partial_json(self):
|
||||
self.assertEqual({}, self.response.to_partial_json())
|
||||
self.assertEqual({k: v for k, v in self.jmsg.items() if k != 'keyAuthorization'},
|
||||
self.response.to_partial_json())
|
||||
|
||||
def test_from_json(self):
|
||||
from acme.challenges import TLSALPN01Response
|
||||
@@ -302,7 +295,7 @@ class TLSALPN01ResponseTest(unittest.TestCase):
|
||||
|
||||
def test_gen_verify_cert_gen_key(self):
|
||||
cert, key = self.response.gen_cert(self.domain)
|
||||
self.assertIsInstance(key, OpenSSL.crypto.PKey)
|
||||
self.assertTrue(isinstance(key, OpenSSL.crypto.PKey))
|
||||
self.assertTrue(self.response.verify_cert(self.domain, cert))
|
||||
|
||||
def test_verify_bad_cert(self):
|
||||
@@ -336,13 +329,13 @@ class TLSALPN01ResponseTest(unittest.TestCase):
|
||||
self.response.probe_cert('foo.com')
|
||||
mock_gethostbyname.assert_called_once_with('foo.com')
|
||||
mock_probe_sni.assert_called_once_with(
|
||||
host=b'127.0.0.1', port=self.response.PORT, name=b'foo.com',
|
||||
alpn_protocols=[b'acme-tls/1'])
|
||||
host='127.0.0.1', port=self.response.PORT, name='foo.com',
|
||||
alpn_protocols=['acme-tls/1'])
|
||||
|
||||
self.response.probe_cert('foo.com', host='8.8.8.8')
|
||||
mock_probe_sni.assert_called_with(
|
||||
host=b'8.8.8.8', port=mock.ANY, name=b'foo.com',
|
||||
alpn_protocols=[b'acme-tls/1'])
|
||||
host='8.8.8.8', port=mock.ANY, name='foo.com',
|
||||
alpn_protocols=['acme-tls/1'])
|
||||
|
||||
@mock.patch('acme.challenges.TLSALPN01Response.probe_cert')
|
||||
def test_simple_verify_false_on_probe_error(self, mock_probe_cert):
|
||||
@@ -411,11 +404,8 @@ class DNSTest(unittest.TestCase):
|
||||
hash(DNS.from_json(self.jmsg))
|
||||
|
||||
def test_gen_check_validation(self):
|
||||
ec_key_secp384r1 = JWKEC(key=test_util.load_ecdsa_private_key('ec_secp384r1_key.pem'))
|
||||
for key, alg in [(KEY, jose.RS256), (ec_key_secp384r1, jose.ES384)]:
|
||||
with self.subTest(key=key, alg=alg):
|
||||
self.assertTrue(self.msg.check_validation(
|
||||
self.msg.gen_validation(key, alg=alg), key.public_key()))
|
||||
self.assertTrue(self.msg.check_validation(
|
||||
self.msg.gen_validation(KEY), KEY.public_key()))
|
||||
|
||||
def test_gen_check_validation_wrong_key(self):
|
||||
key2 = jose.JWKRSA.load(test_util.load_vector('rsa1024_key.pem'))
|
||||
@@ -436,25 +426,20 @@ class DNSTest(unittest.TestCase):
|
||||
payload=self.msg.update(
|
||||
token=b'x' * 20).json_dumps().encode('utf-8'),
|
||||
alg=jose.RS256, key=KEY)
|
||||
self.assertFalse(self.msg.check_validation(bad_validation, KEY.public_key()))
|
||||
self.assertFalse(self.msg.check_validation(
|
||||
bad_validation, KEY.public_key()))
|
||||
|
||||
def test_gen_response(self):
|
||||
with mock.patch('acme.challenges.DNS.gen_validation') as mock_gen:
|
||||
mock_gen.return_value = mock.sentinel.validation
|
||||
response = self.msg.gen_response(KEY)
|
||||
from acme.challenges import DNSResponse
|
||||
self.assertIsInstance(response, DNSResponse)
|
||||
self.assertTrue(isinstance(response, DNSResponse))
|
||||
self.assertEqual(response.validation, mock.sentinel.validation)
|
||||
|
||||
def test_validation_domain_name(self):
|
||||
self.assertEqual('_acme-challenge.le.wtf', self.msg.validation_domain_name('le.wtf'))
|
||||
|
||||
def test_validation_domain_name_ecdsa(self):
|
||||
ec_key_secp384r1 = JWKEC(key=test_util.load_ecdsa_private_key('ec_secp384r1_key.pem'))
|
||||
self.assertIs(self.msg.check_validation(
|
||||
self.msg.gen_validation(ec_key_secp384r1, alg=jose.ES384),
|
||||
ec_key_secp384r1.public_key()), True
|
||||
)
|
||||
self.assertEqual(
|
||||
'_acme-challenge.le.wtf', self.msg.validation_domain_name('le.wtf'))
|
||||
|
||||
|
||||
class DNSResponseTest(unittest.TestCase):
|
||||
@@ -470,6 +455,8 @@ class DNSResponseTest(unittest.TestCase):
|
||||
from acme.challenges import DNSResponse
|
||||
self.msg = DNSResponse(validation=self.validation)
|
||||
self.jmsg_to = {
|
||||
'resource': 'challenge',
|
||||
'type': 'dns',
|
||||
'validation': self.validation,
|
||||
}
|
||||
self.jmsg_from = {
|
||||
@@ -490,7 +477,8 @@ class DNSResponseTest(unittest.TestCase):
|
||||
hash(DNSResponse.from_json(self.jmsg_from))
|
||||
|
||||
def test_check_validation(self):
|
||||
self.assertTrue(self.msg.check_validation(self.chall, KEY.public_key()))
|
||||
self.assertTrue(
|
||||
self.msg.check_validation(self.chall, KEY.public_key()))
|
||||
|
||||
|
||||
class JWSPayloadRFC8555Compliant(unittest.TestCase):
|
||||
@@ -499,6 +487,7 @@ class JWSPayloadRFC8555Compliant(unittest.TestCase):
|
||||
from acme.challenges import HTTP01Response
|
||||
|
||||
challenge_body = HTTP01Response()
|
||||
challenge_body.le_acme_version = 2
|
||||
|
||||
jobj = challenge_body.json_dumps(indent=2).encode()
|
||||
# RFC8555 states that challenge responses must have an empty payload.
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,15 +1,14 @@
|
||||
"""Tests for acme.crypto_util."""
|
||||
import itertools
|
||||
import ipaddress
|
||||
import socket
|
||||
import socketserver
|
||||
import threading
|
||||
import time
|
||||
import unittest
|
||||
from typing import List
|
||||
|
||||
import josepy as jose
|
||||
import OpenSSL
|
||||
import six
|
||||
from six.moves import socketserver # type: ignore # pylint: disable=import-error
|
||||
|
||||
from acme import errors
|
||||
import test_util
|
||||
@@ -28,6 +27,8 @@ class SSLSocketAndProbeSNITest(unittest.TestCase):
|
||||
|
||||
class _TestServer(socketserver.TCPServer):
|
||||
|
||||
# six.moves.* | pylint: disable=attribute-defined-outside-init,no-init
|
||||
|
||||
def server_bind(self): # pylint: disable=missing-docstring
|
||||
self.socket = SSLSocket(socket.socket(),
|
||||
certs)
|
||||
@@ -61,6 +62,7 @@ class SSLSocketAndProbeSNITest(unittest.TestCase):
|
||||
self.assertRaises(errors.Error, self._probe, b'bar')
|
||||
|
||||
def test_probe_connection_error(self):
|
||||
# pylint has a hard time with six
|
||||
self.server.server_close()
|
||||
original_timeout = socket.getdefaulttimeout()
|
||||
try:
|
||||
@@ -109,6 +111,7 @@ class PyOpenSSLCertOrReqAllNamesTest(unittest.TestCase):
|
||||
class PyOpenSSLCertOrReqSANTest(unittest.TestCase):
|
||||
"""Test for acme.crypto_util._pyopenssl_cert_or_req_san."""
|
||||
|
||||
|
||||
@classmethod
|
||||
def _call(cls, loader, name):
|
||||
# pylint: disable=protected-access
|
||||
@@ -118,9 +121,9 @@ class PyOpenSSLCertOrReqSANTest(unittest.TestCase):
|
||||
@classmethod
|
||||
def _get_idn_names(cls):
|
||||
"""Returns expected names from '{cert,csr}-idnsans.pem'."""
|
||||
chars = [chr(i) for i in itertools.chain(range(0x3c3, 0x400),
|
||||
range(0x641, 0x6fc),
|
||||
range(0x1820, 0x1877))]
|
||||
chars = [six.unichr(i) for i in itertools.chain(range(0x3c3, 0x400),
|
||||
range(0x641, 0x6fc),
|
||||
range(0x1820, 0x1877))]
|
||||
return [''.join(chars[i: i + 45]) + '.invalid'
|
||||
for i in range(0, len(chars), 45)]
|
||||
|
||||
@@ -174,73 +177,24 @@ class PyOpenSSLCertOrReqSANTest(unittest.TestCase):
|
||||
['chicago-cubs.venafi.example', 'cubs.venafi.example'])
|
||||
|
||||
|
||||
class PyOpenSSLCertOrReqSANIPTest(unittest.TestCase):
|
||||
"""Test for acme.crypto_util._pyopenssl_cert_or_req_san_ip."""
|
||||
|
||||
@classmethod
|
||||
def _call(cls, loader, name):
|
||||
# pylint: disable=protected-access
|
||||
from acme.crypto_util import _pyopenssl_cert_or_req_san_ip
|
||||
return _pyopenssl_cert_or_req_san_ip(loader(name))
|
||||
|
||||
def _call_cert(self, name):
|
||||
return self._call(test_util.load_cert, name)
|
||||
|
||||
def _call_csr(self, name):
|
||||
return self._call(test_util.load_csr, name)
|
||||
|
||||
def test_cert_no_sans(self):
|
||||
self.assertEqual(self._call_cert('cert.pem'), [])
|
||||
|
||||
def test_csr_no_sans(self):
|
||||
self.assertEqual(self._call_csr('csr-nosans.pem'), [])
|
||||
|
||||
def test_cert_domain_sans(self):
|
||||
self.assertEqual(self._call_cert('cert-san.pem'), [])
|
||||
|
||||
def test_csr_domain_sans(self):
|
||||
self.assertEqual(self._call_csr('csr-san.pem'), [])
|
||||
|
||||
def test_cert_ip_two_sans(self):
|
||||
self.assertEqual(self._call_cert('cert-ipsans.pem'), ['192.0.2.145', '203.0.113.1'])
|
||||
|
||||
def test_csr_ip_two_sans(self):
|
||||
self.assertEqual(self._call_csr('csr-ipsans.pem'), ['192.0.2.145', '203.0.113.1'])
|
||||
|
||||
def test_csr_ipv6_sans(self):
|
||||
self.assertEqual(self._call_csr('csr-ipv6sans.pem'),
|
||||
['0:0:0:0:0:0:0:1', 'A3BE:32F3:206E:C75D:956:CEE:9858:5EC5'])
|
||||
|
||||
def test_cert_ipv6_sans(self):
|
||||
self.assertEqual(self._call_cert('cert-ipv6sans.pem'),
|
||||
['0:0:0:0:0:0:0:1', 'A3BE:32F3:206E:C75D:956:CEE:9858:5EC5'])
|
||||
|
||||
|
||||
class GenSsCertTest(unittest.TestCase):
|
||||
"""Test for gen_ss_cert (generation of self-signed cert)."""
|
||||
class RandomSnTest(unittest.TestCase):
|
||||
"""Test for random certificate serial numbers."""
|
||||
|
||||
|
||||
def setUp(self):
|
||||
self.cert_count = 5
|
||||
self.serial_num: List[int] = []
|
||||
self.serial_num = [] # type: List[int]
|
||||
self.key = OpenSSL.crypto.PKey()
|
||||
self.key.generate_key(OpenSSL.crypto.TYPE_RSA, 2048)
|
||||
|
||||
def test_sn_collisions(self):
|
||||
from acme.crypto_util import gen_ss_cert
|
||||
|
||||
for _ in range(self.cert_count):
|
||||
cert = gen_ss_cert(self.key, ['dummy'], force_san=True,
|
||||
ips=[ipaddress.ip_address("10.10.10.10")])
|
||||
cert = gen_ss_cert(self.key, ['dummy'], force_san=True)
|
||||
self.serial_num.append(cert.get_serial_number())
|
||||
self.assertGreaterEqual(len(set(self.serial_num)), self.cert_count)
|
||||
|
||||
|
||||
def test_no_name(self):
|
||||
from acme.crypto_util import gen_ss_cert
|
||||
with self.assertRaises(AssertionError):
|
||||
gen_ss_cert(self.key, ips=[ipaddress.ip_address("1.1.1.1")])
|
||||
gen_ss_cert(self.key)
|
||||
|
||||
self.assertTrue(len(set(self.serial_num)) > 1)
|
||||
|
||||
class MakeCSRTest(unittest.TestCase):
|
||||
"""Test for standalone functions."""
|
||||
@@ -255,8 +209,8 @@ class MakeCSRTest(unittest.TestCase):
|
||||
|
||||
def test_make_csr(self):
|
||||
csr_pem = self._call_with_key(["a.example", "b.example"])
|
||||
self.assertIn(b'--BEGIN CERTIFICATE REQUEST--', csr_pem)
|
||||
self.assertIn(b'--END CERTIFICATE REQUEST--', csr_pem)
|
||||
self.assertTrue(b'--BEGIN CERTIFICATE REQUEST--' in csr_pem)
|
||||
self.assertTrue(b'--END CERTIFICATE REQUEST--' in csr_pem)
|
||||
csr = OpenSSL.crypto.load_certificate_request(
|
||||
OpenSSL.crypto.FILETYPE_PEM, csr_pem)
|
||||
# In pyopenssl 0.13 (used with TOXENV=py27-oldest), csr objects don't
|
||||
@@ -272,27 +226,6 @@ class MakeCSRTest(unittest.TestCase):
|
||||
).get_data(),
|
||||
)
|
||||
|
||||
def test_make_csr_ip(self):
|
||||
csr_pem = self._call_with_key(["a.example"], False, [ipaddress.ip_address('127.0.0.1'), ipaddress.ip_address('::1')])
|
||||
self.assertIn(b'--BEGIN CERTIFICATE REQUEST--' , csr_pem)
|
||||
self.assertIn(b'--END CERTIFICATE REQUEST--' , csr_pem)
|
||||
csr = OpenSSL.crypto.load_certificate_request(
|
||||
OpenSSL.crypto.FILETYPE_PEM, csr_pem)
|
||||
# In pyopenssl 0.13 (used with TOXENV=py27-oldest), csr objects don't
|
||||
# have a get_extensions() method, so we skip this test if the method
|
||||
# isn't available.
|
||||
if hasattr(csr, 'get_extensions'):
|
||||
self.assertEqual(len(csr.get_extensions()), 1)
|
||||
self.assertEqual(csr.get_extensions()[0].get_data(),
|
||||
OpenSSL.crypto.X509Extension(
|
||||
b'subjectAltName',
|
||||
critical=False,
|
||||
value=b'DNS:a.example, IP:127.0.0.1, IP:::1',
|
||||
).get_data(),
|
||||
)
|
||||
# for IP san it's actually need to be octet-string,
|
||||
# but somewhere downstream thankfully handle it for us
|
||||
|
||||
def test_make_csr_must_staple(self):
|
||||
csr_pem = self._call_with_key(["a.example"], must_staple=True)
|
||||
csr = OpenSSL.crypto.load_certificate_request(
|
||||
@@ -311,17 +244,6 @@ class MakeCSRTest(unittest.TestCase):
|
||||
self.assertEqual(len(must_staple_exts), 1,
|
||||
"Expected exactly one Must Staple extension")
|
||||
|
||||
def test_make_csr_without_hostname(self):
|
||||
self.assertRaises(ValueError, self._call_with_key)
|
||||
|
||||
def test_make_csr_correct_version(self):
|
||||
csr_pem = self._call_with_key(["a.example"])
|
||||
csr = OpenSSL.crypto.load_certificate_request(
|
||||
OpenSSL.crypto.FILETYPE_PEM, csr_pem)
|
||||
|
||||
self.assertEqual(csr.get_version(), 0,
|
||||
"Expected CSR version to be v1 (encoded as 0), per RFC 2986, section 4")
|
||||
|
||||
|
||||
class DumpPyopensslChainTest(unittest.TestCase):
|
||||
"""Test for dump_pyopenssl_chain."""
|
||||
|
||||
@@ -1,6 +1,10 @@
|
||||
"""Tests for acme.errors."""
|
||||
import unittest
|
||||
from unittest import mock
|
||||
|
||||
try:
|
||||
import mock
|
||||
except ImportError: # pragma: no cover
|
||||
from unittest import mock # type: ignore
|
||||
|
||||
|
||||
class BadNonceTest(unittest.TestCase):
|
||||
@@ -24,8 +28,8 @@ class MissingNonceTest(unittest.TestCase):
|
||||
self.error = MissingNonce(self.response)
|
||||
|
||||
def test_str(self):
|
||||
self.assertIn("FOO", str(self.error))
|
||||
self.assertIn("{}", str(self.error))
|
||||
self.assertTrue("FOO" in str(self.error))
|
||||
self.assertTrue("{}" in str(self.error))
|
||||
|
||||
|
||||
class PollErrorTest(unittest.TestCase):
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
"""Tests for acme.fields."""
|
||||
import datetime
|
||||
import unittest
|
||||
import warnings
|
||||
|
||||
import josepy as jose
|
||||
import pytz
|
||||
@@ -11,8 +10,8 @@ class FixedTest(unittest.TestCase):
|
||||
"""Tests for acme.fields.Fixed."""
|
||||
|
||||
def setUp(self):
|
||||
from acme.fields import fixed
|
||||
self.field = fixed('name', 'x')
|
||||
from acme.fields import Fixed
|
||||
self.field = Fixed('name', 'x')
|
||||
|
||||
def test_decode(self):
|
||||
self.assertEqual('x', self.field.decode('x'))
|
||||
@@ -55,5 +54,19 @@ class RFC3339FieldTest(unittest.TestCase):
|
||||
jose.DeserializationError, RFC3339Field.default_decoder, '')
|
||||
|
||||
|
||||
class ResourceTest(unittest.TestCase):
|
||||
"""Tests for acme.fields.Resource."""
|
||||
|
||||
def setUp(self):
|
||||
from acme.fields import Resource
|
||||
self.field = Resource('x')
|
||||
|
||||
def test_decode_good(self):
|
||||
self.assertEqual('x', self.field.decode('x'))
|
||||
|
||||
def test_decode_wrong(self):
|
||||
self.assertRaises(jose.DeserializationError, self.field.decode, 'y')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main() # pragma: no cover
|
||||
|
||||
@@ -48,7 +48,7 @@ class JWSTest(unittest.TestCase):
|
||||
self.assertEqual(jws.signature.combined.nonce, self.nonce)
|
||||
self.assertEqual(jws.signature.combined.url, self.url)
|
||||
self.assertEqual(jws.signature.combined.kid, self.kid)
|
||||
self.assertIsNone(jws.signature.combined.jwk)
|
||||
self.assertEqual(jws.signature.combined.jwk, None)
|
||||
# TODO: check that nonce is in protected header
|
||||
|
||||
self.assertEqual(jws, JWS.from_json(jws.to_json()))
|
||||
@@ -58,7 +58,7 @@ class JWSTest(unittest.TestCase):
|
||||
jws = JWS.sign(payload=b'foo', key=self.privkey,
|
||||
alg=jose.RS256, nonce=self.nonce,
|
||||
url=self.url)
|
||||
self.assertIsNone(jws.signature.combined.kid)
|
||||
self.assertEqual(jws.signature.combined.kid, None)
|
||||
self.assertEqual(jws.signature.combined.jwk, self.pubkey)
|
||||
|
||||
|
||||
|
||||
44
acme/tests/magic_typing_test.py
Normal file
44
acme/tests/magic_typing_test.py
Normal file
@@ -0,0 +1,44 @@
|
||||
"""Tests for acme.magic_typing."""
|
||||
import sys
|
||||
import unittest
|
||||
|
||||
try:
|
||||
import mock
|
||||
except ImportError: # pragma: no cover
|
||||
from unittest import mock # type: ignore
|
||||
|
||||
|
||||
class MagicTypingTest(unittest.TestCase):
|
||||
"""Tests for acme.magic_typing."""
|
||||
def test_import_success(self):
|
||||
try:
|
||||
import typing as temp_typing
|
||||
except ImportError: # pragma: no cover
|
||||
temp_typing = None # pragma: no cover
|
||||
typing_class_mock = mock.MagicMock()
|
||||
text_mock = mock.MagicMock()
|
||||
typing_class_mock.Text = text_mock
|
||||
sys.modules['typing'] = typing_class_mock
|
||||
if 'acme.magic_typing' in sys.modules:
|
||||
del sys.modules['acme.magic_typing'] # pragma: no cover
|
||||
from acme.magic_typing import Text
|
||||
self.assertEqual(Text, text_mock)
|
||||
del sys.modules['acme.magic_typing']
|
||||
sys.modules['typing'] = temp_typing
|
||||
|
||||
def test_import_failure(self):
|
||||
try:
|
||||
import typing as temp_typing
|
||||
except ImportError: # pragma: no cover
|
||||
temp_typing = None # pragma: no cover
|
||||
sys.modules['typing'] = None
|
||||
if 'acme.magic_typing' in sys.modules:
|
||||
del sys.modules['acme.magic_typing'] # pragma: no cover
|
||||
from acme.magic_typing import Text
|
||||
self.assertTrue(Text is None)
|
||||
del sys.modules['acme.magic_typing']
|
||||
sys.modules['typing'] = temp_typing
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main() # pragma: no cover
|
||||
@@ -1,11 +1,11 @@
|
||||
"""Tests for acme.messages."""
|
||||
from typing import Dict
|
||||
import unittest
|
||||
import contextlib
|
||||
from unittest import mock
|
||||
import warnings
|
||||
|
||||
import josepy as jose
|
||||
try:
|
||||
import mock
|
||||
except ImportError: # pragma: no cover
|
||||
from unittest import mock # type: ignore
|
||||
|
||||
from acme import challenges
|
||||
import test_util
|
||||
@@ -19,7 +19,7 @@ class ErrorTest(unittest.TestCase):
|
||||
"""Tests for acme.messages.Error."""
|
||||
|
||||
def setUp(self):
|
||||
from acme.messages import Error, ERROR_PREFIX, Identifier, IDENTIFIER_FQDN
|
||||
from acme.messages import Error, ERROR_PREFIX
|
||||
self.error = Error.with_code('malformed', detail='foo', title='title')
|
||||
self.jobj = {
|
||||
'detail': 'foo',
|
||||
@@ -27,9 +27,6 @@ class ErrorTest(unittest.TestCase):
|
||||
'type': ERROR_PREFIX + 'malformed',
|
||||
}
|
||||
self.error_custom = Error(typ='custom', detail='bar')
|
||||
self.identifier = Identifier(typ=IDENTIFIER_FQDN, value='example.com')
|
||||
self.subproblem = Error.with_code('caa', detail='bar', title='title', identifier=self.identifier)
|
||||
self.error_with_subproblems = Error.with_code('malformed', detail='foo', title='title', subproblems=[self.subproblem])
|
||||
self.empty_error = Error()
|
||||
|
||||
def test_default_typ(self):
|
||||
@@ -44,23 +41,15 @@ class ErrorTest(unittest.TestCase):
|
||||
from acme.messages import Error
|
||||
hash(Error.from_json(self.error.to_json()))
|
||||
|
||||
def test_from_json_with_subproblems(self):
|
||||
from acme.messages import Error
|
||||
|
||||
parsed_error = Error.from_json(self.error_with_subproblems.to_json())
|
||||
|
||||
self.assertEqual(1, len(parsed_error.subproblems))
|
||||
self.assertEqual(self.subproblem, parsed_error.subproblems[0])
|
||||
|
||||
def test_description(self):
|
||||
self.assertEqual('The request message was malformed', self.error.description)
|
||||
self.assertIsNone(self.error_custom.description)
|
||||
self.assertTrue(self.error_custom.description is None)
|
||||
|
||||
def test_code(self):
|
||||
from acme.messages import Error
|
||||
self.assertEqual('malformed', self.error.code)
|
||||
self.assertIsNone(self.error_custom.code)
|
||||
self.assertIsNone(Error().code)
|
||||
self.assertEqual(None, self.error_custom.code)
|
||||
self.assertEqual(None, Error().code)
|
||||
|
||||
def test_is_acme_error(self):
|
||||
from acme.messages import is_acme_error, Error
|
||||
@@ -86,23 +75,6 @@ class ErrorTest(unittest.TestCase):
|
||||
str(self.error),
|
||||
u"{0.typ} :: {0.description} :: {0.detail} :: {0.title}"
|
||||
.format(self.error))
|
||||
self.assertEqual(
|
||||
str(self.error_with_subproblems),
|
||||
(u"{0.typ} :: {0.description} :: {0.detail} :: {0.title}\n"+
|
||||
u"Problem for {1.identifier.value}: {1.typ} :: {1.description} :: {1.detail} :: {1.title}").format(
|
||||
self.error_with_subproblems, self.subproblem))
|
||||
|
||||
# this test is based on a minimal reproduction of a contextmanager/immutable
|
||||
# exception related error: https://github.com/python/cpython/issues/99856
|
||||
def test_setting_traceback(self):
|
||||
self.assertIsNone(self.error_custom.__traceback__)
|
||||
|
||||
try:
|
||||
1/0
|
||||
except ZeroDivisionError as e:
|
||||
self.error_custom.__traceback__ = e.__traceback__
|
||||
|
||||
self.assertIsNotNone(self.error_custom.__traceback__)
|
||||
|
||||
|
||||
class ConstantTest(unittest.TestCase):
|
||||
@@ -112,7 +84,7 @@ class ConstantTest(unittest.TestCase):
|
||||
from acme.messages import _Constant
|
||||
|
||||
class MockConstant(_Constant): # pylint: disable=missing-docstring
|
||||
POSSIBLE_NAMES: Dict = {}
|
||||
POSSIBLE_NAMES = {} # type: Dict
|
||||
|
||||
self.MockConstant = MockConstant # pylint: disable=invalid-name
|
||||
self.const_a = MockConstant('a')
|
||||
@@ -149,8 +121,8 @@ class DirectoryTest(unittest.TestCase):
|
||||
def setUp(self):
|
||||
from acme.messages import Directory
|
||||
self.dir = Directory({
|
||||
'newReg': 'reg',
|
||||
'newCert': 'cert',
|
||||
'new-reg': 'reg',
|
||||
mock.MagicMock(resource_type='new-cert'): 'cert',
|
||||
'meta': Directory.Meta(
|
||||
terms_of_service='https://example.com/acme/terms',
|
||||
website='https://www.example.com/',
|
||||
@@ -163,23 +135,26 @@ class DirectoryTest(unittest.TestCase):
|
||||
Directory({'foo': 'bar'})
|
||||
|
||||
def test_getitem(self):
|
||||
self.assertEqual('reg', self.dir['newReg'])
|
||||
self.assertEqual('reg', self.dir['new-reg'])
|
||||
from acme.messages import NewRegistration
|
||||
self.assertEqual('reg', self.dir[NewRegistration])
|
||||
self.assertEqual('reg', self.dir[NewRegistration()])
|
||||
|
||||
def test_getitem_fails_with_key_error(self):
|
||||
self.assertRaises(KeyError, self.dir.__getitem__, 'foo')
|
||||
|
||||
def test_getattr(self):
|
||||
self.assertEqual('reg', self.dir.newReg)
|
||||
self.assertEqual('reg', self.dir.new_reg)
|
||||
|
||||
def test_getattr_fails_with_attribute_error(self):
|
||||
self.assertRaises(AttributeError, self.dir.__getattr__, 'foo')
|
||||
|
||||
def test_to_json(self):
|
||||
self.assertEqual(self.dir.to_json(), {
|
||||
'newReg': 'reg',
|
||||
'newCert': 'cert',
|
||||
'new-reg': 'reg',
|
||||
'new-cert': 'cert',
|
||||
'meta': {
|
||||
'termsOfService': 'https://example.com/acme/terms',
|
||||
'terms-of-service': 'https://example.com/acme/terms',
|
||||
'website': 'https://www.example.com/',
|
||||
'caaIdentities': ['example.com'],
|
||||
},
|
||||
@@ -287,10 +262,10 @@ class RegistrationTest(unittest.TestCase):
|
||||
self.assertEqual(empty_new_reg.contact, ())
|
||||
self.assertEqual(new_reg_with_contact.contact, ())
|
||||
|
||||
self.assertNotIn('contact', empty_new_reg.to_partial_json())
|
||||
self.assertNotIn('contact', empty_new_reg.fields_to_partial_json())
|
||||
self.assertIn('contact', new_reg_with_contact.to_partial_json())
|
||||
self.assertIn('contact', new_reg_with_contact.fields_to_partial_json())
|
||||
self.assertTrue('contact' not in empty_new_reg.to_partial_json())
|
||||
self.assertTrue('contact' not in empty_new_reg.fields_to_partial_json())
|
||||
self.assertTrue('contact' in new_reg_with_contact.to_partial_json())
|
||||
self.assertTrue('contact' in new_reg_with_contact.fields_to_partial_json())
|
||||
|
||||
|
||||
class UpdateRegistrationTest(unittest.TestCase):
|
||||
@@ -299,7 +274,7 @@ class UpdateRegistrationTest(unittest.TestCase):
|
||||
def test_empty(self):
|
||||
from acme.messages import UpdateRegistration
|
||||
jstring = '{"resource": "reg"}'
|
||||
self.assertEqual('{}', UpdateRegistration().json_dumps())
|
||||
self.assertEqual(jstring, UpdateRegistration().json_dumps())
|
||||
self.assertEqual(
|
||||
UpdateRegistration(), UpdateRegistration.json_loads(jstring))
|
||||
|
||||
@@ -347,7 +322,7 @@ class ChallengeBodyTest(unittest.TestCase):
|
||||
error=error)
|
||||
|
||||
self.jobj_to = {
|
||||
'url': 'http://challb',
|
||||
'uri': 'http://challb',
|
||||
'status': self.status,
|
||||
'type': 'dns',
|
||||
'token': 'evaGxfADs6pSRb2LAv9IZf17Dt3juxGJ-PCt92wr-oA',
|
||||
@@ -394,17 +369,20 @@ class AuthorizationTest(unittest.TestCase):
|
||||
chall=challenges.DNS(
|
||||
token=b'DGyRejmCefe7v4NfDGDKfA')),
|
||||
)
|
||||
combinations = ((0,), (1,))
|
||||
|
||||
from acme.messages import Authorization
|
||||
from acme.messages import Identifier
|
||||
from acme.messages import IDENTIFIER_FQDN
|
||||
identifier = Identifier(typ=IDENTIFIER_FQDN, value='example.com')
|
||||
self.authz = Authorization(
|
||||
identifier=identifier, challenges=self.challbs)
|
||||
identifier=identifier, combinations=combinations,
|
||||
challenges=self.challbs)
|
||||
|
||||
self.jobj_from = {
|
||||
'identifier': identifier.to_json(),
|
||||
'challenges': [challb.to_json() for challb in self.challbs],
|
||||
'combinations': combinations,
|
||||
}
|
||||
|
||||
def test_from_json(self):
|
||||
@@ -415,6 +393,12 @@ class AuthorizationTest(unittest.TestCase):
|
||||
from acme.messages import Authorization
|
||||
hash(Authorization.from_json(self.jobj_from))
|
||||
|
||||
def test_resolved_combinations(self):
|
||||
self.assertEqual(self.authz.resolved_combinations, (
|
||||
(self.challbs[0],),
|
||||
(self.challbs[1],),
|
||||
))
|
||||
|
||||
|
||||
class AuthorizationResourceTest(unittest.TestCase):
|
||||
"""Tests for acme.messages.AuthorizationResource."""
|
||||
@@ -424,7 +408,7 @@ class AuthorizationResourceTest(unittest.TestCase):
|
||||
authzr = AuthorizationResource(
|
||||
uri=mock.sentinel.uri,
|
||||
body=mock.sentinel.body)
|
||||
self.assertIsInstance(authzr, jose.JSONDeSerializable)
|
||||
self.assertTrue(isinstance(authzr, jose.JSONDeSerializable))
|
||||
|
||||
|
||||
class CertificateRequestTest(unittest.TestCase):
|
||||
@@ -435,7 +419,7 @@ class CertificateRequestTest(unittest.TestCase):
|
||||
self.req = CertificateRequest(csr=CSR)
|
||||
|
||||
def test_json_de_serializable(self):
|
||||
self.assertIsInstance(self.req, jose.JSONDeSerializable)
|
||||
self.assertTrue(isinstance(self.req, jose.JSONDeSerializable))
|
||||
from acme.messages import CertificateRequest
|
||||
self.assertEqual(
|
||||
self.req, CertificateRequest.from_json(self.req.to_json()))
|
||||
@@ -451,7 +435,7 @@ class CertificateResourceTest(unittest.TestCase):
|
||||
cert_chain_uri=mock.sentinel.cert_chain_uri)
|
||||
|
||||
def test_json_de_serializable(self):
|
||||
self.assertIsInstance(self.certr, jose.JSONDeSerializable)
|
||||
self.assertTrue(isinstance(self.certr, jose.JSONDeSerializable))
|
||||
from acme.messages import CertificateResource
|
||||
self.assertEqual(
|
||||
self.certr, CertificateResource.from_json(self.certr.to_json()))
|
||||
@@ -505,6 +489,7 @@ class JWSPayloadRFC8555Compliant(unittest.TestCase):
|
||||
from acme.messages import NewAuthorization
|
||||
|
||||
new_order = NewAuthorization()
|
||||
new_order.le_acme_version = 2
|
||||
|
||||
jobj = new_order.json_dumps(indent=2).encode()
|
||||
# RFC8555 states that JWS bodies must not have a resource field.
|
||||
|
||||
@@ -1,14 +1,16 @@
|
||||
"""Tests for acme.standalone."""
|
||||
import http.client as http_client
|
||||
import socket
|
||||
import socketserver
|
||||
import threading
|
||||
import unittest
|
||||
from typing import Set
|
||||
from unittest import mock
|
||||
|
||||
import josepy as jose
|
||||
try:
|
||||
import mock
|
||||
except ImportError: # pragma: no cover
|
||||
from unittest import mock # type: ignore
|
||||
import requests
|
||||
from six.moves import http_client # pylint: disable=import-error
|
||||
from six.moves import socketserver # type: ignore # pylint: disable=import-error
|
||||
|
||||
from acme import challenges
|
||||
from acme import crypto_util
|
||||
@@ -42,7 +44,7 @@ class HTTP01ServerTest(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.account_key = jose.JWK.load(
|
||||
test_util.load_vector('rsa1024_key.pem'))
|
||||
self.resources: Set = set()
|
||||
self.resources = set() # type: Set
|
||||
|
||||
from acme.standalone import HTTP01Server
|
||||
self.server = HTTP01Server(('', 0), resources=self.resources)
|
||||
@@ -165,6 +167,7 @@ class TLSALPN01ServerTest(unittest.TestCase):
|
||||
class BaseDualNetworkedServersTest(unittest.TestCase):
|
||||
"""Test for acme.standalone.BaseDualNetworkedServers."""
|
||||
|
||||
|
||||
class SingleProtocolServer(socketserver.TCPServer):
|
||||
"""Server that only serves on a single protocol. FreeBSD has this behavior for AF_INET6."""
|
||||
def __init__(self, *args, **kwargs):
|
||||
@@ -174,7 +177,7 @@ class BaseDualNetworkedServersTest(unittest.TestCase):
|
||||
kwargs["bind_and_activate"] = False
|
||||
else:
|
||||
self.address_family = socket.AF_INET
|
||||
super().__init__(*args, **kwargs)
|
||||
socketserver.TCPServer.__init__(self, *args, **kwargs)
|
||||
if ipv6:
|
||||
# NB: On Windows, socket.IPPROTO_IPV6 constant may be missing.
|
||||
# We use the corresponding value (41) instead.
|
||||
@@ -189,17 +192,12 @@ class BaseDualNetworkedServersTest(unittest.TestCase):
|
||||
|
||||
@mock.patch("socket.socket.bind")
|
||||
def test_fail_to_bind(self, mock_bind):
|
||||
from errno import EADDRINUSE
|
||||
mock_bind.side_effect = socket.error
|
||||
from acme.standalone import BaseDualNetworkedServers
|
||||
|
||||
mock_bind.side_effect = socket.error(EADDRINUSE, "Fake addr in use error")
|
||||
|
||||
with self.assertRaises(socket.error) as em:
|
||||
BaseDualNetworkedServers(
|
||||
BaseDualNetworkedServersTest.SingleProtocolServer,
|
||||
('', 0), socketserver.BaseRequestHandler)
|
||||
|
||||
self.assertEqual(em.exception.errno, EADDRINUSE)
|
||||
self.assertRaises(socket.error, BaseDualNetworkedServers,
|
||||
BaseDualNetworkedServersTest.SingleProtocolServer,
|
||||
('', 0),
|
||||
socketserver.BaseRequestHandler)
|
||||
|
||||
def test_ports_equal(self):
|
||||
from acme.standalone import BaseDualNetworkedServers
|
||||
@@ -223,7 +221,7 @@ class HTTP01DualNetworkedServersTest(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.account_key = jose.JWK.load(
|
||||
test_util.load_vector('rsa1024_key.pem'))
|
||||
self.resources: Set = set()
|
||||
self.resources = set() # type: Set
|
||||
|
||||
from acme.standalone import HTTP01DualNetworkedServers
|
||||
self.servers = HTTP01DualNetworkedServers(('', 0), resources=self.resources)
|
||||
|
||||
@@ -10,7 +10,6 @@ from cryptography.hazmat.primitives import serialization
|
||||
import josepy as jose
|
||||
from OpenSSL import crypto
|
||||
import pkg_resources
|
||||
from josepy.util import ComparableECKey
|
||||
|
||||
|
||||
def load_vector(*names):
|
||||
@@ -61,14 +60,6 @@ def load_rsa_private_key(*names):
|
||||
load_vector(*names), password=None, backend=default_backend()))
|
||||
|
||||
|
||||
def load_ecdsa_private_key(*names):
|
||||
"""Load ECDSA private key."""
|
||||
loader = _guess_loader(names[-1], serialization.load_pem_private_key,
|
||||
serialization.load_der_private_key)
|
||||
return ComparableECKey(loader(
|
||||
load_vector(*names), password=None, backend=default_backend()))
|
||||
|
||||
|
||||
def load_pyopenssl_private_key(*names):
|
||||
"""Load pyOpenSSL private key."""
|
||||
loader = _guess_loader(
|
||||
|
||||
4
acme/tests/testdata/README
vendored
4
acme/tests/testdata/README
vendored
@@ -15,7 +15,3 @@ and for the certificates:
|
||||
openssl req -key rsa2048_key.pem -new -subj '/CN=example.com' -x509 -outform DER > cert.der
|
||||
openssl req -key rsa2048_key.pem -new -subj '/CN=example.com' -x509 > rsa2048_cert.pem
|
||||
openssl req -key rsa1024_key.pem -new -subj '/CN=example.com' -x509 > rsa1024_cert.pem
|
||||
|
||||
and for the elliptic key curves:
|
||||
|
||||
openssl genpkey -algorithm EC -out ec_secp384r1.pem -pkeyopt ec_paramgen_curve:P-384 -pkeyopt ec_param_enc:named_curve
|
||||
|
||||
21
acme/tests/testdata/cert-ipsans.pem
vendored
21
acme/tests/testdata/cert-ipsans.pem
vendored
@@ -1,21 +0,0 @@
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIDizCCAnOgAwIBAgIIPNBLQXwhoUkwDQYJKoZIhvcNAQELBQAwKDEmMCQGA1UE
|
||||
AxMdUGViYmxlIEludGVybWVkaWF0ZSBDQSAxNzNiMjYwHhcNMjAwNTI5MTkxODA5
|
||||
WhcNMjUwNTI5MTkxODA5WjAWMRQwEgYDVQQDEwsxOTIuMC4yLjE0NTCCASIwDQYJ
|
||||
KoZIhvcNAQEBBQADggEPADCCAQoCggEBALyChb+NDA26GF1AfC0nzEdfOTchKw0h
|
||||
q41xEjonvg5UXgZf/aH/ntvugIkYP0MaFifNAjebOVVsemEVEtyWcUKTfBHKZGbZ
|
||||
ukTDwFIjfTccCfo6U/B2H7ZLzJIywl8DcUw9DypadeQBm8PS0VVR2ncy73dvaqym
|
||||
crhAwlASyXU0mhLqRDMMxfg5Bn/FWpcsIcDpLmPn8Q/FvdRc2t5ryBNw/aWOlwqT
|
||||
Oy16nbfLj2T0zG1A3aPuD+eT/JFUe/o3K7R+FAx7wt+RziQO46wLVVF1SueZUrIU
|
||||
zqN04Gl8Kt1WM2SniZ0gq/rORUNcPtT0NAEsEslTQfA+Trq6j2peqyMCAwEAAaOB
|
||||
yjCBxzAOBgNVHQ8BAf8EBAMCBaAwHQYDVR0lBBYwFAYIKwYBBQUHAwEGCCsGAQUF
|
||||
BwMCMAwGA1UdEwEB/wQCMAAwHQYDVR0OBBYEFHj1mwZzP//nMIH2i58NRUl/arHn
|
||||
MB8GA1UdIwQYMBaAFF5DVAKabvIUvKFHGouscA2Qdpe6MDEGCCsGAQUFBwEBBCUw
|
||||
IzAhBggrBgEFBQcwAYYVaHR0cDovLzEyNy4wLjAuMTo0MDAyMBUGA1UdEQQOMAyH
|
||||
BMAAApGHBMsAcQEwDQYJKoZIhvcNAQELBQADggEBAHjSgDg76/UCIMSYddyhj18r
|
||||
LdNKjA7p8ovnErSkebFT4lIZ9f3Sma9moNr0w64M33NamuFyHe/KTdk90mvoW8Uu
|
||||
26aDekiRIeeMakzbAtDKn67tt2tbedKIYRATcSYVwsV46uZKbM621dZKIjjxOWpo
|
||||
IY6rZYrku8LYhoXJXOqRduV3cTRVuTm5bBa9FfVNtt6N1T5JOtKKDEhuSaF4RSug
|
||||
PDy3hQIiHrVvhPfVrXU3j6owz/8UCS5549inES9ONTFrvM9o0H1R/MsmGNXR5hF5
|
||||
iJqHKC7n8LZujhVnoFIpHu2Dsiefbfr+yRYJS4I+ezy6Nq/Ok8rc8zp0eoX+uyY=
|
||||
-----END CERTIFICATE-----
|
||||
22
acme/tests/testdata/cert-ipv6sans.pem
vendored
22
acme/tests/testdata/cert-ipv6sans.pem
vendored
@@ -1,22 +0,0 @@
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIDmzCCAoOgAwIBAgIIFdxeZP+v2rgwDQYJKoZIhvcNAQELBQAwKDEmMCQGA1UE
|
||||
AxMdUGViYmxlIEludGVybWVkaWF0ZSBDQSA0M2M5NTcwHhcNMjAwNTMwMDQwNzMw
|
||||
WhcNMjUwNTMwMDQwNzMwWjAOMQwwCgYDVQQDEwM6OjEwggEiMA0GCSqGSIb3DQEB
|
||||
AQUAA4IBDwAwggEKAoIBAQC7VidVduJvqKtrSH0fw6PjE0cqL4Kfzo7klexWUkHG
|
||||
KVAa0fRVZFZ462jxKOt417V2U4WJQ6WHHO9PJ+3gW62d/MhCw8FRtUQS4nYFjqB6
|
||||
32+RFU21VRN7cWoQEqSwnEPbh/v/zv/KS5JhQ+swWUo79AOLm1kjnZWCKtcqh1Lc
|
||||
Ug5Tkpot6luoxTKp52MkchvXDpj0q2B/XpLJ8/pw5cqjv7mH12EDOK2HXllA+WwX
|
||||
ZpstcEhaA4FqtaHOW/OHnwTX5MUbINXE5YYHVEDR6moVM31/W/3pe9NDUMTDE7Si
|
||||
lVQnZbXM9NYbzZqlh+WhemDWwnIfGI6rtsfNEiirVEOlAgMBAAGjgeIwgd8wDgYD
|
||||
VR0PAQH/BAQDAgWgMB0GA1UdJQQWMBQGCCsGAQUFBwMBBggrBgEFBQcDAjAMBgNV
|
||||
HRMBAf8EAjAAMB0GA1UdDgQWBBS8DL+MZfDIy6AKky69Tgry2Vxq5DAfBgNVHSME
|
||||
GDAWgBRAsFqVenRRKgB1YPzWKzb9bzZ/ozAxBggrBgEFBQcBAQQlMCMwIQYIKwYB
|
||||
BQUHMAGGFWh0dHA6Ly8xMjcuMC4wLjE6NDAwMjAtBgNVHREEJjAkhxAAAAAAAAAA
|
||||
AAAAAAAAAAABhxCjvjLzIG7HXQlWDO6YWF7FMA0GCSqGSIb3DQEBCwUAA4IBAQBY
|
||||
M9UTZ3uaKMQ+He9kWR3p9jh6hTSD0FNi79ZdfkG0lgSzhhduhN7OhzQH2ihUUfa6
|
||||
rtKTw74fGbszhizCd9UB8YPKlm3si1Xbg6ZUQlA1RtoQo7RUGEa6ZbR68PKGm9Go
|
||||
hTTFIl/JS8jzxBR8jywZdyqtprUx+nnNUDiNk0hJtFLhw7OJH0AHlAUNqHsfD08m
|
||||
HXRdaV6q14HXU5g31slBat9H4D6tCU/2uqBURwW0wVdnqh4QeRfAeqiatJS9EmSF
|
||||
ctbc7n894Idy2Xce7NFoIy5cht3m6Rd42o/LmBsJopBmQcDPZT70/XzRtc2qE0cS
|
||||
CzBIGQHUJ6BfmBjrCQnp
|
||||
-----END CERTIFICATE-----
|
||||
16
acme/tests/testdata/csr-ipsans.pem
vendored
16
acme/tests/testdata/csr-ipsans.pem
vendored
@@ -1,16 +0,0 @@
|
||||
-----BEGIN CERTIFICATE REQUEST-----
|
||||
MIICbTCCAVUCAQIwADCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKT/
|
||||
CE7Y5EYBvI4p7Frt763upIKHDHO/R5/TWMjG8Jm9qTMui8sbMgyh2Yh+lR/j/5Xd
|
||||
tQrhgC6wx10MrW2+3JtYS88HP1p6si8zU1dbK34n3NyyklR2RivW0R7dXgnYNy7t
|
||||
5YcDYLCrbRMIPINV/uHrmzIHWYUDNcZVdAfIM2AHfKYuV6Mepcn///5GR+l4GcAh
|
||||
Nkf9CW8OdAIuKdbyLCxVr0mUW/vJz1b12uxPsgUdax9sjXgZdT4pfMXADsFd1NeF
|
||||
atpsXU073inqtHru+2F9ijHTQ75TC+u/rr6eYl3BnBntac0gp/ADtDBii7/Q1JOO
|
||||
Bhq7xJNqqxIEdiyM7zcCAwEAAaAoMCYGCSqGSIb3DQEJDjEZMBcwFQYDVR0RBA4w
|
||||
DIcEwAACkYcEywBxATANBgkqhkiG9w0BAQsFAAOCAQEADG5g3zdbSCaXpZhWHkzE
|
||||
Mek3f442TUE1pB+ITRpthmM4N3zZWETYmbLCIAO624uMrRnbCCMvAoLs/L/9ETg/
|
||||
XMMFtonQC8u9i9tV8B1ceBh8lpIfa+8b9TMWH3bqnrbWQ+YIl+Yd0gXiCZWJ9vK4
|
||||
eM1Gddu/2bR6s/k4h/XAWRgEexqk57EHr1z0N+T9OoX939n3mVcNI+u9kfd5VJ0z
|
||||
VyA3R8WR6T6KlEl5P5pcWe5Kuyhi7xMmLVImXqBtvKq4O1AMfM+gQr/yn9aE8IRq
|
||||
khP7JrMBLUIub1c/qu2TfvnynNPSM/ZcOX+6PHdHmRkR3nI0Ndpv7Ntv31FTplAm
|
||||
Dw==
|
||||
-----END CERTIFICATE REQUEST-----
|
||||
16
acme/tests/testdata/csr-ipv6sans.pem
vendored
16
acme/tests/testdata/csr-ipv6sans.pem
vendored
@@ -1,16 +0,0 @@
|
||||
-----BEGIN CERTIFICATE REQUEST-----
|
||||
MIIChTCCAW0CAQIwADCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAOIc
|
||||
UAppcqJfTkSqqOFqGt1v7lIJZPOcF4bcKI3d5cHAGbOuVxbC7uMaDuObwYLzoiED
|
||||
qnvs1NaEq2phO6KsgGESB7IE2LUjJivO7OnSZjNRpL5si/9egvBiNCn/50lULaWG
|
||||
gLEuyMfk3awZy2mVAymy7Grhbx069A4TH8TqsHuq2RpKyuDL27e/jUt6yYecb3pu
|
||||
hWMiWy3segif4tI46pkOW0/I6DpxyYD2OqOvzxm/voS9RMqE2+7YJA327H7bEi3N
|
||||
lJZEZ1zy7clZ9ga5fBQaetzbg2RyxTrZ7F919NQXSFoXgxb10Eg64wIpz0L3ooCm
|
||||
GEHehsZZexa3J5ccIvMCAwEAAaBAMD4GCSqGSIb3DQEJDjExMC8wLQYDVR0RBCYw
|
||||
JIcQAAAAAAAAAAAAAAAAAAAAAYcQo74y8yBux10JVgzumFhexTANBgkqhkiG9w0B
|
||||
AQsFAAOCAQEALvwVn0A/JPTCiNzcozHFnp5M23C9PXCplWc5u4k34d4XXzpSeFDz
|
||||
fL4gy7NpYIueme2K2ppw2j3PNQUdR6vQ5a75sriegWYrosL+7Q6Joh51ZyEUZQoD
|
||||
mNl4M4S4oX85EaChR6NFGBywTfjFarYi32XBTbFE7rK8N8KM+DQkNdwL1MXqaHWz
|
||||
F1obQKpNXlLedbCBOteV5Eg4zG3565zu/Gw/NhwzzV3mQmgxUcd1sMJxAfHQz4Vl
|
||||
ImLL+xMcR03nDsH2bgtDbK2tJm7WszSxA9tC+Xp2lRewxrnQloRWPYDz177WGQ5Q
|
||||
SoGDzTTtA6uWZxG8h7CkNLOGvA8LtU2rNA==
|
||||
-----END CERTIFICATE REQUEST-----
|
||||
16
acme/tests/testdata/csr-mixed.pem
vendored
16
acme/tests/testdata/csr-mixed.pem
vendored
@@ -1,16 +0,0 @@
|
||||
-----BEGIN CERTIFICATE REQUEST-----
|
||||
MIICdjCCAV4CAQIwADCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMXq
|
||||
v1y8EIcCbaUIzCtOcLkLS0MJ35oS+6DmV5WB1A0cIk6YrjsHIsY2lwMm13BWIvmw
|
||||
tY+Y6n0rr7eViNx5ZRGHpHEI/TL3Neb+VefTydL5CgvK3dd4ex2kSbTaed3fmpOx
|
||||
qMajEduwNcZPCcmoEXPkfrCP8w2vKQUkQ+JRPcdX1nTuzticeRP5B7YCmJsmxkEh
|
||||
Y0tzzZ+NIRDARoYNofefY86h3e5q66gtJxccNchmIM3YQahhg5n3Xoo8hGfM/TIc
|
||||
R7ncCBCLO6vtqo0QFva/NQODrgOmOsmgvqPkUWQFdZfWM8yIaU826dktx0CPB78t
|
||||
TudnJ1rBRvGsjHMsZikCAwEAAaAxMC8GCSqGSIb3DQEJDjEiMCAwHgYDVR0RBBcw
|
||||
FYINYS5leGVtcGxlLmNvbYcEwAACbzANBgkqhkiG9w0BAQsFAAOCAQEAdGMcRCxq
|
||||
1X09gn1TNdMt64XUv+wdJCKDaJ+AgyIJj7QvVw8H5k7dOnxS4I+a/yo4jE+LDl2/
|
||||
AuHcBLFEI4ddewdJSMrTNZjuRYuOdr3KP7fL7MffICSBi45vw5EOXg0tnjJCEiKu
|
||||
6gcJgbLSP5JMMd7Haf33Q/VWsmHofR3VwOMdrnakwAU3Ff5WTuXTNVhL1kT/uLFX
|
||||
yW1ru6BF4unwNqSR2UeulljpNfRBsiN4zJK11W6n9KT0NkBr9zY5WCM4sW7i8k9V
|
||||
TeypWGo3jBKzYAGeuxZsB97U77jZ2lrGdBLZKfbcjnTeRVqCvCRrui4El7UGYFmj
|
||||
7s6OJyWx5DSV8w==
|
||||
-----END CERTIFICATE REQUEST-----
|
||||
6
acme/tests/testdata/ec_secp384r1_key.pem
vendored
6
acme/tests/testdata/ec_secp384r1_key.pem
vendored
@@ -1,6 +0,0 @@
|
||||
-----BEGIN PRIVATE KEY-----
|
||||
MIG2AgEAMBAGByqGSM49AgEGBSuBBAAiBIGeMIGbAgEBBDArTn0pbFk3xHfKeXte
|
||||
xJgS4JVdJQ8mqvezhaNpULZPnwb+mcKLlrj6f5SRM52yREGhZANiAAQcrMoPMVqV
|
||||
rHnDGGz5HUKLNmXfChlNgsrwsruawXF+M283CA6eckAjTXNyiC/ounWmvtoKsZG0
|
||||
2UQOfQUNSCANId/r986yRGc03W6RJSkcRp86qBYjNsLgbZpber/3+M4=
|
||||
-----END PRIVATE KEY-----
|
||||
@@ -3,6 +3,5 @@ include README.rst
|
||||
recursive-include tests *
|
||||
recursive-include certbot_apache/_internal/augeas_lens *.aug
|
||||
recursive-include certbot_apache/_internal/tls_configs *.conf
|
||||
include certbot_apache/py.typed
|
||||
global-exclude __pycache__
|
||||
global-exclude *.py[cod]
|
||||
|
||||
@@ -4,22 +4,18 @@ import fnmatch
|
||||
import logging
|
||||
import re
|
||||
import subprocess
|
||||
from typing import Dict
|
||||
from typing import Iterable
|
||||
from typing import List
|
||||
from typing import Optional
|
||||
from typing import Tuple
|
||||
|
||||
import pkg_resources
|
||||
|
||||
from certbot import errors
|
||||
from certbot import util
|
||||
|
||||
from certbot.compat import os
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def get_mod_deps(mod_name: str) -> List[str]:
|
||||
def get_mod_deps(mod_name):
|
||||
"""Get known module dependencies.
|
||||
|
||||
.. note:: This does not need to be accurate in order for the client to
|
||||
@@ -38,7 +34,7 @@ def get_mod_deps(mod_name: str) -> List[str]:
|
||||
return deps.get(mod_name, [])
|
||||
|
||||
|
||||
def get_file_path(vhost_path: str) -> Optional[str]:
|
||||
def get_file_path(vhost_path):
|
||||
"""Get file path from augeas_vhost_path.
|
||||
|
||||
Takes in Augeas path and returns the file name
|
||||
@@ -55,7 +51,7 @@ def get_file_path(vhost_path: str) -> Optional[str]:
|
||||
return _split_aug_path(vhost_path)[0]
|
||||
|
||||
|
||||
def get_internal_aug_path(vhost_path: str) -> str:
|
||||
def get_internal_aug_path(vhost_path):
|
||||
"""Get the Augeas path for a vhost with the file path removed.
|
||||
|
||||
:param str vhost_path: Augeas virtual host path
|
||||
@@ -67,7 +63,7 @@ def get_internal_aug_path(vhost_path: str) -> str:
|
||||
return _split_aug_path(vhost_path)[1]
|
||||
|
||||
|
||||
def _split_aug_path(vhost_path: str) -> Tuple[str, str]:
|
||||
def _split_aug_path(vhost_path):
|
||||
"""Splits an Augeas path into a file path and an internal path.
|
||||
|
||||
After removing "/files", this function splits vhost_path into the
|
||||
@@ -81,7 +77,7 @@ def _split_aug_path(vhost_path: str) -> Tuple[str, str]:
|
||||
"""
|
||||
# Strip off /files
|
||||
file_path = vhost_path[6:]
|
||||
internal_path: List[str] = []
|
||||
internal_path = []
|
||||
|
||||
# Remove components from the end of file_path until it becomes valid
|
||||
while not os.path.exists(file_path):
|
||||
@@ -91,7 +87,7 @@ def _split_aug_path(vhost_path: str) -> Tuple[str, str]:
|
||||
return file_path, "/".join(reversed(internal_path))
|
||||
|
||||
|
||||
def parse_define_file(filepath: str, varname: str) -> Dict[str, str]:
|
||||
def parse_define_file(filepath, varname):
|
||||
""" Parses Defines from a variable in configuration file
|
||||
|
||||
:param str filepath: Path of file to parse
|
||||
@@ -101,7 +97,7 @@ def parse_define_file(filepath: str, varname: str) -> Dict[str, str]:
|
||||
:rtype: `dict`
|
||||
|
||||
"""
|
||||
return_vars: Dict[str, str] = {}
|
||||
return_vars = {}
|
||||
# Get list of words in the variable
|
||||
a_opts = util.get_var_from_file(varname, filepath).split()
|
||||
for i, v in enumerate(a_opts):
|
||||
@@ -116,38 +112,41 @@ def parse_define_file(filepath: str, varname: str) -> Dict[str, str]:
|
||||
return return_vars
|
||||
|
||||
|
||||
def unique_id() -> str:
|
||||
def unique_id():
|
||||
""" Returns an unique id to be used as a VirtualHost identifier"""
|
||||
return binascii.hexlify(os.urandom(16)).decode("utf-8")
|
||||
|
||||
|
||||
def included_in_paths(filepath: str, paths: Iterable[str]) -> bool:
|
||||
def included_in_paths(filepath, paths):
|
||||
"""
|
||||
Returns true if the filepath is included in the list of paths
|
||||
that may contain full paths or wildcard paths that need to be
|
||||
expanded.
|
||||
|
||||
:param str filepath: Filepath to check
|
||||
:param list paths: List of paths to check against
|
||||
:params list paths: List of paths to check against
|
||||
|
||||
:returns: True if included
|
||||
:rtype: bool
|
||||
"""
|
||||
|
||||
return any(fnmatch.fnmatch(filepath, path) for path in paths)
|
||||
|
||||
|
||||
def parse_defines(define_cmd: List[str]) -> Dict[str, str]:
|
||||
def parse_defines(apachectl):
|
||||
"""
|
||||
Gets Defines from httpd process and returns a dictionary of
|
||||
the defined variables.
|
||||
|
||||
:param list define_cmd: httpd command to dump defines
|
||||
:param str apachectl: Path to apachectl executable
|
||||
|
||||
:returns: dictionary of defined variables
|
||||
:rtype: dict
|
||||
"""
|
||||
|
||||
variables: Dict[str, str] = {}
|
||||
variables = {}
|
||||
define_cmd = [apachectl, "-t", "-D",
|
||||
"DUMP_RUN_CFG"]
|
||||
matches = parse_from_subprocess(define_cmd, r"Define: ([^ \n]*)")
|
||||
try:
|
||||
matches.remove("DUMP_RUN_CFG")
|
||||
@@ -155,43 +154,50 @@ def parse_defines(define_cmd: List[str]) -> Dict[str, str]:
|
||||
return {}
|
||||
|
||||
for match in matches:
|
||||
# Value could also contain = so split only once
|
||||
parts = match.split('=', 1)
|
||||
value = parts[1] if len(parts) == 2 else ''
|
||||
variables[parts[0]] = value
|
||||
if match.count("=") > 1:
|
||||
logger.error("Unexpected number of equal signs in "
|
||||
"runtime config dump.")
|
||||
raise errors.PluginError(
|
||||
"Error parsing Apache runtime variables")
|
||||
parts = match.partition("=")
|
||||
variables[parts[0]] = parts[2]
|
||||
|
||||
return variables
|
||||
|
||||
|
||||
def parse_includes(inc_cmd: List[str]) -> List[str]:
|
||||
def parse_includes(apachectl):
|
||||
"""
|
||||
Gets Include directives from httpd process and returns a list of
|
||||
their values.
|
||||
|
||||
:param list inc_cmd: httpd command to dump includes
|
||||
:param str apachectl: Path to apachectl executable
|
||||
|
||||
:returns: list of found Include directive values
|
||||
:rtype: list of str
|
||||
"""
|
||||
|
||||
inc_cmd = [apachectl, "-t", "-D",
|
||||
"DUMP_INCLUDES"]
|
||||
return parse_from_subprocess(inc_cmd, r"\(.*\) (.*)")
|
||||
|
||||
|
||||
def parse_modules(mod_cmd: List[str]) -> List[str]:
|
||||
def parse_modules(apachectl):
|
||||
"""
|
||||
Get loaded modules from httpd process, and return the list
|
||||
of loaded module names.
|
||||
|
||||
:param list mod_cmd: httpd command to dump loaded modules
|
||||
:param str apachectl: Path to apachectl executable
|
||||
|
||||
:returns: list of found LoadModule module names
|
||||
:rtype: list of str
|
||||
"""
|
||||
|
||||
mod_cmd = [apachectl, "-t", "-D",
|
||||
"DUMP_MODULES"]
|
||||
return parse_from_subprocess(mod_cmd, r"(.*)_module")
|
||||
|
||||
|
||||
def parse_from_subprocess(command: List[str], regexp: str) -> List[str]:
|
||||
def parse_from_subprocess(command, regexp):
|
||||
"""Get values from stdout of subprocess command
|
||||
|
||||
:param list command: Command to run
|
||||
@@ -205,7 +211,7 @@ def parse_from_subprocess(command: List[str], regexp: str) -> List[str]:
|
||||
return re.compile(regexp).findall(stdout)
|
||||
|
||||
|
||||
def _get_runtime_cfg(command: List[str]) -> str:
|
||||
def _get_runtime_cfg(command):
|
||||
"""
|
||||
Get runtime configuration info.
|
||||
|
||||
@@ -215,14 +221,13 @@ def _get_runtime_cfg(command: List[str]) -> str:
|
||||
|
||||
"""
|
||||
try:
|
||||
proc = subprocess.run(
|
||||
proc = subprocess.Popen(
|
||||
command,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
universal_newlines=True,
|
||||
check=False,
|
||||
env=util.env_no_snap_for_external_calls())
|
||||
stdout, stderr = proc.stdout, proc.stderr
|
||||
stdout, stderr = proc.communicate()
|
||||
|
||||
except (OSError, ValueError):
|
||||
logger.error(
|
||||
@@ -240,8 +245,7 @@ def _get_runtime_cfg(command: List[str]) -> str:
|
||||
|
||||
return stdout
|
||||
|
||||
|
||||
def find_ssl_apache_conf(prefix: str) -> str:
|
||||
def find_ssl_apache_conf(prefix):
|
||||
"""
|
||||
Find a TLS Apache config file in the dedicated storage.
|
||||
:param str prefix: prefix of the TLS Apache config file to find
|
||||
|
||||
@@ -1,14 +1,8 @@
|
||||
""" apacheconfig implementation of the ParserNode interfaces """
|
||||
from typing import Any
|
||||
from typing import Iterable
|
||||
from typing import List
|
||||
from typing import Optional
|
||||
from typing import Tuple
|
||||
|
||||
from certbot_apache._internal import assertions
|
||||
from certbot_apache._internal import interfaces
|
||||
from certbot_apache._internal import parsernode_util as util
|
||||
from certbot_apache._internal.interfaces import ParserNode
|
||||
|
||||
|
||||
class ApacheParserNode(interfaces.ParserNode):
|
||||
@@ -18,20 +12,19 @@ class ApacheParserNode(interfaces.ParserNode):
|
||||
by parsing the equivalent configuration text using the apacheconfig library.
|
||||
"""
|
||||
|
||||
def __init__(self, **kwargs: Any) -> None:
|
||||
# pylint: disable=unused-variable
|
||||
ancestor, dirty, filepath, metadata = util.parsernode_kwargs(kwargs)
|
||||
super().__init__(**kwargs)
|
||||
def __init__(self, **kwargs):
|
||||
ancestor, dirty, filepath, metadata = util.parsernode_kwargs(kwargs) # pylint: disable=unused-variable
|
||||
super(ApacheParserNode, self).__init__(**kwargs)
|
||||
self.ancestor = ancestor
|
||||
self.filepath = filepath
|
||||
self.dirty = dirty
|
||||
self.metadata = metadata
|
||||
self._raw: Any = self.metadata["ac_ast"]
|
||||
self._raw = self.metadata["ac_ast"]
|
||||
|
||||
def save(self, msg: str) -> None:
|
||||
pass # pragma: no cover
|
||||
def save(self, msg): # pragma: no cover
|
||||
pass
|
||||
|
||||
def find_ancestors(self, name: str) -> List["ApacheParserNode"]: # pylint: disable=unused-variable
|
||||
def find_ancestors(self, name): # pylint: disable=unused-variable
|
||||
"""Find ancestor BlockNodes with a given name"""
|
||||
return [ApacheBlockNode(name=assertions.PASS,
|
||||
parameters=assertions.PASS,
|
||||
@@ -43,33 +36,33 @@ class ApacheParserNode(interfaces.ParserNode):
|
||||
class ApacheCommentNode(ApacheParserNode):
|
||||
""" apacheconfig implementation of CommentNode interface """
|
||||
|
||||
def __init__(self, **kwargs: Any) -> None:
|
||||
comment, kwargs = util.commentnode_kwargs(kwargs)
|
||||
super().__init__(**kwargs)
|
||||
def __init__(self, **kwargs):
|
||||
comment, kwargs = util.commentnode_kwargs(kwargs) # pylint: disable=unused-variable
|
||||
super(ApacheCommentNode, self).__init__(**kwargs)
|
||||
self.comment = comment
|
||||
|
||||
def __eq__(self, other: Any) -> bool:
|
||||
def __eq__(self, other): # pragma: no cover
|
||||
if isinstance(other, self.__class__):
|
||||
return (self.comment == other.comment and
|
||||
self.dirty == other.dirty and
|
||||
self.ancestor == other.ancestor and
|
||||
self.metadata == other.metadata and
|
||||
self.filepath == other.filepath)
|
||||
return False # pragma: no cover
|
||||
return False
|
||||
|
||||
|
||||
class ApacheDirectiveNode(ApacheParserNode):
|
||||
""" apacheconfig implementation of DirectiveNode interface """
|
||||
|
||||
def __init__(self, **kwargs: Any) -> None:
|
||||
def __init__(self, **kwargs):
|
||||
name, parameters, enabled, kwargs = util.directivenode_kwargs(kwargs)
|
||||
super().__init__(**kwargs)
|
||||
super(ApacheDirectiveNode, self).__init__(**kwargs)
|
||||
self.name = name
|
||||
self.parameters = parameters
|
||||
self.enabled = enabled
|
||||
self.include: Optional[str] = None
|
||||
self.include = None
|
||||
|
||||
def __eq__(self, other: Any) -> bool:
|
||||
def __eq__(self, other): # pragma: no cover
|
||||
if isinstance(other, self.__class__):
|
||||
return (self.name == other.name and
|
||||
self.filepath == other.filepath and
|
||||
@@ -78,21 +71,21 @@ class ApacheDirectiveNode(ApacheParserNode):
|
||||
self.dirty == other.dirty and
|
||||
self.ancestor == other.ancestor and
|
||||
self.metadata == other.metadata)
|
||||
return False # pragma: no cover
|
||||
return False
|
||||
|
||||
def set_parameters(self, _parameters: Iterable[str]) -> None:
|
||||
def set_parameters(self, _parameters): # pragma: no cover
|
||||
"""Sets the parameters for DirectiveNode"""
|
||||
return # pragma: no cover
|
||||
return
|
||||
|
||||
|
||||
class ApacheBlockNode(ApacheDirectiveNode):
|
||||
""" apacheconfig implementation of BlockNode interface """
|
||||
|
||||
def __init__(self, **kwargs: Any) -> None:
|
||||
super().__init__(**kwargs)
|
||||
self.children: Tuple[ApacheParserNode, ...] = ()
|
||||
def __init__(self, **kwargs):
|
||||
super(ApacheBlockNode, self).__init__(**kwargs)
|
||||
self.children = ()
|
||||
|
||||
def __eq__(self, other: Any) -> bool:
|
||||
def __eq__(self, other): # pragma: no cover
|
||||
if isinstance(other, self.__class__):
|
||||
return (self.name == other.name and
|
||||
self.filepath == other.filepath and
|
||||
@@ -102,11 +95,10 @@ class ApacheBlockNode(ApacheDirectiveNode):
|
||||
self.dirty == other.dirty and
|
||||
self.ancestor == other.ancestor and
|
||||
self.metadata == other.metadata)
|
||||
return False # pragma: no cover
|
||||
return False
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
def add_child_block(self, name: str, parameters: Optional[List[str]] = None,
|
||||
position: Optional[int] = None) -> "ApacheBlockNode": # pragma: no cover
|
||||
def add_child_block(self, name, parameters=None, position=None): # pragma: no cover
|
||||
"""Adds a new BlockNode to the sequence of children"""
|
||||
new_block = ApacheBlockNode(name=assertions.PASS,
|
||||
parameters=assertions.PASS,
|
||||
@@ -117,9 +109,7 @@ class ApacheBlockNode(ApacheDirectiveNode):
|
||||
return new_block
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
def add_child_directive(self, name: str, parameters: Optional[List[str]] = None,
|
||||
position: Optional[int] = None
|
||||
) -> ApacheDirectiveNode: # pragma: no cover
|
||||
def add_child_directive(self, name, parameters=None, position=None): # pragma: no cover
|
||||
"""Adds a new DirectiveNode to the sequence of children"""
|
||||
new_dir = ApacheDirectiveNode(name=assertions.PASS,
|
||||
parameters=assertions.PASS,
|
||||
@@ -130,9 +120,7 @@ class ApacheBlockNode(ApacheDirectiveNode):
|
||||
return new_dir
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
def add_child_comment(
|
||||
self, name: str, parameters: Optional[int] = None, position: Optional[int] = None
|
||||
) -> ApacheCommentNode: # pragma: no cover
|
||||
def add_child_comment(self, comment="", position=None): # pragma: no cover
|
||||
|
||||
"""Adds a new CommentNode to the sequence of children"""
|
||||
new_comment = ApacheCommentNode(comment=assertions.PASS,
|
||||
@@ -142,7 +130,7 @@ class ApacheBlockNode(ApacheDirectiveNode):
|
||||
self.children += (new_comment,)
|
||||
return new_comment
|
||||
|
||||
def find_blocks(self, name: str, exclude: bool = True) -> List["ApacheBlockNode"]: # pylint: disable=unused-argument
|
||||
def find_blocks(self, name, exclude=True): # pylint: disable=unused-argument
|
||||
"""Recursive search of BlockNodes from the sequence of children"""
|
||||
return [ApacheBlockNode(name=assertions.PASS,
|
||||
parameters=assertions.PASS,
|
||||
@@ -150,7 +138,7 @@ class ApacheBlockNode(ApacheDirectiveNode):
|
||||
filepath=assertions.PASS,
|
||||
metadata=self.metadata)]
|
||||
|
||||
def find_directives(self, name: str, exclude: bool = True) -> List[ApacheDirectiveNode]: # pylint: disable=unused-argument
|
||||
def find_directives(self, name, exclude=True): # pylint: disable=unused-argument
|
||||
"""Recursive search of DirectiveNodes from the sequence of children"""
|
||||
return [ApacheDirectiveNode(name=assertions.PASS,
|
||||
parameters=assertions.PASS,
|
||||
@@ -159,22 +147,22 @@ class ApacheBlockNode(ApacheDirectiveNode):
|
||||
metadata=self.metadata)]
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
def find_comments(self, comment: str, exact: bool = False) -> List[ApacheCommentNode]:
|
||||
def find_comments(self, comment, exact=False): # pragma: no cover
|
||||
"""Recursive search of DirectiveNodes from the sequence of children"""
|
||||
return [ApacheCommentNode(comment=assertions.PASS, # pragma: no cover
|
||||
return [ApacheCommentNode(comment=assertions.PASS,
|
||||
ancestor=self,
|
||||
filepath=assertions.PASS,
|
||||
metadata=self.metadata)]
|
||||
|
||||
def delete_child(self, child: ParserNode) -> None:
|
||||
def delete_child(self, child): # pragma: no cover
|
||||
"""Deletes a ParserNode from the sequence of children"""
|
||||
return # pragma: no cover
|
||||
return
|
||||
|
||||
def unsaved_files(self) -> List[str]:
|
||||
def unsaved_files(self): # pragma: no cover
|
||||
"""Returns a list of unsaved filepaths"""
|
||||
return [assertions.PASS] # pragma: no cover
|
||||
return [assertions.PASS]
|
||||
|
||||
def parsed_paths(self) -> List[str]:
|
||||
def parsed_paths(self): # pragma: no cover
|
||||
"""Returns a list of parsed configuration file paths"""
|
||||
return [assertions.PASS]
|
||||
|
||||
|
||||
@@ -1,21 +1,13 @@
|
||||
"""Dual parser node assertions"""
|
||||
import fnmatch
|
||||
from typing import Any
|
||||
from typing import Iterable
|
||||
from typing import List
|
||||
from typing import Optional
|
||||
from typing import Union
|
||||
|
||||
from certbot_apache._internal import interfaces
|
||||
from certbot_apache._internal.interfaces import CommentNode
|
||||
from certbot_apache._internal.interfaces import DirectiveNode
|
||||
from certbot_apache._internal.interfaces import ParserNode
|
||||
from certbot_apache._internal.obj import VirtualHost
|
||||
|
||||
|
||||
PASS = "CERTBOT_PASS_ASSERT"
|
||||
|
||||
|
||||
def assertEqual(first: ParserNode, second: ParserNode) -> None:
|
||||
def assertEqual(first, second):
|
||||
""" Equality assertion """
|
||||
|
||||
if isinstance(first, interfaces.CommentNode):
|
||||
@@ -38,98 +30,84 @@ def assertEqual(first: ParserNode, second: ParserNode) -> None:
|
||||
# (but identical) directory structures.
|
||||
assert first.filepath == second.filepath
|
||||
|
||||
|
||||
def assertEqualComment(first: ParserNode, second: ParserNode) -> None: # pragma: no cover
|
||||
def assertEqualComment(first, second): # pragma: no cover
|
||||
""" Equality assertion for CommentNode """
|
||||
|
||||
assert isinstance(first, interfaces.CommentNode)
|
||||
assert isinstance(second, interfaces.CommentNode)
|
||||
|
||||
if not isPass(first.comment) and not isPass(second.comment):
|
||||
assert first.comment == second.comment
|
||||
if not isPass(first.comment) and not isPass(second.comment): # type: ignore
|
||||
assert first.comment == second.comment # type: ignore
|
||||
|
||||
|
||||
def _assertEqualDirectiveComponents(first: ParserNode, # pragma: no cover
|
||||
second: ParserNode) -> None:
|
||||
def _assertEqualDirectiveComponents(first, second): # pragma: no cover
|
||||
""" Handles assertion for instance variables for DirectiveNode and BlockNode"""
|
||||
|
||||
# Enabled value cannot be asserted, because Augeas implementation
|
||||
# is unable to figure that out.
|
||||
# assert first.enabled == second.enabled
|
||||
assert isinstance(first, DirectiveNode)
|
||||
assert isinstance(second, DirectiveNode)
|
||||
|
||||
if not isPass(first.name) and not isPass(second.name):
|
||||
assert first.name == second.name
|
||||
|
||||
if not isPass(first.parameters) and not isPass(second.parameters):
|
||||
assert first.parameters == second.parameters
|
||||
|
||||
|
||||
def assertEqualDirective(first: ParserNode, second: ParserNode) -> None:
|
||||
def assertEqualDirective(first, second):
|
||||
""" Equality assertion for DirectiveNode """
|
||||
|
||||
assert isinstance(first, interfaces.DirectiveNode)
|
||||
assert isinstance(second, interfaces.DirectiveNode)
|
||||
_assertEqualDirectiveComponents(first, second)
|
||||
|
||||
|
||||
def isPass(value: Any) -> bool: # pragma: no cover
|
||||
def isPass(value): # pragma: no cover
|
||||
"""Checks if the value is set to PASS"""
|
||||
if isinstance(value, bool):
|
||||
return True
|
||||
return PASS in value
|
||||
|
||||
|
||||
def isPassDirective(block: DirectiveNode) -> bool:
|
||||
def isPassDirective(block):
|
||||
""" Checks if BlockNode or DirectiveNode should pass the assertion """
|
||||
|
||||
if isPass(block.name):
|
||||
return True
|
||||
if isPass(block.parameters): # pragma: no cover
|
||||
if isPass(block.parameters): # pragma: no cover
|
||||
return True
|
||||
if isPass(block.filepath): # pragma: no cover
|
||||
if isPass(block.filepath): # pragma: no cover
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def isPassComment(comment: CommentNode) -> bool:
|
||||
def isPassComment(comment):
|
||||
""" Checks if CommentNode should pass the assertion """
|
||||
|
||||
if isPass(comment.comment):
|
||||
return True
|
||||
if isPass(comment.filepath): # pragma: no cover
|
||||
if isPass(comment.filepath): # pragma: no cover
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def isPassNodeList(nodelist: List[Union[DirectiveNode, CommentNode]]) -> bool: # pragma: no cover
|
||||
def isPassNodeList(nodelist): # pragma: no cover
|
||||
""" Checks if a ParserNode in the nodelist should pass the assertion,
|
||||
this function is used for results of find_* methods. Unimplemented find_*
|
||||
methods should return a sequence containing a single ParserNode instance
|
||||
with assertion pass string."""
|
||||
|
||||
node: Optional[Union[DirectiveNode, CommentNode]]
|
||||
try:
|
||||
node = nodelist[0]
|
||||
except IndexError:
|
||||
node = None
|
||||
|
||||
if not node: # pragma: no cover
|
||||
if not node: # pragma: no cover
|
||||
return False
|
||||
|
||||
if isinstance(node, interfaces.DirectiveNode):
|
||||
return isPassDirective(node)
|
||||
return isPassComment(node)
|
||||
|
||||
|
||||
def assertEqualSimple(first: Any, second: Any) -> None:
|
||||
def assertEqualSimple(first, second):
|
||||
""" Simple assertion """
|
||||
if not isPass(first) and not isPass(second):
|
||||
assert first == second
|
||||
|
||||
|
||||
def isEqualVirtualHost(first: VirtualHost, second: VirtualHost) -> bool:
|
||||
def isEqualVirtualHost(first, second):
|
||||
"""
|
||||
Checks that two VirtualHost objects are similar. There are some built
|
||||
in differences with the implementations: VirtualHost created by ParserNode
|
||||
@@ -149,8 +127,7 @@ def isEqualVirtualHost(first: VirtualHost, second: VirtualHost) -> bool:
|
||||
first.ancestor == second.ancestor
|
||||
)
|
||||
|
||||
|
||||
def assertEqualPathsList(first: Iterable[str], second: Iterable[str]) -> None: # pragma: no cover
|
||||
def assertEqualPathsList(first, second): # pragma: no cover
|
||||
"""
|
||||
Checks that the two lists of file paths match. This assertion allows for wildcard
|
||||
paths.
|
||||
@@ -160,6 +137,6 @@ def assertEqualPathsList(first: Iterable[str], second: Iterable[str]) -> None:
|
||||
if any(isPass(path) for path in second):
|
||||
return
|
||||
for fpath in first:
|
||||
assert any(fnmatch.fnmatch(fpath, spath) for spath in second)
|
||||
assert any([fnmatch.fnmatch(fpath, spath) for spath in second])
|
||||
for spath in second:
|
||||
assert any(fnmatch.fnmatch(fpath, spath) for fpath in first)
|
||||
assert any([fnmatch.fnmatch(fpath, spath) for fpath in first])
|
||||
|
||||
@@ -64,15 +64,9 @@ Translates over to:
|
||||
"/files/etc/apache2/apache2.conf/bLoCk[1]",
|
||||
]
|
||||
"""
|
||||
from typing import Any
|
||||
from typing import cast
|
||||
from typing import Dict
|
||||
from typing import Iterable
|
||||
from typing import List
|
||||
from typing import Optional
|
||||
from typing import Set
|
||||
from typing import Tuple
|
||||
from typing import Union
|
||||
from acme.magic_typing import Set
|
||||
from certbot import errors
|
||||
from certbot.compat import os
|
||||
|
||||
from certbot_apache._internal import apache_util
|
||||
from certbot_apache._internal import assertions
|
||||
@@ -80,23 +74,18 @@ from certbot_apache._internal import interfaces
|
||||
from certbot_apache._internal import parser
|
||||
from certbot_apache._internal import parsernode_util as util
|
||||
|
||||
from certbot import errors
|
||||
from certbot.compat import os
|
||||
|
||||
|
||||
class AugeasParserNode(interfaces.ParserNode):
|
||||
""" Augeas implementation of ParserNode interface """
|
||||
|
||||
def __init__(self, **kwargs: Any) -> None:
|
||||
# pylint: disable=unused-variable
|
||||
ancestor, dirty, filepath, metadata = util.parsernode_kwargs(kwargs)
|
||||
super().__init__(**kwargs)
|
||||
def __init__(self, **kwargs):
|
||||
ancestor, dirty, filepath, metadata = util.parsernode_kwargs(kwargs) # pylint: disable=unused-variable
|
||||
super(AugeasParserNode, self).__init__(**kwargs)
|
||||
self.ancestor = ancestor
|
||||
self.filepath = filepath
|
||||
self.dirty = dirty
|
||||
self.metadata = metadata
|
||||
self.parser = cast(parser.ApacheParser,
|
||||
self.metadata.get("augeasparser"))
|
||||
self.parser = self.metadata.get("augeasparser")
|
||||
try:
|
||||
if self.metadata["augeaspath"].endswith("/"):
|
||||
raise errors.PluginError(
|
||||
@@ -107,20 +96,20 @@ class AugeasParserNode(interfaces.ParserNode):
|
||||
except KeyError:
|
||||
raise errors.PluginError("Augeas path is required")
|
||||
|
||||
def save(self, msg: Iterable[str]) -> None:
|
||||
def save(self, msg):
|
||||
self.parser.save(msg)
|
||||
|
||||
def find_ancestors(self, name: str) -> List["AugeasParserNode"]:
|
||||
def find_ancestors(self, name):
|
||||
"""
|
||||
Searches for ancestor BlockNodes with a given name.
|
||||
|
||||
:param str name: Name of the BlockNode parent to search for
|
||||
|
||||
:returns: List of matching ancestor nodes.
|
||||
:rtype: list of AugeasParserNode
|
||||
:rtype: list of AugeasBlockNode
|
||||
"""
|
||||
|
||||
ancestors: List["AugeasParserNode"] = []
|
||||
ancestors = []
|
||||
|
||||
parent = self.metadata["augeaspath"]
|
||||
while True:
|
||||
@@ -130,12 +119,12 @@ class AugeasParserNode(interfaces.ParserNode):
|
||||
if not parent or parent == "/files":
|
||||
break
|
||||
anc = self._create_blocknode(parent)
|
||||
if anc.name is not None and anc.name.lower() == name.lower():
|
||||
if anc.name.lower() == name.lower():
|
||||
ancestors.append(anc)
|
||||
|
||||
return ancestors
|
||||
|
||||
def _create_blocknode(self, path: str) -> "AugeasBlockNode":
|
||||
def _create_blocknode(self, path):
|
||||
"""
|
||||
Helper function to create a BlockNode from Augeas path. This is used by
|
||||
AugeasParserNode.find_ancestors and AugeasBlockNode.
|
||||
@@ -143,25 +132,21 @@ class AugeasParserNode(interfaces.ParserNode):
|
||||
|
||||
"""
|
||||
|
||||
name: str = self._aug_get_name(path)
|
||||
metadata: Dict[str, Union[parser.ApacheParser, str]] = {
|
||||
"augeasparser": self.parser, "augeaspath": path
|
||||
}
|
||||
name = self._aug_get_name(path)
|
||||
metadata = {"augeasparser": self.parser, "augeaspath": path}
|
||||
|
||||
# Check if the file was included from the root config or initial state
|
||||
file_path = apache_util.get_file_path(path)
|
||||
if file_path is None:
|
||||
raise ValueError(f"No file path found for vhost: {path}.") # pragma: no cover
|
||||
|
||||
enabled = self.parser.parsed_in_original(file_path)
|
||||
enabled = self.parser.parsed_in_original(
|
||||
apache_util.get_file_path(path)
|
||||
)
|
||||
|
||||
return AugeasBlockNode(name=name,
|
||||
enabled=enabled,
|
||||
ancestor=assertions.PASS,
|
||||
filepath=file_path,
|
||||
filepath=apache_util.get_file_path(path),
|
||||
metadata=metadata)
|
||||
|
||||
def _aug_get_name(self, path: str) -> str:
|
||||
def _aug_get_name(self, path):
|
||||
"""
|
||||
Helper function to get name of a configuration block or variable from path.
|
||||
"""
|
||||
@@ -175,18 +160,20 @@ class AugeasParserNode(interfaces.ParserNode):
|
||||
|
||||
# remove [...], it's not allowed in Apache configuration and is used
|
||||
# for indexing within Augeas
|
||||
return name.split("[")[0]
|
||||
name = name.split("[")[0]
|
||||
return name
|
||||
|
||||
|
||||
class AugeasCommentNode(AugeasParserNode):
|
||||
""" Augeas implementation of CommentNode interface """
|
||||
|
||||
def __init__(self, **kwargs: Any) -> None:
|
||||
def __init__(self, **kwargs):
|
||||
comment, kwargs = util.commentnode_kwargs(kwargs) # pylint: disable=unused-variable
|
||||
super().__init__(**kwargs)
|
||||
super(AugeasCommentNode, self).__init__(**kwargs)
|
||||
# self.comment = comment
|
||||
self.comment = comment
|
||||
|
||||
def __eq__(self, other: Any) -> bool:
|
||||
def __eq__(self, other):
|
||||
if isinstance(other, self.__class__):
|
||||
return (self.comment == other.comment and
|
||||
self.filepath == other.filepath and
|
||||
@@ -199,15 +186,15 @@ class AugeasCommentNode(AugeasParserNode):
|
||||
class AugeasDirectiveNode(AugeasParserNode):
|
||||
""" Augeas implementation of DirectiveNode interface """
|
||||
|
||||
def __init__(self, **kwargs: Any) -> None:
|
||||
def __init__(self, **kwargs):
|
||||
name, parameters, enabled, kwargs = util.directivenode_kwargs(kwargs)
|
||||
super().__init__(**kwargs)
|
||||
super(AugeasDirectiveNode, self).__init__(**kwargs)
|
||||
self.name = name
|
||||
self.enabled = enabled
|
||||
if parameters:
|
||||
self.set_parameters(parameters)
|
||||
|
||||
def __eq__(self, other: Any) -> bool:
|
||||
def __eq__(self, other):
|
||||
if isinstance(other, self.__class__):
|
||||
return (self.name == other.name and
|
||||
self.filepath == other.filepath and
|
||||
@@ -218,7 +205,7 @@ class AugeasDirectiveNode(AugeasParserNode):
|
||||
self.metadata == other.metadata)
|
||||
return False
|
||||
|
||||
def set_parameters(self, parameters: Iterable[str]) -> None:
|
||||
def set_parameters(self, parameters):
|
||||
"""
|
||||
Sets parameters of a DirectiveNode or BlockNode object.
|
||||
|
||||
@@ -237,7 +224,7 @@ class AugeasDirectiveNode(AugeasParserNode):
|
||||
self.parser.aug.set(param_path, param)
|
||||
|
||||
@property
|
||||
def parameters(self) -> Tuple[str, ...]:
|
||||
def parameters(self):
|
||||
"""
|
||||
Fetches the parameters from Augeas tree, ensuring that the sequence always
|
||||
represents the current state
|
||||
@@ -247,22 +234,21 @@ class AugeasDirectiveNode(AugeasParserNode):
|
||||
"""
|
||||
return tuple(self._aug_get_params(self.metadata["augeaspath"]))
|
||||
|
||||
def _aug_get_params(self, path: str) -> List[str]:
|
||||
def _aug_get_params(self, path):
|
||||
"""Helper function to get parameters for DirectiveNodes and BlockNodes"""
|
||||
|
||||
arg_paths = self.parser.aug.match(path + "/arg")
|
||||
args = [self.parser.get_arg(apath) for apath in arg_paths]
|
||||
return [arg for arg in args if arg is not None]
|
||||
return [self.parser.get_arg(apath) for apath in arg_paths]
|
||||
|
||||
|
||||
class AugeasBlockNode(AugeasDirectiveNode):
|
||||
""" Augeas implementation of BlockNode interface """
|
||||
|
||||
def __init__(self, **kwargs: Any) -> None:
|
||||
super().__init__(**kwargs)
|
||||
self.children: Tuple["AugeasBlockNode", ...] = ()
|
||||
def __init__(self, **kwargs):
|
||||
super(AugeasBlockNode, self).__init__(**kwargs)
|
||||
self.children = ()
|
||||
|
||||
def __eq__(self, other: Any) -> bool:
|
||||
def __eq__(self, other):
|
||||
if isinstance(other, self.__class__):
|
||||
return (self.name == other.name and
|
||||
self.filepath == other.filepath and
|
||||
@@ -275,39 +261,33 @@ class AugeasBlockNode(AugeasDirectiveNode):
|
||||
return False
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
def add_child_block(self, name: str, # pragma: no cover
|
||||
parameters: Optional[List[str]] = None,
|
||||
position: Optional[int] = None) -> "AugeasBlockNode":
|
||||
def add_child_block(self, name, parameters=None, position=None): # pragma: no cover
|
||||
"""Adds a new BlockNode to the sequence of children"""
|
||||
|
||||
insertpath, realpath, before = self._aug_resolve_child_position(
|
||||
name,
|
||||
position
|
||||
)
|
||||
new_metadata: Dict[str, Any] = {"augeasparser": self.parser, "augeaspath": realpath}
|
||||
new_metadata = {"augeasparser": self.parser, "augeaspath": realpath}
|
||||
|
||||
# Create the new block
|
||||
self.parser.aug.insert(insertpath, name, before)
|
||||
# Check if the file was included from the root config or initial state
|
||||
file_path = apache_util.get_file_path(realpath)
|
||||
if file_path is None:
|
||||
raise errors.Error(f"No file path found for vhost: {realpath}") # pragma: no cover
|
||||
enabled = self.parser.parsed_in_original(file_path)
|
||||
|
||||
# Parameters will be set at the initialization of the new object
|
||||
return AugeasBlockNode(
|
||||
name=name,
|
||||
parameters=parameters,
|
||||
enabled=enabled,
|
||||
ancestor=assertions.PASS,
|
||||
filepath=file_path,
|
||||
metadata=new_metadata,
|
||||
enabled = self.parser.parsed_in_original(
|
||||
apache_util.get_file_path(realpath)
|
||||
)
|
||||
|
||||
# Parameters will be set at the initialization of the new object
|
||||
new_block = AugeasBlockNode(name=name,
|
||||
parameters=parameters,
|
||||
enabled=enabled,
|
||||
ancestor=assertions.PASS,
|
||||
filepath=apache_util.get_file_path(realpath),
|
||||
metadata=new_metadata)
|
||||
return new_block
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
def add_child_directive(self, name: str, # pragma: no cover
|
||||
parameters: Optional[List[str]] = None,
|
||||
position: Optional[int] = None) -> AugeasDirectiveNode:
|
||||
def add_child_directive(self, name, parameters=None, position=None): # pragma: no cover
|
||||
"""Adds a new DirectiveNode to the sequence of children"""
|
||||
|
||||
if not parameters:
|
||||
@@ -324,50 +304,43 @@ class AugeasBlockNode(AugeasDirectiveNode):
|
||||
# Set the directive key
|
||||
self.parser.aug.set(realpath, name)
|
||||
# Check if the file was included from the root config or initial state
|
||||
file_path = apache_util.get_file_path(realpath)
|
||||
if file_path is None:
|
||||
raise errors.Error(f"No file path found for vhost: {realpath}") # pragma: no cover
|
||||
enabled = self.parser.parsed_in_original(file_path)
|
||||
|
||||
return AugeasDirectiveNode(
|
||||
name=name,
|
||||
parameters=parameters,
|
||||
enabled=enabled,
|
||||
ancestor=assertions.PASS,
|
||||
filepath=file_path,
|
||||
metadata=new_metadata,
|
||||
enabled = self.parser.parsed_in_original(
|
||||
apache_util.get_file_path(realpath)
|
||||
)
|
||||
|
||||
def add_child_comment(
|
||||
self, comment: str = "", position: Optional[int] = None
|
||||
) -> "AugeasCommentNode":
|
||||
new_dir = AugeasDirectiveNode(name=name,
|
||||
parameters=parameters,
|
||||
enabled=enabled,
|
||||
ancestor=assertions.PASS,
|
||||
filepath=apache_util.get_file_path(realpath),
|
||||
metadata=new_metadata)
|
||||
return new_dir
|
||||
|
||||
def add_child_comment(self, comment="", position=None):
|
||||
"""Adds a new CommentNode to the sequence of children"""
|
||||
|
||||
insertpath, realpath, before = self._aug_resolve_child_position(
|
||||
"#comment",
|
||||
position
|
||||
)
|
||||
new_metadata: Dict[str, Any] = {
|
||||
"augeasparser": self.parser, "augeaspath": realpath,
|
||||
}
|
||||
new_metadata = {"augeasparser": self.parser, "augeaspath": realpath}
|
||||
|
||||
# Create the new comment
|
||||
self.parser.aug.insert(insertpath, "#comment", before)
|
||||
# Set the comment content
|
||||
self.parser.aug.set(realpath, comment)
|
||||
|
||||
return AugeasCommentNode(
|
||||
comment=comment,
|
||||
ancestor=assertions.PASS,
|
||||
filepath=apache_util.get_file_path(realpath),
|
||||
metadata=new_metadata,
|
||||
)
|
||||
new_comment = AugeasCommentNode(comment=comment,
|
||||
ancestor=assertions.PASS,
|
||||
filepath=apache_util.get_file_path(realpath),
|
||||
metadata=new_metadata)
|
||||
return new_comment
|
||||
|
||||
def find_blocks(self, name: str, exclude: bool = True) -> List["AugeasBlockNode"]:
|
||||
def find_blocks(self, name, exclude=True):
|
||||
"""Recursive search of BlockNodes from the sequence of children"""
|
||||
|
||||
nodes: List["AugeasBlockNode"] = []
|
||||
paths: Iterable[str] = self._aug_find_blocks(name)
|
||||
nodes = []
|
||||
paths = self._aug_find_blocks(name)
|
||||
if exclude:
|
||||
paths = self.parser.exclude_dirs(paths)
|
||||
for path in paths:
|
||||
@@ -375,14 +348,14 @@ class AugeasBlockNode(AugeasDirectiveNode):
|
||||
|
||||
return nodes
|
||||
|
||||
def find_directives(self, name: str, exclude: bool = True) -> List["AugeasDirectiveNode"]:
|
||||
def find_directives(self, name, exclude=True):
|
||||
"""Recursive search of DirectiveNodes from the sequence of children"""
|
||||
|
||||
nodes = []
|
||||
ownpath = self.metadata.get("augeaspath")
|
||||
|
||||
directives = self.parser.find_dir(name, start=ownpath, exclude=exclude)
|
||||
already_parsed: Set[str] = set()
|
||||
already_parsed = set() # type: Set[str]
|
||||
for directive in directives:
|
||||
# Remove the /arg part from the Augeas path
|
||||
directive = directive.partition("/arg")[0]
|
||||
@@ -394,14 +367,14 @@ class AugeasBlockNode(AugeasDirectiveNode):
|
||||
|
||||
return nodes
|
||||
|
||||
def find_comments(self, comment: str) -> List["AugeasCommentNode"]:
|
||||
def find_comments(self, comment):
|
||||
"""
|
||||
Recursive search of DirectiveNodes from the sequence of children.
|
||||
|
||||
:param str comment: Comment content to search for.
|
||||
"""
|
||||
|
||||
nodes: List["AugeasCommentNode"] = []
|
||||
nodes = []
|
||||
ownpath = self.metadata.get("augeaspath")
|
||||
|
||||
comments = self.parser.find_comments(comment, start=ownpath)
|
||||
@@ -410,11 +383,11 @@ class AugeasBlockNode(AugeasDirectiveNode):
|
||||
|
||||
return nodes
|
||||
|
||||
def delete_child(self, child: "AugeasParserNode") -> None:
|
||||
def delete_child(self, child):
|
||||
"""
|
||||
Deletes a ParserNode from the sequence of children, and raises an
|
||||
exception if it's unable to do so.
|
||||
:param AugeasParserNode child: A node to delete.
|
||||
:param AugeasParserNode: child: A node to delete.
|
||||
"""
|
||||
if not self.parser.aug.remove(child.metadata["augeaspath"]):
|
||||
|
||||
@@ -423,11 +396,11 @@ class AugeasBlockNode(AugeasDirectiveNode):
|
||||
"seem to exist.").format(child.metadata["augeaspath"])
|
||||
)
|
||||
|
||||
def unsaved_files(self) -> Set[str]:
|
||||
def unsaved_files(self):
|
||||
"""Returns a list of unsaved filepaths"""
|
||||
return self.parser.unsaved_files()
|
||||
|
||||
def parsed_paths(self) -> List[str]:
|
||||
def parsed_paths(self):
|
||||
"""
|
||||
Returns a list of file paths that have currently been parsed into the parser
|
||||
tree. The returned list may include paths with wildcard characters, for
|
||||
@@ -438,7 +411,7 @@ class AugeasBlockNode(AugeasDirectiveNode):
|
||||
:returns: list of file paths of files that have been parsed
|
||||
"""
|
||||
|
||||
res_paths: List[str] = []
|
||||
res_paths = []
|
||||
|
||||
paths = self.parser.existing_paths
|
||||
for directory in paths:
|
||||
@@ -447,7 +420,7 @@ class AugeasBlockNode(AugeasDirectiveNode):
|
||||
|
||||
return res_paths
|
||||
|
||||
def _create_commentnode(self, path: str) -> "AugeasCommentNode":
|
||||
def _create_commentnode(self, path):
|
||||
"""Helper function to create a CommentNode from Augeas path"""
|
||||
|
||||
comment = self.parser.aug.get(path)
|
||||
@@ -460,16 +433,14 @@ class AugeasBlockNode(AugeasDirectiveNode):
|
||||
filepath=apache_util.get_file_path(path),
|
||||
metadata=metadata)
|
||||
|
||||
def _create_directivenode(self, path: str) -> "AugeasDirectiveNode":
|
||||
def _create_directivenode(self, path):
|
||||
"""Helper function to create a DirectiveNode from Augeas path"""
|
||||
|
||||
name = self.parser.get_arg(path)
|
||||
metadata: Dict[str, Union[parser.ApacheParser, str]] = {
|
||||
"augeasparser": self.parser, "augeaspath": path,
|
||||
}
|
||||
metadata = {"augeasparser": self.parser, "augeaspath": path}
|
||||
|
||||
# Check if the file was included from the root config or initial state
|
||||
enabled: bool = self.parser.parsed_in_original(
|
||||
enabled = self.parser.parsed_in_original(
|
||||
apache_util.get_file_path(path)
|
||||
)
|
||||
return AugeasDirectiveNode(name=name,
|
||||
@@ -478,12 +449,12 @@ class AugeasBlockNode(AugeasDirectiveNode):
|
||||
filepath=apache_util.get_file_path(path),
|
||||
metadata=metadata)
|
||||
|
||||
def _aug_find_blocks(self, name: str) -> Set[str]:
|
||||
def _aug_find_blocks(self, name):
|
||||
"""Helper function to perform a search to Augeas DOM tree to search
|
||||
configuration blocks with a given name"""
|
||||
|
||||
# The code here is modified from configurator.get_virtual_hosts()
|
||||
blk_paths: Set[str] = set()
|
||||
blk_paths = set()
|
||||
for vhost_path in list(self.parser.parser_paths):
|
||||
paths = self.parser.aug.match(
|
||||
("/files%s//*[label()=~regexp('%s')]" %
|
||||
@@ -492,8 +463,7 @@ class AugeasBlockNode(AugeasDirectiveNode):
|
||||
name.lower() in os.path.basename(path).lower()])
|
||||
return blk_paths
|
||||
|
||||
def _aug_resolve_child_position(
|
||||
self, name: str, position: Optional[int]) -> Tuple[str, str, bool]:
|
||||
def _aug_resolve_child_position(self, name, position):
|
||||
"""
|
||||
Helper function that iterates through the immediate children and figures
|
||||
out the insertion path for a new AugeasParserNode.
|
||||
@@ -518,16 +488,16 @@ class AugeasBlockNode(AugeasDirectiveNode):
|
||||
"""
|
||||
|
||||
# Default to appending
|
||||
before: bool = False
|
||||
before = False
|
||||
|
||||
all_children: str = self.parser.aug.match("{}/*".format(
|
||||
all_children = self.parser.aug.match("{}/*".format(
|
||||
self.metadata["augeaspath"])
|
||||
)
|
||||
|
||||
# Calculate resulting_path
|
||||
# Augeas indices start at 1. We use counter to calculate the index to
|
||||
# be used in resulting_path.
|
||||
counter: int = 1
|
||||
counter = 1
|
||||
for i, child in enumerate(all_children):
|
||||
if position is not None and i >= position:
|
||||
# We're not going to insert the new node to an index after this
|
||||
@@ -536,7 +506,7 @@ class AugeasBlockNode(AugeasDirectiveNode):
|
||||
if name == childname:
|
||||
counter += 1
|
||||
|
||||
resulting_path: str = "{}/{}[{}]".format(
|
||||
resulting_path = "{}/{}[{}]".format(
|
||||
self.metadata["augeaspath"],
|
||||
name,
|
||||
counter
|
||||
@@ -560,7 +530,7 @@ class AugeasBlockNode(AugeasDirectiveNode):
|
||||
position
|
||||
)
|
||||
|
||||
return insert_path, resulting_path, before
|
||||
return (insert_path, resulting_path, before)
|
||||
|
||||
|
||||
interfaces.CommentNode.register(AugeasCommentNode)
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,22 +1,17 @@
|
||||
"""Apache plugin constants."""
|
||||
from typing import Dict
|
||||
from typing import List
|
||||
|
||||
import pkg_resources
|
||||
|
||||
from certbot.compat import os
|
||||
|
||||
MOD_SSL_CONF_DEST = "options-ssl-apache.conf"
|
||||
"""Name of the mod_ssl config file as saved
|
||||
in `certbot.configuration.NamespaceConfig.config_dir`."""
|
||||
"""Name of the mod_ssl config file as saved in `IConfig.config_dir`."""
|
||||
|
||||
|
||||
UPDATED_MOD_SSL_CONF_DIGEST = ".updated-options-ssl-apache-conf-digest.txt"
|
||||
"""Name of the hash of the updated or informed mod_ssl_conf as saved
|
||||
in `certbot.configuration.NamespaceConfig.config_dir`."""
|
||||
"""Name of the hash of the updated or informed mod_ssl_conf as saved in `IConfig.config_dir`."""
|
||||
|
||||
# NEVER REMOVE A SINGLE HASH FROM THIS LIST UNLESS YOU KNOW EXACTLY WHAT YOU ARE DOING!
|
||||
ALL_SSL_OPTIONS_HASHES: List[str] = [
|
||||
ALL_SSL_OPTIONS_HASHES = [
|
||||
'2086bca02db48daf93468332543c60ac6acdb6f0b58c7bfdf578a5d47092f82a',
|
||||
'4844d36c9a0f587172d9fa10f4f1c9518e3bcfa1947379f155e16a70a728c21a',
|
||||
'5a922826719981c0a234b1fbcd495f3213e49d2519e845ea0748ba513044b65b',
|
||||
@@ -32,8 +27,6 @@ ALL_SSL_OPTIONS_HASHES: List[str] = [
|
||||
'5cc003edd93fb9cd03d40c7686495f8f058f485f75b5e764b789245a386e6daf',
|
||||
'007cd497a56a3bb8b6a2c1aeb4997789e7e38992f74e44cc5d13a625a738ac73',
|
||||
'34783b9e2210f5c4a23bced2dfd7ec289834716673354ed7c7abf69fe30192a3',
|
||||
'61466bc2f98a623c02be8a5ee916ead1655b0ce883bdc936692076ea499ff5ce',
|
||||
'3fd812e3e87fe5c645d3682a511b2a06c8286f19594f28e280f17cd6af1301b5',
|
||||
]
|
||||
"""SHA256 hashes of the contents of previous versions of all versions of MOD_SSL_CONF_SRC"""
|
||||
|
||||
@@ -41,35 +34,39 @@ AUGEAS_LENS_DIR = pkg_resources.resource_filename(
|
||||
"certbot_apache", os.path.join("_internal", "augeas_lens"))
|
||||
"""Path to the Augeas lens directory"""
|
||||
|
||||
REWRITE_HTTPS_ARGS: List[str] = [
|
||||
REWRITE_HTTPS_ARGS = [
|
||||
"^", "https://%{SERVER_NAME}%{REQUEST_URI}", "[L,NE,R=permanent]"]
|
||||
"""Apache version<2.3.9 rewrite rule arguments used for redirections to
|
||||
https vhost"""
|
||||
|
||||
REWRITE_HTTPS_ARGS_WITH_END = [
|
||||
"^", "https://%{SERVER_NAME}%{REQUEST_URI}", "[END,NE,R=permanent]"]
|
||||
"""Apache version >= 2.3.9 rewrite rule arguments used for redirections to
|
||||
https vhost"""
|
||||
|
||||
OLD_REWRITE_HTTPS_ARGS: List[List[str]] = [
|
||||
OLD_REWRITE_HTTPS_ARGS = [
|
||||
["^", "https://%{SERVER_NAME}%{REQUEST_URI}", "[L,QSA,R=permanent]"],
|
||||
["^", "https://%{SERVER_NAME}%{REQUEST_URI}", "[END,QSA,R=permanent]"],
|
||||
["^", "https://%{SERVER_NAME}%{REQUEST_URI}", "[L,NE,R=permanent]"]]
|
||||
["^", "https://%{SERVER_NAME}%{REQUEST_URI}", "[END,QSA,R=permanent]"]]
|
||||
|
||||
HSTS_ARGS: List[str] = ["always", "set", "Strict-Transport-Security",
|
||||
HSTS_ARGS = ["always", "set", "Strict-Transport-Security",
|
||||
"\"max-age=31536000\""]
|
||||
"""Apache header arguments for HSTS"""
|
||||
|
||||
UIR_ARGS: List[str] = ["always", "set", "Content-Security-Policy", "upgrade-insecure-requests"]
|
||||
UIR_ARGS = ["always", "set", "Content-Security-Policy",
|
||||
"upgrade-insecure-requests"]
|
||||
|
||||
HEADER_ARGS: Dict[str, List[str]] = {
|
||||
"Strict-Transport-Security": HSTS_ARGS, "Upgrade-Insecure-Requests": UIR_ARGS,
|
||||
}
|
||||
HEADER_ARGS = {"Strict-Transport-Security": HSTS_ARGS,
|
||||
"Upgrade-Insecure-Requests": UIR_ARGS}
|
||||
|
||||
AUTOHSTS_STEPS: List[int] = [60, 300, 900, 3600, 21600, 43200, 86400]
|
||||
AUTOHSTS_STEPS = [60, 300, 900, 3600, 21600, 43200, 86400]
|
||||
"""AutoHSTS increase steps: 1min, 5min, 15min, 1h, 6h, 12h, 24h"""
|
||||
|
||||
AUTOHSTS_PERMANENT: int = 31536000
|
||||
AUTOHSTS_PERMANENT = 31536000
|
||||
"""Value for the last max-age of HSTS"""
|
||||
|
||||
AUTOHSTS_FREQ: int = 172800
|
||||
AUTOHSTS_FREQ = 172800
|
||||
"""Minimum time since last increase to perform a new one: 48h"""
|
||||
|
||||
MANAGED_COMMENT: str = "DO NOT REMOVE - Managed by Certbot"
|
||||
MANAGED_COMMENT_ID: str = MANAGED_COMMENT + ", VirtualHost id: {0}"
|
||||
MANAGED_COMMENT = "DO NOT REMOVE - Managed by Certbot"
|
||||
MANAGED_COMMENT_ID = MANAGED_COMMENT+", VirtualHost id: {0}"
|
||||
"""Managed by Certbot comments and the VirtualHost identification template"""
|
||||
|
||||
@@ -1,28 +1,24 @@
|
||||
"""Contains UI methods for Apache operations."""
|
||||
import logging
|
||||
from typing import Iterable
|
||||
from typing import List
|
||||
from typing import Optional
|
||||
from typing import Sequence
|
||||
from typing import Tuple
|
||||
|
||||
from certbot_apache._internal.obj import VirtualHost
|
||||
import zope.component
|
||||
|
||||
from certbot import errors
|
||||
from certbot import interfaces
|
||||
from certbot.compat import os
|
||||
from certbot.display import util as display_util
|
||||
import certbot.display.util as display_util
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def select_vhost_multiple(vhosts: Optional[List[VirtualHost]]) -> List[VirtualHost]:
|
||||
def select_vhost_multiple(vhosts):
|
||||
"""Select multiple Vhosts to install the certificate for
|
||||
|
||||
:param vhosts: Available Apache VirtualHosts
|
||||
:type vhosts: :class:`list` of type `~VirtualHost`
|
||||
:type vhosts: :class:`list` of type `~obj.Vhost`
|
||||
|
||||
:returns: List of VirtualHosts
|
||||
:rtype: :class:`list`of type `~VirtualHost`
|
||||
:rtype: :class:`list`of type `~obj.Vhost`
|
||||
"""
|
||||
if not vhosts:
|
||||
return []
|
||||
@@ -30,7 +26,7 @@ def select_vhost_multiple(vhosts: Optional[List[VirtualHost]]) -> List[VirtualHo
|
||||
# Remove the extra newline from the last entry
|
||||
if tags_list:
|
||||
tags_list[-1] = tags_list[-1][:-1]
|
||||
code, names = display_util.checklist(
|
||||
code, names = zope.component.getUtility(interfaces.IDisplay).checklist(
|
||||
"Which VirtualHosts would you like to install the wildcard certificate for?",
|
||||
tags=tags_list, force_interactive=True)
|
||||
if code == display_util.OK:
|
||||
@@ -38,8 +34,7 @@ def select_vhost_multiple(vhosts: Optional[List[VirtualHost]]) -> List[VirtualHo
|
||||
return return_vhosts
|
||||
return []
|
||||
|
||||
|
||||
def _reversemap_vhosts(names: Iterable[str], vhosts: Iterable[VirtualHost]) -> List[VirtualHost]:
|
||||
def _reversemap_vhosts(names, vhosts):
|
||||
"""Helper function for select_vhost_multiple for mapping string
|
||||
representations back to actual vhost objects"""
|
||||
return_vhosts = []
|
||||
@@ -50,14 +45,11 @@ def _reversemap_vhosts(names: Iterable[str], vhosts: Iterable[VirtualHost]) -> L
|
||||
return_vhosts.append(vhost)
|
||||
return return_vhosts
|
||||
|
||||
|
||||
def select_vhost(domain: str, vhosts: Sequence[VirtualHost]) -> Optional[VirtualHost]:
|
||||
def select_vhost(domain, vhosts):
|
||||
"""Select an appropriate Apache Vhost.
|
||||
|
||||
:param str domain: Domain for vhost selection
|
||||
|
||||
:param vhosts: Available Apache VirtualHosts
|
||||
:type vhosts: :class:`list` of type `~VirtualHost`
|
||||
:type vhosts: :class:`list` of type `~obj.Vhost`
|
||||
|
||||
:returns: VirtualHost or `None`
|
||||
:rtype: `~obj.Vhost` or `None`
|
||||
@@ -70,8 +62,7 @@ def select_vhost(domain: str, vhosts: Sequence[VirtualHost]) -> Optional[Virtual
|
||||
return vhosts[tag]
|
||||
return None
|
||||
|
||||
|
||||
def _vhost_menu(domain: str, vhosts: Iterable[VirtualHost]) -> Tuple[str, int]:
|
||||
def _vhost_menu(domain, vhosts):
|
||||
"""Select an appropriate Apache Vhost.
|
||||
|
||||
:param vhosts: Available Apache Virtual Hosts
|
||||
@@ -112,23 +103,23 @@ def _vhost_menu(domain: str, vhosts: Iterable[VirtualHost]) -> Tuple[str, int]:
|
||||
https="HTTPS" if vhost.ssl else "",
|
||||
active="Enabled" if vhost.enabled else "",
|
||||
fn_size=filename_size,
|
||||
name_size=disp_name_size),
|
||||
name_size=disp_name_size)
|
||||
)
|
||||
|
||||
try:
|
||||
code, tag = display_util.menu(
|
||||
f"We were unable to find a vhost with a ServerName "
|
||||
f"or Address of {domain}.{os.linesep}Which virtual host would you "
|
||||
f"like to choose?",
|
||||
code, tag = zope.component.getUtility(interfaces.IDisplay).menu(
|
||||
"We were unable to find a vhost with a ServerName "
|
||||
"or Address of {0}.{1}Which virtual host would you "
|
||||
"like to choose?".format(domain, os.linesep),
|
||||
choices, force_interactive=True)
|
||||
except errors.MissingCommandlineFlag:
|
||||
msg = (
|
||||
f"Encountered vhost ambiguity when trying to find a vhost for "
|
||||
f"{domain} but was unable to ask for user "
|
||||
f"guidance in non-interactive mode. Certbot may need "
|
||||
f"vhosts to be explicitly labelled with ServerName or "
|
||||
f"ServerAlias directives.")
|
||||
logger.error(msg)
|
||||
"Encountered vhost ambiguity when trying to find a vhost for "
|
||||
"{0} but was unable to ask for user "
|
||||
"guidance in non-interactive mode. Certbot may need "
|
||||
"vhosts to be explicitly labelled with ServerName or "
|
||||
"ServerAlias directives.".format(domain))
|
||||
logger.warning(msg)
|
||||
raise errors.MissingCommandlineFlag(msg)
|
||||
|
||||
return code, tag
|
||||
|
||||
@@ -1,45 +1,20 @@
|
||||
""" Dual ParserNode implementation """
|
||||
from typing import Any
|
||||
from typing import Generic
|
||||
from typing import Iterable
|
||||
from typing import List
|
||||
from typing import Optional
|
||||
from typing import Set
|
||||
from typing import Tuple
|
||||
from typing import Type
|
||||
from typing import TYPE_CHECKING
|
||||
from typing import TypeVar
|
||||
|
||||
from certbot_apache._internal import apacheparser
|
||||
from certbot_apache._internal import assertions
|
||||
from certbot_apache._internal import augeasparser
|
||||
from certbot_apache._internal import interfaces
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from certbot_apache._internal.apacheparser import ApacheParserNode # pragma: no cover
|
||||
from certbot_apache._internal.augeasparser import AugeasParserNode # pragma: no cover
|
||||
|
||||
GenericAugeasParserNode = TypeVar("GenericAugeasParserNode", bound="AugeasParserNode")
|
||||
GenericApacheParserNode = TypeVar("GenericApacheParserNode", bound="ApacheParserNode")
|
||||
GenericDualNode = TypeVar("GenericDualNode", bound="DualNodeBase")
|
||||
from certbot_apache._internal import apacheparser
|
||||
|
||||
|
||||
class DualNodeBase(Generic[GenericAugeasParserNode, GenericApacheParserNode]):
|
||||
class DualNodeBase(object):
|
||||
""" Dual parser interface for in development testing. This is used as the
|
||||
base class for dual parser interface classes. This class handles runtime
|
||||
attribute value assertions."""
|
||||
|
||||
def __init__(self, primary: GenericAugeasParserNode,
|
||||
secondary: GenericApacheParserNode) -> None:
|
||||
self.primary = primary
|
||||
self.secondary = secondary
|
||||
|
||||
def save(self, msg: str) -> None: # pragma: no cover
|
||||
def save(self, msg): # pragma: no cover
|
||||
""" Call save for both parsers """
|
||||
self.primary.save(msg)
|
||||
self.secondary.save(msg)
|
||||
|
||||
def __getattr__(self, aname: str) -> Any:
|
||||
def __getattr__(self, aname):
|
||||
""" Attribute value assertion """
|
||||
firstval = getattr(self.primary, aname)
|
||||
secondval = getattr(self.secondary, aname)
|
||||
@@ -53,12 +28,11 @@ class DualNodeBase(Generic[GenericAugeasParserNode, GenericApacheParserNode]):
|
||||
assertions.assertEqualSimple(firstval, secondval)
|
||||
return firstval
|
||||
|
||||
def find_ancestors(self, name: str) -> List["DualNodeBase"]:
|
||||
def find_ancestors(self, name):
|
||||
""" Traverses the ancestor tree and returns ancestors matching name """
|
||||
return self._find_helper(DualBlockNode, "find_ancestors", name)
|
||||
|
||||
def _find_helper(self, nodeclass: Type[GenericDualNode], findfunc: str, search: str,
|
||||
**kwargs: Any) -> List[GenericDualNode]:
|
||||
def _find_helper(self, nodeclass, findfunc, search, **kwargs):
|
||||
"""A helper for find_* functions. The function specific attributes should
|
||||
be passed as keyword arguments.
|
||||
|
||||
@@ -98,11 +72,10 @@ class DualNodeBase(Generic[GenericAugeasParserNode, GenericApacheParserNode]):
|
||||
return new_nodes
|
||||
|
||||
|
||||
class DualCommentNode(DualNodeBase[augeasparser.AugeasCommentNode,
|
||||
apacheparser.ApacheCommentNode]):
|
||||
class DualCommentNode(DualNodeBase):
|
||||
""" Dual parser implementation of CommentNode interface """
|
||||
|
||||
def __init__(self, **kwargs: Any) -> None:
|
||||
def __init__(self, **kwargs):
|
||||
""" This initialization implementation allows ordinary initialization
|
||||
of CommentNode objects as well as creating a DualCommentNode object
|
||||
using precreated or fetched CommentNode objects if provided as optional
|
||||
@@ -122,21 +95,19 @@ class DualCommentNode(DualNodeBase[augeasparser.AugeasCommentNode,
|
||||
|
||||
if primary or secondary:
|
||||
assert primary and secondary
|
||||
super().__init__(primary, secondary)
|
||||
self.primary = primary
|
||||
self.secondary = secondary
|
||||
else:
|
||||
super().__init__(augeasparser.AugeasCommentNode(**kwargs),
|
||||
apacheparser.ApacheCommentNode(**kwargs))
|
||||
self.primary = augeasparser.AugeasCommentNode(**kwargs)
|
||||
self.secondary = apacheparser.ApacheCommentNode(**kwargs)
|
||||
|
||||
assertions.assertEqual(self.primary, self.secondary)
|
||||
|
||||
|
||||
class DualDirectiveNode(DualNodeBase[augeasparser.AugeasDirectiveNode,
|
||||
apacheparser.ApacheDirectiveNode]):
|
||||
class DualDirectiveNode(DualNodeBase):
|
||||
""" Dual parser implementation of DirectiveNode interface """
|
||||
|
||||
parameters: str
|
||||
|
||||
def __init__(self, **kwargs: Any) -> None:
|
||||
def __init__(self, **kwargs):
|
||||
""" This initialization implementation allows ordinary initialization
|
||||
of DirectiveNode objects as well as creating a DualDirectiveNode object
|
||||
using precreated or fetched DirectiveNode objects if provided as optional
|
||||
@@ -147,6 +118,8 @@ class DualDirectiveNode(DualNodeBase[augeasparser.AugeasDirectiveNode,
|
||||
:param DirectiveNode primary: Primary pre-created DirectiveNode, mainly
|
||||
used when creating new DualParser nodes using add_* methods.
|
||||
:param DirectiveNode secondary: Secondary pre-created DirectiveNode
|
||||
|
||||
|
||||
"""
|
||||
|
||||
kwargs.setdefault("primary", None)
|
||||
@@ -156,14 +129,15 @@ class DualDirectiveNode(DualNodeBase[augeasparser.AugeasDirectiveNode,
|
||||
|
||||
if primary or secondary:
|
||||
assert primary and secondary
|
||||
super().__init__(primary, secondary)
|
||||
self.primary = primary
|
||||
self.secondary = secondary
|
||||
else:
|
||||
super().__init__(augeasparser.AugeasDirectiveNode(**kwargs),
|
||||
apacheparser.ApacheDirectiveNode(**kwargs))
|
||||
self.primary = augeasparser.AugeasDirectiveNode(**kwargs)
|
||||
self.secondary = apacheparser.ApacheDirectiveNode(**kwargs)
|
||||
|
||||
assertions.assertEqual(self.primary, self.secondary)
|
||||
|
||||
def set_parameters(self, parameters: Iterable[str]) -> None:
|
||||
def set_parameters(self, parameters):
|
||||
""" Sets parameters and asserts that both implementation successfully
|
||||
set the parameter sequence """
|
||||
|
||||
@@ -172,11 +146,10 @@ class DualDirectiveNode(DualNodeBase[augeasparser.AugeasDirectiveNode,
|
||||
assertions.assertEqual(self.primary, self.secondary)
|
||||
|
||||
|
||||
class DualBlockNode(DualNodeBase[augeasparser.AugeasBlockNode,
|
||||
apacheparser.ApacheBlockNode]):
|
||||
class DualBlockNode(DualNodeBase):
|
||||
""" Dual parser implementation of BlockNode interface """
|
||||
|
||||
def __init__(self, **kwargs: Any) -> None:
|
||||
def __init__(self, **kwargs):
|
||||
""" This initialization implementation allows ordinary initialization
|
||||
of BlockNode objects as well as creating a DualBlockNode object
|
||||
using precreated or fetched BlockNode objects if provided as optional
|
||||
@@ -191,20 +164,20 @@ class DualBlockNode(DualNodeBase[augeasparser.AugeasBlockNode,
|
||||
|
||||
kwargs.setdefault("primary", None)
|
||||
kwargs.setdefault("secondary", None)
|
||||
primary: Optional[augeasparser.AugeasBlockNode] = kwargs.pop("primary")
|
||||
secondary: Optional[apacheparser.ApacheBlockNode] = kwargs.pop("secondary")
|
||||
primary = kwargs.pop("primary")
|
||||
secondary = kwargs.pop("secondary")
|
||||
|
||||
if primary or secondary:
|
||||
assert primary and secondary
|
||||
super().__init__(primary, secondary)
|
||||
self.primary = primary
|
||||
self.secondary = secondary
|
||||
else:
|
||||
super().__init__(augeasparser.AugeasBlockNode(**kwargs),
|
||||
apacheparser.ApacheBlockNode(**kwargs))
|
||||
self.primary = augeasparser.AugeasBlockNode(**kwargs)
|
||||
self.secondary = apacheparser.ApacheBlockNode(**kwargs)
|
||||
|
||||
assertions.assertEqual(self.primary, self.secondary)
|
||||
|
||||
def add_child_block(self, name: str, parameters: Optional[List[str]] = None,
|
||||
position: Optional[int] = None) -> "DualBlockNode":
|
||||
def add_child_block(self, name, parameters=None, position=None):
|
||||
""" Creates a new child BlockNode, asserts that both implementations
|
||||
did it in a similar way, and returns a newly created DualBlockNode object
|
||||
encapsulating both of the newly created objects """
|
||||
@@ -212,10 +185,10 @@ class DualBlockNode(DualNodeBase[augeasparser.AugeasBlockNode,
|
||||
primary_new = self.primary.add_child_block(name, parameters, position)
|
||||
secondary_new = self.secondary.add_child_block(name, parameters, position)
|
||||
assertions.assertEqual(primary_new, secondary_new)
|
||||
return DualBlockNode(primary=primary_new, secondary=secondary_new)
|
||||
new_block = DualBlockNode(primary=primary_new, secondary=secondary_new)
|
||||
return new_block
|
||||
|
||||
def add_child_directive(self, name: str, parameters: Optional[List[str]] = None,
|
||||
position: Optional[int] = None) -> DualDirectiveNode:
|
||||
def add_child_directive(self, name, parameters=None, position=None):
|
||||
""" Creates a new child DirectiveNode, asserts that both implementations
|
||||
did it in a similar way, and returns a newly created DualDirectiveNode
|
||||
object encapsulating both of the newly created objects """
|
||||
@@ -223,22 +196,21 @@ class DualBlockNode(DualNodeBase[augeasparser.AugeasBlockNode,
|
||||
primary_new = self.primary.add_child_directive(name, parameters, position)
|
||||
secondary_new = self.secondary.add_child_directive(name, parameters, position)
|
||||
assertions.assertEqual(primary_new, secondary_new)
|
||||
return DualDirectiveNode(primary=primary_new, secondary=secondary_new)
|
||||
new_dir = DualDirectiveNode(primary=primary_new, secondary=secondary_new)
|
||||
return new_dir
|
||||
|
||||
def add_child_comment(self, comment: str = "",
|
||||
position: Optional[int] = None) -> DualCommentNode:
|
||||
def add_child_comment(self, comment="", position=None):
|
||||
""" Creates a new child CommentNode, asserts that both implementations
|
||||
did it in a similar way, and returns a newly created DualCommentNode
|
||||
object encapsulating both of the newly created objects """
|
||||
|
||||
primary_new = self.primary.add_child_comment(comment=comment, position=position)
|
||||
secondary_new = self.secondary.add_child_comment(name=comment, position=position)
|
||||
primary_new = self.primary.add_child_comment(comment, position)
|
||||
secondary_new = self.secondary.add_child_comment(comment, position)
|
||||
assertions.assertEqual(primary_new, secondary_new)
|
||||
return DualCommentNode(primary=primary_new, secondary=secondary_new)
|
||||
new_comment = DualCommentNode(primary=primary_new, secondary=secondary_new)
|
||||
return new_comment
|
||||
|
||||
def _create_matching_list(self, primary_list: Iterable[interfaces.ParserNode],
|
||||
secondary_list: Iterable[interfaces.ParserNode]
|
||||
) -> List[Tuple[interfaces.ParserNode, interfaces.ParserNode]]:
|
||||
def _create_matching_list(self, primary_list, secondary_list):
|
||||
""" Matches the list of primary_list to a list of secondary_list and
|
||||
returns a list of tuples. This is used to create results for find_
|
||||
methods.
|
||||
@@ -265,7 +237,7 @@ class DualBlockNode(DualNodeBase[augeasparser.AugeasBlockNode,
|
||||
raise AssertionError("Could not find a matching node.")
|
||||
return matched
|
||||
|
||||
def find_blocks(self, name: str, exclude: bool = True) -> List["DualBlockNode"]:
|
||||
def find_blocks(self, name, exclude=True):
|
||||
"""
|
||||
Performs a search for BlockNodes using both implementations and does simple
|
||||
checks for results. This is built upon the assumption that unimplemented
|
||||
@@ -277,7 +249,7 @@ class DualBlockNode(DualNodeBase[augeasparser.AugeasBlockNode,
|
||||
return self._find_helper(DualBlockNode, "find_blocks", name,
|
||||
exclude=exclude)
|
||||
|
||||
def find_directives(self, name: str, exclude: bool = True) -> List[DualDirectiveNode]:
|
||||
def find_directives(self, name, exclude=True):
|
||||
"""
|
||||
Performs a search for DirectiveNodes using both implementations and
|
||||
checks the results. This is built upon the assumption that unimplemented
|
||||
@@ -289,7 +261,7 @@ class DualBlockNode(DualNodeBase[augeasparser.AugeasBlockNode,
|
||||
return self._find_helper(DualDirectiveNode, "find_directives", name,
|
||||
exclude=exclude)
|
||||
|
||||
def find_comments(self, comment: str) -> List[DualCommentNode]:
|
||||
def find_comments(self, comment):
|
||||
"""
|
||||
Performs a search for CommentNodes using both implementations and
|
||||
checks the results. This is built upon the assumption that unimplemented
|
||||
@@ -300,7 +272,7 @@ class DualBlockNode(DualNodeBase[augeasparser.AugeasBlockNode,
|
||||
|
||||
return self._find_helper(DualCommentNode, "find_comments", comment)
|
||||
|
||||
def delete_child(self, child: "DualBlockNode") -> None:
|
||||
def delete_child(self, child):
|
||||
"""Deletes a child from the ParserNode implementations. The actual
|
||||
ParserNode implementations are used here directly in order to be able
|
||||
to match a child to the list of children."""
|
||||
@@ -308,7 +280,7 @@ class DualBlockNode(DualNodeBase[augeasparser.AugeasBlockNode,
|
||||
self.primary.delete_child(child.primary)
|
||||
self.secondary.delete_child(child.secondary)
|
||||
|
||||
def unsaved_files(self) -> Set[str]:
|
||||
def unsaved_files(self):
|
||||
""" Fetches the list of unsaved file paths and asserts that the lists
|
||||
match """
|
||||
primary_files = self.primary.unsaved_files()
|
||||
@@ -317,7 +289,7 @@ class DualBlockNode(DualNodeBase[augeasparser.AugeasBlockNode,
|
||||
|
||||
return primary_files
|
||||
|
||||
def parsed_paths(self) -> List[str]:
|
||||
def parsed_paths(self):
|
||||
"""
|
||||
Returns a list of file paths that have currently been parsed into the parser
|
||||
tree. The returned list may include paths with wildcard characters, for
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
""" Entry point for Apache Plugin """
|
||||
from typing import Dict
|
||||
from typing import Type
|
||||
from distutils.version import LooseVersion
|
||||
|
||||
from certbot import util
|
||||
from certbot_apache._internal import configurator
|
||||
from certbot_apache._internal import override_arch
|
||||
from certbot_apache._internal import override_centos
|
||||
@@ -10,11 +10,8 @@ from certbot_apache._internal import override_debian
|
||||
from certbot_apache._internal import override_fedora
|
||||
from certbot_apache._internal import override_gentoo
|
||||
from certbot_apache._internal import override_suse
|
||||
from certbot_apache._internal import override_void
|
||||
|
||||
from certbot import util
|
||||
|
||||
OVERRIDE_CLASSES: Dict[str, Type[configurator.ApacheConfigurator]] = {
|
||||
OVERRIDE_CLASSES = {
|
||||
"arch": override_arch.ArchConfigurator,
|
||||
"cloudlinux": override_centos.CentOSConfigurator,
|
||||
"darwin": override_darwin.DarwinConfigurator,
|
||||
@@ -38,19 +35,17 @@ OVERRIDE_CLASSES: Dict[str, Type[configurator.ApacheConfigurator]] = {
|
||||
"sles": override_suse.OpenSUSEConfigurator,
|
||||
"scientific": override_centos.CentOSConfigurator,
|
||||
"scientific linux": override_centos.CentOSConfigurator,
|
||||
"void": override_void.VoidConfigurator,
|
||||
}
|
||||
|
||||
|
||||
def get_configurator() -> Type[configurator.ApacheConfigurator]:
|
||||
def get_configurator():
|
||||
""" Get correct configurator class based on the OS fingerprint """
|
||||
os_name, os_version = util.get_os_info()
|
||||
os_name = os_name.lower()
|
||||
override_class = None
|
||||
|
||||
# Special case for older Fedora versions
|
||||
min_version = util.parse_loose_version('29')
|
||||
if os_name == 'fedora' and util.parse_loose_version(os_version) < min_version:
|
||||
if os_name == 'fedora' and LooseVersion(os_version) < LooseVersion('29'):
|
||||
os_name = 'fedora_old'
|
||||
|
||||
try:
|
||||
@@ -60,7 +55,8 @@ def get_configurator() -> Type[configurator.ApacheConfigurator]:
|
||||
os_like = util.get_systemd_os_like()
|
||||
if os_like:
|
||||
for os_name in os_like:
|
||||
override_class = OVERRIDE_CLASSES.get(os_name)
|
||||
if os_name in OVERRIDE_CLASSES.keys():
|
||||
override_class = OVERRIDE_CLASSES[os_name]
|
||||
if not override_class:
|
||||
# No override class found, return the generic configurator
|
||||
override_class = configurator.ApacheConfigurator
|
||||
|
||||
@@ -1,22 +1,15 @@
|
||||
"""A class that performs HTTP-01 challenges for Apache"""
|
||||
import errno
|
||||
import logging
|
||||
from typing import List
|
||||
from typing import Set
|
||||
from typing import TYPE_CHECKING
|
||||
import errno
|
||||
|
||||
from certbot_apache._internal.obj import VirtualHost
|
||||
from certbot_apache._internal.parser import get_aug_path
|
||||
|
||||
from acme.challenges import KeyAuthorizationChallengeResponse
|
||||
from acme.magic_typing import List
|
||||
from acme.magic_typing import Set
|
||||
from certbot import errors
|
||||
from certbot.achallenges import KeyAuthorizationAnnotatedChallenge
|
||||
from certbot.compat import filesystem
|
||||
from certbot.compat import os
|
||||
from certbot.plugins import common
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from certbot_apache._internal.configurator import ApacheConfigurator # pragma: no cover
|
||||
from certbot_apache._internal.obj import VirtualHost # pylint: disable=unused-import
|
||||
from certbot_apache._internal.parser import get_aug_path
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -24,6 +17,22 @@ logger = logging.getLogger(__name__)
|
||||
class ApacheHttp01(common.ChallengePerformer):
|
||||
"""Class that performs HTTP-01 challenges within the Apache configurator."""
|
||||
|
||||
CONFIG_TEMPLATE22_PRE = """\
|
||||
RewriteEngine on
|
||||
RewriteRule ^/\\.well-known/acme-challenge/([A-Za-z0-9-_=]+)$ {0}/$1 [L]
|
||||
|
||||
"""
|
||||
CONFIG_TEMPLATE22_POST = """\
|
||||
<Directory {0}>
|
||||
Order Allow,Deny
|
||||
Allow from all
|
||||
</Directory>
|
||||
<Location /.well-known/acme-challenge>
|
||||
Order Allow,Deny
|
||||
Allow from all
|
||||
</Location>
|
||||
"""
|
||||
|
||||
CONFIG_TEMPLATE24_PRE = """\
|
||||
RewriteEngine on
|
||||
RewriteRule ^/\\.well-known/acme-challenge/([A-Za-z0-9-_=]+)$ {0}/$1 [END]
|
||||
@@ -37,9 +46,8 @@ class ApacheHttp01(common.ChallengePerformer):
|
||||
</Location>
|
||||
"""
|
||||
|
||||
def __init__(self, configurator: "ApacheConfigurator") -> None:
|
||||
super().__init__(configurator)
|
||||
self.configurator: "ApacheConfigurator"
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(ApacheHttp01, self).__init__(*args, **kwargs)
|
||||
self.challenge_conf_pre = os.path.join(
|
||||
self.configurator.conf("challenge-location"),
|
||||
"le_http_01_challenge_pre.conf")
|
||||
@@ -49,9 +57,9 @@ class ApacheHttp01(common.ChallengePerformer):
|
||||
self.challenge_dir = os.path.join(
|
||||
self.configurator.config.work_dir,
|
||||
"http_challenges")
|
||||
self.moded_vhosts: Set[VirtualHost] = set()
|
||||
self.moded_vhosts = set() # type: Set[VirtualHost]
|
||||
|
||||
def perform(self) -> List[KeyAuthorizationChallengeResponse]:
|
||||
def perform(self):
|
||||
"""Perform all HTTP-01 challenges."""
|
||||
if not self.achalls:
|
||||
return []
|
||||
@@ -59,7 +67,8 @@ class ApacheHttp01(common.ChallengePerformer):
|
||||
# About to make temporary changes to the config
|
||||
self.configurator.save("Changes before challenge setup", True)
|
||||
|
||||
self.configurator.ensure_listen(str(self.configurator.config.http01_port))
|
||||
self.configurator.ensure_listen(str(
|
||||
self.configurator.config.http01_port))
|
||||
self.prepare_http01_modules()
|
||||
|
||||
responses = self._set_up_challenges()
|
||||
@@ -70,22 +79,26 @@ class ApacheHttp01(common.ChallengePerformer):
|
||||
|
||||
return responses
|
||||
|
||||
def prepare_http01_modules(self) -> None:
|
||||
def prepare_http01_modules(self):
|
||||
"""Make sure that we have the needed modules available for http01"""
|
||||
|
||||
if self.configurator.conf("handle-modules"):
|
||||
needed_modules = ["rewrite", "authz_core"]
|
||||
needed_modules = ["rewrite"]
|
||||
if self.configurator.version < (2, 4):
|
||||
needed_modules.append("authz_host")
|
||||
else:
|
||||
needed_modules.append("authz_core")
|
||||
for mod in needed_modules:
|
||||
if mod + "_module" not in self.configurator.parser.modules:
|
||||
self.configurator.enable_mod(mod, temp=True)
|
||||
|
||||
def _mod_config(self) -> None:
|
||||
selected_vhosts: List[VirtualHost] = []
|
||||
def _mod_config(self):
|
||||
selected_vhosts = [] # type: List[VirtualHost]
|
||||
http_port = str(self.configurator.config.http01_port)
|
||||
|
||||
# Search for VirtualHosts matching by name
|
||||
for chall in self.achalls:
|
||||
selected_vhosts += self._matching_vhosts(chall.domain)
|
||||
# Search for matching VirtualHosts
|
||||
for vh in self._matching_vhosts(chall.domain):
|
||||
selected_vhosts.append(vh)
|
||||
|
||||
# Ensure that we have one or more VirtualHosts that we can continue
|
||||
# with. (one that listens to port configured with --http-01-port)
|
||||
@@ -94,13 +107,9 @@ class ApacheHttp01(common.ChallengePerformer):
|
||||
if any(a.is_wildcard() or a.get_port() == http_port for a in vhost.addrs):
|
||||
found = True
|
||||
|
||||
# If there's at least one eligible VirtualHost, also add all unnamed VirtualHosts
|
||||
# because they might match at runtime (#8890)
|
||||
if found:
|
||||
selected_vhosts += self._unnamed_vhosts()
|
||||
# Otherwise, add every Virtualhost which listens on the right port
|
||||
else:
|
||||
selected_vhosts += self._relevant_vhosts()
|
||||
if not found:
|
||||
for vh in self._relevant_vhosts():
|
||||
selected_vhosts.append(vh)
|
||||
|
||||
# Add the challenge configuration
|
||||
for vh in selected_vhosts:
|
||||
@@ -111,8 +120,15 @@ class ApacheHttp01(common.ChallengePerformer):
|
||||
self.configurator.reverter.register_file_creation(
|
||||
True, self.challenge_conf_post)
|
||||
|
||||
config_text_pre = self.CONFIG_TEMPLATE24_PRE.format(self.challenge_dir)
|
||||
config_text_post = self.CONFIG_TEMPLATE24_POST.format(self.challenge_dir)
|
||||
if self.configurator.version < (2, 4):
|
||||
config_template_pre = self.CONFIG_TEMPLATE22_PRE
|
||||
config_template_post = self.CONFIG_TEMPLATE22_POST
|
||||
else:
|
||||
config_template_pre = self.CONFIG_TEMPLATE24_PRE
|
||||
config_template_post = self.CONFIG_TEMPLATE24_POST
|
||||
|
||||
config_text_pre = config_template_pre.format(self.challenge_dir)
|
||||
config_text_post = config_template_post.format(self.challenge_dir)
|
||||
|
||||
logger.debug("writing a pre config file with text:\n %s", config_text_pre)
|
||||
with open(self.challenge_conf_pre, "w") as new_conf:
|
||||
@@ -121,7 +137,7 @@ class ApacheHttp01(common.ChallengePerformer):
|
||||
with open(self.challenge_conf_post, "w") as new_conf:
|
||||
new_conf.write(config_text_post)
|
||||
|
||||
def _matching_vhosts(self, domain: str) -> List[VirtualHost]:
|
||||
def _matching_vhosts(self, domain):
|
||||
"""Return all VirtualHost objects that have the requested domain name or
|
||||
a wildcard name that would match the domain in ServerName or ServerAlias
|
||||
directive.
|
||||
@@ -135,9 +151,9 @@ class ApacheHttp01(common.ChallengePerformer):
|
||||
|
||||
return matching_vhosts
|
||||
|
||||
def _relevant_vhosts(self) -> List[VirtualHost]:
|
||||
def _relevant_vhosts(self):
|
||||
http01_port = str(self.configurator.config.http01_port)
|
||||
relevant_vhosts: List[VirtualHost] = []
|
||||
relevant_vhosts = []
|
||||
for vhost in self.configurator.vhosts:
|
||||
if any(a.is_wildcard() or a.get_port() == http01_port for a in vhost.addrs):
|
||||
if not vhost.ssl:
|
||||
@@ -151,19 +167,17 @@ class ApacheHttp01(common.ChallengePerformer):
|
||||
|
||||
return relevant_vhosts
|
||||
|
||||
def _unnamed_vhosts(self) -> List[VirtualHost]:
|
||||
"""Return all VirtualHost objects with no ServerName"""
|
||||
return [vh for vh in self.configurator.vhosts if vh.name is None]
|
||||
|
||||
def _set_up_challenges(self) -> List[KeyAuthorizationChallengeResponse]:
|
||||
def _set_up_challenges(self):
|
||||
if not os.path.isdir(self.challenge_dir):
|
||||
with filesystem.temp_umask(0o022):
|
||||
try:
|
||||
filesystem.makedirs(self.challenge_dir, 0o755)
|
||||
except OSError as exception:
|
||||
if exception.errno not in (errno.EEXIST, errno.EISDIR):
|
||||
raise errors.PluginError(
|
||||
"Couldn't create root for http-01 challenge")
|
||||
old_umask = filesystem.umask(0o022)
|
||||
try:
|
||||
filesystem.makedirs(self.challenge_dir, 0o755)
|
||||
except OSError as exception:
|
||||
if exception.errno not in (errno.EEXIST, errno.EISDIR):
|
||||
raise errors.PluginError(
|
||||
"Couldn't create root for http-01 challenge")
|
||||
finally:
|
||||
filesystem.umask(old_umask)
|
||||
|
||||
responses = []
|
||||
for achall in self.achalls:
|
||||
@@ -171,11 +185,10 @@ class ApacheHttp01(common.ChallengePerformer):
|
||||
|
||||
return responses
|
||||
|
||||
def _set_up_challenge(self, achall: KeyAuthorizationAnnotatedChallenge
|
||||
) -> KeyAuthorizationChallengeResponse:
|
||||
def _set_up_challenge(self, achall):
|
||||
response, validation = achall.response_and_validation()
|
||||
|
||||
name: str = os.path.join(self.challenge_dir, achall.chall.encode("token"))
|
||||
name = os.path.join(self.challenge_dir, achall.chall.encode("token"))
|
||||
|
||||
self.configurator.reverter.register_file_creation(True, name)
|
||||
with open(name, 'wb') as f:
|
||||
@@ -184,7 +197,7 @@ class ApacheHttp01(common.ChallengePerformer):
|
||||
|
||||
return response
|
||||
|
||||
def _set_up_include_directives(self, vhost: VirtualHost) -> None:
|
||||
def _set_up_include_directives(self, vhost):
|
||||
"""Includes override configuration to the beginning and to the end of
|
||||
VirtualHost. Note that this include isn't added to Augeas search tree"""
|
||||
|
||||
|
||||
@@ -98,18 +98,14 @@ names and parameters in case insensitive manner. This does not apply to everythi
|
||||
however, for example the parameters of a conditional statement may be case sensitive.
|
||||
For this reason the internal representation of data should not ignore the case.
|
||||
"""
|
||||
|
||||
import abc
|
||||
from typing import Any
|
||||
from typing import Dict
|
||||
from typing import List
|
||||
from typing import Optional
|
||||
from typing import Tuple
|
||||
from typing import TypeVar
|
||||
|
||||
GenericParserNode = TypeVar("GenericParserNode", bound="ParserNode")
|
||||
import six
|
||||
|
||||
|
||||
class ParserNode(metaclass=abc.ABCMeta):
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class ParserNode(object):
|
||||
"""
|
||||
ParserNode is the basic building block of the tree of such nodes,
|
||||
representing the structure of the configuration. It is largely meant to keep
|
||||
@@ -151,13 +147,9 @@ class ParserNode(metaclass=abc.ABCMeta):
|
||||
# for the ParserNode instance.
|
||||
metadata: Dict[str, Any]
|
||||
"""
|
||||
ancestor: Optional["ParserNode"]
|
||||
dirty: bool
|
||||
filepath: Optional[str]
|
||||
metadata: Dict[str, Any]
|
||||
|
||||
@abc.abstractmethod
|
||||
def __init__(self, **kwargs: Any) -> None:
|
||||
def __init__(self, **kwargs):
|
||||
"""
|
||||
Initializes the ParserNode instance, and sets the ParserNode specific
|
||||
instance variables. This is not meant to be used directly, but through
|
||||
@@ -181,7 +173,7 @@ class ParserNode(metaclass=abc.ABCMeta):
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def save(self, msg: str) -> None:
|
||||
def save(self, msg):
|
||||
"""
|
||||
Save traverses the children, and attempts to write the AST to disk for
|
||||
all the objects that are marked dirty. The actual operation of course
|
||||
@@ -200,7 +192,7 @@ class ParserNode(metaclass=abc.ABCMeta):
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def find_ancestors(self: GenericParserNode, name: str) -> List[GenericParserNode]:
|
||||
def find_ancestors(self, name):
|
||||
"""
|
||||
Traverses the ancestor tree up, searching for BlockNodes with a specific
|
||||
name.
|
||||
@@ -212,7 +204,9 @@ class ParserNode(metaclass=abc.ABCMeta):
|
||||
"""
|
||||
|
||||
|
||||
class CommentNode(ParserNode, metaclass=abc.ABCMeta):
|
||||
# Linter rule exclusion done because of https://github.com/PyCQA/pylint/issues/179
|
||||
@six.add_metaclass(abc.ABCMeta) # pylint: disable=abstract-method
|
||||
class CommentNode(ParserNode):
|
||||
"""
|
||||
CommentNode class is used for representation of comments within the parsed
|
||||
configuration structure. Because of the nature of comments, it is not able
|
||||
@@ -229,10 +223,9 @@ class CommentNode(ParserNode, metaclass=abc.ABCMeta):
|
||||
comment: str
|
||||
|
||||
"""
|
||||
comment: str
|
||||
|
||||
@abc.abstractmethod
|
||||
def __init__(self, **kwargs: Any) -> None:
|
||||
def __init__(self, **kwargs):
|
||||
"""
|
||||
Initializes the CommentNode instance and sets its instance variables.
|
||||
|
||||
@@ -250,15 +243,14 @@ class CommentNode(ParserNode, metaclass=abc.ABCMeta):
|
||||
created or changed after the last save. Default: False.
|
||||
:type dirty: bool
|
||||
"""
|
||||
super().__init__( # pragma: no cover
|
||||
ancestor=kwargs['ancestor'],
|
||||
dirty=kwargs.get('dirty', False),
|
||||
filepath=kwargs['filepath'],
|
||||
metadata=kwargs.get('metadata', {}),
|
||||
)
|
||||
super(CommentNode, self).__init__(ancestor=kwargs['ancestor'],
|
||||
dirty=kwargs.get('dirty', False),
|
||||
filepath=kwargs['filepath'],
|
||||
metadata=kwargs.get('metadata', {})) # pragma: no cover
|
||||
|
||||
|
||||
class DirectiveNode(ParserNode, metaclass=abc.ABCMeta):
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class DirectiveNode(ParserNode):
|
||||
"""
|
||||
DirectiveNode class represents a configuration directive within the configuration.
|
||||
It can have zero or more parameters attached to it. Because of the nature of
|
||||
@@ -284,12 +276,9 @@ class DirectiveNode(ParserNode, metaclass=abc.ABCMeta):
|
||||
parameters: Tuple[str, ...]
|
||||
|
||||
"""
|
||||
enabled: bool
|
||||
name: Optional[str]
|
||||
parameters: Tuple[str, ...]
|
||||
|
||||
@abc.abstractmethod
|
||||
def __init__(self, **kwargs: Any) -> None:
|
||||
def __init__(self, **kwargs):
|
||||
"""
|
||||
Initializes the DirectiveNode instance and sets its instance variables.
|
||||
|
||||
@@ -319,26 +308,25 @@ class DirectiveNode(ParserNode, metaclass=abc.ABCMeta):
|
||||
:type enabled: bool
|
||||
|
||||
"""
|
||||
super().__init__( # pragma: no cover
|
||||
ancestor=kwargs['ancestor'],
|
||||
dirty=kwargs.get('dirty', False),
|
||||
filepath=kwargs['filepath'],
|
||||
metadata=kwargs.get('metadata', {}),
|
||||
)
|
||||
super(DirectiveNode, self).__init__(ancestor=kwargs['ancestor'],
|
||||
dirty=kwargs.get('dirty', False),
|
||||
filepath=kwargs['filepath'],
|
||||
metadata=kwargs.get('metadata', {})) # pragma: no cover
|
||||
|
||||
@abc.abstractmethod
|
||||
def set_parameters(self, parameters: List[str]) -> None:
|
||||
def set_parameters(self, parameters):
|
||||
"""
|
||||
Sets the sequence of parameters for this ParserNode object without
|
||||
whitespaces. While the whitespaces for parameters are discarded when using
|
||||
this method, the whitespacing preceding the ParserNode itself should be
|
||||
this method, the whitespacing preceeding the ParserNode itself should be
|
||||
kept intact.
|
||||
|
||||
:param list parameters: sequence of parameters
|
||||
"""
|
||||
|
||||
|
||||
class BlockNode(DirectiveNode, metaclass=abc.ABCMeta):
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class BlockNode(DirectiveNode):
|
||||
"""
|
||||
BlockNode class represents a block of nested configuration directives, comments
|
||||
and other blocks as its children. A BlockNode can have zero or more parameters
|
||||
@@ -378,14 +366,12 @@ class BlockNode(DirectiveNode, metaclass=abc.ABCMeta):
|
||||
children: Tuple[ParserNode, ...]
|
||||
|
||||
"""
|
||||
children: Tuple[ParserNode, ...]
|
||||
|
||||
@abc.abstractmethod
|
||||
def add_child_block(self, name: str, parameters: Optional[List[str]] = None,
|
||||
position: Optional[int] = None) -> "BlockNode":
|
||||
def add_child_block(self, name, parameters=None, position=None):
|
||||
"""
|
||||
Adds a new BlockNode child node with provided values and marks the callee
|
||||
BlockNode dirty. This is used to add new children to the AST. The preceding
|
||||
BlockNode dirty. This is used to add new children to the AST. The preceeding
|
||||
whitespaces should not be added based on the ancestor or siblings for the
|
||||
newly created object. This is to match the current behavior of the legacy
|
||||
parser implementation.
|
||||
@@ -402,12 +388,11 @@ class BlockNode(DirectiveNode, metaclass=abc.ABCMeta):
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def add_child_directive(self, name: str, parameters: Optional[List[str]] = None,
|
||||
position: Optional[int] = None) -> DirectiveNode:
|
||||
def add_child_directive(self, name, parameters=None, position=None):
|
||||
"""
|
||||
Adds a new DirectiveNode child node with provided values and marks the
|
||||
callee BlockNode dirty. This is used to add new children to the AST. The
|
||||
preceding whitespaces should not be added based on the ancestor or siblings
|
||||
preceeding whitespaces should not be added based on the ancestor or siblings
|
||||
for the newly created object. This is to match the current behavior of the
|
||||
legacy parser implementation.
|
||||
|
||||
@@ -424,11 +409,11 @@ class BlockNode(DirectiveNode, metaclass=abc.ABCMeta):
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def add_child_comment(self, comment: str = "", position: Optional[int] = None) -> CommentNode:
|
||||
def add_child_comment(self, comment="", position=None):
|
||||
"""
|
||||
Adds a new CommentNode child node with provided value and marks the
|
||||
callee BlockNode dirty. This is used to add new children to the AST. The
|
||||
preceding whitespaces should not be added based on the ancestor or siblings
|
||||
preceeding whitespaces should not be added based on the ancestor or siblings
|
||||
for the newly created object. This is to match the current behavior of the
|
||||
legacy parser implementation.
|
||||
|
||||
@@ -444,7 +429,7 @@ class BlockNode(DirectiveNode, metaclass=abc.ABCMeta):
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def find_blocks(self, name: str, exclude: bool = True) -> List["BlockNode"]:
|
||||
def find_blocks(self, name, exclude=True):
|
||||
"""
|
||||
Find a configuration block by name. This method walks the child tree of
|
||||
ParserNodes under the instance it was called from. This way it is possible
|
||||
@@ -461,7 +446,7 @@ class BlockNode(DirectiveNode, metaclass=abc.ABCMeta):
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def find_directives(self, name: str, exclude: bool = True) -> List[DirectiveNode]:
|
||||
def find_directives(self, name, exclude=True):
|
||||
"""
|
||||
Find a directive by name. This method walks the child tree of ParserNodes
|
||||
under the instance it was called from. This way it is possible to search
|
||||
@@ -479,7 +464,7 @@ class BlockNode(DirectiveNode, metaclass=abc.ABCMeta):
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def find_comments(self, comment: str) -> List[CommentNode]:
|
||||
def find_comments(self, comment):
|
||||
"""
|
||||
Find comments with value containing the search term.
|
||||
|
||||
@@ -495,7 +480,7 @@ class BlockNode(DirectiveNode, metaclass=abc.ABCMeta):
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def delete_child(self, child: ParserNode) -> None:
|
||||
def delete_child(self, child):
|
||||
"""
|
||||
Remove a specified child node from the list of children of the called
|
||||
BlockNode object.
|
||||
@@ -505,7 +490,7 @@ class BlockNode(DirectiveNode, metaclass=abc.ABCMeta):
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def unsaved_files(self) -> List[str]:
|
||||
def unsaved_files(self):
|
||||
"""
|
||||
Returns a list of file paths that have been changed since the last save
|
||||
(or the initial configuration parse). The intended use for this method
|
||||
@@ -518,7 +503,7 @@ class BlockNode(DirectiveNode, metaclass=abc.ABCMeta):
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def parsed_paths(self) -> List[str]:
|
||||
def parsed_paths(self):
|
||||
"""
|
||||
Returns a list of file paths that have currently been parsed into the parser
|
||||
tree. The returned list may include paths with wildcard characters, for
|
||||
|
||||
@@ -1,23 +1,14 @@
|
||||
"""Module contains classes used by the Apache Configurator."""
|
||||
import re
|
||||
from typing import Any
|
||||
from typing import Iterable
|
||||
from typing import Optional
|
||||
from typing import Pattern
|
||||
from typing import Set
|
||||
from typing import Union
|
||||
|
||||
from certbot_apache._internal.apacheparser import ApacheBlockNode
|
||||
from certbot_apache._internal.augeasparser import AugeasBlockNode
|
||||
from certbot_apache._internal.dualparser import DualBlockNode
|
||||
|
||||
from acme.magic_typing import Set
|
||||
from certbot.plugins import common
|
||||
|
||||
|
||||
class Addr(common.Addr):
|
||||
"""Represents an Apache address."""
|
||||
|
||||
def __eq__(self, other: Any) -> bool:
|
||||
def __eq__(self, other):
|
||||
"""This is defined as equivalent within Apache.
|
||||
|
||||
ip_addr:* == ip_addr
|
||||
@@ -29,20 +20,23 @@ class Addr(common.Addr):
|
||||
self.is_wildcard() and other.is_wildcard()))
|
||||
return False
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"certbot_apache._internal.obj.Addr({repr(self.tup)})"
|
||||
def __ne__(self, other):
|
||||
return not self.__eq__(other)
|
||||
|
||||
def __hash__(self) -> int: # pylint: disable=useless-super-delegation
|
||||
def __repr__(self):
|
||||
return "certbot_apache._internal.obj.Addr(" + repr(self.tup) + ")"
|
||||
|
||||
def __hash__(self): # pylint: disable=useless-super-delegation
|
||||
# Python 3 requires explicit overridden for __hash__ if __eq__ or
|
||||
# __cmp__ is overridden. See https://bugs.python.org/issue2235
|
||||
return super().__hash__()
|
||||
return super(Addr, self).__hash__()
|
||||
|
||||
def _addr_less_specific(self, addr: "Addr") -> bool:
|
||||
def _addr_less_specific(self, addr):
|
||||
"""Returns if addr.get_addr() is more specific than self.get_addr()."""
|
||||
# pylint: disable=protected-access
|
||||
return addr._rank_specific_addr() > self._rank_specific_addr()
|
||||
|
||||
def _rank_specific_addr(self) -> int:
|
||||
def _rank_specific_addr(self):
|
||||
"""Returns numerical rank for get_addr()
|
||||
|
||||
:returns: 2 - FQ, 1 - wildcard, 0 - _default_
|
||||
@@ -55,7 +49,7 @@ class Addr(common.Addr):
|
||||
return 1
|
||||
return 2
|
||||
|
||||
def conflicts(self, addr: "Addr") -> bool:
|
||||
def conflicts(self, addr):
|
||||
r"""Returns if address could conflict with correct function of self.
|
||||
|
||||
Could addr take away service provided by self within Apache?
|
||||
@@ -83,11 +77,11 @@ class Addr(common.Addr):
|
||||
return True
|
||||
return False
|
||||
|
||||
def is_wildcard(self) -> bool:
|
||||
def is_wildcard(self):
|
||||
"""Returns if address has a wildcard port."""
|
||||
return self.tup[1] == "*" or not self.tup[1]
|
||||
|
||||
def get_sni_addr(self, port: str) -> common.Addr:
|
||||
def get_sni_addr(self, port):
|
||||
"""Returns the least specific address that resolves on the port.
|
||||
|
||||
Examples:
|
||||
@@ -104,7 +98,7 @@ class Addr(common.Addr):
|
||||
return self.get_addr_obj(port)
|
||||
|
||||
|
||||
class VirtualHost:
|
||||
class VirtualHost(object):
|
||||
"""Represents an Apache Virtualhost.
|
||||
|
||||
:ivar str filep: file path of VH
|
||||
@@ -127,16 +121,13 @@ class VirtualHost:
|
||||
|
||||
"""
|
||||
# ?: is used for not returning enclosed characters
|
||||
strip_name: Pattern = re.compile(r"^(?:.+://)?([^ :$]*)")
|
||||
strip_name = re.compile(r"^(?:.+://)?([^ :$]*)")
|
||||
|
||||
def __init__(self, filepath: str, path: str, addrs: Set["Addr"], ssl: bool,
|
||||
enabled: bool, name: Optional[str] = None, aliases: Optional[Set[str]] = None,
|
||||
modmacro: bool = False, ancestor: Optional["VirtualHost"] = None,
|
||||
node: Optional[Union[ApacheBlockNode, AugeasBlockNode, DualBlockNode]] = None
|
||||
) -> None:
|
||||
def __init__(self, filep, path, addrs, ssl, enabled, name=None,
|
||||
aliases=None, modmacro=False, ancestor=None, node=None):
|
||||
|
||||
"""Initialize a VH."""
|
||||
self.filep = filepath
|
||||
self.filep = filep
|
||||
self.path = path
|
||||
self.addrs = addrs
|
||||
self.name = name
|
||||
@@ -147,9 +138,9 @@ class VirtualHost:
|
||||
self.ancestor = ancestor
|
||||
self.node = node
|
||||
|
||||
def get_names(self) -> Set[str]:
|
||||
def get_names(self):
|
||||
"""Return a set of all names."""
|
||||
all_names: Set[str] = set()
|
||||
all_names = set() # type: Set[str]
|
||||
all_names.update(self.aliases)
|
||||
# Strip out any scheme:// and <port> field from servername
|
||||
if self.name is not None:
|
||||
@@ -157,28 +148,39 @@ class VirtualHost:
|
||||
|
||||
return all_names
|
||||
|
||||
def __str__(self) -> str:
|
||||
def __str__(self):
|
||||
return (
|
||||
f"File: {self.filep}\n"
|
||||
f"Vhost path: {self.path}\n"
|
||||
f"Addresses: {', '.join(str(addr) for addr in self.addrs)}\n"
|
||||
f"Name: {self.name if self.name is not None else ''}\n"
|
||||
f"Aliases: {', '.join(name for name in self.aliases)}\n"
|
||||
f"TLS Enabled: {'Yes' if self.ssl else 'No'}\n"
|
||||
f"Site Enabled: {'Yes' if self.enabled else 'No'}\n"
|
||||
f"mod_macro Vhost: {'Yes' if self.modmacro else 'No'}"
|
||||
)
|
||||
"File: {filename}\n"
|
||||
"Vhost path: {vhpath}\n"
|
||||
"Addresses: {addrs}\n"
|
||||
"Name: {name}\n"
|
||||
"Aliases: {aliases}\n"
|
||||
"TLS Enabled: {tls}\n"
|
||||
"Site Enabled: {active}\n"
|
||||
"mod_macro Vhost: {modmacro}".format(
|
||||
filename=self.filep,
|
||||
vhpath=self.path,
|
||||
addrs=", ".join(str(addr) for addr in self.addrs),
|
||||
name=self.name if self.name is not None else "",
|
||||
aliases=", ".join(name for name in self.aliases),
|
||||
tls="Yes" if self.ssl else "No",
|
||||
active="Yes" if self.enabled else "No",
|
||||
modmacro="Yes" if self.modmacro else "No"))
|
||||
|
||||
def display_repr(self) -> str:
|
||||
def display_repr(self):
|
||||
"""Return a representation of VHost to be used in dialog"""
|
||||
return (
|
||||
f"File: {self.filep}\n"
|
||||
f"Addresses: {', '.join(str(addr) for addr in self.addrs)}\n"
|
||||
f"Names: {', '.join(self.get_names())}\n"
|
||||
f"HTTPS: {'Yes' if self.ssl else 'No'}\n"
|
||||
)
|
||||
"File: {filename}\n"
|
||||
"Addresses: {addrs}\n"
|
||||
"Names: {names}\n"
|
||||
"HTTPS: {https}\n".format(
|
||||
filename=self.filep,
|
||||
addrs=", ".join(str(addr) for addr in self.addrs),
|
||||
names=", ".join(self.get_names()),
|
||||
https="Yes" if self.ssl else "No"))
|
||||
|
||||
def __eq__(self, other: Any) -> bool:
|
||||
|
||||
def __eq__(self, other):
|
||||
if isinstance(other, self.__class__):
|
||||
return (self.filep == other.filep and self.path == other.path and
|
||||
self.addrs == other.addrs and
|
||||
@@ -189,12 +191,15 @@ class VirtualHost:
|
||||
|
||||
return False
|
||||
|
||||
def __hash__(self) -> int:
|
||||
def __ne__(self, other):
|
||||
return not self.__eq__(other)
|
||||
|
||||
def __hash__(self):
|
||||
return hash((self.filep, self.path,
|
||||
tuple(self.addrs), tuple(self.get_names()),
|
||||
self.ssl, self.enabled, self.modmacro))
|
||||
|
||||
def conflicts(self, addrs: Iterable[Addr]) -> bool:
|
||||
def conflicts(self, addrs):
|
||||
"""See if vhost conflicts with any of the addrs.
|
||||
|
||||
This determines whether or not these addresses would/could overwrite
|
||||
@@ -213,7 +218,7 @@ class VirtualHost:
|
||||
return True
|
||||
return False
|
||||
|
||||
def same_server(self, vhost: "VirtualHost", generic: bool = False) -> bool:
|
||||
def same_server(self, vhost, generic=False):
|
||||
"""Determines if the vhost is the same 'server'.
|
||||
|
||||
Used in redirection - indicates whether or not the two virtual hosts
|
||||
@@ -246,7 +251,7 @@ class VirtualHost:
|
||||
|
||||
# already_found acts to keep everything very conservative.
|
||||
# Don't allow multiple ip:ports in same set.
|
||||
already_found: Set[str] = set()
|
||||
already_found = set() # type: Set[str]
|
||||
|
||||
for addr in vhost.addrs:
|
||||
for local_addr in self.addrs:
|
||||
|
||||
@@ -1,12 +1,15 @@
|
||||
""" Distribution specific override class for Arch Linux """
|
||||
import zope.interface
|
||||
|
||||
from certbot import interfaces
|
||||
from certbot_apache._internal import configurator
|
||||
from certbot_apache._internal.configurator import OsOptions
|
||||
|
||||
|
||||
@zope.interface.provider(interfaces.IPluginFactory)
|
||||
class ArchConfigurator(configurator.ApacheConfigurator):
|
||||
"""Arch Linux specific ApacheConfigurator override class"""
|
||||
|
||||
OS_DEFAULTS = OsOptions(
|
||||
OS_DEFAULTS = dict(
|
||||
server_root="/etc/httpd",
|
||||
vhost_root="/etc/httpd/conf",
|
||||
vhost_files="*.conf",
|
||||
@@ -15,5 +18,11 @@ class ArchConfigurator(configurator.ApacheConfigurator):
|
||||
version_cmd=['apachectl', '-v'],
|
||||
restart_cmd=['apachectl', 'graceful'],
|
||||
conftest_cmd=['apachectl', 'configtest'],
|
||||
enmod=None,
|
||||
dismod=None,
|
||||
le_vhost_ext="-le-ssl.conf",
|
||||
handle_modules=False,
|
||||
handle_sites=False,
|
||||
challenge_location="/etc/httpd/conf",
|
||||
bin=None,
|
||||
)
|
||||
|
||||
@@ -1,36 +1,44 @@
|
||||
""" Distribution specific override class for CentOS family (RHEL, Fedora) """
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
import zope.interface
|
||||
|
||||
from acme.magic_typing import List
|
||||
from certbot import errors
|
||||
from certbot import interfaces
|
||||
from certbot import util
|
||||
from certbot.errors import MisconfigurationError
|
||||
from certbot_apache._internal import apache_util
|
||||
from certbot_apache._internal import configurator
|
||||
from certbot_apache._internal import parser
|
||||
from certbot_apache._internal.configurator import OsOptions
|
||||
|
||||
from certbot import errors
|
||||
from certbot import util
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@zope.interface.provider(interfaces.IPluginFactory)
|
||||
class CentOSConfigurator(configurator.ApacheConfigurator):
|
||||
"""CentOS specific ApacheConfigurator override class"""
|
||||
|
||||
OS_DEFAULTS = OsOptions(
|
||||
OS_DEFAULTS = dict(
|
||||
server_root="/etc/httpd",
|
||||
vhost_root="/etc/httpd/conf.d",
|
||||
vhost_files="*.conf",
|
||||
logs_root="/var/log/httpd",
|
||||
ctl="apachectl",
|
||||
apache_bin="httpd",
|
||||
version_cmd=['apachectl', '-v'],
|
||||
restart_cmd=['apachectl', 'graceful'],
|
||||
restart_cmd_alt=['apachectl', 'restart'],
|
||||
conftest_cmd=['apachectl', 'configtest'],
|
||||
enmod=None,
|
||||
dismod=None,
|
||||
le_vhost_ext="-le-ssl.conf",
|
||||
handle_modules=False,
|
||||
handle_sites=False,
|
||||
challenge_location="/etc/httpd/conf.d",
|
||||
bin=None,
|
||||
)
|
||||
|
||||
def config_test(self) -> None:
|
||||
def config_test(self):
|
||||
"""
|
||||
Override config_test to mitigate configtest error in vanilla installation
|
||||
of mod_ssl in Fedora. The error is caused by non-existent self-signed
|
||||
@@ -42,52 +50,16 @@ class CentOSConfigurator(configurator.ApacheConfigurator):
|
||||
fedora = os_info[0].lower() == "fedora"
|
||||
|
||||
try:
|
||||
super().config_test()
|
||||
super(CentOSConfigurator, self).config_test()
|
||||
except errors.MisconfigurationError:
|
||||
if fedora:
|
||||
self._try_restart_fedora()
|
||||
else:
|
||||
raise
|
||||
|
||||
def _rhel9_or_newer(self) -> bool:
|
||||
os_name, os_version = util.get_os_info()
|
||||
rhel_derived = os_name in [
|
||||
"centos", "centos linux",
|
||||
"cloudlinux",
|
||||
"ol", "oracle",
|
||||
"rhel", "redhatenterpriseserver", "red hat enterprise linux server",
|
||||
"scientific", "scientific linux",
|
||||
]
|
||||
# It is important that the loose version comparison below is not made
|
||||
# if the OS is not RHEL derived. See
|
||||
# https://github.com/certbot/certbot/issues/9481.
|
||||
if not rhel_derived:
|
||||
return False
|
||||
at_least_v9 = util.parse_loose_version(os_version) >= util.parse_loose_version('9')
|
||||
return at_least_v9
|
||||
|
||||
def _override_cmds(self) -> None:
|
||||
super()._override_cmds()
|
||||
|
||||
# As of RHEL 9, apachectl can't be passed flags like "-v" or "-t -D", so
|
||||
# instead use options.bin (i.e. httpd) for version_cmd and the various
|
||||
# get_X commands
|
||||
if self._rhel9_or_newer():
|
||||
if not self.options.bin:
|
||||
raise ValueError("OS option apache_bin must be set for CentOS") # pragma: no cover
|
||||
|
||||
self.options.version_cmd[0] = self.options.bin
|
||||
self.options.get_modules_cmd[0] = self.options.bin
|
||||
self.options.get_includes_cmd[0] = self.options.bin
|
||||
self.options.get_defines_cmd[0] = self.options.bin
|
||||
|
||||
if not self.options.restart_cmd_alt: # pragma: no cover
|
||||
raise ValueError("OS option restart_cmd_alt must be set for CentOS.")
|
||||
self.options.restart_cmd_alt[0] = self.options.ctl
|
||||
|
||||
def _try_restart_fedora(self) -> None:
|
||||
def _try_restart_fedora(self):
|
||||
"""
|
||||
Tries to restart httpd using systemctl to generate the self signed key pair.
|
||||
Tries to restart httpd using systemctl to generate the self signed keypair.
|
||||
"""
|
||||
|
||||
try:
|
||||
@@ -96,29 +68,145 @@ class CentOSConfigurator(configurator.ApacheConfigurator):
|
||||
raise errors.MisconfigurationError(str(err))
|
||||
|
||||
# Finish with actual config check to see if systemctl restart helped
|
||||
super().config_test()
|
||||
super(CentOSConfigurator, self).config_test()
|
||||
|
||||
def get_parser(self) -> "CentOSParser":
|
||||
def _prepare_options(self):
|
||||
"""
|
||||
Override the options dictionary initialization in order to support
|
||||
alternative restart cmd used in CentOS.
|
||||
"""
|
||||
super(CentOSConfigurator, self)._prepare_options()
|
||||
self.options["restart_cmd_alt"][0] = self.option("ctl")
|
||||
|
||||
def get_parser(self):
|
||||
"""Initializes the ApacheParser"""
|
||||
return CentOSParser(
|
||||
self.options.server_root, self, self.options.vhost_root, self.version)
|
||||
self.option("server_root"), self.option("vhost_root"),
|
||||
self.version, configurator=self)
|
||||
|
||||
def _deploy_cert(self, *args, **kwargs): # pylint: disable=arguments-differ
|
||||
"""
|
||||
Override _deploy_cert in order to ensure that the Apache configuration
|
||||
has "LoadModule ssl_module..." before parsing the VirtualHost configuration
|
||||
that was created by Certbot
|
||||
"""
|
||||
super(CentOSConfigurator, self)._deploy_cert(*args, **kwargs)
|
||||
if self.version < (2, 4, 0):
|
||||
self._deploy_loadmodule_ssl_if_needed()
|
||||
|
||||
def _deploy_loadmodule_ssl_if_needed(self):
|
||||
"""
|
||||
Add "LoadModule ssl_module <pre-existing path>" to main httpd.conf if
|
||||
it doesn't exist there already.
|
||||
"""
|
||||
|
||||
loadmods = self.parser.find_dir("LoadModule", "ssl_module", exclude=False)
|
||||
|
||||
correct_ifmods = [] # type: List[str]
|
||||
loadmod_args = [] # type: List[str]
|
||||
loadmod_paths = [] # type: List[str]
|
||||
for m in loadmods:
|
||||
noarg_path = m.rpartition("/")[0]
|
||||
path_args = self.parser.get_all_args(noarg_path)
|
||||
if loadmod_args:
|
||||
if loadmod_args != path_args:
|
||||
msg = ("Certbot encountered multiple LoadModule directives "
|
||||
"for LoadModule ssl_module with differing library paths. "
|
||||
"Please remove or comment out the one(s) that are not in "
|
||||
"use, and run Certbot again.")
|
||||
raise MisconfigurationError(msg)
|
||||
else:
|
||||
loadmod_args = path_args
|
||||
|
||||
if self.parser.not_modssl_ifmodule(noarg_path): # pylint: disable=no-member
|
||||
if self.parser.loc["default"] in noarg_path:
|
||||
# LoadModule already in the main configuration file
|
||||
if ("ifmodule/" in noarg_path.lower() or
|
||||
"ifmodule[1]" in noarg_path.lower()):
|
||||
# It's the first or only IfModule in the file
|
||||
return
|
||||
# Populate the list of known !mod_ssl.c IfModules
|
||||
nodir_path = noarg_path.rpartition("/directive")[0]
|
||||
correct_ifmods.append(nodir_path)
|
||||
else:
|
||||
loadmod_paths.append(noarg_path)
|
||||
|
||||
if not loadmod_args:
|
||||
# Do not try to enable mod_ssl
|
||||
return
|
||||
|
||||
# Force creation as the directive wasn't found from the beginning of
|
||||
# httpd.conf
|
||||
rootconf_ifmod = self.parser.create_ifmod(
|
||||
parser.get_aug_path(self.parser.loc["default"]),
|
||||
"!mod_ssl.c", beginning=True)
|
||||
# parser.get_ifmod returns a path postfixed with "/", remove that
|
||||
self.parser.add_dir(rootconf_ifmod[:-1], "LoadModule", loadmod_args)
|
||||
correct_ifmods.append(rootconf_ifmod[:-1])
|
||||
self.save_notes += "Added LoadModule ssl_module to main configuration.\n"
|
||||
|
||||
# Wrap LoadModule mod_ssl inside of <IfModule !mod_ssl.c> if it's not
|
||||
# configured like this already.
|
||||
for loadmod_path in loadmod_paths:
|
||||
nodir_path = loadmod_path.split("/directive")[0]
|
||||
# Remove the old LoadModule directive
|
||||
self.parser.aug.remove(loadmod_path)
|
||||
|
||||
# Create a new IfModule !mod_ssl.c if not already found on path
|
||||
ssl_ifmod = self.parser.get_ifmod(nodir_path, "!mod_ssl.c",
|
||||
beginning=True)[:-1]
|
||||
if ssl_ifmod not in correct_ifmods:
|
||||
self.parser.add_dir(ssl_ifmod, "LoadModule", loadmod_args)
|
||||
correct_ifmods.append(ssl_ifmod)
|
||||
self.save_notes += ("Wrapped pre-existing LoadModule ssl_module "
|
||||
"inside of <IfModule !mod_ssl> block.\n")
|
||||
|
||||
|
||||
class CentOSParser(parser.ApacheParser):
|
||||
"""CentOS specific ApacheParser override class"""
|
||||
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
||||
def __init__(self, *args, **kwargs):
|
||||
# CentOS specific configuration file for Apache
|
||||
self.sysconfig_filep: str = "/etc/sysconfig/httpd"
|
||||
super().__init__(*args, **kwargs)
|
||||
self.sysconfig_filep = "/etc/sysconfig/httpd"
|
||||
super(CentOSParser, self).__init__(*args, **kwargs)
|
||||
|
||||
def update_runtime_variables(self) -> None:
|
||||
def update_runtime_variables(self):
|
||||
""" Override for update_runtime_variables for custom parsing """
|
||||
# Opportunistic, works if SELinux not enforced
|
||||
super().update_runtime_variables()
|
||||
super(CentOSParser, self).update_runtime_variables()
|
||||
self.parse_sysconfig_var()
|
||||
|
||||
def parse_sysconfig_var(self) -> None:
|
||||
def parse_sysconfig_var(self):
|
||||
""" Parses Apache CLI options from CentOS configuration file """
|
||||
defines = apache_util.parse_define_file(self.sysconfig_filep, "OPTIONS")
|
||||
for k, v in defines.items():
|
||||
self.variables[k] = v
|
||||
for k in defines:
|
||||
self.variables[k] = defines[k]
|
||||
|
||||
def not_modssl_ifmodule(self, path):
|
||||
"""Checks if the provided Augeas path has argument !mod_ssl"""
|
||||
|
||||
if "ifmodule" not in path.lower():
|
||||
return False
|
||||
|
||||
# Trim the path to the last ifmodule
|
||||
workpath = path.lower()
|
||||
while workpath:
|
||||
# Get path to the last IfModule (ignore the tail)
|
||||
parts = workpath.rpartition("ifmodule")
|
||||
|
||||
if not parts[0]:
|
||||
# IfModule not found
|
||||
break
|
||||
ifmod_path = parts[0] + parts[1]
|
||||
# Check if ifmodule had an index
|
||||
if parts[2].startswith("["):
|
||||
# Append the index from tail
|
||||
ifmod_path += parts[2].partition("/")[0]
|
||||
# Get the original path trimmed to correct length
|
||||
# This is required to preserve cases
|
||||
ifmod_real_path = path[0:len(ifmod_path)]
|
||||
if "!mod_ssl.c" in self.get_all_args(ifmod_real_path):
|
||||
return True
|
||||
# Set the workpath to the heading part
|
||||
workpath = parts[0]
|
||||
|
||||
return False
|
||||
|
||||
@@ -1,17 +1,28 @@
|
||||
""" Distribution specific override class for macOS """
|
||||
import zope.interface
|
||||
|
||||
from certbot import interfaces
|
||||
from certbot_apache._internal import configurator
|
||||
from certbot_apache._internal.configurator import OsOptions
|
||||
|
||||
|
||||
@zope.interface.provider(interfaces.IPluginFactory)
|
||||
class DarwinConfigurator(configurator.ApacheConfigurator):
|
||||
"""macOS specific ApacheConfigurator override class"""
|
||||
|
||||
OS_DEFAULTS = OsOptions(
|
||||
OS_DEFAULTS = dict(
|
||||
server_root="/etc/apache2",
|
||||
vhost_root="/etc/apache2/other",
|
||||
vhost_files="*.conf",
|
||||
logs_root="/var/log/apache2",
|
||||
ctl="apachectl",
|
||||
version_cmd=['apachectl', '-v'],
|
||||
restart_cmd=['apachectl', 'graceful'],
|
||||
conftest_cmd=['apachectl', 'configtest'],
|
||||
enmod=None,
|
||||
dismod=None,
|
||||
le_vhost_ext="-le-ssl.conf",
|
||||
handle_modules=False,
|
||||
handle_sites=False,
|
||||
challenge_location="/etc/apache2/other",
|
||||
bin=None,
|
||||
)
|
||||
|
||||
@@ -1,30 +1,42 @@
|
||||
""" Distribution specific override class for Debian family (Ubuntu/Debian) """
|
||||
import logging
|
||||
|
||||
from certbot_apache._internal import apache_util
|
||||
from certbot_apache._internal import configurator
|
||||
from certbot_apache._internal.configurator import OsOptions
|
||||
from certbot_apache._internal.obj import VirtualHost
|
||||
import zope.interface
|
||||
|
||||
from certbot import errors
|
||||
from certbot import interfaces
|
||||
from certbot import util
|
||||
from certbot.compat import filesystem
|
||||
from certbot.compat import os
|
||||
from certbot_apache._internal import apache_util
|
||||
from certbot_apache._internal import configurator
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@zope.interface.provider(interfaces.IPluginFactory)
|
||||
class DebianConfigurator(configurator.ApacheConfigurator):
|
||||
"""Debian specific ApacheConfigurator override class"""
|
||||
|
||||
OS_DEFAULTS = OsOptions(
|
||||
OS_DEFAULTS = dict(
|
||||
server_root="/etc/apache2",
|
||||
vhost_root="/etc/apache2/sites-available",
|
||||
vhost_files="*",
|
||||
logs_root="/var/log/apache2",
|
||||
ctl="apache2ctl",
|
||||
version_cmd=['apache2ctl', '-v'],
|
||||
restart_cmd=['apache2ctl', 'graceful'],
|
||||
conftest_cmd=['apache2ctl', 'configtest'],
|
||||
enmod="a2enmod",
|
||||
dismod="a2dismod",
|
||||
le_vhost_ext="-le-ssl.conf",
|
||||
handle_modules=True,
|
||||
handle_sites=True,
|
||||
challenge_location="/etc/apache2",
|
||||
bin=None,
|
||||
)
|
||||
|
||||
def enable_site(self, vhost: VirtualHost) -> None:
|
||||
def enable_site(self, vhost):
|
||||
"""Enables an available site, Apache reload required.
|
||||
|
||||
.. note:: Does not make sure that the site correctly works or that all
|
||||
@@ -46,7 +58,7 @@ class DebianConfigurator(configurator.ApacheConfigurator):
|
||||
if not os.path.isdir(os.path.dirname(enabled_path)):
|
||||
# For some reason, sites-enabled / sites-available do not exist
|
||||
# Call the parent method
|
||||
return super().enable_site(vhost)
|
||||
return super(DebianConfigurator, self).enable_site(vhost)
|
||||
self.reverter.register_file_creation(False, enabled_path)
|
||||
try:
|
||||
os.symlink(vhost.filep, enabled_path)
|
||||
@@ -56,7 +68,7 @@ class DebianConfigurator(configurator.ApacheConfigurator):
|
||||
# Already in shape
|
||||
vhost.enabled = True
|
||||
return None
|
||||
logger.error(
|
||||
logger.warning(
|
||||
"Could not symlink %s to %s, got error: %s", enabled_path,
|
||||
vhost.filep, err.strerror)
|
||||
errstring = ("Encountered error while trying to enable a " +
|
||||
@@ -69,7 +81,7 @@ class DebianConfigurator(configurator.ApacheConfigurator):
|
||||
self.save_notes += "Enabled site %s\n" % vhost.filep
|
||||
return None
|
||||
|
||||
def enable_mod(self, mod_name: str, temp: bool = False) -> None:
|
||||
def enable_mod(self, mod_name, temp=False):
|
||||
"""Enables module in Apache.
|
||||
|
||||
Both enables and reloads Apache so module is active.
|
||||
@@ -115,16 +127,16 @@ class DebianConfigurator(configurator.ApacheConfigurator):
|
||||
# Reload is not necessary as DUMP_RUN_CFG uses latest config.
|
||||
self.parser.update_runtime_variables()
|
||||
|
||||
def _enable_mod_debian(self, mod_name: str, temp: bool) -> None:
|
||||
def _enable_mod_debian(self, mod_name, temp):
|
||||
"""Assumes mods-available, mods-enabled layout."""
|
||||
# Generate reversal command.
|
||||
# Try to be safe here... check that we can probably reverse before
|
||||
# applying enmod command
|
||||
if (self.options.dismod is None or self.options.enmod is None
|
||||
or not util.exe_exists(self.options.dismod)):
|
||||
if not util.exe_exists(self.option("dismod")):
|
||||
raise errors.MisconfigurationError(
|
||||
"Unable to find a2dismod, please make sure a2enmod and "
|
||||
"a2dismod are configured correctly for certbot.")
|
||||
|
||||
self.reverter.register_undo_command(temp, [self.options.dismod, "-f", mod_name])
|
||||
util.run_script([self.options.enmod, mod_name])
|
||||
self.reverter.register_undo_command(
|
||||
temp, [self.option("dismod"), "-f", mod_name])
|
||||
util.run_script([self.option("enmod"), mod_name])
|
||||
|
||||
@@ -1,19 +1,19 @@
|
||||
""" Distribution specific override class for Fedora 29+ """
|
||||
from typing import Any
|
||||
import zope.interface
|
||||
|
||||
from certbot import errors
|
||||
from certbot import interfaces
|
||||
from certbot import util
|
||||
from certbot_apache._internal import apache_util
|
||||
from certbot_apache._internal import configurator
|
||||
from certbot_apache._internal import parser
|
||||
from certbot_apache._internal.configurator import OsOptions
|
||||
|
||||
from certbot import errors
|
||||
from certbot import util
|
||||
|
||||
|
||||
@zope.interface.provider(interfaces.IPluginFactory)
|
||||
class FedoraConfigurator(configurator.ApacheConfigurator):
|
||||
"""Fedora 29+ specific ApacheConfigurator override class"""
|
||||
|
||||
OS_DEFAULTS = OsOptions(
|
||||
OS_DEFAULTS = dict(
|
||||
server_root="/etc/httpd",
|
||||
vhost_root="/etc/httpd/conf.d",
|
||||
vhost_files="*.conf",
|
||||
@@ -23,10 +23,16 @@ class FedoraConfigurator(configurator.ApacheConfigurator):
|
||||
restart_cmd=['apachectl', 'graceful'],
|
||||
restart_cmd_alt=['apachectl', 'restart'],
|
||||
conftest_cmd=['apachectl', 'configtest'],
|
||||
enmod=None,
|
||||
dismod=None,
|
||||
le_vhost_ext="-le-ssl.conf",
|
||||
handle_modules=False,
|
||||
handle_sites=False,
|
||||
challenge_location="/etc/httpd/conf.d",
|
||||
bin=None,
|
||||
)
|
||||
|
||||
def config_test(self) -> None:
|
||||
def config_test(self):
|
||||
"""
|
||||
Override config_test to mitigate configtest error in vanilla installation
|
||||
of mod_ssl in Fedora. The error is caused by non-existent self-signed
|
||||
@@ -34,18 +40,19 @@ class FedoraConfigurator(configurator.ApacheConfigurator):
|
||||
during the first (re)start of httpd.
|
||||
"""
|
||||
try:
|
||||
super().config_test()
|
||||
super(FedoraConfigurator, self).config_test()
|
||||
except errors.MisconfigurationError:
|
||||
self._try_restart_fedora()
|
||||
|
||||
def get_parser(self) -> "FedoraParser":
|
||||
def get_parser(self):
|
||||
"""Initializes the ApacheParser"""
|
||||
return FedoraParser(
|
||||
self.options.server_root, self, self.options.vhost_root, self.version)
|
||||
self.option("server_root"), self.option("vhost_root"),
|
||||
self.version, configurator=self)
|
||||
|
||||
def _try_restart_fedora(self) -> None:
|
||||
def _try_restart_fedora(self):
|
||||
"""
|
||||
Tries to restart httpd using systemctl to generate the self signed key pair.
|
||||
Tries to restart httpd using systemctl to generate the self signed keypair.
|
||||
"""
|
||||
try:
|
||||
util.run_script(['systemctl', 'restart', 'httpd'])
|
||||
@@ -53,37 +60,35 @@ class FedoraConfigurator(configurator.ApacheConfigurator):
|
||||
raise errors.MisconfigurationError(str(err))
|
||||
|
||||
# Finish with actual config check to see if systemctl restart helped
|
||||
super().config_test()
|
||||
super(FedoraConfigurator, self).config_test()
|
||||
|
||||
def _prepare_options(self) -> None:
|
||||
def _prepare_options(self):
|
||||
"""
|
||||
Override the options dictionary initialization to keep using apachectl
|
||||
instead of httpd and so take advantages of this new bash script in newer versions
|
||||
of Fedora to restart httpd.
|
||||
"""
|
||||
super()._prepare_options()
|
||||
self.options.restart_cmd[0] = 'apachectl'
|
||||
if not self.options.restart_cmd_alt: # pragma: no cover
|
||||
raise ValueError("OS option restart_cmd_alt must be set for Fedora.")
|
||||
self.options.restart_cmd_alt[0] = 'apachectl'
|
||||
self.options.conftest_cmd[0] = 'apachectl'
|
||||
super(FedoraConfigurator, self)._prepare_options()
|
||||
self.options["restart_cmd"][0] = 'apachectl'
|
||||
self.options["restart_cmd_alt"][0] = 'apachectl'
|
||||
self.options["conftest_cmd"][0] = 'apachectl'
|
||||
|
||||
|
||||
class FedoraParser(parser.ApacheParser):
|
||||
"""Fedora 29+ specific ApacheParser override class"""
|
||||
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
||||
def __init__(self, *args, **kwargs):
|
||||
# Fedora 29+ specific configuration file for Apache
|
||||
self.sysconfig_filep = "/etc/sysconfig/httpd"
|
||||
super().__init__(*args, **kwargs)
|
||||
super(FedoraParser, self).__init__(*args, **kwargs)
|
||||
|
||||
def update_runtime_variables(self) -> None:
|
||||
def update_runtime_variables(self):
|
||||
""" Override for update_runtime_variables for custom parsing """
|
||||
# Opportunistic, works if SELinux not enforced
|
||||
super().update_runtime_variables()
|
||||
super(FedoraParser, self).update_runtime_variables()
|
||||
self._parse_sysconfig_var()
|
||||
|
||||
def _parse_sysconfig_var(self) -> None:
|
||||
def _parse_sysconfig_var(self):
|
||||
""" Parses Apache CLI options from Fedora configuration file """
|
||||
defines = apache_util.parse_define_file(self.sysconfig_filep, "OPTIONS")
|
||||
for k, v in defines.items():
|
||||
self.variables[k] = v
|
||||
for k in defines:
|
||||
self.variables[k] = defines[k]
|
||||
|
||||
@@ -1,61 +1,72 @@
|
||||
""" Distribution specific override class for Gentoo Linux """
|
||||
from typing import Any
|
||||
import zope.interface
|
||||
|
||||
from certbot import interfaces
|
||||
from certbot_apache._internal import apache_util
|
||||
from certbot_apache._internal import configurator
|
||||
from certbot_apache._internal import parser
|
||||
from certbot_apache._internal.configurator import OsOptions
|
||||
|
||||
|
||||
@zope.interface.provider(interfaces.IPluginFactory)
|
||||
class GentooConfigurator(configurator.ApacheConfigurator):
|
||||
"""Gentoo specific ApacheConfigurator override class"""
|
||||
|
||||
OS_DEFAULTS = OsOptions(
|
||||
OS_DEFAULTS = dict(
|
||||
server_root="/etc/apache2",
|
||||
vhost_root="/etc/apache2/vhosts.d",
|
||||
vhost_files="*.conf",
|
||||
logs_root="/var/log/apache2",
|
||||
ctl="apache2ctl",
|
||||
version_cmd=['apache2ctl', '-v'],
|
||||
restart_cmd=['apache2ctl', 'graceful'],
|
||||
restart_cmd_alt=['apache2ctl', 'restart'],
|
||||
conftest_cmd=['apache2ctl', 'configtest'],
|
||||
enmod=None,
|
||||
dismod=None,
|
||||
le_vhost_ext="-le-ssl.conf",
|
||||
handle_modules=False,
|
||||
handle_sites=False,
|
||||
challenge_location="/etc/apache2/vhosts.d",
|
||||
bin=None,
|
||||
)
|
||||
|
||||
def _prepare_options(self) -> None:
|
||||
def _prepare_options(self):
|
||||
"""
|
||||
Override the options dictionary initialization in order to support
|
||||
alternative restart cmd used in Gentoo.
|
||||
"""
|
||||
super()._prepare_options()
|
||||
if not self.options.restart_cmd_alt: # pragma: no cover
|
||||
raise ValueError("OS option restart_cmd_alt must be set for Gentoo.")
|
||||
self.options.restart_cmd_alt[0] = self.options.ctl
|
||||
super(GentooConfigurator, self)._prepare_options()
|
||||
self.options["restart_cmd_alt"][0] = self.option("ctl")
|
||||
|
||||
def get_parser(self) -> "GentooParser":
|
||||
def get_parser(self):
|
||||
"""Initializes the ApacheParser"""
|
||||
return GentooParser(
|
||||
self.options.server_root, self, self.options.vhost_root, self.version)
|
||||
self.option("server_root"), self.option("vhost_root"),
|
||||
self.version, configurator=self)
|
||||
|
||||
|
||||
class GentooParser(parser.ApacheParser):
|
||||
"""Gentoo specific ApacheParser override class"""
|
||||
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
||||
def __init__(self, *args, **kwargs):
|
||||
# Gentoo specific configuration file for Apache2
|
||||
self.apacheconfig_filep = "/etc/conf.d/apache2"
|
||||
super().__init__(*args, **kwargs)
|
||||
super(GentooParser, self).__init__(*args, **kwargs)
|
||||
|
||||
def update_runtime_variables(self) -> None:
|
||||
def update_runtime_variables(self):
|
||||
""" Override for update_runtime_variables for custom parsing """
|
||||
self.parse_sysconfig_var()
|
||||
self.update_modules()
|
||||
|
||||
def parse_sysconfig_var(self) -> None:
|
||||
def parse_sysconfig_var(self):
|
||||
""" Parses Apache CLI options from Gentoo configuration file """
|
||||
defines = apache_util.parse_define_file(self.apacheconfig_filep,
|
||||
"APACHE2_OPTS")
|
||||
for k, v in defines.items():
|
||||
self.variables[k] = v
|
||||
for k in defines:
|
||||
self.variables[k] = defines[k]
|
||||
|
||||
def update_modules(self) -> None:
|
||||
def update_modules(self):
|
||||
"""Get loaded modules from httpd process, and add them to DOM"""
|
||||
mod_cmd = [self.configurator.options.ctl, "modules"]
|
||||
mod_cmd = [self.configurator.option("ctl"), "modules"]
|
||||
matches = apache_util.parse_from_subprocess(mod_cmd, r"(.*)_module")
|
||||
for mod in matches:
|
||||
self.add_mod(mod.strip())
|
||||
|
||||
@@ -1,19 +1,28 @@
|
||||
""" Distribution specific override class for OpenSUSE """
|
||||
import zope.interface
|
||||
|
||||
from certbot import interfaces
|
||||
from certbot_apache._internal import configurator
|
||||
from certbot_apache._internal.configurator import OsOptions
|
||||
|
||||
|
||||
@zope.interface.provider(interfaces.IPluginFactory)
|
||||
class OpenSUSEConfigurator(configurator.ApacheConfigurator):
|
||||
"""OpenSUSE specific ApacheConfigurator override class"""
|
||||
|
||||
OS_DEFAULTS = OsOptions(
|
||||
OS_DEFAULTS = dict(
|
||||
server_root="/etc/apache2",
|
||||
vhost_root="/etc/apache2/vhosts.d",
|
||||
vhost_files="*.conf",
|
||||
ctl="apachectl",
|
||||
version_cmd=['apachectl', '-v'],
|
||||
restart_cmd=['apachectl', 'graceful'],
|
||||
conftest_cmd=['apachectl', 'configtest'],
|
||||
logs_root="/var/log/apache2",
|
||||
ctl="apache2ctl",
|
||||
version_cmd=['apache2ctl', '-v'],
|
||||
restart_cmd=['apache2ctl', 'graceful'],
|
||||
conftest_cmd=['apache2ctl', 'configtest'],
|
||||
enmod="a2enmod",
|
||||
dismod="a2dismod",
|
||||
le_vhost_ext="-le-ssl.conf",
|
||||
handle_modules=False,
|
||||
handle_sites=False,
|
||||
challenge_location="/etc/apache2/vhosts.d",
|
||||
bin=None,
|
||||
)
|
||||
|
||||
@@ -1,19 +0,0 @@
|
||||
""" Distribution specific override class for Void Linux """
|
||||
from certbot_apache._internal import configurator
|
||||
from certbot_apache._internal.configurator import OsOptions
|
||||
|
||||
|
||||
class VoidConfigurator(configurator.ApacheConfigurator):
|
||||
"""Void Linux specific ApacheConfigurator override class"""
|
||||
|
||||
OS_DEFAULTS = OsOptions(
|
||||
server_root="/etc/apache",
|
||||
vhost_root="/etc/apache/extra",
|
||||
vhost_files="*.conf",
|
||||
logs_root="/var/log/httpd",
|
||||
ctl="apachectl",
|
||||
version_cmd=['apachectl', '-v'],
|
||||
restart_cmd=['apachectl', 'graceful'],
|
||||
conftest_cmd=['apachectl', 'configtest'],
|
||||
challenge_location="/etc/apache/extra",
|
||||
)
|
||||
@@ -3,36 +3,21 @@ import copy
|
||||
import fnmatch
|
||||
import logging
|
||||
import re
|
||||
from typing import Collection
|
||||
from typing import Dict
|
||||
from typing import Iterable
|
||||
from typing import List
|
||||
from typing import Mapping
|
||||
from typing import Optional
|
||||
from typing import Pattern
|
||||
from typing import Set
|
||||
from typing import Tuple
|
||||
from typing import TYPE_CHECKING
|
||||
from typing import Union
|
||||
import sys
|
||||
|
||||
from certbot_apache._internal import apache_util
|
||||
from certbot_apache._internal import constants
|
||||
import six
|
||||
|
||||
from acme.magic_typing import Dict
|
||||
from acme.magic_typing import List
|
||||
from certbot import errors
|
||||
from certbot.compat import os
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from certbot_apache._internal.configurator import ApacheConfigurator # pragma: no cover
|
||||
|
||||
try:
|
||||
from augeas import Augeas
|
||||
except ImportError: # pragma: no cover
|
||||
Augeas = None
|
||||
from certbot_apache._internal import apache_util
|
||||
from certbot_apache._internal import constants
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ApacheParser:
|
||||
class ApacheParser(object):
|
||||
"""Class handles the fine details of parsing the Apache Configuration.
|
||||
|
||||
.. todo:: Make parsing general... remove sites-available etc...
|
||||
@@ -44,12 +29,11 @@ class ApacheParser:
|
||||
default - user config file, name - NameVirtualHost,
|
||||
|
||||
"""
|
||||
arg_var_interpreter: Pattern = re.compile(r"\$\{[^ \}]*}")
|
||||
fnmatch_chars: Set[str] = {"*", "?", "\\", "[", "]"}
|
||||
arg_var_interpreter = re.compile(r"\$\{[^ \}]*}")
|
||||
fnmatch_chars = {"*", "?", "\\", "[", "]"}
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
def __init__(self, root: str, configurator: "ApacheConfigurator",
|
||||
vhostroot: str, version: Tuple[int, ...] = (2, 4)) -> None:
|
||||
def __init__(self, root, vhostroot=None, version=(2, 4),
|
||||
configurator=None):
|
||||
# Note: Order is important here.
|
||||
|
||||
# Needed for calling save() with reverter functionality that resides in
|
||||
@@ -58,7 +42,8 @@ class ApacheParser:
|
||||
self.configurator = configurator
|
||||
|
||||
# Initialize augeas
|
||||
self.aug: Augeas = init_augeas()
|
||||
self.aug = None
|
||||
self.init_augeas()
|
||||
|
||||
if not self.check_aug_version():
|
||||
raise errors.NotSupportedError(
|
||||
@@ -66,17 +51,18 @@ class ApacheParser:
|
||||
"version 1.2.0 or higher, please make sure you have you have "
|
||||
"those installed.")
|
||||
|
||||
self.modules: Dict[str, Optional[str]] = {}
|
||||
self.parser_paths: Dict[str, List[str]] = {}
|
||||
self.variables: Dict[str, str] = {}
|
||||
self.modules = {} # type: Dict[str, str]
|
||||
self.parser_paths = {} # type: Dict[str, List[str]]
|
||||
self.variables = {} # type: Dict[str, str]
|
||||
|
||||
# Find configuration root and make sure augeas can parse it.
|
||||
self.root: str = os.path.abspath(root)
|
||||
self.loc: Dict[str, str] = {"root": self._find_config_root()}
|
||||
self.root = os.path.abspath(root)
|
||||
self.loc = {"root": self._find_config_root()}
|
||||
self.parse_file(self.loc["root"])
|
||||
|
||||
# Look up variables from httpd and add to DOM if not already parsed
|
||||
self.update_runtime_variables()
|
||||
if version >= (2, 4):
|
||||
# Look up variables from httpd and add to DOM if not already parsed
|
||||
self.update_runtime_variables()
|
||||
|
||||
# This problem has been fixed in Augeas 1.0
|
||||
self.standardize_excl()
|
||||
@@ -93,9 +79,31 @@ class ApacheParser:
|
||||
# Must also attempt to parse additional virtual host root
|
||||
if vhostroot:
|
||||
self.parse_file(os.path.abspath(vhostroot) + "/" +
|
||||
self.configurator.options.vhost_files)
|
||||
self.configurator.option("vhost_files"))
|
||||
|
||||
def check_parsing_errors(self, lens: str) -> None:
|
||||
# check to see if there were unparsed define statements
|
||||
if version < (2, 4):
|
||||
if self.find_dir("Define", exclude=False):
|
||||
raise errors.PluginError("Error parsing runtime variables")
|
||||
|
||||
def init_augeas(self):
|
||||
""" Initialize the actual Augeas instance """
|
||||
|
||||
try:
|
||||
import augeas
|
||||
except ImportError: # pragma: no cover
|
||||
raise errors.NoInstallationError("Problem in Augeas installation")
|
||||
|
||||
self.aug = augeas.Augeas(
|
||||
# specify a directory to load our preferred lens from
|
||||
loadpath=constants.AUGEAS_LENS_DIR,
|
||||
# Do not save backup (we do it ourselves), do not load
|
||||
# anything by default
|
||||
flags=(augeas.Augeas.NONE |
|
||||
augeas.Augeas.NO_MODL_AUTOLOAD |
|
||||
augeas.Augeas.ENABLE_SPAN))
|
||||
|
||||
def check_parsing_errors(self, lens):
|
||||
"""Verify Augeas can parse all of the lens files.
|
||||
|
||||
:param str lens: lens to check for errors
|
||||
@@ -121,7 +129,7 @@ class ApacheParser:
|
||||
self.aug.get(path + "/message")))
|
||||
raise errors.PluginError(msg)
|
||||
|
||||
def check_aug_version(self) -> Union[bool, List[str]]:
|
||||
def check_aug_version(self):
|
||||
""" Checks that we have recent enough version of libaugeas.
|
||||
If augeas version is recent enough, it will support case insensitive
|
||||
regexp matching"""
|
||||
@@ -136,7 +144,7 @@ class ApacheParser:
|
||||
self.aug.remove("/test/path")
|
||||
return matches
|
||||
|
||||
def unsaved_files(self) -> Set[str]:
|
||||
def unsaved_files(self):
|
||||
"""Lists files that have modified Augeas DOM but the changes have not
|
||||
been written to the filesystem yet, used by `self.save()` and
|
||||
ApacheConfigurator to check the file state.
|
||||
@@ -175,7 +183,7 @@ class ApacheParser:
|
||||
save_files.add(self.aug.get(path)[6:])
|
||||
return save_files
|
||||
|
||||
def ensure_augeas_state(self) -> None:
|
||||
def ensure_augeas_state(self):
|
||||
"""Makes sure that all Augeas dom changes are written to files to avoid
|
||||
loss of configuration directives when doing additional augeas parsing,
|
||||
causing a possible augeas.load() resulting dom reset
|
||||
@@ -185,7 +193,7 @@ class ApacheParser:
|
||||
self.configurator.save_notes += "(autosave)"
|
||||
self.configurator.save()
|
||||
|
||||
def save(self, save_files: Iterable[str]) -> None:
|
||||
def save(self, save_files):
|
||||
"""Saves all changes to the configuration files.
|
||||
|
||||
save() is called from ApacheConfigurator to handle the parser specific
|
||||
@@ -195,13 +203,7 @@ class ApacheParser:
|
||||
|
||||
"""
|
||||
self.configurator.save_notes = ""
|
||||
|
||||
ex_errs = self.aug.match("/augeas//error")
|
||||
try:
|
||||
self.aug.save()
|
||||
except IOError:
|
||||
self._log_save_errors(ex_errs)
|
||||
raise
|
||||
self.aug.save()
|
||||
|
||||
# Force reload if files were modified
|
||||
# This is needed to recalculate augeas directive span
|
||||
@@ -210,24 +212,21 @@ class ApacheParser:
|
||||
self.aug.remove("/files/"+sf)
|
||||
self.aug.load()
|
||||
|
||||
def _log_save_errors(self, ex_errs: Iterable[str]) -> None:
|
||||
def _log_save_errors(self, ex_errs):
|
||||
"""Log errors due to bad Augeas save.
|
||||
|
||||
:param list ex_errs: Existing errors before save
|
||||
|
||||
"""
|
||||
# Check for the root of save problems
|
||||
new_errs = [e for e in self.aug.match("/augeas//error") if e not in ex_errs]
|
||||
new_errs = self.aug.match("/augeas//error")
|
||||
# logger.error("During Save - %s", mod_conf)
|
||||
logger.error("Unable to save files: %s. Attempted Save Notes: %s",
|
||||
", ".join(err[13:len(err) - 6] for err in new_errs
|
||||
# Only new errors caused by recent save
|
||||
if err not in ex_errs), self.configurator.save_notes)
|
||||
|
||||
for err in new_errs:
|
||||
logger.debug(
|
||||
"Error %s saving %s: %s", self.aug.get(err), err[13:len(err) - 6],
|
||||
self.aug.get(f"{err}/message"))
|
||||
logger.error(
|
||||
"Unable to save files: %s.%s", ", ".join(err[13:len(err) - 6] for err in new_errs),
|
||||
f" Save Notes: {self.configurator.save_notes}" if self.configurator.save_notes else "")
|
||||
|
||||
def add_include(self, main_config: str, inc_path: str) -> None:
|
||||
def add_include(self, main_config, inc_path):
|
||||
"""Add Include for a new configuration file if one does not exist
|
||||
|
||||
:param str main_config: file path to main Apache config file
|
||||
@@ -246,28 +245,28 @@ class ApacheParser:
|
||||
new_file = os.path.basename(inc_path)
|
||||
self.existing_paths.setdefault(new_dir, []).append(new_file)
|
||||
|
||||
def add_mod(self, mod_name: str) -> None:
|
||||
def add_mod(self, mod_name):
|
||||
"""Shortcut for updating parser modules."""
|
||||
if mod_name + "_module" not in self.modules:
|
||||
self.modules[mod_name + "_module"] = None
|
||||
if "mod_" + mod_name + ".c" not in self.modules:
|
||||
self.modules["mod_" + mod_name + ".c"] = None
|
||||
|
||||
def reset_modules(self) -> None:
|
||||
def reset_modules(self):
|
||||
"""Reset the loaded modules list. This is called from cleanup to clear
|
||||
temporarily loaded modules."""
|
||||
self.modules = {}
|
||||
self.update_modules()
|
||||
self.parse_modules()
|
||||
|
||||
def parse_modules(self) -> None:
|
||||
def parse_modules(self):
|
||||
"""Iterates on the configuration until no new modules are loaded.
|
||||
|
||||
..todo:: This should be attempted to be done with a binary to avoid
|
||||
the iteration issue. Else... parse and enable mods at same time.
|
||||
|
||||
"""
|
||||
mods: Dict[str, str] = {}
|
||||
mods = {} # type: Dict[str, str]
|
||||
matches = self.find_dir("LoadModule")
|
||||
iterator = iter(matches)
|
||||
# Make sure prev_size != cur_size for do: while: iteration
|
||||
@@ -276,7 +275,7 @@ class ApacheParser:
|
||||
while len(mods) != prev_size:
|
||||
prev_size = len(mods)
|
||||
|
||||
for match_name, match_filename in zip(
|
||||
for match_name, match_filename in six.moves.zip(
|
||||
iterator, iterator):
|
||||
mod_name = self.get_arg(match_name)
|
||||
mod_filename = self.get_arg(match_filename)
|
||||
@@ -288,18 +287,19 @@ class ApacheParser:
|
||||
match_name[6:])
|
||||
self.modules.update(mods)
|
||||
|
||||
def update_runtime_variables(self) -> None:
|
||||
def update_runtime_variables(self):
|
||||
"""Update Includes, Defines and Includes from httpd config dump data"""
|
||||
|
||||
self.update_defines()
|
||||
self.update_includes()
|
||||
self.update_modules()
|
||||
|
||||
def update_defines(self) -> None:
|
||||
def update_defines(self):
|
||||
"""Updates the dictionary of known variables in the configuration"""
|
||||
self.variables = apache_util.parse_defines(self.configurator.options.get_defines_cmd)
|
||||
|
||||
def update_includes(self) -> None:
|
||||
self.variables = apache_util.parse_defines(self.configurator.option("ctl"))
|
||||
|
||||
def update_includes(self):
|
||||
"""Get includes from httpd process, and add them to DOM if needed"""
|
||||
|
||||
# Find_dir iterates over configuration for Include and IncludeOptional
|
||||
@@ -307,34 +307,34 @@ class ApacheParser:
|
||||
# configuration files
|
||||
_ = self.find_dir("Include")
|
||||
|
||||
matches = apache_util.parse_includes(self.configurator.options.get_includes_cmd)
|
||||
matches = apache_util.parse_includes(self.configurator.option("ctl"))
|
||||
if matches:
|
||||
for i in matches:
|
||||
if not self.parsed_in_current(i):
|
||||
self.parse_file(i)
|
||||
|
||||
def update_modules(self) -> None:
|
||||
def update_modules(self):
|
||||
"""Get loaded modules from httpd process, and add them to DOM"""
|
||||
|
||||
matches = apache_util.parse_modules(self.configurator.options.get_modules_cmd)
|
||||
matches = apache_util.parse_modules(self.configurator.option("ctl"))
|
||||
for mod in matches:
|
||||
self.add_mod(mod.strip())
|
||||
|
||||
def filter_args_num(self, matches: str, args: int) -> List[str]:
|
||||
def filter_args_num(self, matches, args):
|
||||
"""Filter out directives with specific number of arguments.
|
||||
|
||||
This function makes the assumption that all related arguments are given
|
||||
in order. Thus /files/apache/directive[5]/arg[2] must come immediately
|
||||
after /files/apache/directive[5]/arg[1]. Runs in 1 linear pass.
|
||||
|
||||
:param str matches: Matches of all directives with arg nodes
|
||||
:param string matches: Matches of all directives with arg nodes
|
||||
:param int args: Number of args you would like to filter
|
||||
|
||||
:returns: List of directives that contain # of arguments.
|
||||
(arg is stripped off)
|
||||
|
||||
"""
|
||||
filtered: List[str] = []
|
||||
filtered = []
|
||||
if args == 1:
|
||||
for i, match in enumerate(matches):
|
||||
if match.endswith("/arg"):
|
||||
@@ -351,7 +351,7 @@ class ApacheParser:
|
||||
|
||||
return filtered
|
||||
|
||||
def add_dir_to_ifmodssl(self, aug_conf_path: str, directive: str, args: List[str]) -> None:
|
||||
def add_dir_to_ifmodssl(self, aug_conf_path, directive, args):
|
||||
"""Adds directive and value to IfMod ssl block.
|
||||
|
||||
Adds given directive and value along configuration path within
|
||||
@@ -360,7 +360,7 @@ class ApacheParser:
|
||||
|
||||
:param str aug_conf_path: Desired Augeas config path to add directive
|
||||
:param str directive: Directive you would like to add, e.g. Listen
|
||||
:param args: Values of the directive; list of str (eg. ["443"])
|
||||
:param args: Values of the directive; str "443" or list of str
|
||||
:type args: list
|
||||
|
||||
"""
|
||||
@@ -377,7 +377,7 @@ class ApacheParser:
|
||||
for i, arg in enumerate(args):
|
||||
self.aug.set("%s/arg[%d]" % (nvh_path, i + 1), arg)
|
||||
|
||||
def get_ifmod(self, aug_conf_path: str, mod: str) -> str:
|
||||
def get_ifmod(self, aug_conf_path, mod, beginning=False):
|
||||
"""Returns the path to <IfMod mod> and creates one if it doesn't exist.
|
||||
|
||||
:param str aug_conf_path: Augeas configuration path
|
||||
@@ -394,32 +394,39 @@ class ApacheParser:
|
||||
if_mods = self.aug.match(("%s/IfModule/*[self::arg='%s']" %
|
||||
(aug_conf_path, mod)))
|
||||
if not if_mods:
|
||||
return self.create_ifmod(aug_conf_path, mod)
|
||||
return self.create_ifmod(aug_conf_path, mod, beginning)
|
||||
|
||||
# Strip off "arg" at end of first ifmod path
|
||||
return if_mods[0].rpartition("arg")[0]
|
||||
|
||||
def create_ifmod(self, aug_conf_path: str, mod: str) -> str:
|
||||
def create_ifmod(self, aug_conf_path, mod, beginning=False):
|
||||
"""Creates a new <IfMod mod> and returns its path.
|
||||
|
||||
:param str aug_conf_path: Augeas configuration path
|
||||
:param str mod: module ie. mod_ssl.c
|
||||
:param bool beginning: If the IfModule should be created to the beginning
|
||||
of augeas path DOM tree.
|
||||
|
||||
:returns: Augeas path of the newly created IfModule directive.
|
||||
The path may be dynamic, i.e. .../IfModule[last()]
|
||||
:rtype: str
|
||||
|
||||
"""
|
||||
c_path = "{}/IfModule[last() + 1]".format(aug_conf_path)
|
||||
c_path_arg = "{}/IfModule[last()]/arg".format(aug_conf_path)
|
||||
self.aug.set(c_path, "")
|
||||
retpath = "{}/IfModule[last()]/".format(aug_conf_path)
|
||||
if beginning:
|
||||
c_path_arg = "{}/IfModule[1]/arg".format(aug_conf_path)
|
||||
# Insert IfModule before the first directive
|
||||
self.aug.insert("{}/directive[1]".format(aug_conf_path),
|
||||
"IfModule", True)
|
||||
retpath = "{}/IfModule[1]/".format(aug_conf_path)
|
||||
else:
|
||||
c_path = "{}/IfModule[last() + 1]".format(aug_conf_path)
|
||||
c_path_arg = "{}/IfModule[last()]/arg".format(aug_conf_path)
|
||||
self.aug.set(c_path, "")
|
||||
retpath = "{}/IfModule[last()]/".format(aug_conf_path)
|
||||
self.aug.set(c_path_arg, mod)
|
||||
return retpath
|
||||
|
||||
def add_dir(
|
||||
self, aug_conf_path: Optional[str], directive: Optional[str], args: Union[List[str], str]
|
||||
) -> None:
|
||||
def add_dir(self, aug_conf_path, directive, args):
|
||||
"""Appends directive to the end fo the file given by aug_conf_path.
|
||||
|
||||
.. note:: Not added to AugeasConfigurator because it may depend
|
||||
@@ -431,7 +438,6 @@ class ApacheParser:
|
||||
:type args: list or str
|
||||
|
||||
"""
|
||||
aug_conf_path = aug_conf_path if aug_conf_path else ""
|
||||
self.aug.set(aug_conf_path + "/directive[last() + 1]", directive)
|
||||
if isinstance(args, list):
|
||||
for i, value in enumerate(args, 1):
|
||||
@@ -440,8 +446,7 @@ class ApacheParser:
|
||||
else:
|
||||
self.aug.set(aug_conf_path + "/directive[last()]/arg", args)
|
||||
|
||||
def add_dir_beginning(self, aug_conf_path: Optional[str], dirname: str,
|
||||
args: Union[List[str], str]) -> None:
|
||||
def add_dir_beginning(self, aug_conf_path, dirname, args):
|
||||
"""Adds the directive to the beginning of defined aug_conf_path.
|
||||
|
||||
:param str aug_conf_path: Augeas configuration path to add directive
|
||||
@@ -449,13 +454,8 @@ class ApacheParser:
|
||||
:param args: Value of the directive. ie. Listen 443, 443 is arg
|
||||
:type args: list or str
|
||||
"""
|
||||
aug_conf_path = aug_conf_path if aug_conf_path else ""
|
||||
first_dir = aug_conf_path + "/directive[1]"
|
||||
if self.aug.get(first_dir):
|
||||
self.aug.insert(first_dir, "directive", True)
|
||||
else:
|
||||
self.aug.set(first_dir, "directive")
|
||||
|
||||
self.aug.insert(first_dir, "directive", True)
|
||||
self.aug.set(first_dir, dirname)
|
||||
if isinstance(args, list):
|
||||
for i, value in enumerate(args, 1):
|
||||
@@ -463,7 +463,7 @@ class ApacheParser:
|
||||
else:
|
||||
self.aug.set(first_dir + "/arg", args)
|
||||
|
||||
def add_comment(self, aug_conf_path: str, comment: str) -> None:
|
||||
def add_comment(self, aug_conf_path, comment):
|
||||
"""Adds the comment to the augeas path
|
||||
|
||||
:param str aug_conf_path: Augeas configuration path to add directive
|
||||
@@ -472,7 +472,7 @@ class ApacheParser:
|
||||
"""
|
||||
self.aug.set(aug_conf_path + "/#comment[last() + 1]", comment)
|
||||
|
||||
def find_comments(self, arg: str, start: Optional[str] = None) -> List[str]:
|
||||
def find_comments(self, arg, start=None):
|
||||
"""Finds a comment with specified content from the provided DOM path
|
||||
|
||||
:param str arg: Comment content to search
|
||||
@@ -494,8 +494,7 @@ class ApacheParser:
|
||||
results.append(comment)
|
||||
return results
|
||||
|
||||
def find_dir(self, directive: str, arg: Optional[str] = None,
|
||||
start: Optional[str] = None, exclude: bool = True) -> List[str]:
|
||||
def find_dir(self, directive, arg=None, start=None, exclude=True):
|
||||
"""Finds directive in the configuration.
|
||||
|
||||
Recursively searches through config files to find directives
|
||||
@@ -523,8 +522,6 @@ class ApacheParser:
|
||||
:param bool exclude: Whether or not to exclude directives based on
|
||||
variables and enabled modules
|
||||
|
||||
:rtype list
|
||||
|
||||
"""
|
||||
# Cannot place member variable in the definition of the function so...
|
||||
if not start:
|
||||
@@ -556,7 +553,7 @@ class ApacheParser:
|
||||
else:
|
||||
arg_suffix = "/*[self::arg=~regexp('%s')]" % case_i(arg)
|
||||
|
||||
ordered_matches: List[str] = []
|
||||
ordered_matches = [] # type: List[str]
|
||||
|
||||
# TODO: Wildcards should be included in alphabetical order
|
||||
# https://httpd.apache.org/docs/2.4/mod/core.html#include
|
||||
@@ -573,7 +570,21 @@ class ApacheParser:
|
||||
|
||||
return ordered_matches
|
||||
|
||||
def get_arg(self, match: str) -> Optional[str]:
|
||||
def get_all_args(self, match):
|
||||
"""
|
||||
Tries to fetch all arguments for a directive. See get_arg.
|
||||
|
||||
Note that if match is an ancestor node, it returns all names of
|
||||
child directives as well as the list of arguments.
|
||||
|
||||
"""
|
||||
|
||||
if match[-1] != "/":
|
||||
match = match+"/"
|
||||
allargs = self.aug.match(match + '*')
|
||||
return [self.get_arg(arg) for arg in allargs]
|
||||
|
||||
def get_arg(self, match):
|
||||
"""Uses augeas.get to get argument value and interprets result.
|
||||
|
||||
This also converts all variables and parameters appropriately.
|
||||
@@ -588,7 +599,6 @@ class ApacheParser:
|
||||
# e.g. strip now, not later
|
||||
if not value:
|
||||
return None
|
||||
|
||||
value = value.strip("'\"")
|
||||
|
||||
variables = ApacheParser.arg_var_interpreter.findall(value)
|
||||
@@ -602,13 +612,13 @@ class ApacheParser:
|
||||
|
||||
return value
|
||||
|
||||
def get_root_augpath(self) -> str:
|
||||
def get_root_augpath(self):
|
||||
"""
|
||||
Returns the Augeas path of root configuration.
|
||||
"""
|
||||
return get_aug_path(self.loc["root"])
|
||||
|
||||
def exclude_dirs(self, matches: Iterable[str]) -> List[str]:
|
||||
def exclude_dirs(self, matches):
|
||||
"""Exclude directives that are not loaded into the configuration."""
|
||||
filters = [("ifmodule", self.modules.keys()), ("ifdefine", self.variables)]
|
||||
|
||||
@@ -622,7 +632,7 @@ class ApacheParser:
|
||||
valid_matches.append(match)
|
||||
return valid_matches
|
||||
|
||||
def _pass_filter(self, match: str, filter_: Tuple[str, Collection[str]]) -> bool:
|
||||
def _pass_filter(self, match, filter_):
|
||||
"""Determine if directive passes a filter.
|
||||
|
||||
:param str match: Augeas path
|
||||
@@ -651,7 +661,7 @@ class ApacheParser:
|
||||
|
||||
return True
|
||||
|
||||
def standard_path_from_server_root(self, arg: str) -> str:
|
||||
def standard_path_from_server_root(self, arg):
|
||||
"""Ensure paths are consistent and absolute
|
||||
|
||||
:param str arg: Argument of directive
|
||||
@@ -670,7 +680,7 @@ class ApacheParser:
|
||||
arg = os.path.normpath(arg)
|
||||
return arg
|
||||
|
||||
def _get_include_path(self, arg: Optional[str]) -> Optional[str]:
|
||||
def _get_include_path(self, arg):
|
||||
"""Converts an Apache Include directive into Augeas path.
|
||||
|
||||
Converts an Apache Include directive argument into an Augeas
|
||||
@@ -690,8 +700,6 @@ class ApacheParser:
|
||||
# if matchObj.group() != arg:
|
||||
# logger.error("Error: Invalid regexp characters in %s", arg)
|
||||
# return []
|
||||
if arg is None:
|
||||
return None # pragma: no cover
|
||||
arg = self.standard_path_from_server_root(arg)
|
||||
|
||||
# Attempts to add a transform to the file if one does not already exist
|
||||
@@ -716,7 +724,7 @@ class ApacheParser:
|
||||
|
||||
return get_aug_path(arg)
|
||||
|
||||
def fnmatch_to_re(self, clean_fn_match: str) -> str:
|
||||
def fnmatch_to_re(self, clean_fn_match):
|
||||
"""Method converts Apache's basic fnmatch to regular expression.
|
||||
|
||||
Assumption - Configs are assumed to be well-formed and only writable by
|
||||
@@ -730,10 +738,13 @@ class ApacheParser:
|
||||
:rtype: str
|
||||
|
||||
"""
|
||||
if sys.version_info < (3, 6):
|
||||
# This strips off final /Z(?ms)
|
||||
return fnmatch.translate(clean_fn_match)[:-7] # pragma: no cover
|
||||
# Since Python 3.6, it returns a different pattern like (?s:.*\.load)\Z
|
||||
return fnmatch.translate(clean_fn_match)[4:-3] # pragma: no cover
|
||||
|
||||
def parse_file(self, filepath: str) -> None:
|
||||
def parse_file(self, filepath):
|
||||
"""Parse file with Augeas
|
||||
|
||||
Checks to see if file_path is parsed by Augeas
|
||||
@@ -760,7 +771,7 @@ class ApacheParser:
|
||||
self._add_httpd_transform(filepath)
|
||||
self.aug.load()
|
||||
|
||||
def parsed_in_current(self, filep: Optional[str]) -> bool:
|
||||
def parsed_in_current(self, filep):
|
||||
"""Checks if the file path is parsed by current Augeas parser config
|
||||
ie. returns True if the file is found on a path that's found in live
|
||||
Augeas configuration.
|
||||
@@ -770,11 +781,9 @@ class ApacheParser:
|
||||
:returns: True if file is parsed in existing configuration tree
|
||||
:rtype: bool
|
||||
"""
|
||||
if not filep:
|
||||
return False # pragma: no cover
|
||||
return self._parsed_by_parser_paths(filep, self.parser_paths)
|
||||
|
||||
def parsed_in_original(self, filep: Optional[str]) -> bool:
|
||||
def parsed_in_original(self, filep):
|
||||
"""Checks if the file path is parsed by existing Apache config.
|
||||
ie. returns True if the file is found on a path that matches Include or
|
||||
IncludeOptional statement in the Apache configuration.
|
||||
@@ -784,11 +793,9 @@ class ApacheParser:
|
||||
:returns: True if file is parsed in existing configuration tree
|
||||
:rtype: bool
|
||||
"""
|
||||
if not filep:
|
||||
return False # pragma: no cover
|
||||
return self._parsed_by_parser_paths(filep, self.existing_paths)
|
||||
|
||||
def _parsed_by_parser_paths(self, filep: str, paths: Mapping[str, List[str]]) -> bool:
|
||||
def _parsed_by_parser_paths(self, filep, paths):
|
||||
"""Helper function that searches through provided paths and returns
|
||||
True if file path is found in the set"""
|
||||
for directory in paths:
|
||||
@@ -797,7 +804,7 @@ class ApacheParser:
|
||||
return True
|
||||
return False
|
||||
|
||||
def _check_path_actions(self, filepath: str) -> Tuple[bool, bool]:
|
||||
def _check_path_actions(self, filepath):
|
||||
"""Determine actions to take with a new augeas path
|
||||
|
||||
This helper function will return a tuple that defines
|
||||
@@ -822,7 +829,7 @@ class ApacheParser:
|
||||
remove_old = False
|
||||
return use_new, remove_old
|
||||
|
||||
def _remove_httpd_transform(self, filepath: str) -> None:
|
||||
def _remove_httpd_transform(self, filepath):
|
||||
"""Remove path from Augeas transform
|
||||
|
||||
:param str filepath: filepath to remove
|
||||
@@ -837,7 +844,7 @@ class ApacheParser:
|
||||
self.aug.remove(remove_inc[0])
|
||||
self.parser_paths.pop(remove_dirname)
|
||||
|
||||
def _add_httpd_transform(self, incl: str) -> None:
|
||||
def _add_httpd_transform(self, incl):
|
||||
"""Add a transform to Augeas.
|
||||
|
||||
This function will correctly add a transform to augeas
|
||||
@@ -847,7 +854,7 @@ class ApacheParser:
|
||||
:param str incl: filepath to include for transform
|
||||
|
||||
"""
|
||||
last_include: str = self.aug.match("/augeas/load/Httpd/incl [last()]")
|
||||
last_include = self.aug.match("/augeas/load/Httpd/incl [last()]")
|
||||
if last_include:
|
||||
# Insert a new node immediately after the last incl
|
||||
self.aug.insert(last_include[0], "incl", False)
|
||||
@@ -865,7 +872,7 @@ class ApacheParser:
|
||||
self.parser_paths[os.path.dirname(incl)] = [
|
||||
os.path.basename(incl)]
|
||||
|
||||
def standardize_excl(self) -> None:
|
||||
def standardize_excl(self):
|
||||
"""Standardize the excl arguments for the Httpd lens in Augeas.
|
||||
|
||||
Note: Hack!
|
||||
@@ -897,16 +904,16 @@ class ApacheParser:
|
||||
|
||||
self.aug.load()
|
||||
|
||||
def _set_locations(self) -> Dict[str, str]:
|
||||
def _set_locations(self):
|
||||
"""Set default location for directives.
|
||||
|
||||
Locations are given as file_paths
|
||||
.. todo:: Make sure that files are included
|
||||
|
||||
"""
|
||||
default: str = self.loc["root"]
|
||||
default = self.loc["root"]
|
||||
|
||||
temp: str = os.path.join(self.root, "ports.conf")
|
||||
temp = os.path.join(self.root, "ports.conf")
|
||||
if os.path.isfile(temp):
|
||||
listen = temp
|
||||
name = temp
|
||||
@@ -916,7 +923,7 @@ class ApacheParser:
|
||||
|
||||
return {"default": default, "listen": listen, "name": name}
|
||||
|
||||
def _find_config_root(self) -> str:
|
||||
def _find_config_root(self):
|
||||
"""Find the Apache Configuration Root file."""
|
||||
location = ["apache2.conf", "httpd.conf", "conf/httpd.conf"]
|
||||
for name in location:
|
||||
@@ -925,7 +932,7 @@ class ApacheParser:
|
||||
raise errors.NoInstallationError("Could not find configuration root")
|
||||
|
||||
|
||||
def case_i(string: str) -> str:
|
||||
def case_i(string):
|
||||
"""Returns case insensitive regex.
|
||||
|
||||
Returns a sloppy, but necessary version of a case insensitive regex.
|
||||
@@ -941,26 +948,10 @@ def case_i(string: str) -> str:
|
||||
if c.isalpha() else c for c in re.escape(string))
|
||||
|
||||
|
||||
def get_aug_path(file_path: str) -> str:
|
||||
def get_aug_path(file_path):
|
||||
"""Return augeas path for full filepath.
|
||||
|
||||
:param str file_path: Full filepath
|
||||
|
||||
"""
|
||||
return "/files%s" % file_path
|
||||
|
||||
|
||||
def init_augeas() -> Augeas:
|
||||
""" Initialize the actual Augeas instance """
|
||||
|
||||
if not Augeas: # pragma: no cover
|
||||
raise errors.NoInstallationError("Problem in Augeas installation")
|
||||
|
||||
return Augeas(
|
||||
# specify a directory to load our preferred lens from
|
||||
loadpath=constants.AUGEAS_LENS_DIR,
|
||||
# Do not save backup (we do it ourselves), do not load
|
||||
# anything by default
|
||||
flags=(Augeas.NONE |
|
||||
Augeas.NO_MODL_AUTOLOAD |
|
||||
Augeas.ENABLE_SPAN))
|
||||
|
||||
@@ -1,14 +1,7 @@
|
||||
"""ParserNode utils"""
|
||||
from typing import Any
|
||||
from typing import Dict
|
||||
from typing import Iterable
|
||||
from typing import Optional
|
||||
from typing import Tuple
|
||||
|
||||
from certbot_apache._internal.interfaces import ParserNode
|
||||
|
||||
|
||||
def validate_kwargs(kwargs: Dict[str, Any], required_names: Iterable[str]) -> Dict[str, Any]:
|
||||
def validate_kwargs(kwargs, required_names):
|
||||
"""
|
||||
Ensures that the kwargs dict has all the expected values. This function modifies
|
||||
the kwargs dictionary, and hence the returned dictionary should be used instead
|
||||
@@ -18,7 +11,7 @@ def validate_kwargs(kwargs: Dict[str, Any], required_names: Iterable[str]) -> Di
|
||||
:param list required_names: List of required parameter names.
|
||||
"""
|
||||
|
||||
validated_kwargs: Dict[str, Any] = {}
|
||||
validated_kwargs = {}
|
||||
for name in required_names:
|
||||
try:
|
||||
validated_kwargs[name] = kwargs.pop(name)
|
||||
@@ -32,8 +25,7 @@ def validate_kwargs(kwargs: Dict[str, Any], required_names: Iterable[str]) -> Di
|
||||
return validated_kwargs
|
||||
|
||||
|
||||
def parsernode_kwargs(kwargs: Dict[str, Any]
|
||||
) -> Tuple[Optional[ParserNode], bool, Optional[str], Dict[str, Any]]:
|
||||
def parsernode_kwargs(kwargs):
|
||||
"""
|
||||
Validates keyword arguments for ParserNode. This function modifies the kwargs
|
||||
dictionary, and hence the returned dictionary should be used instead in the
|
||||
@@ -63,7 +55,7 @@ def parsernode_kwargs(kwargs: Dict[str, Any]
|
||||
return kwargs["ancestor"], kwargs["dirty"], kwargs["filepath"], kwargs["metadata"]
|
||||
|
||||
|
||||
def commentnode_kwargs(kwargs: Dict[str, Any]) -> Tuple[Optional[str], Dict[str, str]]:
|
||||
def commentnode_kwargs(kwargs):
|
||||
"""
|
||||
Validates keyword arguments for CommentNode and sets the default values for
|
||||
optional kwargs. This function modifies the kwargs dictionary, and hence the
|
||||
@@ -98,8 +90,7 @@ def commentnode_kwargs(kwargs: Dict[str, Any]) -> Tuple[Optional[str], Dict[str,
|
||||
return comment, kwargs
|
||||
|
||||
|
||||
def directivenode_kwargs(kwargs: Dict[str, Any]
|
||||
) -> Tuple[Optional[str], Tuple[str, ...], bool, Dict[str, Any]]:
|
||||
def directivenode_kwargs(kwargs):
|
||||
"""
|
||||
Validates keyword arguments for DirectiveNode and BlockNode and sets the
|
||||
default values for optional kwargs. This function modifies the kwargs
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
# manually, Certbot will be unable to automatically provide future security
|
||||
# updates. Instead, Certbot will print and log an error message with a path to
|
||||
# the up-to-date file that you will need to refer to when manually updating
|
||||
# this file. Contents are based on https://ssl-config.mozilla.org
|
||||
# this file.
|
||||
|
||||
SSLEngine on
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
# manually, Certbot will be unable to automatically provide future security
|
||||
# updates. Instead, Certbot will print and log an error message with a path to
|
||||
# the up-to-date file that you will need to refer to when manually updating
|
||||
# this file. Contents are based on https://ssl-config.mozilla.org
|
||||
# this file.
|
||||
|
||||
SSLEngine on
|
||||
|
||||
|
||||
3
certbot-apache/local-oldest-requirements.txt
Normal file
3
certbot-apache/local-oldest-requirements.txt
Normal file
@@ -0,0 +1,3 @@
|
||||
# Remember to update setup.py to match the package versions below.
|
||||
acme[dev]==0.29.0
|
||||
certbot[dev]==1.6.0
|
||||
2
certbot-apache/setup.cfg
Normal file
2
certbot-apache/setup.cfg
Normal file
@@ -0,0 +1,2 @@
|
||||
[bdist_wheel]
|
||||
universal = 1
|
||||
@@ -1,18 +1,32 @@
|
||||
from distutils.version import LooseVersion
|
||||
import sys
|
||||
|
||||
from setuptools import __version__ as setuptools_version
|
||||
from setuptools import find_packages
|
||||
from setuptools import setup
|
||||
|
||||
version = '2.3.0.dev0'
|
||||
version = '1.11.0.dev0'
|
||||
|
||||
# Remember to update local-oldest-requirements.txt when changing the minimum
|
||||
# acme/certbot version.
|
||||
install_requires = [
|
||||
# We specify the minimum acme and certbot version as the current plugin
|
||||
# version for simplicity. See
|
||||
# https://github.com/certbot/certbot/issues/8761 for more info.
|
||||
f'acme>={version}',
|
||||
f'certbot>={version}',
|
||||
'acme>=0.29.0',
|
||||
'certbot>=1.6.0',
|
||||
'python-augeas',
|
||||
'setuptools>=41.6.0',
|
||||
'setuptools',
|
||||
'zope.component',
|
||||
'zope.interface',
|
||||
]
|
||||
|
||||
setuptools_known_environment_markers = (LooseVersion(setuptools_version) >= LooseVersion('36.2'))
|
||||
if setuptools_known_environment_markers:
|
||||
install_requires.append('mock ; python_version < "3.3"')
|
||||
elif 'bdist_wheel' in sys.argv[1:]:
|
||||
raise RuntimeError('Error, you are trying to build certbot wheels using an old version '
|
||||
'of setuptools. Version 36.2+ of setuptools is required.')
|
||||
elif sys.version_info < (3,3):
|
||||
install_requires.append('mock')
|
||||
|
||||
dev_extras = [
|
||||
'apacheconfig>=0.3.2',
|
||||
]
|
||||
@@ -23,9 +37,9 @@ setup(
|
||||
description="Apache plugin for Certbot",
|
||||
url='https://github.com/letsencrypt/letsencrypt',
|
||||
author="Certbot Project",
|
||||
author_email='certbot-dev@eff.org',
|
||||
author_email='client-dev@letsencrypt.org',
|
||||
license='Apache License 2.0',
|
||||
python_requires='>=3.7',
|
||||
python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*',
|
||||
classifiers=[
|
||||
'Development Status :: 5 - Production/Stable',
|
||||
'Environment :: Plugins',
|
||||
@@ -33,12 +47,13 @@ setup(
|
||||
'License :: OSI Approved :: Apache Software License',
|
||||
'Operating System :: POSIX :: Linux',
|
||||
'Programming Language :: Python',
|
||||
'Programming Language :: Python :: 2',
|
||||
'Programming Language :: Python :: 2.7',
|
||||
'Programming Language :: Python :: 3',
|
||||
'Programming Language :: Python :: 3.6',
|
||||
'Programming Language :: Python :: 3.7',
|
||||
'Programming Language :: Python :: 3.8',
|
||||
'Programming Language :: Python :: 3.9',
|
||||
'Programming Language :: Python :: 3.10',
|
||||
'Programming Language :: Python :: 3.11',
|
||||
'Topic :: Internet :: WWW/HTTP',
|
||||
'Topic :: Security',
|
||||
'Topic :: System :: Installation/Setup',
|
||||
|
||||
@@ -51,9 +51,9 @@ function Cleanup() {
|
||||
|
||||
# if our environment asks us to enable modules, do our best!
|
||||
if [ "$1" = --debian-modules ] ; then
|
||||
sudo DEBIAN_FRONTEND=noninteractive apt-get install -y apache2
|
||||
sudo DEBIAN_FRONTEND=noninteractive apt-get install -y libapache2-mod-wsgi-py3
|
||||
sudo DEBIAN_FRONTEND=noninteractive apt-get install -y libapache2-mod-macro
|
||||
sudo apt-get install -y apache2
|
||||
sudo apt-get install -y libapache2-mod-wsgi-py3
|
||||
sudo apt-get install -y libapache2-mod-macro
|
||||
|
||||
for mod in ssl rewrite macro wsgi deflate userdir version mime setenvif ; do
|
||||
echo -n enabling $mod
|
||||
|
||||
@@ -1,9 +1,11 @@
|
||||
"""Tests for AugeasParserNode classes"""
|
||||
from typing import List
|
||||
try:
|
||||
import mock
|
||||
except ImportError: # pragma: no cover
|
||||
from unittest import mock # type: ignore
|
||||
|
||||
import os
|
||||
import util
|
||||
from unittest import mock
|
||||
|
||||
from certbot import errors
|
||||
|
||||
@@ -21,29 +23,24 @@ def _get_augeasnode_mock(filepath):
|
||||
metadata=metadata)
|
||||
return augeasnode_mock
|
||||
|
||||
|
||||
class AugeasParserNodeTest(util.ApacheTest): # pylint: disable=too-many-public-methods
|
||||
"""Test AugeasParserNode using available test configurations"""
|
||||
|
||||
def setUp(self): # pylint: disable=arguments-differ
|
||||
super().setUp()
|
||||
super(AugeasParserNodeTest, self).setUp()
|
||||
|
||||
with mock.patch(
|
||||
"certbot_apache._internal.configurator.ApacheConfigurator.get_parsernode_root"
|
||||
) as mock_parsernode:
|
||||
with mock.patch("certbot_apache._internal.configurator.ApacheConfigurator.get_parsernode_root") as mock_parsernode:
|
||||
mock_parsernode.side_effect = _get_augeasnode_mock(
|
||||
os.path.join(self.config_path, "apache2.conf"))
|
||||
self.config = util.get_apache_configurator(
|
||||
self.config_path, self.vhost_path, self.config_dir, self.work_dir,
|
||||
use_parsernode=True,
|
||||
)
|
||||
self.config_path, self.vhost_path, self.config_dir, self.work_dir, use_parsernode=True)
|
||||
self.vh_truth = util.get_vh_truth(
|
||||
self.temp_dir, "debian_apache_2_4/multiple_vhosts")
|
||||
|
||||
def test_save(self):
|
||||
with mock.patch('certbot_apache._internal.parser.ApacheParser.save') as mock_save:
|
||||
self.config.parser_root.save("A save message")
|
||||
self.assertIs(mock_save.called, True)
|
||||
self.assertTrue(mock_save.called)
|
||||
self.assertEqual(mock_save.call_args[0][0], "A save message")
|
||||
|
||||
def test_unsaved_files(self):
|
||||
@@ -68,8 +65,7 @@ class AugeasParserNodeTest(util.ApacheTest): # pylint: disable=too-many-public-
|
||||
"/Anything": "Anything",
|
||||
}
|
||||
for test in testcases:
|
||||
# pylint: disable=protected-access
|
||||
self.assertEqual(block._aug_get_name(test), testcases[test])
|
||||
self.assertEqual(block._aug_get_name(test), testcases[test]) # pylint: disable=protected-access
|
||||
|
||||
def test_find_blocks(self):
|
||||
blocks = self.config.parser_root.find_blocks("VirtualHost", exclude=False)
|
||||
@@ -83,7 +79,7 @@ class AugeasParserNodeTest(util.ApacheTest): # pylint: disable=too-many-public-
|
||||
def test_find_directive_found(self):
|
||||
directives = self.config.parser_root.find_directives("Listen")
|
||||
self.assertEqual(len(directives), 1)
|
||||
self.assertIs(directives[0].filepath.endswith("/apache2/ports.conf"), True)
|
||||
self.assertTrue(directives[0].filepath.endswith("/apache2/ports.conf"))
|
||||
self.assertEqual(directives[0].parameters, (u'80',))
|
||||
|
||||
def test_find_directive_notfound(self):
|
||||
@@ -98,29 +94,29 @@ class AugeasParserNodeTest(util.ApacheTest): # pylint: disable=too-many-public-
|
||||
servername = vh.find_directives("servername")
|
||||
self.assertEqual(servername[0].parameters[0], "certbot.demo")
|
||||
found = True
|
||||
self.assertIs(found, True)
|
||||
self.assertTrue(found)
|
||||
|
||||
def test_find_comments(self):
|
||||
rootcomment = self.config.parser_root.find_comments(
|
||||
"This is the main Apache server configuration file. "
|
||||
)
|
||||
self.assertEqual(len(rootcomment), 1)
|
||||
self.assertIs(rootcomment[0].filepath.endswith(
|
||||
self.assertTrue(rootcomment[0].filepath.endswith(
|
||||
"debian_apache_2_4/multiple_vhosts/apache2/apache2.conf"
|
||||
), True)
|
||||
))
|
||||
|
||||
def test_set_parameters(self):
|
||||
servernames = self.config.parser_root.find_directives("servername")
|
||||
names: List[str] = []
|
||||
names = [] # type: List[str]
|
||||
for servername in servernames:
|
||||
names += servername.parameters
|
||||
self.assertNotIn("going_to_set_this", names)
|
||||
self.assertFalse("going_to_set_this" in names)
|
||||
servernames[0].set_parameters(["something", "going_to_set_this"])
|
||||
servernames = self.config.parser_root.find_directives("servername")
|
||||
names = []
|
||||
for servername in servernames:
|
||||
names += servername.parameters
|
||||
self.assertIn("going_to_set_this", names)
|
||||
self.assertTrue("going_to_set_this" in names)
|
||||
|
||||
def test_set_parameters_atinit(self):
|
||||
from certbot_apache._internal.augeasparser import AugeasDirectiveNode
|
||||
@@ -133,7 +129,7 @@ class AugeasParserNodeTest(util.ApacheTest): # pylint: disable=too-many-public-
|
||||
ancestor=assertions.PASS,
|
||||
metadata=servernames[0].metadata
|
||||
)
|
||||
self.assertIs(mock_set.called, True)
|
||||
self.assertTrue(mock_set.called)
|
||||
self.assertEqual(
|
||||
mock_set.call_args_list[0][0][0],
|
||||
["test", "setting", "these"]
|
||||
@@ -153,7 +149,7 @@ class AugeasParserNodeTest(util.ApacheTest): # pylint: disable=too-many-public-
|
||||
self.assertEqual(len(servername.parameters), 3)
|
||||
servername.set_parameters(["thisshouldnotexistpreviously"])
|
||||
found = True
|
||||
self.assertIs(found, True)
|
||||
self.assertTrue(found)
|
||||
|
||||
# Verify params
|
||||
servernames = self.config.parser_root.find_directives("servername")
|
||||
@@ -163,7 +159,7 @@ class AugeasParserNodeTest(util.ApacheTest): # pylint: disable=too-many-public-
|
||||
self.assertEqual(len(servername.parameters), 1)
|
||||
servername.set_parameters(["thisshouldnotexistpreviously"])
|
||||
found = True
|
||||
self.assertIs(found, True)
|
||||
self.assertTrue(found)
|
||||
|
||||
def test_add_child_comment(self):
|
||||
newc = self.config.parser_root.add_child_comment("The content")
|
||||
@@ -203,7 +199,7 @@ class AugeasParserNodeTest(util.ApacheTest): # pylint: disable=too-many-public-
|
||||
rpath,
|
||||
self.config.parser_root.metadata["augeaspath"]
|
||||
)
|
||||
self.assertIs(directive.startswith("NewBlock"), True)
|
||||
self.assertTrue(directive.startswith("NewBlock"))
|
||||
|
||||
def test_add_child_block_beginning(self):
|
||||
self.config.parser_root.add_child_block(
|
||||
@@ -214,7 +210,7 @@ class AugeasParserNodeTest(util.ApacheTest): # pylint: disable=too-many-public-
|
||||
root_path = self.config.parser_root.metadata["augeaspath"]
|
||||
# Get first child
|
||||
first = parser.aug.match("{}/*[1]".format(root_path))
|
||||
self.assertIs(first[0].endswith("Beginning"), True)
|
||||
self.assertTrue(first[0].endswith("Beginning"))
|
||||
|
||||
def test_add_child_block_append(self):
|
||||
self.config.parser_root.add_child_block(
|
||||
@@ -224,7 +220,7 @@ class AugeasParserNodeTest(util.ApacheTest): # pylint: disable=too-many-public-
|
||||
root_path = self.config.parser_root.metadata["augeaspath"]
|
||||
# Get last child
|
||||
last = parser.aug.match("{}/*[last()]".format(root_path))
|
||||
self.assertIs(last[0].endswith("VeryLast"), True)
|
||||
self.assertTrue(last[0].endswith("VeryLast"))
|
||||
|
||||
def test_add_child_block_append_alt(self):
|
||||
self.config.parser_root.add_child_block(
|
||||
@@ -235,7 +231,7 @@ class AugeasParserNodeTest(util.ApacheTest): # pylint: disable=too-many-public-
|
||||
root_path = self.config.parser_root.metadata["augeaspath"]
|
||||
# Get last child
|
||||
last = parser.aug.match("{}/*[last()]".format(root_path))
|
||||
self.assertIs(last[0].endswith("VeryLastAlt"), True)
|
||||
self.assertTrue(last[0].endswith("VeryLastAlt"))
|
||||
|
||||
def test_add_child_block_middle(self):
|
||||
self.config.parser_root.add_child_block(
|
||||
@@ -246,7 +242,7 @@ class AugeasParserNodeTest(util.ApacheTest): # pylint: disable=too-many-public-
|
||||
root_path = self.config.parser_root.metadata["augeaspath"]
|
||||
# Augeas indices start at 1 :(
|
||||
middle = parser.aug.match("{}/*[6]".format(root_path))
|
||||
self.assertIs(middle[0].endswith("Middle"), True)
|
||||
self.assertTrue(middle[0].endswith("Middle"))
|
||||
|
||||
def test_add_child_block_existing_name(self):
|
||||
parser = self.config.parser_root.parser
|
||||
@@ -259,7 +255,7 @@ class AugeasParserNodeTest(util.ApacheTest): # pylint: disable=too-many-public-
|
||||
)
|
||||
new_block = parser.aug.match("{}/VirtualHost[2]".format(root_path))
|
||||
self.assertEqual(len(new_block), 1)
|
||||
self.assertIs(vh.metadata["augeaspath"].endswith("VirtualHost[2]"), True)
|
||||
self.assertTrue(vh.metadata["augeaspath"].endswith("VirtualHost[2]"))
|
||||
|
||||
def test_node_init_error_bad_augeaspath(self):
|
||||
from certbot_apache._internal.augeasparser import AugeasBlockNode
|
||||
@@ -304,7 +300,7 @@ class AugeasParserNodeTest(util.ApacheTest): # pylint: disable=too-many-public-
|
||||
self.assertEqual(len(dirs), 1)
|
||||
self.assertEqual(dirs[0].parameters, ("with", "parameters"))
|
||||
# The new directive was added to the very first line of the config
|
||||
self.assertIs(dirs[0].metadata["augeaspath"].endswith("[1]"), True)
|
||||
self.assertTrue(dirs[0].metadata["augeaspath"].endswith("[1]"))
|
||||
|
||||
def test_add_child_directive_exception(self):
|
||||
self.assertRaises(
|
||||
@@ -330,8 +326,8 @@ class AugeasParserNodeTest(util.ApacheTest): # pylint: disable=too-many-public-
|
||||
ancs = vh.find_ancestors("Macro")
|
||||
self.assertEqual(len(ancs), 0)
|
||||
nonmacro_test = True
|
||||
self.assertIs(macro_test, True)
|
||||
self.assertIs(nonmacro_test, True)
|
||||
self.assertTrue(macro_test)
|
||||
self.assertTrue(nonmacro_test)
|
||||
|
||||
def test_find_ancestors_bad_path(self):
|
||||
self.config.parser_root.metadata["augeaspath"] = ""
|
||||
|
||||
@@ -2,7 +2,12 @@
|
||||
"""Test for certbot_apache._internal.configurator AutoHSTS functionality"""
|
||||
import re
|
||||
import unittest
|
||||
from unittest import mock
|
||||
|
||||
try:
|
||||
import mock
|
||||
except ImportError: # pragma: no cover
|
||||
from unittest import mock # type: ignore
|
||||
import six # pylint: disable=unused-import # six is used in mock.patch()
|
||||
|
||||
from certbot import errors
|
||||
from certbot_apache._internal import constants
|
||||
@@ -14,7 +19,7 @@ class AutoHSTSTest(util.ApacheTest):
|
||||
# pylint: disable=protected-access
|
||||
|
||||
def setUp(self): # pylint: disable=arguments-differ
|
||||
super().setUp()
|
||||
super(AutoHSTSTest, self).setUp()
|
||||
|
||||
self.config = util.get_apache_configurator(
|
||||
self.config_path, self.vhost_path, self.config_dir, self.work_dir)
|
||||
@@ -43,7 +48,7 @@ class AutoHSTSTest(util.ApacheTest):
|
||||
self.config.parser.modules.pop("headers_module", None)
|
||||
self.config.parser.modules.pop("mod_header.c", None)
|
||||
self.config.enable_autohsts(mock.MagicMock(), ["ocspvhost.com"])
|
||||
self.assertIs(mock_enable.called, True)
|
||||
self.assertTrue(mock_enable.called)
|
||||
|
||||
@mock.patch("certbot_apache._internal.configurator.ApacheConfigurator.restart")
|
||||
def test_autohsts_deploy_already_exists(self, _restart):
|
||||
@@ -70,7 +75,7 @@ class AutoHSTSTest(util.ApacheTest):
|
||||
# Verify increased value
|
||||
self.assertEqual(self.get_autohsts_value(self.vh_truth[7].path),
|
||||
inc_val)
|
||||
self.assertIs(mock_prepare.called, True)
|
||||
self.assertTrue(mock_prepare.called)
|
||||
|
||||
@mock.patch("certbot_apache._internal.configurator.ApacheConfigurator.restart")
|
||||
@mock.patch("certbot_apache._internal.configurator.ApacheConfigurator._autohsts_increase")
|
||||
@@ -84,7 +89,7 @@ class AutoHSTSTest(util.ApacheTest):
|
||||
|
||||
self.config.update_autohsts(mock.MagicMock())
|
||||
# Freq not patched, so value shouldn't increase
|
||||
self.assertIs(mock_increase.called, False)
|
||||
self.assertFalse(mock_increase.called)
|
||||
|
||||
|
||||
@mock.patch("certbot_apache._internal.configurator.ApacheConfigurator.restart")
|
||||
@@ -131,28 +136,30 @@ class AutoHSTSTest(util.ApacheTest):
|
||||
# Time mock is used to make sure that the execution does not
|
||||
# continue when no autohsts entries exist in pluginstorage
|
||||
self.config.update_autohsts(mock.MagicMock())
|
||||
self.assertIs(mock_time.called, False)
|
||||
self.assertFalse(mock_time.called)
|
||||
|
||||
def test_autohsts_make_permanent_noop(self):
|
||||
self.config.storage.put = mock.MagicMock()
|
||||
self.config.deploy_autohsts(mock.MagicMock())
|
||||
# Make sure that the execution does not continue when no entries in store
|
||||
self.assertIs(self.config.storage.put.called, False)
|
||||
self.assertFalse(self.config.storage.put.called)
|
||||
|
||||
@mock.patch("certbot_apache._internal.display_ops.select_vhost")
|
||||
def test_autohsts_no_ssl_vhost(self, mock_select):
|
||||
mock_select.return_value = self.vh_truth[0]
|
||||
with mock.patch("certbot_apache._internal.configurator.logger.error") as mock_log:
|
||||
with mock.patch("certbot_apache._internal.configurator.logger.warning") as mock_log:
|
||||
self.assertRaises(errors.PluginError,
|
||||
self.config.enable_autohsts,
|
||||
mock.MagicMock(), "invalid.example.com")
|
||||
self.assertIn("Certbot was not able to find SSL", mock_log.call_args[0][0])
|
||||
self.assertTrue(
|
||||
"Certbot was not able to find SSL" in mock_log.call_args[0][0])
|
||||
|
||||
@mock.patch("certbot_apache._internal.configurator.ApacheConfigurator.restart")
|
||||
@mock.patch("certbot_apache._internal.configurator.ApacheConfigurator.add_vhost_id")
|
||||
def test_autohsts_dont_enhance_twice(self, mock_id, _restart):
|
||||
mock_id.return_value = "1234567"
|
||||
self.config.enable_autohsts(mock.MagicMock(), ["ocspvhost.com", "ocspvhost.com"])
|
||||
self.config.enable_autohsts(mock.MagicMock(),
|
||||
["ocspvhost.com", "ocspvhost.com"])
|
||||
self.assertEqual(mock_id.call_count, 1)
|
||||
|
||||
def test_autohsts_remove_orphaned(self):
|
||||
@@ -162,7 +169,7 @@ class AutoHSTSTest(util.ApacheTest):
|
||||
|
||||
self.config._autohsts_save_state()
|
||||
self.config.update_autohsts(mock.MagicMock())
|
||||
self.assertNotIn("orphan_id", self.config._autohsts)
|
||||
self.assertFalse("orphan_id" in self.config._autohsts)
|
||||
# Make sure it's removed from the pluginstorage file as well
|
||||
self.config._autohsts = None
|
||||
self.config._autohsts_fetch_state()
|
||||
@@ -173,10 +180,11 @@ class AutoHSTSTest(util.ApacheTest):
|
||||
self.config._autohsts_fetch_state()
|
||||
self.config._autohsts["orphan_id"] = {"laststep": 999, "timestamp": 0}
|
||||
self.config._autohsts_save_state()
|
||||
with mock.patch("certbot_apache._internal.configurator.logger.error") as mock_log:
|
||||
with mock.patch("certbot_apache._internal.configurator.logger.warning") as mock_log:
|
||||
self.config.deploy_autohsts(mock.MagicMock())
|
||||
self.assertIs(mock_log.called, True)
|
||||
self.assertIn("VirtualHost with id orphan_id was not", mock_log.call_args[0][0])
|
||||
self.assertTrue(mock_log.called)
|
||||
self.assertTrue(
|
||||
"VirtualHost with id orphan_id was not" in mock_log.call_args[0][0])
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
221
certbot-apache/tests/centos6_test.py
Normal file
221
certbot-apache/tests/centos6_test.py
Normal file
@@ -0,0 +1,221 @@
|
||||
"""Test for certbot_apache._internal.configurator for CentOS 6 overrides"""
|
||||
import unittest
|
||||
|
||||
from certbot.compat import os
|
||||
from certbot.errors import MisconfigurationError
|
||||
from certbot_apache._internal import obj
|
||||
from certbot_apache._internal import override_centos
|
||||
from certbot_apache._internal import parser
|
||||
import util
|
||||
|
||||
|
||||
def get_vh_truth(temp_dir, config_name):
|
||||
"""Return the ground truth for the specified directory."""
|
||||
prefix = os.path.join(
|
||||
temp_dir, config_name, "httpd/conf.d")
|
||||
|
||||
aug_pre = "/files" + prefix
|
||||
vh_truth = [
|
||||
obj.VirtualHost(
|
||||
os.path.join(prefix, "test.example.com.conf"),
|
||||
os.path.join(aug_pre, "test.example.com.conf/VirtualHost"),
|
||||
{obj.Addr.fromstring("*:80")},
|
||||
False, True, "test.example.com"),
|
||||
obj.VirtualHost(
|
||||
os.path.join(prefix, "ssl.conf"),
|
||||
os.path.join(aug_pre, "ssl.conf/VirtualHost"),
|
||||
{obj.Addr.fromstring("_default_:443")},
|
||||
True, True, None)
|
||||
]
|
||||
return vh_truth
|
||||
|
||||
class CentOS6Tests(util.ApacheTest):
|
||||
"""Tests for CentOS 6"""
|
||||
|
||||
def setUp(self): # pylint: disable=arguments-differ
|
||||
test_dir = "centos6_apache/apache"
|
||||
config_root = "centos6_apache/apache/httpd"
|
||||
vhost_root = "centos6_apache/apache/httpd/conf.d"
|
||||
super(CentOS6Tests, self).setUp(test_dir=test_dir,
|
||||
config_root=config_root,
|
||||
vhost_root=vhost_root)
|
||||
|
||||
self.config = util.get_apache_configurator(
|
||||
self.config_path, self.vhost_path, self.config_dir, self.work_dir,
|
||||
version=(2, 2, 15), os_info="centos")
|
||||
self.vh_truth = get_vh_truth(
|
||||
self.temp_dir, "centos6_apache/apache")
|
||||
|
||||
def test_get_parser(self):
|
||||
self.assertTrue(isinstance(self.config.parser,
|
||||
override_centos.CentOSParser))
|
||||
|
||||
def test_get_virtual_hosts(self):
|
||||
"""Make sure all vhosts are being properly found."""
|
||||
vhs = self.config.get_virtual_hosts()
|
||||
self.assertEqual(len(vhs), 2)
|
||||
found = 0
|
||||
|
||||
for vhost in vhs:
|
||||
for centos_truth in self.vh_truth:
|
||||
if vhost == centos_truth:
|
||||
found += 1
|
||||
break
|
||||
else:
|
||||
raise Exception("Missed: %s" % vhost) # pragma: no cover
|
||||
self.assertEqual(found, 2)
|
||||
|
||||
def test_loadmod_default(self):
|
||||
ssl_loadmods = self.config.parser.find_dir(
|
||||
"LoadModule", "ssl_module", exclude=False)
|
||||
self.assertEqual(len(ssl_loadmods), 1)
|
||||
# Make sure the LoadModule ssl_module is in ssl.conf (default)
|
||||
self.assertTrue("ssl.conf" in ssl_loadmods[0])
|
||||
# ...and that it's not inside of <IfModule>
|
||||
self.assertFalse("IfModule" in ssl_loadmods[0])
|
||||
|
||||
# Get the example vhost
|
||||
self.config.assoc["test.example.com"] = self.vh_truth[0]
|
||||
self.config.deploy_cert(
|
||||
"random.demo", "example/cert.pem", "example/key.pem",
|
||||
"example/cert_chain.pem", "example/fullchain.pem")
|
||||
self.config.save()
|
||||
|
||||
post_loadmods = self.config.parser.find_dir(
|
||||
"LoadModule", "ssl_module", exclude=False)
|
||||
|
||||
# We should now have LoadModule ssl_module in root conf and ssl.conf
|
||||
self.assertEqual(len(post_loadmods), 2)
|
||||
for lm in post_loadmods:
|
||||
# lm[:-7] removes "/arg[#]" from the path
|
||||
arguments = self.config.parser.get_all_args(lm[:-7])
|
||||
self.assertEqual(arguments, ["ssl_module", "modules/mod_ssl.so"])
|
||||
# ...and both of them should be wrapped in <IfModule !mod_ssl.c>
|
||||
# lm[:-17] strips off /directive/arg[1] from the path.
|
||||
ifmod_args = self.config.parser.get_all_args(lm[:-17])
|
||||
self.assertTrue("!mod_ssl.c" in ifmod_args)
|
||||
|
||||
def test_loadmod_multiple(self):
|
||||
sslmod_args = ["ssl_module", "modules/mod_ssl.so"]
|
||||
# Adds another LoadModule to main httpd.conf in addtition to ssl.conf
|
||||
self.config.parser.add_dir(self.config.parser.loc["default"], "LoadModule",
|
||||
sslmod_args)
|
||||
self.config.save()
|
||||
pre_loadmods = self.config.parser.find_dir(
|
||||
"LoadModule", "ssl_module", exclude=False)
|
||||
# LoadModules are not within IfModule blocks
|
||||
self.assertFalse(any("ifmodule" in m.lower() for m in pre_loadmods))
|
||||
self.config.assoc["test.example.com"] = self.vh_truth[0]
|
||||
self.config.deploy_cert(
|
||||
"random.demo", "example/cert.pem", "example/key.pem",
|
||||
"example/cert_chain.pem", "example/fullchain.pem")
|
||||
post_loadmods = self.config.parser.find_dir(
|
||||
"LoadModule", "ssl_module", exclude=False)
|
||||
|
||||
for mod in post_loadmods:
|
||||
self.assertTrue(self.config.parser.not_modssl_ifmodule(mod)) #pylint: disable=no-member
|
||||
|
||||
def test_loadmod_rootconf_exists(self):
|
||||
sslmod_args = ["ssl_module", "modules/mod_ssl.so"]
|
||||
rootconf_ifmod = self.config.parser.get_ifmod(
|
||||
parser.get_aug_path(self.config.parser.loc["default"]),
|
||||
"!mod_ssl.c", beginning=True)
|
||||
self.config.parser.add_dir(rootconf_ifmod[:-1], "LoadModule", sslmod_args)
|
||||
self.config.save()
|
||||
# Get the example vhost
|
||||
self.config.assoc["test.example.com"] = self.vh_truth[0]
|
||||
self.config.deploy_cert(
|
||||
"random.demo", "example/cert.pem", "example/key.pem",
|
||||
"example/cert_chain.pem", "example/fullchain.pem")
|
||||
self.config.save()
|
||||
|
||||
root_loadmods = self.config.parser.find_dir(
|
||||
"LoadModule", "ssl_module",
|
||||
start=parser.get_aug_path(self.config.parser.loc["default"]),
|
||||
exclude=False)
|
||||
|
||||
mods = [lm for lm in root_loadmods if self.config.parser.loc["default"] in lm]
|
||||
|
||||
self.assertEqual(len(mods), 1)
|
||||
# [:-7] removes "/arg[#]" from the path
|
||||
self.assertEqual(
|
||||
self.config.parser.get_all_args(mods[0][:-7]),
|
||||
sslmod_args)
|
||||
|
||||
def test_neg_loadmod_already_on_path(self):
|
||||
loadmod_args = ["ssl_module", "modules/mod_ssl.so"]
|
||||
ifmod = self.config.parser.get_ifmod(
|
||||
self.vh_truth[1].path, "!mod_ssl.c", beginning=True)
|
||||
self.config.parser.add_dir(ifmod[:-1], "LoadModule", loadmod_args)
|
||||
self.config.parser.add_dir(self.vh_truth[1].path, "LoadModule", loadmod_args)
|
||||
self.config.save()
|
||||
pre_loadmods = self.config.parser.find_dir(
|
||||
"LoadModule", "ssl_module", start=self.vh_truth[1].path, exclude=False)
|
||||
self.assertEqual(len(pre_loadmods), 2)
|
||||
# The ssl.conf now has two LoadModule directives, one inside of
|
||||
# !mod_ssl.c IfModule
|
||||
self.config.assoc["test.example.com"] = self.vh_truth[0]
|
||||
self.config.deploy_cert(
|
||||
"random.demo", "example/cert.pem", "example/key.pem",
|
||||
"example/cert_chain.pem", "example/fullchain.pem")
|
||||
self.config.save()
|
||||
# Ensure that the additional LoadModule wasn't written into the IfModule
|
||||
post_loadmods = self.config.parser.find_dir(
|
||||
"LoadModule", "ssl_module", start=self.vh_truth[1].path, exclude=False)
|
||||
self.assertEqual(len(post_loadmods), 1)
|
||||
|
||||
def test_loadmod_non_duplicate(self):
|
||||
# the modules/mod_ssl.so exists in ssl.conf
|
||||
sslmod_args = ["ssl_module", "modules/mod_somethingelse.so"]
|
||||
rootconf_ifmod = self.config.parser.get_ifmod(
|
||||
parser.get_aug_path(self.config.parser.loc["default"]),
|
||||
"!mod_ssl.c", beginning=True)
|
||||
self.config.parser.add_dir(rootconf_ifmod[:-1], "LoadModule", sslmod_args)
|
||||
self.config.save()
|
||||
self.config.assoc["test.example.com"] = self.vh_truth[0]
|
||||
pre_matches = self.config.parser.find_dir("LoadModule",
|
||||
"ssl_module", exclude=False)
|
||||
|
||||
self.assertRaises(MisconfigurationError, self.config.deploy_cert,
|
||||
"random.demo", "example/cert.pem", "example/key.pem",
|
||||
"example/cert_chain.pem", "example/fullchain.pem")
|
||||
|
||||
post_matches = self.config.parser.find_dir("LoadModule",
|
||||
"ssl_module", exclude=False)
|
||||
# Make sure that none was changed
|
||||
self.assertEqual(pre_matches, post_matches)
|
||||
|
||||
def test_loadmod_not_found(self):
|
||||
# Remove all existing LoadModule ssl_module... directives
|
||||
orig_loadmods = self.config.parser.find_dir("LoadModule",
|
||||
"ssl_module",
|
||||
exclude=False)
|
||||
for mod in orig_loadmods:
|
||||
noarg_path = mod.rpartition("/")[0]
|
||||
self.config.parser.aug.remove(noarg_path)
|
||||
self.config.save()
|
||||
self.config.deploy_cert(
|
||||
"random.demo", "example/cert.pem", "example/key.pem",
|
||||
"example/cert_chain.pem", "example/fullchain.pem")
|
||||
|
||||
post_loadmods = self.config.parser.find_dir("LoadModule",
|
||||
"ssl_module",
|
||||
exclude=False)
|
||||
self.assertFalse(post_loadmods)
|
||||
|
||||
def test_no_ifmod_search_false(self):
|
||||
#pylint: disable=no-member
|
||||
|
||||
self.assertFalse(self.config.parser.not_modssl_ifmodule(
|
||||
"/path/does/not/include/ifmod"
|
||||
))
|
||||
self.assertFalse(self.config.parser.not_modssl_ifmodule(
|
||||
""
|
||||
))
|
||||
self.assertFalse(self.config.parser.not_modssl_ifmodule(
|
||||
"/path/includes/IfModule/but/no/arguments"
|
||||
))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main() # pragma: no cover
|
||||
@@ -1,12 +1,16 @@
|
||||
"""Test for certbot_apache._internal.configurator for Centos overrides"""
|
||||
import unittest
|
||||
from unittest import mock
|
||||
|
||||
try:
|
||||
import mock
|
||||
except ImportError: # pragma: no cover
|
||||
from unittest import mock # type: ignore
|
||||
|
||||
from certbot import errors
|
||||
from certbot.compat import filesystem
|
||||
from certbot.compat import os
|
||||
from certbot_apache._internal import override_centos
|
||||
from certbot_apache._internal import obj
|
||||
from certbot_apache._internal import override_centos
|
||||
import util
|
||||
|
||||
|
||||
@@ -30,7 +34,6 @@ def get_vh_truth(temp_dir, config_name):
|
||||
]
|
||||
return vh_truth
|
||||
|
||||
|
||||
class FedoraRestartTest(util.ApacheTest):
|
||||
"""Tests for Fedora specific self-signed certificate override"""
|
||||
|
||||
@@ -38,9 +41,9 @@ class FedoraRestartTest(util.ApacheTest):
|
||||
test_dir = "centos7_apache/apache"
|
||||
config_root = "centos7_apache/apache/httpd"
|
||||
vhost_root = "centos7_apache/apache/httpd/conf.d"
|
||||
super().setUp(test_dir=test_dir,
|
||||
config_root=config_root,
|
||||
vhost_root=vhost_root)
|
||||
super(FedoraRestartTest, self).setUp(test_dir=test_dir,
|
||||
config_root=config_root,
|
||||
vhost_root=vhost_root)
|
||||
self.config = util.get_apache_configurator(
|
||||
self.config_path, self.vhost_path, self.config_dir, self.work_dir,
|
||||
os_info="fedora_old")
|
||||
@@ -84,66 +87,19 @@ class FedoraRestartTest(util.ApacheTest):
|
||||
['systemctl', 'restart', 'httpd'])
|
||||
|
||||
|
||||
class UseCorrectApacheExecutableTest(util.ApacheTest):
|
||||
"""Make sure the various CentOS/RHEL versions use the right httpd executable"""
|
||||
|
||||
def setUp(self): # pylint: disable=arguments-differ
|
||||
test_dir = "centos7_apache/apache"
|
||||
config_root = "centos7_apache/apache/httpd"
|
||||
vhost_root = "centos7_apache/apache/httpd/conf.d"
|
||||
super().setUp(test_dir=test_dir,
|
||||
config_root=config_root,
|
||||
vhost_root=vhost_root)
|
||||
|
||||
@mock.patch("certbot.util.get_os_info")
|
||||
def test_old_centos_rhel_and_fedora(self, mock_get_os_info):
|
||||
for os_info in [("centos", "7"), ("rhel", "7"), ("fedora", "28"), ("scientific", "6")]:
|
||||
mock_get_os_info.return_value = os_info
|
||||
config = util.get_apache_configurator(
|
||||
self.config_path, self.vhost_path, self.config_dir, self.work_dir,
|
||||
os_info="centos")
|
||||
self.assertEqual(config.options.ctl, "apachectl")
|
||||
self.assertEqual(config.options.bin, "httpd")
|
||||
self.assertEqual(config.options.version_cmd, ["apachectl", "-v"])
|
||||
self.assertEqual(config.options.restart_cmd, ["apachectl", "graceful"])
|
||||
self.assertEqual(config.options.restart_cmd_alt, ["apachectl", "restart"])
|
||||
self.assertEqual(config.options.conftest_cmd, ["apachectl", "configtest"])
|
||||
self.assertEqual(config.options.get_defines_cmd, ["apachectl", "-t", "-D", "DUMP_RUN_CFG"])
|
||||
self.assertEqual(config.options.get_includes_cmd, ["apachectl", "-t", "-D", "DUMP_INCLUDES"])
|
||||
self.assertEqual(config.options.get_modules_cmd, ["apachectl", "-t", "-D", "DUMP_MODULES"])
|
||||
|
||||
@mock.patch("certbot.util.get_os_info")
|
||||
def test_new_rhel_derived(self, mock_get_os_info):
|
||||
for os_info in [("centos", "9"), ("rhel", "9"), ("oracle", "9")]:
|
||||
mock_get_os_info.return_value = os_info
|
||||
config = util.get_apache_configurator(
|
||||
self.config_path, self.vhost_path, self.config_dir, self.work_dir,
|
||||
os_info=os_info[0])
|
||||
self.assertEqual(config.options.ctl, "apachectl")
|
||||
self.assertEqual(config.options.bin, "httpd")
|
||||
self.assertEqual(config.options.version_cmd, ["httpd", "-v"])
|
||||
self.assertEqual(config.options.restart_cmd, ["apachectl", "graceful"])
|
||||
self.assertEqual(config.options.restart_cmd_alt, ["apachectl", "restart"])
|
||||
self.assertEqual(config.options.conftest_cmd, ["apachectl", "configtest"])
|
||||
self.assertEqual(config.options.get_defines_cmd, ["httpd", "-t", "-D", "DUMP_RUN_CFG"])
|
||||
self.assertEqual(config.options.get_includes_cmd, ["httpd", "-t", "-D", "DUMP_INCLUDES"])
|
||||
self.assertEqual(config.options.get_modules_cmd, ["httpd", "-t", "-D", "DUMP_MODULES"])
|
||||
|
||||
|
||||
class MultipleVhostsTestCentOS(util.ApacheTest):
|
||||
"""Multiple vhost tests for CentOS / RHEL family of distros"""
|
||||
|
||||
_multiprocess_can_split_ = True
|
||||
|
||||
@mock.patch("certbot.util.get_os_info")
|
||||
def setUp(self, mock_get_os_info): # pylint: disable=arguments-differ
|
||||
def setUp(self): # pylint: disable=arguments-differ
|
||||
test_dir = "centos7_apache/apache"
|
||||
config_root = "centos7_apache/apache/httpd"
|
||||
vhost_root = "centos7_apache/apache/httpd/conf.d"
|
||||
super().setUp(test_dir=test_dir,
|
||||
config_root=config_root,
|
||||
vhost_root=vhost_root)
|
||||
mock_get_os_info.return_value = ("centos", "9")
|
||||
super(MultipleVhostsTestCentOS, self).setUp(test_dir=test_dir,
|
||||
config_root=config_root,
|
||||
vhost_root=vhost_root)
|
||||
|
||||
self.config = util.get_apache_configurator(
|
||||
self.config_path, self.vhost_path, self.config_dir, self.work_dir,
|
||||
os_info="centos")
|
||||
@@ -167,9 +123,9 @@ class MultipleVhostsTestCentOS(util.ApacheTest):
|
||||
)
|
||||
def mock_get_cfg(command):
|
||||
"""Mock httpd process stdout"""
|
||||
if command == ['httpd', '-t', '-D', 'DUMP_RUN_CFG']:
|
||||
if command == ['apachectl', '-t', '-D', 'DUMP_RUN_CFG']:
|
||||
return define_val
|
||||
elif command == ['httpd', '-t', '-D', 'DUMP_MODULES']:
|
||||
elif command == ['apachectl', '-t', '-D', 'DUMP_MODULES']:
|
||||
return mod_val
|
||||
return ""
|
||||
mock_get.side_effect = mock_get_cfg
|
||||
@@ -178,14 +134,14 @@ class MultipleVhostsTestCentOS(util.ApacheTest):
|
||||
|
||||
with mock.patch("certbot.util.get_os_info") as mock_osi:
|
||||
# Make sure we have the have the CentOS httpd constants
|
||||
mock_osi.return_value = ("centos", "9")
|
||||
mock_osi.return_value = ("centos", "7")
|
||||
self.config.parser.update_runtime_variables()
|
||||
|
||||
self.assertEqual(mock_get.call_count, 3)
|
||||
self.assertEqual(len(self.config.parser.modules), 4)
|
||||
self.assertEqual(len(self.config.parser.variables), 2)
|
||||
self.assertIn("TEST2", self.config.parser.variables)
|
||||
self.assertIn("mod_another.c", self.config.parser.modules)
|
||||
self.assertTrue("TEST2" in self.config.parser.variables)
|
||||
self.assertTrue("mod_another.c" in self.config.parser.modules)
|
||||
|
||||
def test_get_virtual_hosts(self):
|
||||
"""Make sure all vhosts are being properly found."""
|
||||
@@ -213,14 +169,14 @@ class MultipleVhostsTestCentOS(util.ApacheTest):
|
||||
|
||||
with mock.patch("certbot.util.get_os_info") as mock_osi:
|
||||
# Make sure we have the have the CentOS httpd constants
|
||||
mock_osi.return_value = ("centos", "9")
|
||||
mock_osi.return_value = ("centos", "7")
|
||||
self.config.parser.update_runtime_variables()
|
||||
|
||||
self.assertIn("mock_define", self.config.parser.variables)
|
||||
self.assertIn("mock_define_too", self.config.parser.variables)
|
||||
self.assertIn("mock_value", self.config.parser.variables)
|
||||
self.assertTrue("mock_define" in self.config.parser.variables)
|
||||
self.assertTrue("mock_define_too" in self.config.parser.variables)
|
||||
self.assertTrue("mock_value" in self.config.parser.variables)
|
||||
self.assertEqual("TRUE", self.config.parser.variables["mock_value"])
|
||||
self.assertIn("MOCK_NOSEP", self.config.parser.variables)
|
||||
self.assertTrue("MOCK_NOSEP" in self.config.parser.variables)
|
||||
self.assertEqual("NOSEP_VAL", self.config.parser.variables["NOSEP_TWO"])
|
||||
|
||||
@mock.patch("certbot_apache._internal.configurator.util.run_script")
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user