Compare commits

..

3 Commits

Author SHA1 Message Date
Brad Warren
fcb85becb9 only test apache 2020-06-04 10:15:09 -07:00
Alex Zorin
ca1573ea32 Merge remote-tracking branch 'upstream/master' into apache_static_ssl_module 2020-06-04 08:59:33 +10:00
Alex Zorin
8af28d83a6 apache: handle statically linked mod_ssl
In #7771, the Apache configurator gained the ability to identify what
version of OpenSSL Apache's ssl_module is linked against. However, the
detection was only functional if the module was built as a DSO (which is
almost always the case).

This commit covers the case where the ssl_module is statically linked
within the Apache binary. It requires the user to specify the path to
the binary (with --apache-bin) and emits a warning if static linking is
detected but no path has been provided.
2020-05-20 21:00:53 +10:00
555 changed files with 18268 additions and 16315 deletions

View File

@@ -2,13 +2,12 @@
trigger:
# When changing these triggers, please ensure the documentation under
# "Running tests in CI" is still correct.
- azure-test-*
- test-*
pr: none
variables:
# We don't publish our Docker images in this pipeline, but when building them
# for testing, let's use the nightly tag.
dockerTag: nightly
stages:
- template: templates/stages/test-and-package-stage.yml
jobs:
# Any addition here should be reflected in the advanced and release pipelines.
# It is advised to declare all jobs here as templates to improve maintainability.
- template: templates/tests-suite.yml
- template: templates/installer-tests.yml

View File

@@ -0,0 +1,18 @@
# Advanced pipeline for running our full test suite on protected branches.
trigger:
- '*.x'
pr: none
# This pipeline is also nightly run on master
schedules:
- cron: "0 4 * * *"
displayName: Nightly build
branches:
include:
- master
always: true
jobs:
# Any addition here should be reflected in the advanced-test and release pipelines.
# It is advised to declare all jobs here as templates to improve maintainability.
- template: templates/tests-suite.yml
- template: templates/installer-tests.yml

View File

@@ -1,8 +1,8 @@
trigger: none
trigger:
- master
pr:
- master
- '*.x'
jobs:
- template: templates/jobs/standard-tests-jobs.yml
- template: templates/tests-suite.yml

View File

@@ -1,18 +0,0 @@
# Nightly pipeline running each day for master.
trigger: none
pr: none
schedules:
- cron: "30 4 * * *"
displayName: Nightly build
branches:
include:
- master
always: true
variables:
dockerTag: nightly
stages:
- template: templates/stages/test-and-package-stage.yml
- template: templates/stages/deploy-stage.yml
- template: templates/stages/notify-failure-stage.yml

View File

@@ -1,18 +1,13 @@
# Release pipeline to run our full test suite, build artifacts, and deploy them
# for GitHub release tags.
# Release pipeline to build and deploy Certbot for Windows for GitHub release tags
trigger:
tags:
include:
- v*
pr: none
variables:
dockerTag: ${{variables['Build.SourceBranchName']}}
stages:
- template: templates/stages/test-and-package-stage.yml
- template: templates/stages/changelog-stage.yml
- template: templates/stages/deploy-stage.yml
parameters:
snapReleaseChannel: beta
- template: templates/stages/notify-failure-stage.yml
jobs:
# Any addition here should be reflected in the advanced and advanced-test pipelines.
# It is advised to declare all jobs here as templates to improve maintainability.
- template: templates/tests-suite.yml
- template: templates/installer-tests.yml
- template: templates/changelog.yml

View File

@@ -0,0 +1,14 @@
jobs:
- job: changelog
pool:
vmImage: vs2017-win2016
steps:
- bash: |
CERTBOT_VERSION="$(cd certbot && python -c "import certbot; print(certbot.__version__)" && cd ~-)"
"${BUILD_REPOSITORY_LOCALPATH}\tools\extract_changelog.py" "${CERTBOT_VERSION}" >> "${BUILD_ARTIFACTSTAGINGDIRECTORY}/release_notes.md"
displayName: Prepare changelog
- task: PublishPipelineArtifact@1
inputs:
path: $(Build.ArtifactStagingDirectory)
artifact: changelog
displayName: Publish changelog

View File

@@ -0,0 +1,61 @@
jobs:
- job: installer_build
pool:
vmImage: vs2017-win2016
steps:
- task: UsePythonVersion@0
inputs:
versionSpec: 3.7
architecture: x86
addToPath: true
- script: python windows-installer/construct.py
displayName: Build Certbot installer
- task: CopyFiles@2
inputs:
sourceFolder: $(System.DefaultWorkingDirectory)/windows-installer/build/nsis
contents: '*.exe'
targetFolder: $(Build.ArtifactStagingDirectory)
- task: PublishPipelineArtifact@1
inputs:
path: $(Build.ArtifactStagingDirectory)
artifact: windows-installer
displayName: Publish Windows installer
- job: installer_run
dependsOn: installer_build
strategy:
matrix:
win2019:
imageName: windows-2019
win2016:
imageName: vs2017-win2016
pool:
vmImage: $(imageName)
steps:
- powershell: |
$currentVersion = $PSVersionTable.PSVersion
if ($currentVersion.Major -ne 5) {
throw "Powershell version is not 5.x"
}
condition: eq(variables['imageName'], 'vs2017-win2016')
displayName: Check Powershell 5.x is used in vs2017-win2016
- task: UsePythonVersion@0
inputs:
versionSpec: 3.8
addToPath: true
- task: DownloadPipelineArtifact@2
inputs:
artifact: windows-installer
path: $(Build.SourcesDirectory)/bin
displayName: Retrieve Windows installer
- script: |
py -3 -m venv venv
venv\Scripts\python tools\pip_install.py -e certbot-ci
displayName: Prepare Certbot-CI
- script: |
set PATH=%ProgramFiles(x86)%\Certbot\bin;%PATH%
venv\Scripts\python -m pytest certbot-ci\windows_installer_integration_tests --allow-persistent-changes --installer-path $(Build.SourcesDirectory)\bin\certbot-beta-installer-win32.exe
displayName: Run windows installer integration tests
- script: |
set PATH=%ProgramFiles(x86)%\Certbot\bin;%PATH%
venv\Scripts\python -m pytest certbot-ci\certbot_integration_tests\certbot_tests -n 4
displayName: Run certbot integration tests

View File

@@ -1,97 +0,0 @@
jobs:
- job: extended_test
variables:
- name: IMAGE_NAME
value: ubuntu-18.04
- name: PYTHON_VERSION
value: 3.9
- group: certbot-common
strategy:
matrix:
linux-py36:
PYTHON_VERSION: 3.6
TOXENV: py36
linux-py37:
PYTHON_VERSION: 3.7
TOXENV: py37
linux-py38:
PYTHON_VERSION: 3.8
TOXENV: py38
linux-py37-nopin:
PYTHON_VERSION: 3.7
TOXENV: py37
CERTBOT_NO_PIN: 1
linux-external-mock:
TOXENV: external-mock
linux-boulder-v1-integration-certbot-oldest:
PYTHON_VERSION: 3.6
TOXENV: integration-certbot-oldest
ACME_SERVER: boulder-v1
linux-boulder-v2-integration-certbot-oldest:
PYTHON_VERSION: 3.6
TOXENV: integration-certbot-oldest
ACME_SERVER: boulder-v2
linux-boulder-v1-integration-nginx-oldest:
PYTHON_VERSION: 3.6
TOXENV: integration-nginx-oldest
ACME_SERVER: boulder-v1
linux-boulder-v2-integration-nginx-oldest:
PYTHON_VERSION: 3.6
TOXENV: integration-nginx-oldest
ACME_SERVER: boulder-v2
linux-boulder-v1-py36-integration:
PYTHON_VERSION: 3.6
TOXENV: integration
ACME_SERVER: boulder-v1
linux-boulder-v2-py36-integration:
PYTHON_VERSION: 3.6
TOXENV: integration
ACME_SERVER: boulder-v2
linux-boulder-v1-py37-integration:
PYTHON_VERSION: 3.7
TOXENV: integration
ACME_SERVER: boulder-v1
linux-boulder-v2-py37-integration:
PYTHON_VERSION: 3.7
TOXENV: integration
ACME_SERVER: boulder-v2
linux-boulder-v1-py38-integration:
PYTHON_VERSION: 3.8
TOXENV: integration
ACME_SERVER: boulder-v1
linux-boulder-v2-py38-integration:
PYTHON_VERSION: 3.8
TOXENV: integration
ACME_SERVER: boulder-v2
linux-boulder-v1-py39-integration:
PYTHON_VERSION: 3.9
TOXENV: integration
ACME_SERVER: boulder-v1
linux-boulder-v2-py39-integration:
PYTHON_VERSION: 3.9
TOXENV: integration
ACME_SERVER: boulder-v2
nginx-compat:
TOXENV: nginx_compat
linux-integration-rfc2136:
IMAGE_NAME: ubuntu-18.04
PYTHON_VERSION: 3.8
TOXENV: integration-dns-rfc2136
docker-dev:
TOXENV: docker_dev
le-modification:
IMAGE_NAME: ubuntu-18.04
TOXENV: modification
macos-farmtest-apache2:
# We run one of these test farm tests on macOS to help ensure the
# tests continue to work on the platform.
IMAGE_NAME: macOS-10.15
PYTHON_VERSION: 3.8
TOXENV: test-farm-apache2
farmtest-sdists:
PYTHON_VERSION: 3.7
TOXENV: test-farm-sdists
pool:
vmImage: $(IMAGE_NAME)
steps:
- template: ../steps/tox-steps.yml

View File

@@ -1,230 +0,0 @@
jobs:
- job: docker_build
pool:
vmImage: ubuntu-18.04
strategy:
matrix:
amd64:
DOCKER_ARCH: amd64
# Do not run the heavy non-amd64 builds for test branches
${{ if not(startsWith(variables['Build.SourceBranchName'], 'test-')) }}:
arm32v6:
DOCKER_ARCH: arm32v6
arm64v8:
DOCKER_ARCH: arm64v8
# The default timeout of 60 minutes is a little low for compiling
# cryptography on ARM architectures.
timeoutInMinutes: 180
steps:
- bash: set -e && tools/docker/build.sh $(dockerTag) $DOCKER_ARCH
displayName: Build the Docker images
# We don't filter for the Docker Hub organization to continue to allow
# easy testing of these scripts on forks.
- bash: |
set -e
DOCKER_IMAGES=$(docker images --filter reference='*/certbot' --filter reference='*/dns-*' --format '{{.Repository}}')
docker save --output images.tar $DOCKER_IMAGES
displayName: Save the Docker images
# If the name of the tar file or artifact changes, the deploy stage will
# also need to be updated.
- bash: set -e && mv images.tar $(Build.ArtifactStagingDirectory)
displayName: Prepare Docker artifact
- task: PublishPipelineArtifact@1
inputs:
path: $(Build.ArtifactStagingDirectory)
artifact: docker_$(DOCKER_ARCH)
displayName: Store Docker artifact
- job: docker_run
dependsOn: docker_build
pool:
vmImage: ubuntu-18.04
steps:
- task: DownloadPipelineArtifact@2
inputs:
artifact: docker_amd64
path: $(Build.SourcesDirectory)
displayName: Retrieve Docker images
- bash: set -e && docker load --input $(Build.SourcesDirectory)/images.tar
displayName: Load Docker images
- bash: |
set -ex
DOCKER_IMAGES=$(docker images --filter reference='*/certbot' --filter reference='*/dns-*' --format '{{.Repository}}:{{.Tag}}')
for DOCKER_IMAGE in ${DOCKER_IMAGES}
do docker run --rm "${DOCKER_IMAGE}" plugins --prepare
done
displayName: Run integration tests for Docker images
- job: installer_build
pool:
vmImage: vs2017-win2016
steps:
- task: UsePythonVersion@0
inputs:
versionSpec: 3.9
architecture: x86
addToPath: true
- script: |
python -m venv venv
venv\Scripts\python tools\pipstrap.py
venv\Scripts\python tools\pip_install.py -e windows-installer
displayName: Prepare Windows installer build environment
- script: |
venv\Scripts\construct-windows-installer
displayName: Build Certbot installer
- task: CopyFiles@2
inputs:
sourceFolder: $(System.DefaultWorkingDirectory)/windows-installer/build/nsis
contents: '*.exe'
targetFolder: $(Build.ArtifactStagingDirectory)
- task: PublishPipelineArtifact@1
inputs:
path: $(Build.ArtifactStagingDirectory)
# If we change the artifact's name, it should also be changed in tools/create_github_release.py
artifact: windows-installer
displayName: Publish Windows installer
- job: installer_run
dependsOn: installer_build
strategy:
matrix:
win2019:
imageName: windows-2019
win2016:
imageName: vs2017-win2016
pool:
vmImage: $(imageName)
steps:
- powershell: |
if ($PSVersionTable.PSVersion.Major -ne 5) {
throw "Powershell version is not 5.x"
}
condition: eq(variables['imageName'], 'vs2017-win2016')
displayName: Check Powershell 5.x is used in vs2017-win2016
- task: UsePythonVersion@0
inputs:
versionSpec: 3.9
addToPath: true
- task: DownloadPipelineArtifact@2
inputs:
artifact: windows-installer
path: $(Build.SourcesDirectory)/bin
displayName: Retrieve Windows installer
- script: |
python -m venv venv
venv\Scripts\python tools\pipstrap.py
venv\Scripts\python tools\pip_install.py -e certbot-ci
env:
PIP_NO_BUILD_ISOLATION: no
displayName: Prepare Certbot-CI
- script: |
set PATH=%ProgramFiles(x86)%\Certbot\bin;%PATH%
venv\Scripts\python -m pytest certbot-ci\windows_installer_integration_tests --allow-persistent-changes --installer-path $(Build.SourcesDirectory)\bin\certbot-beta-installer-win32.exe
displayName: Run windows installer integration tests
- script: |
set PATH=%ProgramFiles(x86)%\Certbot\bin;%PATH%
venv\Scripts\python -m pytest certbot-ci\certbot_integration_tests\certbot_tests -n 4
displayName: Run certbot integration tests
- job: snaps_build
pool:
vmImage: ubuntu-18.04
strategy:
matrix:
amd64:
SNAP_ARCH: amd64
# Do not run the heavy non-amd64 builds for test branches
${{ if not(startsWith(variables['Build.SourceBranchName'], 'test-')) }}:
armhf:
SNAP_ARCH: armhf
arm64:
SNAP_ARCH: arm64
timeoutInMinutes: 0
steps:
- script: |
set -e
sudo apt-get update
sudo apt-get install -y --no-install-recommends snapd
sudo snap install --classic snapcraft
displayName: Install dependencies
- task: UsePythonVersion@0
inputs:
versionSpec: 3.8
addToPath: true
- task: DownloadSecureFile@1
name: credentials
inputs:
secureFile: launchpad-credentials
- script: |
set -e
git config --global user.email "$(Build.RequestedForEmail)"
git config --global user.name "$(Build.RequestedFor)"
mkdir -p ~/.local/share/snapcraft/provider/launchpad
cp $(credentials.secureFilePath) ~/.local/share/snapcraft/provider/launchpad/credentials
python3 tools/snap/build_remote.py ALL --archs ${SNAP_ARCH} --timeout 19800
displayName: Build snaps
- script: |
set -e
mv *.snap $(Build.ArtifactStagingDirectory)
mv certbot-dns-*/*.snap $(Build.ArtifactStagingDirectory)
displayName: Prepare artifacts
- task: PublishPipelineArtifact@1
inputs:
path: $(Build.ArtifactStagingDirectory)
artifact: snaps_$(SNAP_ARCH)
displayName: Store snaps artifacts
- job: snap_run
dependsOn: snaps_build
pool:
vmImage: ubuntu-18.04
steps:
- task: UsePythonVersion@0
inputs:
versionSpec: 3.8
addToPath: true
- script: |
set -e
sudo apt-get update
sudo apt-get install -y --no-install-recommends nginx-light snapd
python3 -m venv venv
venv/bin/python tools/pipstrap.py
venv/bin/python tools/pip_install.py -U tox
displayName: Install dependencies
- task: DownloadPipelineArtifact@2
inputs:
artifact: snaps_amd64
path: $(Build.SourcesDirectory)/snap
displayName: Retrieve Certbot snaps
- script: |
set -e
sudo snap install --dangerous --classic snap/certbot_*.snap
displayName: Install Certbot snap
- script: |
set -e
venv/bin/python -m tox -e integration-external,apacheconftest-external-with-pebble
displayName: Run tox
- job: snap_dns_run
dependsOn: snaps_build
pool:
vmImage: ubuntu-18.04
steps:
- script: |
set -e
sudo apt-get update
sudo apt-get install -y --no-install-recommends snapd
displayName: Install dependencies
- task: UsePythonVersion@0
inputs:
versionSpec: 3.8
addToPath: true
- task: DownloadPipelineArtifact@2
inputs:
artifact: snaps_amd64
path: $(Build.SourcesDirectory)/snap
displayName: Retrieve Certbot snaps
- script: |
set -e
python3 -m venv venv
venv/bin/python tools/pipstrap.py
venv/bin/python tools/pip_install.py -e certbot-ci
displayName: Prepare Certbot-CI
- script: |
set -e
sudo -E venv/bin/pytest certbot-ci/snap_integration_tests/dns_tests --allow-persistent-changes --snap-folder $(Build.SourcesDirectory)/snap --snap-arch amd64
displayName: Test DNS plugins snaps

View File

@@ -1,75 +0,0 @@
jobs:
- job: test
variables:
PYTHON_VERSION: 3.9
strategy:
matrix:
macos-py36:
IMAGE_NAME: macOS-10.15
PYTHON_VERSION: 3.6
TOXENV: py36
macos-py39:
IMAGE_NAME: macOS-10.15
PYTHON_VERSION: 3.9
TOXENV: py39
windows-py36:
IMAGE_NAME: vs2017-win2016
PYTHON_VERSION: 3.6
TOXENV: py36-win
windows-py39-cover:
IMAGE_NAME: vs2017-win2016
PYTHON_VERSION: 3.9
TOXENV: py39-cover-win
windows-integration-certbot:
IMAGE_NAME: vs2017-win2016
PYTHON_VERSION: 3.9
TOXENV: integration-certbot
linux-oldest-tests-1:
IMAGE_NAME: ubuntu-18.04
PYTHON_VERSION: 3.6
TOXENV: '{acme,apache,apache-v2,certbot}-oldest'
linux-oldest-tests-2:
IMAGE_NAME: ubuntu-18.04
PYTHON_VERSION: 3.6
TOXENV: '{dns,nginx}-oldest'
linux-py36:
IMAGE_NAME: ubuntu-18.04
PYTHON_VERSION: 3.6
TOXENV: py36
linux-py39-cover:
IMAGE_NAME: ubuntu-18.04
PYTHON_VERSION: 3.9
TOXENV: py39-cover
linux-py39-lint:
IMAGE_NAME: ubuntu-18.04
PYTHON_VERSION: 3.9
TOXENV: lint-posix
linux-py39-mypy:
IMAGE_NAME: ubuntu-18.04
PYTHON_VERSION: 3.9
TOXENV: mypy-posix
linux-integration:
IMAGE_NAME: ubuntu-18.04
PYTHON_VERSION: 3.8
TOXENV: integration
ACME_SERVER: pebble
apache-compat:
IMAGE_NAME: ubuntu-18.04
TOXENV: apache_compat
apacheconftest:
IMAGE_NAME: ubuntu-18.04
PYTHON_VERSION: 3.6
TOXENV: apacheconftest-with-pebble
nginxroundtrip:
IMAGE_NAME: ubuntu-18.04
PYTHON_VERSION: 3.6
TOXENV: nginxroundtrip
pool:
vmImage: $(IMAGE_NAME)
steps:
- template: ../steps/tox-steps.yml
- job: test_sphinx_builds
pool:
vmImage: ubuntu-20.04
steps:
- template: ../steps/sphinx-steps.yml

View File

@@ -1,19 +0,0 @@
stages:
- stage: Changelog
jobs:
- job: prepare
pool:
vmImage: vs2017-win2016
steps:
# If we change the output filename from `release_notes.md`, it should also be changed in tools/create_github_release.py
- bash: |
set -e
CERTBOT_VERSION="$(cd certbot && python -c "import certbot; print(certbot.__version__)" && cd ~-)"
"${BUILD_REPOSITORY_LOCALPATH}\tools\extract_changelog.py" "${CERTBOT_VERSION}" >> "${BUILD_ARTIFACTSTAGINGDIRECTORY}/release_notes.md"
displayName: Prepare changelog
- task: PublishPipelineArtifact@1
inputs:
path: $(Build.ArtifactStagingDirectory)
# If we change the artifact's name, it should also be changed in tools/create_github_release.py
artifact: changelog
displayName: Publish changelog

View File

@@ -1,107 +0,0 @@
parameters:
- name: snapReleaseChannel
type: string
default: edge
values:
- edge
- beta
stages:
- stage: Deploy
jobs:
# This job relies on credentials used to publish the Certbot snaps. This
# credential file was created by running:
#
# snapcraft logout
# snapcraft login (provide the shared snapcraft credentials when prompted)
# snapcraft export-login --channels=beta,edge snapcraft.cfg
#
# Then the file was added as a secure file in Azure pipelines
# with the name snapcraft.cfg by following the instructions at
# https://docs.microsoft.com/en-us/azure/devops/pipelines/library/secure-files?view=azure-devops
# including authorizing the file for use in the "nightly" and "release"
# pipelines as described at
# https://docs.microsoft.com/en-us/azure/devops/pipelines/library/secure-files?view=azure-devops#q-how-do-i-authorize-a-secure-file-for-use-in-a-specific-pipeline.
#
# This file has a maximum lifetime of one year and the current
# file will expire on 2022-07-25 which is also tracked by
# https://github.com/certbot/certbot/issues/7931. The file will
# need to be updated before then to prevent automated deploys
# from breaking.
#
# Revoking these credentials can be done by changing the password of the
# account used to generate the credentials. See
# https://forum.snapcraft.io/t/revoking-exported-credentials/19031 for
# more info.
- job: publish_snap
pool:
vmImage: ubuntu-18.04
variables:
- group: certbot-common
strategy:
matrix:
amd64:
SNAP_ARCH: amd64
arm32v6:
SNAP_ARCH: armhf
arm64v8:
SNAP_ARCH: arm64
steps:
- bash: |
set -e
sudo apt-get update
sudo apt-get install -y --no-install-recommends snapd
sudo snap install --classic snapcraft
displayName: Install dependencies
- task: DownloadPipelineArtifact@2
inputs:
artifact: snaps_$(SNAP_ARCH)
path: $(Build.SourcesDirectory)/snap
displayName: Retrieve Certbot snaps
- task: DownloadSecureFile@1
name: snapcraftCfg
inputs:
secureFile: snapcraft.cfg
- bash: |
set -e
snapcraft login --with $(snapcraftCfg.secureFilePath)
for SNAP_FILE in snap/*.snap; do
tools/retry.sh eval snapcraft upload --release=${{ parameters.snapReleaseChannel }} "${SNAP_FILE}"
done
displayName: Publish to Snap store
- job: publish_docker
pool:
vmImage: ubuntu-18.04
strategy:
matrix:
amd64:
DOCKER_ARCH: amd64
arm32v6:
DOCKER_ARCH: arm32v6
arm64v8:
DOCKER_ARCH: arm64v8
steps:
- task: DownloadPipelineArtifact@2
inputs:
artifact: docker_$(DOCKER_ARCH)
path: $(Build.SourcesDirectory)
displayName: Retrieve Docker images
- bash: set -e && docker load --input $(Build.SourcesDirectory)/images.tar
displayName: Load Docker images
- task: Docker@2
inputs:
command: login
# The credentials used here are for the shared certbotbot account
# on Docker Hub. The credentials are stored in a service account
# which was created by following the instructions at
# https://docs.microsoft.com/en-us/azure/devops/pipelines/library/service-endpoints?view=azure-devops&tabs=yaml#sep-docreg.
# The name given to this service account must match the value
# given to containerRegistry below. "Grant access to all
# pipelines" should also be checked. To revoke these
# credentials, we can change the password on the certbotbot
# Docker Hub account or remove the account from the
# Certbot organization on Docker Hub.
containerRegistry: docker-hub
displayName: Login to Docker Hub
- bash: set -e && tools/docker/deploy.sh $(dockerTag) $DOCKER_ARCH
displayName: Deploy the Docker images

View File

@@ -1,19 +0,0 @@
stages:
- stage: On_Failure
jobs:
- job: notify_mattermost
variables:
- group: certbot-common
pool:
vmImage: ubuntu-20.04
steps:
- bash: |
set -e
MESSAGE="\
---\n\
##### Azure Pipeline
*Repo* $(Build.Repository.ID) - *Pipeline* $(Build.DefinitionName) #$(Build.BuildNumber) - *Branch/PR* $(Build.SourceBranchName)\n\
:warning: __Pipeline has failed__: [Link to the build](https://dev.azure.com/$(Build.Repository.ID)/_build/results?buildId=$(Build.BuildId)&view=results)\n\n\
---"
curl -i -X POST --data-urlencode "payload={\"text\":\"${MESSAGE}\"}" "$(MATTERMOST_URL)"
condition: failed()

View File

@@ -1,6 +0,0 @@
stages:
- stage: TestAndPackage
jobs:
- template: ../jobs/standard-tests-jobs.yml
- template: ../jobs/extended-tests-jobs.yml
- template: ../jobs/packaging-jobs.yml

View File

@@ -1,24 +0,0 @@
steps:
- bash: |
set -e
sudo apt-get update
sudo apt-get install -y --no-install-recommends libaugeas0
FINAL_STATUS=0
declare -a FAILED_BUILDS
tools/venv.py
source venv/bin/activate
for doc_path in */docs
do
echo ""
echo "##[group]Building $doc_path"
if ! sphinx-build -W --keep-going -b html $doc_path $doc_path/_build/html; then
FINAL_STATUS=1
FAILED_BUILDS[${#FAILED_BUILDS[@]}]="${doc_path%/docs}"
fi
echo "##[endgroup]"
done
if [[ $FINAL_STATUS -ne 0 ]]; then
echo "##[error]The following builds failed: ${FAILED_BUILDS[*]}"
exit 1
fi
displayName: Build Sphinx Documentation

View File

@@ -1,57 +0,0 @@
steps:
# We run brew update because we've seen attempts to install an older version
# of a package fail. See
# https://github.com/actions/virtual-environments/issues/3165.
- bash: |
set -e
brew update
brew install augeas
condition: startswith(variables['IMAGE_NAME'], 'macOS')
displayName: Install MacOS dependencies
- bash: |
set -e
sudo apt-get update
sudo apt-get install -y --no-install-recommends \
python-dev \
gcc \
libaugeas0 \
libssl-dev \
libffi-dev \
ca-certificates \
nginx-light \
openssl
sudo systemctl stop nginx
condition: startswith(variables['IMAGE_NAME'], 'ubuntu')
displayName: Install Linux dependencies
- task: UsePythonVersion@0
inputs:
versionSpec: $(PYTHON_VERSION)
addToPath: true
# tools/pip_install.py is used to pin packages to a known working version
# except in tests where the environment variable CERTBOT_NO_PIN is set.
# virtualenv is listed here explicitly to make sure it is upgraded when
# CERTBOT_NO_PIN is set to work around failures we've seen when using an older
# version of virtualenv. The option "-I" is set so when CERTBOT_NO_PIN is also
# set, pip updates dependencies it thinks are already satisfied to avoid some
# problems with its lack of real dependency resolution.
- bash: |
set -e
python tools/pipstrap.py
python tools/pip_install.py -I tox virtualenv
displayName: Install runtime dependencies
- task: DownloadSecureFile@1
name: testFarmPem
inputs:
secureFile: azure-test-farm.pem
condition: contains(variables['TOXENV'], 'test-farm')
- bash: |
set -e
export TARGET_BRANCH="`echo "${BUILD_SOURCEBRANCH}" | sed -E 's!refs/(heads|tags)/!!g'`"
[ -z "${SYSTEM_PULLREQUEST_TARGETBRANCH}" ] || export TARGET_BRANCH="${SYSTEM_PULLREQUEST_TARGETBRANCH}"
env
python -m tox
env:
AWS_ACCESS_KEY_ID: $(AWS_ACCESS_KEY_ID)
AWS_SECRET_ACCESS_KEY: $(AWS_SECRET_ACCESS_KEY)
AWS_EC2_PEM_FILE: $(testFarmPem.secureFilePath)
displayName: Run tox

View File

@@ -0,0 +1,39 @@
jobs:
- job: test
strategy:
matrix:
macos-py27:
IMAGE_NAME: macOS-10.14
PYTHON_VERSION: 2.7
TOXENV: py27
macos-py38:
IMAGE_NAME: macOS-10.14
PYTHON_VERSION: 3.8
TOXENV: py38
windows-py35:
IMAGE_NAME: vs2017-win2016
PYTHON_VERSION: 3.5
TOXENV: py35
windows-py37-cover:
IMAGE_NAME: vs2017-win2016
PYTHON_VERSION: 3.7
TOXENV: py37-cover
windows-integration-certbot:
IMAGE_NAME: vs2017-win2016
PYTHON_VERSION: 3.7
TOXENV: integration-certbot
PYTEST_ADDOPTS: --numprocesses 4
pool:
vmImage: $(IMAGE_NAME)
steps:
- bash: brew install augeas
condition: startswith(variables['IMAGE_NAME'], 'macOS')
displayName: Install Augeas
- task: UsePythonVersion@0
inputs:
versionSpec: $(PYTHON_VERSION)
addToPath: true
- script: python tools/pip_install.py -U tox coverage
displayName: Install dependencies
- script: python -m tox
displayName: Run tox

View File

@@ -8,4 +8,5 @@
.git
.tox
venv
venv3
docs

View File

@@ -1,18 +0,0 @@
# https://editorconfig.org/
root = true
[*]
insert_final_newline = true
trim_trailing_whitespace = true
end_of_line = lf
[*.py]
indent_style = space
indent_size = 4
charset = utf-8
max_line_length = 100
[*.yaml]
indent_style = space
indent_size = 2

12
.envrc
View File

@@ -1,12 +0,0 @@
# This file is just a nicety for developers who use direnv. When you cd under
# the Certbot repo, Certbot's virtual environment will be automatically
# activated and then deactivated when you cd elsewhere. Developers have to have
# direnv set up and run `direnv allow` to allow this file to execute on their
# system. You can find more information at https://direnv.net/.
. venv/bin/activate
# direnv doesn't support modifying PS1 so we unset it to squelch the error
# it'll otherwise print about this being done in the activate script. See
# https://github.com/direnv/direnv/wiki/PS1. If you would like your shell
# prompt to change like it normally does, see
# https://github.com/direnv/direnv/wiki/Python#restoring-the-ps1.
unset PS1

1
.github/FUNDING.yml vendored
View File

@@ -1 +0,0 @@
custom: https://supporters.eff.org/donate/support-work-on-certbot

16
.gitignore vendored
View File

@@ -4,12 +4,13 @@
build/
dist*/
/venv*/
/kgs/
/.tox/
/releases*/
/log*
letsencrypt.log
certbot.log
poetry.lock
letsencrypt-auto-source/letsencrypt-auto.sig.lzma.base64
# coverage
.coverage
@@ -30,6 +31,12 @@ tags
# auth --cert-path --chain-path
/*.pem
# letstest
tests/letstest/letest-*/
tests/letstest/*.pem
tests/letstest/venv/
tests/letstest/venv3/
.venv
# pytest cache
@@ -51,10 +58,3 @@ parts
prime
stage
*.snap
snap-constraints.txt
qemu-*
certbot-dns*/certbot-dns*_amd64*.txt
certbot-dns*/certbot-dns*_arm*.txt
/certbot_amd64*.txt
/certbot_arm*.txt
certbot-dns*/snap

View File

@@ -1,5 +1,6 @@
[settings]
skip_glob=venv*
skip=letsencrypt-auto-source
force_sort_within_sections=True
force_single_line=True
order_by_type=False

View File

@@ -8,10 +8,7 @@ jobs=0
# Python code to execute, usually for sys.path manipulation such as
# pygtk.require().
# CERTBOT COMMENT
# This is needed for pylint to import linter_plugin.py since
# https://github.com/PyCQA/pylint/pull/3396.
init-hook="import pylint.config, os, sys; sys.path.append(os.path.dirname(pylint.config.PYLINTRC))"
#init-hook=
# Profiled execution.
profile=no
@@ -56,23 +53,7 @@ extension-pkg-whitelist=pywintypes,win32api,win32file,win32security
# See https://github.com/PyCQA/pylint/issues/1498.
# 3) Same as point 2 for no-value-for-parameter.
# See https://github.com/PyCQA/pylint/issues/2820.
# 4) raise-missing-from makes it an error to raise an exception from except
# block without using explicit exception chaining. While explicit exception
# chaining results in a slightly more informative traceback, I don't think
# it's beneficial enough for us to change all of our current instances and
# give Certbot developers errors about this when they're working on new code
# in the future. You can read more about exception chaining and this pylint
# check at
# https://blog.ram.rachum.com/post/621791438475296768/improving-python-exception-chaining-with.
# 5) wrong-import-order generates false positives and a pylint developer
# suggests that people using isort should disable this check at
# https://github.com/PyCQA/pylint/issues/3817#issuecomment-687892090.
# 6) unspecified-encoding generates errors when encoding is not specified in
# in a call to the built-in open function. This relates more to a design decision
# (unspecified encoding makes the open function use the default encoding of the system)
# than a clear flaw on which a check should be enforced. Anyway the project does
# not need to enforce encoding on files so we disable this check.
disable=fixme,locally-disabled,locally-enabled,bad-continuation,no-self-use,invalid-name,cyclic-import,duplicate-code,design,import-outside-toplevel,useless-object-inheritance,unsubscriptable-object,no-value-for-parameter,no-else-return,no-else-raise,no-else-break,no-else-continue,raise-missing-from,wrong-import-order,unspecified-encoding
disable=fixme,locally-disabled,locally-enabled,bad-continuation,no-self-use,invalid-name,cyclic-import,duplicate-code,design,import-outside-toplevel,useless-object-inheritance,unsubscriptable-object,no-value-for-parameter,no-else-return,no-else-raise,no-else-break,no-else-continue
[REPORTS]
@@ -273,7 +254,7 @@ ignore-mixin-members=yes
# List of module names for which member attributes should not be checked
# (useful for modules/projects where namespaces are manipulated during runtime
# and thus existing member attributes cannot be deduced by static analysis
ignored-modules=pkg_resources,confargparse,argparse
ignored-modules=pkg_resources,confargparse,argparse,six.moves,six.moves.urllib
# import errors ignored only in 1.4.4
# https://bitbucket.org/logilab/pylint/commits/cd000904c9e2

92
.travis.yml Normal file
View File

@@ -0,0 +1,92 @@
language: python
dist: xenial
cache:
directories:
- $HOME/.cache/pip
before_script:
# On Travis, the fastest parallelization for integration tests has proved to be 4.
- 'if [[ "$TOXENV" == *"integration"* ]]; then export PYTEST_ADDOPTS="--numprocesses 4"; fi'
# Use Travis retry feature for farm tests since they are flaky
- 'if [[ "$TOXENV" == "travis-test-farm"* ]]; then export TRAVIS_RETRY=travis_retry; fi'
- export TOX_TESTENV_PASSENV=TRAVIS
- 'if [[ "$SNAP" == true ]]; then snap/local/build_and_install.sh; fi'
# Only build pushes to the master branch, PRs, and branches beginning with
# `test-`, `travis-test-`, or of the form `digit(s).digit(s).x` or
# `vdigit(s).digit(s).digit(s)`. As documented at
# https://docs.travis-ci.com/user/customizing-the-build/#safelisting-or-blocklisting-branches,
# this includes tags so pushing tags of the form `vdigit(s).digit(s).digit(s)`
# will also trigger tests. This reduces the number of simultaneous Travis runs,
# which speeds turnaround time on review since there is a cap of on the number
# of simultaneous runs.
branches:
# When changing these branches, please ensure the documentation under
# "Running tests in CI" is still correct.
only:
- master
- /^\d+\.\d+\.x$/ # this matches our point release branches
- /^v\d+\.\d+\.\d+$/ # this matches our release tags
- /^(travis-)?test-.*$/
# Jobs for the main test suite are always executed (including on PRs) except for pushes on master.
not-on-master: &not-on-master
if: NOT (type = push AND branch = master)
# Jobs for the extended test suite are executed for cron jobs and pushes to
# non-development branches.
extended-test-suite: &extended-test-suite
if: type = cron OR (type = push AND branch != master)
matrix:
include:
- python: "3.7"
env:
- TOXENV=travis-test-farm-apache2
- secure: "f+j/Lj9s1lcuKo5sEFrlRd1kIAMnIJI4z0MTI7QF8jl9Fkmbx7KECGzw31TNgzrOSzxSapHbcueFYvNCLKST+kE/8ogMZBbwqXfEDuKpyF6BY3uYoJn+wPVE5pIb8Hhe08xPte8TTDSMIyHI3EyTfcAKrIreauoArePvh/cRvSw="
<<: *extended-test-suite
# container-based infrastructure
sudo: false
addons:
apt:
packages: # Keep in sync with letsencrypt-auto-source/pieces/bootstrappers/deb_common.sh and Boulder.
- python-dev
- gcc
- libaugeas0
- libssl-dev
- libffi-dev
- ca-certificates
# For certbot-nginx integration testing
- nginx-light
- openssl
# tools/pip_install.py is used to pin packages to a known working version
# except in tests where the environment variable CERTBOT_NO_PIN is set.
# virtualenv is listed here explicitly to make sure it is upgraded when
# CERTBOT_NO_PIN is set to work around failures we've seen when using an older
# version of virtualenv. The option "-I" is set so when CERTBOT_NO_PIN is also
# set, pip updates dependencies it thinks are already satisfied to avoid some
# problems with its lack of real dependency resolution.
install: 'tools/pip_install.py -I tox virtualenv'
# Most of the time TRAVIS_RETRY is an empty string, and has no effect on the
# script command. It is set only to `travis_retry` during farm tests, in
# order to trigger the Travis retry feature, and compensate the inherent
# flakiness of these specific tests.
script: '$TRAVIS_RETRY tox'
notifications:
email: false
irc:
if: NOT branch =~ ^(travis-)?test-.*$
channels:
# This is set to a secure variable to prevent forks from sending
# notifications. This value was created by installing
# https://github.com/travis-ci/travis.rb and running
# `travis encrypt "chat.freenode.net#certbot-devel"`.
- secure: "EWW66E2+KVPZyIPR8ViENZwfcup4Gx3/dlimmAZE0WuLwxDCshBBOd3O8Rf6pBokEoZlXM5eDT6XdyJj8n0DLslgjO62pExdunXpbcMwdY7l1ELxX2/UbnDTE6UnPYa09qVBHNG7156Z6yE0x2lH4M9Ykvp0G0cubjPQHylAwo0="
on_cancel: never
on_success: never
on_failure: always

View File

@@ -1,7 +1,6 @@
Authors
=======
* [Aaron Gable](https://github.com/aarongable)
* [Aaron Zirbes](https://github.com/aaronzirbes)
* Aaron Zuehlke
* Ada Lovelace
@@ -37,8 +36,7 @@ Authors
* [Blake Griffith](https://github.com/cowlicks)
* [Brad Warren](https://github.com/bmw)
* [Brandon Kraft](https://github.com/kraftbj)
* [Brandon Kreisel](https://github.com/BKreisel)
* [Brian Heim](https://github.com/brianlheim)
* [Brandon Kreisel](https://github.com/kraftbj)
* [Cameron Steel](https://github.com/Tugzrida)
* [Ceesjan Luiten](https://github.com/quinox)
* [Chad Whitacre](https://github.com/whit537)
@@ -61,9 +59,7 @@ Authors
* [DanCld](https://github.com/DanCld)
* [Daniel Albers](https://github.com/AID)
* [Daniel Aleksandersen](https://github.com/da2x)
* [Daniel Almasi](https://github.com/almasen)
* [Daniel Convissor](https://github.com/convissor)
* [Daniel "Drex" Drexler](https://github.com/aeturnum)
* [Daniel Huang](https://github.com/dhuang)
* [Dave Guarino](https://github.com/daguar)
* [David cz](https://github.com/dave-cz)
@@ -88,7 +84,6 @@ Authors
* [Felix Schwarz](https://github.com/FelixSchwarz)
* [Felix Yan](https://github.com/felixonmars)
* [Filip Ochnik](https://github.com/filipochnik)
* [Florian Klink](https://github.com/flokli)
* [Francois Marier](https://github.com/fmarier)
* [Frank](https://github.com/Frankkkkk)
* [Frederic BLANC](https://github.com/fblanc)
@@ -151,13 +146,11 @@ Authors
* [Lior Sabag](https://github.com/liorsbg)
* [Lipis](https://github.com/lipis)
* [lord63](https://github.com/lord63)
* [Lorenzo Fundaró](https://github.com/lfundaro)
* [Luca Beltrame](https://github.com/lbeltrame)
* [Luca Ebach](https://github.com/lucebac)
* [Luca Olivetti](https://github.com/olivluca)
* [Luke Rogers](https://github.com/lukeroge)
* [Maarten](https://github.com/mrtndwrd)
* [Mads Jensen](https://github.com/atombrella)
* [Maikel Martens](https://github.com/krukas)
* [Malte Janduda](https://github.com/MalteJ)
* [Mantas Mikulėnas](https://github.com/grawity)
@@ -209,7 +202,6 @@ Authors
* [Pierre Jaury](https://github.com/kaiyou)
* [Piotr Kasprzyk](https://github.com/kwadrat)
* [Prayag Verma](https://github.com/pra85)
* [Rasesh Patel](https://github.com/raspat1)
* [Reinaldo de Souza Jr](https://github.com/juniorz)
* [Remi Rampin](https://github.com/remram44)
* [Rémy HUBSCHER](https://github.com/Natim)
@@ -217,7 +209,6 @@ Authors
* [Richard Barnes](https://github.com/r-barnes)
* [Richard Panek](https://github.com/kernelpanek)
* [Robert Buchholz](https://github.com/rbu)
* [Robert Dailey](https://github.com/pahrohfit)
* [Robert Habermann](https://github.com/frennkie)
* [Robert Xiao](https://github.com/nneonneo)
* [Roland Shoemaker](https://github.com/rolandshoemaker)
@@ -243,7 +234,6 @@ Authors
* [Spencer Bliven](https://github.com/sbliven)
* [Stacey Sheldon](https://github.com/solidgoldbomb)
* [Stavros Korokithakis](https://github.com/skorokithakis)
* [Ștefan Talpalaru](https://github.com/stefantalpalaru)
* [Stefan Weil](https://github.com/stweil)
* [Steve Desmond](https://github.com/stevedesmond-ca)
* [sydneyli](https://github.com/sydneyli)

View File

@@ -11,7 +11,7 @@ to the Sphinx generated docs is provided below.
[1] https://github.com/blog/1184-contributing-guidelines
[2] https://docutils.sourceforge.io/docs/user/rst/quickref.html#hyperlink-targets
[2] http://docutils.sourceforge.net/docs/user/rst/quickref.html#hyperlink-targets
-->

View File

@@ -1,5 +1,5 @@
# This Dockerfile builds an image for development.
FROM ubuntu:focal
FROM debian:buster
# Note: this only exposes the port to other docker containers.
EXPOSE 80 443
@@ -8,14 +8,13 @@ WORKDIR /opt/certbot/src
COPY . .
RUN apt-get update && \
DEBIAN_FRONTEND=noninteractive apt-get install apache2 git python3-dev \
python3-venv gcc libaugeas0 libssl-dev libffi-dev ca-certificates \
openssl nginx-light -y --no-install-recommends && \
apt-get install apache2 git python3-dev python3-venv gcc libaugeas0 \
libssl-dev libffi-dev ca-certificates openssl nginx-light -y && \
apt-get clean && \
rm -rf /var/lib/apt/lists/* \
/tmp/* \
/var/tmp/*
RUN VENV_NAME="../venv" python3 tools/venv.py
RUN VENV_NAME="../venv3" python3 tools/venv3.py
ENV PATH /opt/certbot/venv/bin:$PATH
ENV PATH /opt/certbot/venv3/bin:$PATH

View File

@@ -7,7 +7,7 @@ questions.
## My operating system is (include version):
## I installed Certbot with (snap, OS package manager, pip, certbot-auto, etc):
## I installed Certbot with (certbot-auto, OS package manager, pip, etc):
## I ran this command and it produced this output:

View File

@@ -6,6 +6,7 @@ This module is an implementation of the `ACME protocol`_.
"""
import sys
import warnings
# This code exists to keep backwards compatibility with people using acme.jose
# before it became the standalone josepy package.

View File

@@ -5,19 +5,18 @@ import functools
import hashlib
import logging
import socket
from typing import Type
from cryptography.hazmat.primitives import hashes
from cryptography.hazmat.primitives import hashes # type: ignore
import josepy as jose
from OpenSSL import crypto
from OpenSSL import SSL
import requests
import six
from OpenSSL import SSL # type: ignore # https://github.com/python/typeshed/issues/2052
from OpenSSL import crypto
from acme import crypto_util
from acme import errors
from acme import fields
from acme.mixins import ResourceMixin
from acme.mixins import TypeMixin
from acme.mixins import ResourceMixin, TypeMixin
logger = logging.getLogger(__name__)
@@ -25,12 +24,12 @@ logger = logging.getLogger(__name__)
class Challenge(jose.TypedJSONObjectWithFields):
# _fields_to_partial_json
"""ACME challenge."""
TYPES: dict = {}
TYPES = {} # type: dict
@classmethod
def from_json(cls, jobj):
try:
return super().from_json(jobj)
return super(Challenge, cls).from_json(jobj)
except jose.UnrecognizedTypeError as error:
logger.debug(error)
return UnrecognizedChallenge.from_json(jobj)
@@ -39,7 +38,7 @@ class Challenge(jose.TypedJSONObjectWithFields):
class ChallengeResponse(ResourceMixin, TypeMixin, jose.TypedJSONObjectWithFields):
# _fields_to_partial_json
"""ACME challenge response."""
TYPES: dict = {}
TYPES = {} # type: dict
resource_type = 'challenge'
resource = fields.Resource(resource_type)
@@ -58,7 +57,7 @@ class UnrecognizedChallenge(Challenge):
"""
def __init__(self, jobj):
super().__init__()
super(UnrecognizedChallenge, self).__init__()
object.__setattr__(self, "jobj", jobj)
def to_partial_json(self):
@@ -141,20 +140,21 @@ class KeyAuthorizationChallengeResponse(ChallengeResponse):
return True
def to_partial_json(self):
jobj = super().to_partial_json()
jobj = super(KeyAuthorizationChallengeResponse, self).to_partial_json()
jobj.pop('keyAuthorization', None)
return jobj
class KeyAuthorizationChallenge(_TokenChallenge, metaclass=abc.ABCMeta):
@six.add_metaclass(abc.ABCMeta)
class KeyAuthorizationChallenge(_TokenChallenge):
"""Challenge based on Key Authorization.
:param response_cls: Subclass of `KeyAuthorizationChallengeResponse`
that will be used to generate ``response``.
that will be used to generate `response`.
:param str typ: type of the challenge
"""
typ: str = NotImplemented
response_cls: Type[KeyAuthorizationChallengeResponse] = NotImplemented
typ = NotImplemented
response_cls = NotImplemented
thumbprint_hash_function = (
KeyAuthorizationChallengeResponse.thumbprint_hash_function)
@@ -314,15 +314,6 @@ class HTTP01Response(KeyAuthorizationChallengeResponse):
except requests.exceptions.RequestException as error:
logger.error("Unable to reach %s: %s", uri, error)
return False
# By default, http_response.text will try to guess the encoding to use
# when decoding the response to Python unicode strings. This guesswork
# is error prone. RFC 8555 specifies that HTTP-01 responses should be
# key authorizations with possible trailing whitespace. Since key
# authorizations must be composed entirely of the base64url alphabet
# plus ".", we tell requests that the response should be ASCII. See
# https://datatracker.ietf.org/doc/html/rfc8555#section-8.3 for more
# info.
http_response.encoding = "ascii"
logger.debug("Received %s: %s. Headers: %s", http_response,
http_response.text, http_response.headers)

View File

@@ -1,47 +1,51 @@
"""ACME client API."""
# pylint: disable=too-many-lines
# This pylint disable can be deleted once the deprecated ACMEv1 code is
# removed.
import base64
import collections
import datetime
from email.utils import parsedate_tz
import heapq
import http.client as http_client
import logging
import re
import sys
import time
from types import ModuleType
from typing import cast
from typing import Dict
from typing import List
from typing import Set
from typing import Text
from typing import Union
import warnings
import josepy as jose
import OpenSSL
import requests
from requests.adapters import HTTPAdapter
from requests.utils import parse_header_links
from requests_toolbelt.adapters.source import SourceAddressAdapter
import six
from six.moves import http_client
from acme import crypto_util
from acme import errors
from acme import jws
from acme import messages
from acme.magic_typing import Dict
from acme.magic_typing import List
from acme.magic_typing import Set
from acme.magic_typing import Text
from acme.mixins import VersionedLEACMEMixin
logger = logging.getLogger(__name__)
# Prior to Python 2.7.9 the stdlib SSL module did not allow a user to configure
# many important security related options. On these platforms we use PyOpenSSL
# for SSL, which does allow these options to be configured.
# https://urllib3.readthedocs.org/en/latest/security.html#insecureplatformwarning
if sys.version_info < (2, 7, 9): # pragma: no cover
try:
requests.packages.urllib3.contrib.pyopenssl.inject_into_urllib3() # type: ignore
except AttributeError:
import urllib3.contrib.pyopenssl
urllib3.contrib.pyopenssl.inject_into_urllib3()
DEFAULT_NETWORK_TIMEOUT = 45
DER_CONTENT_TYPE = 'application/pkix-cert'
class ClientBase:
class ClientBase(object):
"""ACME client base object.
:ivar messages.Directory directory:
@@ -120,9 +124,8 @@ class ClientBase:
"""
return self.update_registration(regr, update={'status': 'deactivated'})
def deactivate_authorization(self,
authzr: messages.AuthorizationResource
) -> messages.AuthorizationResource:
def deactivate_authorization(self, authzr):
# type: (messages.AuthorizationResource) -> messages.AuthorizationResource
"""Deactivate authorization.
:param messages.AuthorizationResource authzr: The Authorization resource
@@ -197,7 +200,7 @@ class ClientBase:
when = parsedate_tz(retry_after)
if when is not None:
try:
tz_secs = datetime.timedelta(when[-1] if when[-1] is not None else 0)
tz_secs = datetime.timedelta(when[-1] if when[-1] else 0)
return datetime.datetime(*when[:7]) - tz_secs
except (ValueError, OverflowError):
pass
@@ -230,9 +233,6 @@ class ClientBase:
class Client(ClientBase):
"""ACME client for a v1 API.
.. deprecated:: 1.18.0
Use :class:`ClientV2` instead.
.. todo::
Clean up raised error types hierarchy, document, and handle (wrap)
instances of `.DeserializationError` raised in `from_json()`.
@@ -259,10 +259,10 @@ class Client(ClientBase):
if net is None:
net = ClientNetwork(key, alg=alg, verify_ssl=verify_ssl)
if isinstance(directory, str):
if isinstance(directory, six.string_types):
directory = messages.Directory.from_json(
net.get(directory).json())
super().__init__(directory=directory,
super(Client, self).__init__(directory=directory,
net=net, acme_version=1)
def register(self, new_reg=None):
@@ -435,7 +435,7 @@ class Client(ClientBase):
"""
assert max_attempts > 0
attempts: Dict[messages.AuthorizationResource, int] = collections.defaultdict(int)
attempts = collections.defaultdict(int) # type: Dict[messages.AuthorizationResource, int]
exhausted = set()
# priority queue with datetime.datetime (based on Retry-After) as key,
@@ -447,7 +447,7 @@ class Client(ClientBase):
heapq.heapify(waiting)
# mapping between original Authorization Resource and the most
# recently updated one
updated = {authzr: authzr for authzr in authzrs}
updated = dict((authzr, authzr) for authzr in authzrs)
while waiting:
# find the smallest Retry-After, and sleep if necessary
@@ -474,7 +474,7 @@ class Client(ClientBase):
exhausted.add(authzr)
if exhausted or any(authzr.body.status == messages.STATUS_INVALID
for authzr in updated.values()):
for authzr in six.itervalues(updated)):
raise errors.PollError(exhausted, updated)
updated_authzrs = tuple(updated[authzr] for authzr in authzrs)
@@ -548,7 +548,7 @@ class Client(ClientBase):
:rtype: `list` of `OpenSSL.crypto.X509` wrapped in `.ComparableX509`
"""
chain: List[jose.ComparableX509] = []
chain = [] # type: List[jose.ComparableX509]
uri = certr.cert_chain_uri
while uri is not None and len(chain) < max_length:
response, cert = self._get_cert(uri)
@@ -586,7 +586,7 @@ class ClientV2(ClientBase):
:param .messages.Directory directory: Directory Resource
:param .ClientNetwork net: Client network.
"""
super().__init__(directory=directory,
super(ClientV2, self).__init__(directory=directory,
net=net, acme_version=2)
def new_account(self, new_account):
@@ -636,7 +636,7 @@ class ClientV2(ClientBase):
"""
# https://github.com/certbot/certbot/issues/6155
new_regr = self._get_v2_account(regr)
return super().update_registration(new_regr, update)
return super(ClientV2, self).update_registration(new_regr, update)
def _get_v2_account(self, regr):
self.net.account = None
@@ -658,23 +658,16 @@ class ClientV2(ClientBase):
csr = OpenSSL.crypto.load_certificate_request(OpenSSL.crypto.FILETYPE_PEM, csr_pem)
# pylint: disable=protected-access
dnsNames = crypto_util._pyopenssl_cert_or_req_all_names(csr)
ipNames = crypto_util._pyopenssl_cert_or_req_san_ip(csr)
# ipNames is now []string
identifiers = []
for name in dnsNames:
identifiers.append(messages.Identifier(typ=messages.IDENTIFIER_FQDN,
value=name))
for ips in ipNames:
identifiers.append(messages.Identifier(typ=messages.IDENTIFIER_IP,
value=ips))
order = messages.NewOrder(identifiers=identifiers)
response = self._post(self.directory['newOrder'], order)
body = messages.Order.from_json(response.json())
authorizations = []
# pylint has trouble understanding our josepy based objects which use
# things like custom metaclass logic. body.authorizations should be a
# list of strings containing URLs so let's disable this check here.
for url in body.authorizations: # pylint: disable=not-an-iterable
for url in body.authorizations:
authorizations.append(self._authzr_from_response(self._post_as_get(url), uri=url))
return messages.OrderResource(
body=body,
@@ -740,13 +733,11 @@ class ClientV2(ClientBase):
raise errors.ValidationError(failed)
return orderr.update(authorizations=responses)
def finalize_order(self, orderr, deadline, fetch_alternative_chains=False):
def finalize_order(self, orderr, deadline):
"""Finalize an order and obtain a certificate.
:param messages.OrderResource orderr: order to finalize
:param datetime.datetime deadline: when to stop polling and timeout
:param bool fetch_alternative_chains: whether to also fetch alternative
certificate chains
:returns: finalized order
:rtype: messages.OrderResource
@@ -763,13 +754,8 @@ class ClientV2(ClientBase):
if body.error is not None:
raise errors.IssuanceError(body.error)
if body.certificate is not None:
certificate_response = self._post_as_get(body.certificate)
orderr = orderr.update(body=body, fullchain_pem=certificate_response.text)
if fetch_alternative_chains:
alt_chains_urls = self._get_links(certificate_response, 'alternate')
alt_chains = [self._post_as_get(url).text for url in alt_chains_urls]
orderr = orderr.update(alternative_fullchains_pem=alt_chains)
return orderr
certificate_response = self._post_as_get(body.certificate).text
return orderr.update(body=body, fullchain_pem=certificate_response)
raise errors.TimeoutError()
def revoke(self, cert, rsn):
@@ -799,28 +785,11 @@ class ClientV2(ClientBase):
new_args = args[:1] + (None,) + args[1:]
return self._post(*new_args, **kwargs)
def _get_links(self, response, relation_type):
"""
Retrieves all Link URIs of relation_type from the response.
:param requests.Response response: The requests HTTP response.
:param str relation_type: The relation type to filter by.
"""
# Can't use response.links directly because it drops multiple links
# of the same relation type, which is possible in RFC8555 responses.
if 'Link' not in response.headers:
return []
links = parse_header_links(response.headers['Link'])
return [l['url'] for l in links
if 'rel' in l and 'url' in l and l['rel'] == relation_type]
class BackwardsCompatibleClientV2:
class BackwardsCompatibleClientV2(object):
"""ACME client wrapper that tends towards V2-style calls, but
supports V1 servers.
.. deprecated:: 1.18.0
Use :class:`ClientV2` instead.
.. note:: While this class handles the majority of the differences
between versions of the ACME protocol, if you need to support an
ACME server based on version 3 or older of the IETF ACME draft
@@ -839,7 +808,6 @@ class BackwardsCompatibleClientV2:
def __init__(self, net, key, server):
directory = messages.Directory.from_json(net.get(server).json())
self.acme_version = self._acme_version_from_directory(directory)
self.client: Union[Client, ClientV2]
if self.acme_version == 1:
self.client = Client(directory, key=key, net=net)
else:
@@ -859,18 +827,16 @@ class BackwardsCompatibleClientV2:
if check_tos_cb is not None:
check_tos_cb(tos)
if self.acme_version == 1:
client_v1 = cast(Client, self.client)
regr = client_v1.register(regr)
regr = self.client.register(regr)
if regr.terms_of_service is not None:
_assess_tos(regr.terms_of_service)
return client_v1.agree_to_tos(regr)
return self.client.agree_to_tos(regr)
return regr
else:
client_v2 = cast(ClientV2, self.client)
if "terms_of_service" in client_v2.directory.meta:
_assess_tos(client_v2.directory.meta.terms_of_service)
if "terms_of_service" in self.client.directory.meta:
_assess_tos(self.client.directory.meta.terms_of_service)
regr = regr.update(terms_of_service_agreed=True)
return client_v2.new_account(regr)
return self.client.new_account(regr)
def new_order(self, csr_pem):
"""Request a new Order object from the server.
@@ -888,32 +854,28 @@ class BackwardsCompatibleClientV2:
"""
if self.acme_version == 1:
client_v1 = cast(Client, self.client)
csr = OpenSSL.crypto.load_certificate_request(OpenSSL.crypto.FILETYPE_PEM, csr_pem)
# pylint: disable=protected-access
dnsNames = crypto_util._pyopenssl_cert_or_req_all_names(csr)
authorizations = []
for domain in dnsNames:
authorizations.append(client_v1.request_domain_challenges(domain))
authorizations.append(self.client.request_domain_challenges(domain))
return messages.OrderResource(authorizations=authorizations, csr_pem=csr_pem)
return cast(ClientV2, self.client).new_order(csr_pem)
return self.client.new_order(csr_pem)
def finalize_order(self, orderr, deadline, fetch_alternative_chains=False):
def finalize_order(self, orderr, deadline):
"""Finalize an order and obtain a certificate.
:param messages.OrderResource orderr: order to finalize
:param datetime.datetime deadline: when to stop polling and timeout
:param bool fetch_alternative_chains: whether to also fetch alternative
certificate chains
:returns: finalized order
:rtype: messages.OrderResource
"""
if self.acme_version == 1:
client_v1 = cast(Client, self.client)
csr_pem = orderr.csr_pem
certr = client_v1.request_issuance(
certr = self.client.request_issuance(
jose.ComparableX509(
OpenSSL.crypto.load_certificate_request(OpenSSL.crypto.FILETYPE_PEM, csr_pem)),
orderr.authorizations)
@@ -921,7 +883,7 @@ class BackwardsCompatibleClientV2:
chain = None
while datetime.datetime.now() < deadline:
try:
chain = client_v1.fetch_chain(certr)
chain = self.client.fetch_chain(certr)
break
except errors.Error:
time.sleep(1)
@@ -936,8 +898,7 @@ class BackwardsCompatibleClientV2:
chain = crypto_util.dump_pyopenssl_chain(chain).decode()
return orderr.update(fullchain_pem=(cert + chain))
return cast(ClientV2, self.client).finalize_order(
orderr, deadline, fetch_alternative_chains)
return self.client.finalize_order(orderr, deadline)
def revoke(self, cert, rsn):
"""Revoke certificate.
@@ -963,10 +924,10 @@ class BackwardsCompatibleClientV2:
Always return False for ACMEv1 servers, as it doesn't use External Account Binding."""
if self.acme_version == 1:
return False
return cast(ClientV2, self.client).external_account_required()
return self.client.external_account_required()
class ClientNetwork:
class ClientNetwork(object):
"""Wrapper around requests that signs POSTs for authentication.
Also adds user agent, and handles Content-Type.
@@ -996,7 +957,7 @@ class ClientNetwork:
self.account = account
self.alg = alg
self.verify_ssl = verify_ssl
self._nonces: Set[Text] = set()
self._nonces = set() # type: Set[Text]
self.user_agent = user_agent
self.session = requests.Session()
self._default_timeout = timeout
@@ -1154,23 +1115,12 @@ class ClientNetwork:
host, path, _err_no, err_msg = m.groups()
raise ValueError("Requesting {0}{1}:{2}".format(host, path, err_msg))
# If the Content-Type is DER or an Accept header was sent in the
# request, the response may not be UTF-8 encoded. In this case, we
# don't set response.encoding and log the base64 response instead of
# raw bytes to keep binary data out of the logs. This code can be
# simplified to only check for an Accept header in the request when
# ACMEv1 support is dropped.
debug_content: Union[bytes, str]
if (response.headers.get("Content-Type") == DER_CONTENT_TYPE or
"Accept" in kwargs["headers"]):
# If content is DER, log the base64 of it instead of raw bytes, to keep
# binary data out of the logs.
if response.headers.get("Content-Type") == DER_CONTENT_TYPE:
debug_content = base64.b64encode(response.content)
else:
# We set response.encoding so response.text knows the response is
# UTF-8 encoded instead of trying to guess the encoding that was
# used which is error prone. This setting affects all future
# accesses of .text made on the returned response object as well.
response.encoding = "utf-8"
debug_content = response.text
debug_content = response.content.decode("utf-8")
logger.debug('Received response:\nHTTP %d\n%s\n\n%s',
response.status_code,
"\n".join("{0}: {1}".format(k, v)
@@ -1240,35 +1190,3 @@ class ClientNetwork:
response = self._check_response(response, content_type=content_type)
self._add_nonce(response)
return response
# This class takes a similar approach to the cryptography project to deprecate attributes
# in public modules. See the _ModuleWithDeprecation class here:
# https://github.com/pyca/cryptography/blob/91105952739442a74582d3e62b3d2111365b0dc7/src/cryptography/utils.py#L129
class _ClientDeprecationModule:
"""
Internal class delegating to a module, and displaying warnings when attributes
related to deprecated attributes in the acme.client module.
"""
def __init__(self, module):
self.__dict__['_module'] = module
def __getattr__(self, attr):
if attr in ('Client', 'BackwardsCompatibleClientV2'):
warnings.warn('The {0} attribute in acme.client is deprecated '
'and will be removed soon.'.format(attr),
DeprecationWarning, stacklevel=2)
return getattr(self._module, attr)
def __setattr__(self, attr, value): # pragma: no cover
setattr(self._module, attr, value)
def __delattr__(self, attr): # pragma: no cover
delattr(self._module, attr)
def __dir__(self): # pragma: no cover
return ['_module'] + dir(self._module)
# Patching ourselves to warn about deprecation and planned removal of some elements in the module.
sys.modules[__name__] = cast(ModuleType, _ClientDeprecationModule(sys.modules[__name__]))

View File

@@ -5,15 +5,15 @@ import logging
import os
import re
import socket
from typing import Callable
from typing import Tuple
from typing import Union
import josepy as jose
from OpenSSL import crypto
from OpenSSL import SSL
from OpenSSL import SSL # type: ignore # https://github.com/python/typeshed/issues/2052
from acme import errors
from acme.magic_typing import Callable
from acme.magic_typing import Tuple
from acme.magic_typing import Union
logger = logging.getLogger(__name__)
@@ -24,10 +24,10 @@ logger = logging.getLogger(__name__)
# https://www.openssl.org/docs/ssl/SSLv23_method.html). _serve_sni
# should be changed to use "set_options" to disable SSLv2 and SSLv3,
# in case it's used for things other than probing/serving!
_DEFAULT_SSL_METHOD = SSL.SSLv23_METHOD
_DEFAULT_SSL_METHOD = SSL.SSLv23_METHOD # type: ignore
class _DefaultCertSelection:
class _DefaultCertSelection(object):
def __init__(self, certs):
self.certs = certs
@@ -36,7 +36,7 @@ class _DefaultCertSelection:
return self.certs.get(server_name, None)
class SSLSocket: # pylint: disable=too-few-public-methods
class SSLSocket(object): # pylint: disable=too-few-public-methods
"""SSL wrapper for sockets.
:ivar socket sock: Original wrapped socket.
@@ -93,7 +93,7 @@ class SSLSocket: # pylint: disable=too-few-public-methods
new_context.set_alpn_select_callback(self.alpn_selection)
connection.set_context(new_context)
class FakeConnection:
class FakeConnection(object):
"""Fake OpenSSL.SSL.Connection."""
# pylint: disable=missing-function-docstring
@@ -166,10 +166,10 @@ def probe_sni(name, host, port=443, timeout=300, # pylint: disable=too-many-argu
" from {0}:{1}".format(
source_address[0],
source_address[1]
) if any(source_address) else ""
) if socket_kwargs else ""
)
socket_tuple: Tuple[str, int] = (host, port)
sock = socket.create_connection(socket_tuple, **socket_kwargs)
socket_tuple = (host, port) # type: Tuple[str, int]
sock = socket.create_connection(socket_tuple, **socket_kwargs) # type: ignore
except socket.error as error:
raise errors.Error(error)
@@ -186,43 +186,23 @@ def probe_sni(name, host, port=443, timeout=300, # pylint: disable=too-many-argu
raise errors.Error(error)
return client_ssl.get_peer_certificate()
def make_csr(private_key_pem, domains=None, must_staple=False, ipaddrs=None):
"""Generate a CSR containing domains or IPs as subjectAltNames.
def make_csr(private_key_pem, domains, must_staple=False):
"""Generate a CSR containing a list of domains as subjectAltNames.
:param buffer private_key_pem: Private key, in PEM PKCS#8 format.
:param list domains: List of DNS names to include in subjectAltNames of CSR.
:param bool must_staple: Whether to include the TLS Feature extension (aka
OCSP Must Staple: https://tools.ietf.org/html/rfc7633).
:param list ipaddrs: List of IPaddress(type ipaddress.IPv4Address or ipaddress.IPv6Address)
names to include in subbjectAltNames of CSR.
params ordered this way for backward competablity when called by positional argument.
:returns: buffer PEM-encoded Certificate Signing Request.
"""
private_key = crypto.load_privatekey(
crypto.FILETYPE_PEM, private_key_pem)
csr = crypto.X509Req()
sanlist = []
# if domain or ip list not supplied make it empty list so it's easier to iterate
if domains is None:
domains = []
if ipaddrs is None:
ipaddrs = []
if len(domains)+len(ipaddrs) == 0:
raise ValueError("At least one of domains or ipaddrs parameter need to be not empty")
for address in domains:
sanlist.append('DNS:' + address)
for ips in ipaddrs:
sanlist.append('IP:' + ips.exploded)
# make sure its ascii encoded
san_string = ', '.join(sanlist).encode('ascii')
# for IP san it's actually need to be octet-string,
# but somewhere downsteam thankfully handle it for us
extensions = [
crypto.X509Extension(
b'subjectAltName',
critical=False,
value=san_string
value=', '.join('DNS:' + d for d in domains).encode('ascii')
),
]
if must_staple:
@@ -237,9 +217,7 @@ def make_csr(private_key_pem, domains=None, must_staple=False, ipaddrs=None):
return crypto.dump_certificate_request(
crypto.FILETYPE_PEM, csr)
def _pyopenssl_cert_or_req_all_names(loaded_cert_or_req):
# unlike its name this only outputs DNS names, other type of idents will ignored
common_name = loaded_cert_or_req.get_subject().CN
sans = _pyopenssl_cert_or_req_san(loaded_cert_or_req)
@@ -247,7 +225,6 @@ def _pyopenssl_cert_or_req_all_names(loaded_cert_or_req):
return sans
return [common_name] + [d for d in sans if d != common_name]
def _pyopenssl_cert_or_req_san(cert_or_req):
"""Get Subject Alternative Names from certificate or CSR using pyOpenSSL.
@@ -259,76 +236,40 @@ def _pyopenssl_cert_or_req_san(cert_or_req):
:param cert_or_req: Certificate or CSR.
:type cert_or_req: `OpenSSL.crypto.X509` or `OpenSSL.crypto.X509Req`.
:returns: A list of Subject Alternative Names that is DNS.
:rtype: `list` of `unicode`
"""
# This function finds SANs with dns name
# constants based on PyOpenSSL certificate/CSR text dump
part_separator = ":"
prefix = "DNS" + part_separator
sans_parts = _pyopenssl_extract_san_list_raw(cert_or_req)
return [part.split(part_separator)[1]
for part in sans_parts if part.startswith(prefix)]
def _pyopenssl_cert_or_req_san_ip(cert_or_req):
"""Get Subject Alternative Names IPs from certificate or CSR using pyOpenSSL.
:param cert_or_req: Certificate or CSR.
:type cert_or_req: `OpenSSL.crypto.X509` or `OpenSSL.crypto.X509Req`.
:returns: A list of Subject Alternative Names that are IP Addresses.
:rtype: `list` of `unicode`. note that this returns as string, not IPaddress object
"""
# constants based on PyOpenSSL certificate/CSR text dump
part_separator = ":"
prefix = "IP Address" + part_separator
sans_parts = _pyopenssl_extract_san_list_raw(cert_or_req)
return [part[len(prefix):] for part in sans_parts if part.startswith(prefix)]
def _pyopenssl_extract_san_list_raw(cert_or_req):
"""Get raw SAN string from cert or csr, parse it as UTF-8 and return.
:param cert_or_req: Certificate or CSR.
:type cert_or_req: `OpenSSL.crypto.X509` or `OpenSSL.crypto.X509Req`.
:returns: raw san strings, parsed byte as utf-8
:returns: A list of Subject Alternative Names.
:rtype: `list` of `unicode`
"""
# This function finds SANs by dumping the certificate/CSR to text and
# searching for "X509v3 Subject Alternative Name" in the text. This method
# is used to because in PyOpenSSL version <0.17 `_subjectAltNameString` methods are
# not able to Parse IP Addresses in subjectAltName string.
# is used to support PyOpenSSL version 0.13 where the
# `_subjectAltNameString` and `get_extensions` methods are not available
# for CSRs.
# constants based on PyOpenSSL certificate/CSR text dump
part_separator = ":"
parts_separator = ", "
prefix = "DNS" + part_separator
if isinstance(cert_or_req, crypto.X509):
# pylint: disable=line-too-long
func: Union[Callable[[int, crypto.X509Req], bytes], Callable[[int, crypto.X509], bytes]] = crypto.dump_certificate
func = crypto.dump_certificate # type: Union[Callable[[int, crypto.X509Req], bytes], Callable[[int, crypto.X509], bytes]]
else:
func = crypto.dump_certificate_request
text = func(crypto.FILETYPE_TEXT, cert_or_req).decode("utf-8")
# WARNING: this function does not support multiple SANs extensions.
# Multiple X509v3 extensions of the same type is disallowed by RFC 5280.
raw_san = re.search(r"X509v3 Subject Alternative Name:(?: critical)?\s*(.*)", text)
parts_separator = ", "
match = re.search(r"X509v3 Subject Alternative Name:(?: critical)?\s*(.*)", text)
# WARNING: this function assumes that no SAN can include
# parts_separator, hence the split!
sans_parts = [] if raw_san is None else raw_san.group(1).split(parts_separator)
return sans_parts
sans_parts = [] if match is None else match.group(1).split(parts_separator)
return [part.split(part_separator)[1]
for part in sans_parts if part.startswith(prefix)]
def gen_ss_cert(key, domains=None, not_before=None,
validity=(7 * 24 * 60 * 60), force_san=True, extensions=None, ips=None):
def gen_ss_cert(key, domains, not_before=None,
validity=(7 * 24 * 60 * 60), force_san=True, extensions=None):
"""Generate new self-signed certificate.
:type domains: `list` of `unicode`
@@ -336,7 +277,6 @@ def gen_ss_cert(key, domains=None, not_before=None,
:param bool force_san:
:param extensions: List of additional extensions to include in the cert.
:type extensions: `list` of `OpenSSL.crypto.X509Extension`
:type ips: `list` of (`ipaddress.IPv4Address` or `ipaddress.IPv6Address`)
If more than one domain is provided, all of the domains are put into
``subjectAltName`` X.509 extension and first domain is set as the
@@ -344,39 +284,28 @@ def gen_ss_cert(key, domains=None, not_before=None,
extension is used, unless `force_san` is ``True``.
"""
assert domains or ips, "Must provide one or more hostnames or IPs for the cert."
assert domains, "Must provide one or more hostnames for the cert."
cert = crypto.X509()
cert.set_serial_number(int(binascii.hexlify(os.urandom(16)), 16))
cert.set_version(2)
if extensions is None:
extensions = []
if domains is None:
domains = []
if ips is None:
ips = []
extensions.append(
crypto.X509Extension(
b"basicConstraints", True, b"CA:TRUE, pathlen:0"),
)
if len(domains) > 0:
cert.get_subject().CN = domains[0]
cert.get_subject().CN = domains[0]
# TODO: what to put into cert.get_subject()?
cert.set_issuer(cert.get_subject())
sanlist = []
for address in domains:
sanlist.append('DNS:' + address)
for ip in ips:
sanlist.append('IP:' + ip.exploded)
san_string = ', '.join(sanlist).encode('ascii')
if force_san or len(domains) > 1 or len(ips) > 0:
if force_san or len(domains) > 1:
extensions.append(crypto.X509Extension(
b"subjectAltName",
critical=False,
value=san_string
value=b", ".join(b"DNS:" + d.encode() for d in domains)
))
cert.add_extensions(extensions)
@@ -388,7 +317,6 @@ def gen_ss_cert(key, domains=None, not_before=None,
cert.sign(key, "sha256")
return cert
def dump_pyopenssl_chain(chain, filetype=crypto.FILETYPE_PEM):
"""Dump certificate chain into a bundle.

View File

@@ -28,8 +28,13 @@ class NonceError(ClientError):
class BadNonce(NonceError):
"""Bad nonce error."""
def __init__(self, nonce, error, *args):
super().__init__(*args)
def __init__(self, nonce, error, *args, **kwargs):
# MyPy complains here that there is too many arguments for BaseException constructor.
# This is an error fixed in typeshed, see https://github.com/python/mypy/issues/4183
# The fix is included in MyPy>=0.740, but upgrading it would bring dozen of errors due to
# new types definitions. So we ignore the error until the code base is fixed to match
# with MyPy>=0.740 referential.
super(BadNonce, self).__init__(*args, **kwargs) # type: ignore
self.nonce = nonce
self.error = error
@@ -44,11 +49,12 @@ class MissingNonce(NonceError):
Replay-Nonce header field in each successful response to a POST it
provides to a client (...)".
:ivar requests.Response ~.response: HTTP Response
:ivar requests.Response response: HTTP Response
"""
def __init__(self, response, *args):
super().__init__(*args)
def __init__(self, response, *args, **kwargs):
# See comment in BadNonce constructor above for an explanation of type: ignore here.
super(MissingNonce, self).__init__(*args, **kwargs) # type: ignore
self.response = response
def __str__(self):
@@ -72,7 +78,7 @@ class PollError(ClientError):
def __init__(self, exhausted, updated):
self.exhausted = exhausted
self.updated = updated
super().__init__()
super(PollError, self).__init__()
@property
def timeout(self):
@@ -90,7 +96,7 @@ class ValidationError(Error):
"""
def __init__(self, failed_authzrs):
self.failed_authzrs = failed_authzrs
super().__init__()
super(ValidationError, self).__init__()
class TimeoutError(Error): # pylint: disable=redefined-builtin
@@ -106,7 +112,7 @@ class IssuanceError(Error):
:param messages.Error error: The error provided by the server.
"""
self.error = error
super().__init__()
super(IssuanceError, self).__init__()
class ConflictError(ClientError):
@@ -119,7 +125,7 @@ class ConflictError(ClientError):
"""
def __init__(self, location):
self.location = location
super().__init__()
super(ConflictError, self).__init__()
class WildcardUnsupportedError(Error):

View File

@@ -12,7 +12,7 @@ class Fixed(jose.Field):
def __init__(self, json_name, value):
self.value = value
super().__init__(
super(Fixed, self).__init__(
json_name=json_name, default=value, omitempty=False)
def decode(self, value):
@@ -53,7 +53,7 @@ class Resource(jose.Field):
def __init__(self, resource_type, *args, **kwargs):
self.resource_type = resource_type
super().__init__(
super(Resource, self).__init__(
'resource', default=resource_type, *args, **kwargs)
def decode(self, value):

View File

@@ -14,9 +14,7 @@ class Header(jose.Header):
kid = jose.Field('kid', omitempty=True)
url = jose.Field('url', omitempty=True)
# Mypy does not understand the josepy magic happening here, and falsely claims
# that nonce is redefined. Let's ignore the type check here.
@nonce.decoder # type: ignore
@nonce.decoder
def nonce(value): # pylint: disable=no-self-argument,missing-function-docstring
try:
return jose.decode_b64jose(value)
@@ -50,7 +48,7 @@ class JWS(jose.JWS):
# Per ACME spec, jwk and kid are mutually exclusive, so only include a
# jwk field if kid is not provided.
include_jwk = kid is None
return super().sign(payload, key=key, alg=alg,
return super(JWS, cls).sign(payload, key=key, alg=alg,
protect=frozenset(['nonce', 'url', 'kid', 'jwk', 'alg']),
nonce=nonce, url=url, kid=kid,
include_jwk=include_jwk)

View File

@@ -1,17 +1,15 @@
"""Simple shim around the typing module.
"""Shim class to not have to depend on typing module in prod."""
import sys
This was useful when this code supported Python 2 and typing wasn't always
available. This code is being kept for now for backwards compatibility.
"""
import warnings
from typing import * # pylint: disable=wildcard-import, unused-wildcard-import
from typing import Collection, IO
warnings.warn("acme.magic_typing is deprecated and will be removed in a future release.",
DeprecationWarning)
class TypingClass:
class TypingClass(object):
"""Ignore import errors by getting anything"""
def __getattr__(self, name):
return None # pragma: no cover
return None
try:
# mypy doesn't respect modifying sys.modules
from typing import * # pylint: disable=wildcard-import, unused-wildcard-import
from typing import Collection, IO # type: ignore
except ImportError:
sys.modules[__name__] = TypingClass()

View File

@@ -1,11 +1,8 @@
"""ACME protocol messages."""
from collections.abc import Hashable
import json
from typing import Any
from typing import Dict
from typing import Type
import josepy as jose
import six
from acme import challenges
from acme import errors
@@ -14,6 +11,13 @@ from acme import jws
from acme import util
from acme.mixins import ResourceMixin
try:
from collections.abc import Hashable
except ImportError: # pragma: no cover
from collections import Hashable
OLD_ERROR_PREFIX = "urn:acme:error:"
ERROR_PREFIX = "urn:ietf:params:acme:error:"
@@ -64,6 +68,7 @@ def is_acme_error(err):
return False
@six.python_2_unicode_compatible
class Error(jose.JSONObjectWithFields, errors.Error):
"""ACME error.
@@ -90,9 +95,7 @@ class Error(jose.JSONObjectWithFields, errors.Error):
raise ValueError("The supplied code: %s is not a known ACME error"
" code" % code)
typ = ERROR_PREFIX + code
# Mypy will not understand that the Error constructor accepts a named argument
# "typ" because of josepy magic. Let's ignore the type check here.
return cls(typ=typ, **kwargs) # type: ignore
return cls(typ=typ, **kwargs)
@property
def description(self):
@@ -114,7 +117,7 @@ class Error(jose.JSONObjectWithFields, errors.Error):
:rtype: unicode
"""
code = str(self.typ).rsplit(':', maxsplit=1)[-1]
code = str(self.typ).split(':')[-1]
if code in ERROR_CODES:
return code
return None
@@ -126,13 +129,13 @@ class Error(jose.JSONObjectWithFields, errors.Error):
if part is not None).decode()
class _Constant(jose.JSONDeSerializable, Hashable):
class _Constant(jose.JSONDeSerializable, Hashable): # type: ignore
"""ACME constant."""
__slots__ = ('name',)
POSSIBLE_NAMES: Dict[str, '_Constant'] = NotImplemented
POSSIBLE_NAMES = NotImplemented
def __init__(self, name):
super().__init__()
super(_Constant, self).__init__()
self.POSSIBLE_NAMES[name] = self # pylint: disable=unsupported-assignment-operation
self.name = name
@@ -155,10 +158,13 @@ class _Constant(jose.JSONDeSerializable, Hashable):
def __hash__(self):
return hash((self.__class__, self.name))
def __ne__(self, other):
return not self == other
class Status(_Constant):
"""ACME "status" field."""
POSSIBLE_NAMES: dict = {}
POSSIBLE_NAMES = {} # type: dict
STATUS_UNKNOWN = Status('unknown')
STATUS_PENDING = Status('pending')
STATUS_PROCESSING = Status('processing')
@@ -171,10 +177,8 @@ STATUS_DEACTIVATED = Status('deactivated')
class IdentifierType(_Constant):
"""ACME identifier type."""
POSSIBLE_NAMES: Dict[str, 'IdentifierType'] = {}
# class def ends here
POSSIBLE_NAMES = {} # type: dict
IDENTIFIER_FQDN = IdentifierType('dns') # IdentifierDNS in Boulder
IDENTIFIER_IP = IdentifierType('ip') # IdentifierIP in pebble - not in Boulder yet
class Identifier(jose.JSONObjectWithFields):
@@ -191,7 +195,7 @@ class Identifier(jose.JSONObjectWithFields):
class Directory(jose.JSONDeSerializable):
"""Directory."""
_REGISTERED_TYPES: Dict[str, Type[Any]] = {}
_REGISTERED_TYPES = {} # type: dict
class Meta(jose.JSONObjectWithFields):
"""Directory Meta."""
@@ -202,8 +206,8 @@ class Directory(jose.JSONDeSerializable):
external_account_required = jose.Field('externalAccountRequired', omitempty=True)
def __init__(self, **kwargs):
kwargs = {self._internal_name(k): v for k, v in kwargs.items()}
super().__init__(**kwargs)
kwargs = dict((self._internal_name(k), v) for k, v in kwargs.items())
super(Directory.Meta, self).__init__(**kwargs)
@property
def terms_of_service(self):
@@ -213,7 +217,7 @@ class Directory(jose.JSONDeSerializable):
def __iter__(self):
# When iterating over fields, use the external name 'terms_of_service' instead of
# the internal '_terms_of_service'.
for name in super().__iter__():
for name in super(Directory.Meta, self).__iter__():
yield name[1:] if name == '_terms_of_service' else name
def _internal_name(self, name):
@@ -225,7 +229,7 @@ class Directory(jose.JSONDeSerializable):
return getattr(key, 'resource_type', key)
@classmethod
def register(cls, resource_body_cls: Type[Any]) -> Type[Any]:
def register(cls, resource_body_cls):
"""Register resource."""
resource_type = resource_body_cls.resource_type
assert resource_type not in cls._REGISTERED_TYPES
@@ -271,7 +275,7 @@ class Resource(jose.JSONObjectWithFields):
class ResourceWithURI(Resource):
"""ACME Resource with URI.
:ivar unicode ~.uri: Location of the resource.
:ivar unicode uri: Location of the resource.
"""
uri = jose.Field('uri') # no ChallengeResource.uri
@@ -281,7 +285,7 @@ class ResourceBody(jose.JSONObjectWithFields):
"""ACME Resource Body."""
class ExternalAccountBinding:
class ExternalAccountBinding(object):
"""ACME External Account Binding"""
@classmethod
@@ -311,9 +315,6 @@ class Registration(ResourceBody):
# on new-reg key server ignores 'key' and populates it based on
# JWS.signature.combined.jwk
key = jose.Field('key', omitempty=True, decoder=jose.JWK.from_json)
# Contact field implements special behavior to allow messages that clear existing
# contacts while not expecting the `contact` field when loading from json.
# This is implemented in the constructor and *_json methods.
contact = jose.Field('contact', omitempty=True, default=())
agreement = jose.Field('agreement', omitempty=True)
status = jose.Field('status', omitempty=True)
@@ -326,73 +327,24 @@ class Registration(ResourceBody):
@classmethod
def from_data(cls, phone=None, email=None, external_account_binding=None, **kwargs):
"""
Create registration resource from contact details.
The `contact` keyword being passed to a Registration object is meaningful, so
this function represents empty iterables in its kwargs by passing on an empty
`tuple`.
"""
# Note if `contact` was in kwargs.
contact_provided = 'contact' in kwargs
# Pop `contact` from kwargs and add formatted email or phone numbers
"""Create registration resource from contact details."""
details = list(kwargs.pop('contact', ()))
if phone is not None:
details.append(cls.phone_prefix + phone)
if email is not None:
details.extend([cls.email_prefix + mail for mail in email.split(',')])
# Insert formatted contact information back into kwargs
# or insert an empty tuple if `contact` provided.
if details or contact_provided:
kwargs['contact'] = tuple(details)
kwargs['contact'] = tuple(details)
if external_account_binding:
kwargs['external_account_binding'] = external_account_binding
return cls(**kwargs)
def __init__(self, **kwargs):
"""Note if the user provides a value for the `contact` member."""
if 'contact' in kwargs:
# Avoid the __setattr__ used by jose.TypedJSONObjectWithFields
object.__setattr__(self, '_add_contact', True)
super().__init__(**kwargs)
def _filter_contact(self, prefix):
return tuple(
detail[len(prefix):] for detail in self.contact # pylint: disable=not-an-iterable
if detail.startswith(prefix))
def _add_contact_if_appropriate(self, jobj):
"""
The `contact` member of Registration objects should not be required when
de-serializing (as it would be if the Fields' `omitempty` flag were `False`), but
it should be included in serializations if it was provided.
:param jobj: Dictionary containing this Registrations' data
:type jobj: dict
:returns: Dictionary containing Registrations data to transmit to the server
:rtype: dict
"""
if getattr(self, '_add_contact', False):
jobj['contact'] = self.encode('contact')
return jobj
def to_partial_json(self):
"""Modify josepy.JSONDeserializable.to_partial_json()"""
jobj = super().to_partial_json()
return self._add_contact_if_appropriate(jobj)
def fields_to_partial_json(self):
"""Modify josepy.JSONObjectWithFields.fields_to_partial_json()"""
jobj = super().fields_to_partial_json()
return self._add_contact_if_appropriate(jobj)
@property
def phones(self):
"""All phones found in the ``contact`` field."""
@@ -461,20 +413,20 @@ class ChallengeBody(ResourceBody):
omitempty=True, default=None)
def __init__(self, **kwargs):
kwargs = {self._internal_name(k): v for k, v in kwargs.items()}
super().__init__(**kwargs)
kwargs = dict((self._internal_name(k), v) for k, v in kwargs.items())
super(ChallengeBody, self).__init__(**kwargs)
def encode(self, name):
return super().encode(self._internal_name(name))
return super(ChallengeBody, self).encode(self._internal_name(name))
def to_partial_json(self):
jobj = super().to_partial_json()
jobj = super(ChallengeBody, self).to_partial_json()
jobj.update(self.chall.to_partial_json())
return jobj
@classmethod
def fields_from_json(cls, jobj):
jobj_fields = super().fields_from_json(jobj)
jobj_fields = super(ChallengeBody, cls).fields_from_json(jobj)
jobj_fields['chall'] = challenges.Challenge.from_json(jobj)
return jobj_fields
@@ -489,7 +441,7 @@ class ChallengeBody(ResourceBody):
def __iter__(self):
# When iterating over fields, use the external name 'uri' instead of
# the internal '_uri'.
for name in super().__iter__():
for name in super(ChallengeBody, self).__iter__():
yield name[1:] if name == '_uri' else name
def _internal_name(self, name):
@@ -535,9 +487,7 @@ class Authorization(ResourceBody):
expires = fields.RFC3339Field('expires', omitempty=True)
wildcard = jose.Field('wildcard', omitempty=True)
# Mypy does not understand the josepy magic happening here, and falsely claims
# that challenge is redefined. Let's ignore the type check here.
@challenges.decoder # type: ignore
@challenges.decoder
def challenges(value): # pylint: disable=no-self-argument,missing-function-docstring
return tuple(ChallengeBody.from_json(chall) for chall in value)
@@ -616,16 +566,14 @@ class Revocation(ResourceMixin, jose.JSONObjectWithFields):
class Order(ResourceBody):
"""Order Resource Body.
:ivar identifiers: List of identifiers for the certificate.
:vartype identifiers: `list` of `.Identifier`
:ivar list of .Identifier: List of identifiers for the certificate.
:ivar acme.messages.Status status:
:ivar authorizations: URLs of authorizations.
:vartype authorizations: `list` of `str`
:ivar list of str authorizations: URLs of authorizations.
:ivar str certificate: URL to download certificate as a fullchain PEM.
:ivar str finalize: URL to POST to to request issuance once all
authorizations have "valid" status.
:ivar datetime.datetime expires: When the order expires.
:ivar ~.Error error: Any error that occurred during finalization, if applicable.
:ivar .Error error: Any error that occurred during finalization, if applicable.
"""
identifiers = jose.Field('identifiers', omitempty=True)
status = jose.Field('status', decoder=Status.from_json,
@@ -636,9 +584,7 @@ class Order(ResourceBody):
expires = fields.RFC3339Field('expires', omitempty=True)
error = jose.Field('error', omitempty=True, decoder=Error.from_json)
# Mypy does not understand the josepy magic happening here, and falsely claims
# that identifiers is redefined. Let's ignore the type check here.
@identifiers.decoder # type: ignore
@identifiers.decoder
def identifiers(value): # pylint: disable=no-self-argument,missing-function-docstring
return tuple(Identifier.from_json(identifier) for identifier in value)
@@ -647,20 +593,15 @@ class OrderResource(ResourceWithURI):
:ivar acme.messages.Order body:
:ivar str csr_pem: The CSR this Order will be finalized with.
:ivar authorizations: Fully-fetched AuthorizationResource objects.
:vartype authorizations: `list` of `acme.messages.AuthorizationResource`
:ivar list of acme.messages.AuthorizationResource authorizations:
Fully-fetched AuthorizationResource objects.
:ivar str fullchain_pem: The fetched contents of the certificate URL
produced once the order was finalized, if it's present.
:ivar alternative_fullchains_pem: The fetched contents of alternative certificate
chain URLs produced once the order was finalized, if present and requested during
finalization.
:vartype alternative_fullchains_pem: `list` of `str`
"""
body = jose.Field('body', decoder=Order.from_json)
csr_pem = jose.Field('csr_pem', omitempty=True)
authorizations = jose.Field('authorizations')
fullchain_pem = jose.Field('fullchain_pem', omitempty=True)
alternative_fullchains_pem = jose.Field('alternative_fullchains_pem', omitempty=True)
@Directory.register
class NewOrder(Order):

View File

@@ -1,7 +1,7 @@
"""Useful mixins for Challenge and Resource objects"""
class VersionedLEACMEMixin:
class VersionedLEACMEMixin(object):
"""This mixin stores the version of Let's Encrypt's endpoint being used."""
@property
def le_acme_version(self):
@@ -20,7 +20,7 @@ class VersionedLEACMEMixin:
# Required for @property to operate properly. See comment above.
object.__setattr__(self, key, value)
else:
super().__setattr__(key, value) # pragma: no cover
super(VersionedLEACMEMixin, self).__setattr__(key, value) # pragma: no cover
class ResourceMixin(VersionedLEACMEMixin):
@@ -30,12 +30,12 @@ class ResourceMixin(VersionedLEACMEMixin):
"""
def to_partial_json(self):
"""See josepy.JSONDeserializable.to_partial_json()"""
return _safe_jobj_compliance(super(),
return _safe_jobj_compliance(super(ResourceMixin, self),
'to_partial_json', 'resource')
def fields_to_partial_json(self):
"""See josepy.JSONObjectWithFields.fields_to_partial_json()"""
return _safe_jobj_compliance(super(),
return _safe_jobj_compliance(super(ResourceMixin, self),
'fields_to_partial_json', 'resource')
@@ -46,12 +46,12 @@ class TypeMixin(VersionedLEACMEMixin):
"""
def to_partial_json(self):
"""See josepy.JSONDeserializable.to_partial_json()"""
return _safe_jobj_compliance(super(),
return _safe_jobj_compliance(super(TypeMixin, self),
'to_partial_json', 'type')
def fields_to_partial_json(self):
"""See josepy.JSONObjectWithFields.fields_to_partial_json()"""
return _safe_jobj_compliance(super(),
return _safe_jobj_compliance(super(TypeMixin, self),
'fields_to_partial_json', 'type')

View File

@@ -1,17 +1,17 @@
"""Support for standalone client challenge solvers. """
import collections
import functools
import http.client as http_client
import http.server as BaseHTTPServer
import logging
import socket
import socketserver
import threading
from typing import List
from typing import Optional
from six.moves import BaseHTTPServer # type: ignore
from six.moves import http_client
from six.moves import socketserver # type: ignore
from acme import challenges
from acme import crypto_util
from acme.magic_typing import List
logger = logging.getLogger(__name__)
@@ -54,7 +54,7 @@ class ACMEServerMixin:
allow_reuse_address = True
class BaseDualNetworkedServers:
class BaseDualNetworkedServers(object):
"""Base class for a pair of IPv6 and IPv4 servers that tries to do everything
it's asked for both servers, but where failures in one server don't
affect the other.
@@ -64,11 +64,8 @@ class BaseDualNetworkedServers:
def __init__(self, ServerClass, server_address, *remaining_args, **kwargs):
port = server_address[1]
self.threads: List[threading.Thread] = []
self.servers: List[socketserver.BaseServer] = []
# Preserve socket error for re-raising, if no servers can be started
last_socket_err: Optional[socket.error] = None
self.threads = [] # type: List[threading.Thread]
self.servers = [] # type: List[ACMEServerMixin]
# Must try True first.
# Ubuntu, for example, will fail to bind to IPv4 if we've already bound
@@ -86,8 +83,7 @@ class BaseDualNetworkedServers:
logger.debug(
"Successfully bound to %s:%s using %s", new_address[0],
new_address[1], "IPv6" if ip_version else "IPv4")
except socket.error as e:
last_socket_err = e
except socket.error:
if self.servers:
# Already bound using IPv6.
logger.debug(
@@ -106,10 +102,7 @@ class BaseDualNetworkedServers:
# bind to the same port for both servers.
port = server.socket.getsockname()[1]
if not self.servers:
if last_socket_err:
raise last_socket_err
else: # pragma: no cover
raise socket.error("Could not bind to IPv4 or IPv6.")
raise socket.error("Could not bind to IPv4 or IPv6.")
def serve_forever(self):
"""Wraps socketserver.TCPServer.serve_forever"""
@@ -211,24 +204,8 @@ class HTTP01RequestHandler(BaseHTTPServer.BaseHTTPRequestHandler):
def __init__(self, *args, **kwargs):
self.simple_http_resources = kwargs.pop("simple_http_resources", set())
self._timeout = kwargs.pop('timeout', 30)
self.timeout = kwargs.pop('timeout', 30)
BaseHTTPServer.BaseHTTPRequestHandler.__init__(self, *args, **kwargs)
self.server: HTTP01Server
# In parent class BaseHTTPRequestHandler, 'timeout' is a class-level property but we
# need to define its value during the initialization phase in HTTP01RequestHandler.
# However MyPy does not appreciate that we dynamically shadow a class-level property
# with an instance-level property (eg. self.timeout = ... in __init__()). So to make
# everyone happy, we statically redefine 'timeout' as a method property, and set the
# timeout value in a new internal instance-level property _timeout.
@property
def timeout(self):
"""
The default timeout this server should apply to requests.
:return: timeout to apply
:rtype: int
"""
return self._timeout
def log_message(self, format, *args): # pylint: disable=redefined-builtin
"""Log arbitrary message."""

View File

@@ -1,5 +1,7 @@
"""ACME utilities."""
import six
def map_keys(dikt, func):
"""Map dictionary keys."""
return {func(key): value for key, value in dikt.items()}
return dict((func(key), value) for key, value in six.iteritems(dikt))

View File

@@ -9,7 +9,7 @@ BUILDDIR = _build
# User-friendly check for sphinx-build
ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1)
$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from https://www.sphinx-doc.org/)
$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/)
endif
# Internal variables.

View File

@@ -85,9 +85,7 @@ language = 'en'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = [
'_build',
]
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
@@ -122,7 +120,7 @@ todo_include_todos = False
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
# https://docs.readthedocs.io/en/stable/faq.html#i-want-to-use-the-read-the-docs-theme-locally
# http://docs.readthedocs.org/en/latest/theme.html#how-do-i-use-this-locally-and-on-read-the-docs
# on_rtd is whether we are on readthedocs.org
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
if not on_rtd: # only import and set the theme if we're building docs locally

View File

@@ -65,7 +65,7 @@ if errorlevel 9009 (
echo.may add the Sphinx directory to PATH.
echo.
echo.If you don't have Sphinx installed, grab it from
echo.https://www.sphinx-doc.org/
echo.http://sphinx-doc.org/
exit /b 1
)

View File

@@ -1,3 +1 @@
:orphan:
.. literalinclude:: ../jws-help.txt

View File

@@ -0,0 +1,2 @@
python -m acme.standalone -p 1234
curl -k https://localhost:1234

View File

@@ -0,0 +1 @@
../../../acme/testdata/rsa2048_cert.pem

View File

@@ -0,0 +1 @@
../../../acme/testdata/rsa2048_key.pem

View File

@@ -1,19 +1,45 @@
from distutils.version import StrictVersion
import sys
from setuptools import __version__ as setuptools_version
from setuptools import find_packages
from setuptools import setup
from setuptools.command.test import test as TestCommand
version = '1.20.0.dev0'
version = '1.6.0.dev0'
# Please update tox.ini when modifying dependency version requirements
install_requires = [
'cryptography>=2.1.4',
'josepy>=1.9.0',
'PyOpenSSL>=17.3.0',
# load_pem_private/public_key (>=0.6)
# rsa_recover_prime_factors (>=0.8)
'cryptography>=1.2.3',
# formerly known as acme.jose:
# 1.1.0+ is required to avoid the warnings described at
# https://github.com/certbot/josepy/issues/13.
'josepy>=1.1.0',
# Connection.set_tlsext_host_name (>=0.13)
'PyOpenSSL>=0.13.1',
'pyrfc3339',
'pytz',
'requests>=2.14.2',
'requests[security]>=2.6.0', # security extras added in 2.4.1
'requests-toolbelt>=0.3.0',
'setuptools>=39.0.1',
'setuptools',
'six>=1.9.0', # needed for python_2_unicode_compatible
]
setuptools_known_environment_markers = (StrictVersion(setuptools_version) >= StrictVersion('36.2'))
if setuptools_known_environment_markers:
install_requires.append('mock ; python_version < "3.3"')
elif 'bdist_wheel' in sys.argv[1:]:
raise RuntimeError('Error, you are trying to build certbot wheels using an old version '
'of setuptools. Version 36.2+ of setuptools is required.')
elif sys.version_info < (3,3):
install_requires.append('mock')
dev_extras = [
'pytest',
'pytest-xdist',
'tox',
]
docs_extras = [
@@ -21,10 +47,21 @@ docs_extras = [
'sphinx_rtd_theme',
]
test_extras = [
'pytest',
'pytest-xdist',
]
class PyTest(TestCommand):
user_options = []
def initialize_options(self):
TestCommand.initialize_options(self)
self.pytest_args = ''
def run_tests(self):
import shlex
# import here, cause outside the eggs aren't loaded
import pytest
errno = pytest.main(shlex.split(self.pytest_args))
sys.exit(errno)
setup(
name='acme',
@@ -32,19 +69,21 @@ setup(
description='ACME protocol implementation in Python',
url='https://github.com/letsencrypt/letsencrypt',
author="Certbot Project",
author_email='certbot-dev@eff.org',
author_email='client-dev@letsencrypt.org',
license='Apache License 2.0',
python_requires='>=3.6',
python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Security',
],
@@ -53,7 +92,10 @@ setup(
include_package_data=True,
install_requires=install_requires,
extras_require={
'dev': dev_extras,
'docs': docs_extras,
'test': test_extras,
},
test_suite='acme',
tests_require=["pytest"],
cmdclass={"test": PyTest},
)

View File

@@ -1,11 +1,14 @@
"""Tests for acme.challenges."""
import urllib.parse as urllib_parse
import unittest
from unittest import mock
import josepy as jose
import OpenSSL
try:
import mock
except ImportError: # pragma: no cover
from unittest import mock # type: ignore
import requests
from six.moves.urllib import parse as urllib_parse
from acme import errors
@@ -292,7 +295,7 @@ class TLSALPN01ResponseTest(unittest.TestCase):
def test_gen_verify_cert_gen_key(self):
cert, key = self.response.gen_cert(self.domain)
self.assertIsInstance(key, OpenSSL.crypto.PKey)
self.assertTrue(isinstance(key, OpenSSL.crypto.PKey))
self.assertTrue(self.response.verify_cert(self.domain, cert))
def test_verify_bad_cert(self):
@@ -431,7 +434,7 @@ class DNSTest(unittest.TestCase):
mock_gen.return_value = mock.sentinel.validation
response = self.msg.gen_response(KEY)
from acme.challenges import DNSResponse
self.assertIsInstance(response, DNSResponse)
self.assertTrue(isinstance(response, DNSResponse))
self.assertEqual(response.validation, mock.sentinel.validation)
def test_validation_domain_name(self):

View File

@@ -2,16 +2,17 @@
# pylint: disable=too-many-lines
import copy
import datetime
import http.client as http_client
import ipaddress
import json
import unittest
from typing import Dict
from unittest import mock
import josepy as jose
try:
import mock
except ImportError: # pragma: no cover
from unittest import mock # type: ignore
import OpenSSL
import requests
from six.moves import http_client # pylint: disable=import-error
from acme import challenges
from acme import errors
@@ -24,7 +25,6 @@ import test_util
CERT_DER = test_util.load_vector('cert.der')
CERT_SAN_PEM = test_util.load_vector('cert-san.pem')
CSR_SAN_PEM = test_util.load_vector('csr-san.pem')
CSR_MIXED_PEM = test_util.load_vector('csr-mixed.pem')
KEY = jose.JWKRSA.load(test_util.load_vector('rsa512_key.pem'))
KEY2 = jose.JWKRSA.load(test_util.load_vector('rsa256_key.pem'))
@@ -64,7 +64,7 @@ class ClientTestBase(unittest.TestCase):
self.contact = ('mailto:cert-admin@example.com', 'tel:+12025551212')
reg = messages.Registration(
contact=self.contact, key=KEY.public_key())
the_arg: Dict = dict(reg)
the_arg = dict(reg) # type: Dict
self.new_reg = messages.NewRegistration(**the_arg)
self.regr = messages.RegistrationResource(
body=reg, uri='https://www.letsencrypt-demo.org/acme/reg/1')
@@ -92,7 +92,7 @@ class BackwardsCompatibleClientV2Test(ClientTestBase):
"""Tests for acme.client.BackwardsCompatibleClientV2."""
def setUp(self):
super().setUp()
super(BackwardsCompatibleClientV2Test, self).setUp()
# contains a loaded cert
self.certr = messages.CertificateResource(
body=messages_test.CERT)
@@ -263,7 +263,7 @@ class BackwardsCompatibleClientV2Test(ClientTestBase):
with mock.patch('acme.client.ClientV2') as mock_client:
client = self._init()
client.finalize_order(mock_orderr, mock_deadline)
mock_client().finalize_order.assert_called_once_with(mock_orderr, mock_deadline, False)
mock_client().finalize_order.assert_called_once_with(mock_orderr, mock_deadline)
def test_revoke(self):
self.response.json.return_value = DIRECTORY_V1.to_json()
@@ -321,7 +321,7 @@ class ClientTest(ClientTestBase):
"""Tests for acme.client.Client."""
def setUp(self):
super().setUp()
super(ClientTest, self).setUp()
self.directory = DIRECTORY_V1
@@ -606,8 +606,8 @@ class ClientTest(ClientTestBase):
# make sure that max_attempts is per-authorization, rather
# than global
max_attempts=max(len(authzrs[0].retries), len(authzrs[1].retries)))
self.assertIs(cert[0], csr)
self.assertIs(cert[1], updated_authzrs)
self.assertTrue(cert[0] is csr)
self.assertTrue(cert[1] is updated_authzrs)
self.assertEqual(updated_authzrs[0].uri, 'a...')
self.assertEqual(updated_authzrs[1].uri, 'b.')
self.assertEqual(updated_authzrs[0].times, [
@@ -643,7 +643,7 @@ class ClientTest(ClientTestBase):
authzr = self.client.deactivate_authorization(self.authzr)
self.assertEqual(authzb, authzr.body)
self.assertEqual(self.client.net.post.call_count, 1)
self.assertIn(self.authzr.uri, self.net.post.call_args_list[0][0])
self.assertTrue(self.authzr.uri in self.net.post.call_args_list[0][0])
def test_check_cert(self):
self.response.headers['Location'] = self.certr.uri
@@ -702,7 +702,7 @@ class ClientTest(ClientTestBase):
def test_revocation_payload(self):
obj = messages.Revocation(certificate=self.certr.body, reason=self.rsn)
self.assertIn('reason', obj.to_partial_json().keys())
self.assertTrue('reason' in obj.to_partial_json().keys())
self.assertEqual(self.rsn, obj.to_partial_json()['reason'])
def test_revoke_bad_status_raises_error(self):
@@ -718,7 +718,7 @@ class ClientV2Test(ClientTestBase):
"""Tests for acme.client.ClientV2."""
def setUp(self):
super().setUp()
super(ClientV2Test, self).setUp()
self.directory = DIRECTORY_V2
@@ -742,7 +742,7 @@ class ClientV2Test(ClientTestBase):
self.orderr = messages.OrderResource(
body=self.order,
uri='https://www.letsencrypt-demo.org/acme/acct/1/order/1',
authorizations=[self.authzr, self.authzr2], csr_pem=CSR_MIXED_PEM)
authorizations=[self.authzr, self.authzr2], csr_pem=CSR_SAN_PEM)
def test_new_account(self):
self.response.status_code = http_client.CREATED
@@ -772,7 +772,7 @@ class ClientV2Test(ClientTestBase):
with mock.patch('acme.client.ClientV2._post_as_get') as mock_post_as_get:
mock_post_as_get.side_effect = (authz_response, authz_response2)
self.assertEqual(self.client.new_order(CSR_MIXED_PEM), self.orderr)
self.assertEqual(self.client.new_order(CSR_SAN_PEM), self.orderr)
@mock.patch('acme.client.datetime')
def test_poll_and_finalize(self, mock_datetime):
@@ -842,32 +842,6 @@ class ClientV2Test(ClientTestBase):
deadline = datetime.datetime.now() - datetime.timedelta(seconds=60)
self.assertRaises(errors.TimeoutError, self.client.finalize_order, self.orderr, deadline)
def test_finalize_order_alt_chains(self):
updated_order = self.order.update(
certificate='https://www.letsencrypt-demo.org/acme/cert/',
)
updated_orderr = self.orderr.update(body=updated_order,
fullchain_pem=CERT_SAN_PEM,
alternative_fullchains_pem=[CERT_SAN_PEM,
CERT_SAN_PEM])
self.response.json.return_value = updated_order.to_json()
self.response.text = CERT_SAN_PEM
self.response.headers['Link'] ='<https://example.com/acme/cert/1>;rel="alternate", ' + \
'<https://example.com/dir>;rel="index", ' + \
'<https://example.com/acme/cert/2>;title="foo";rel="alternate"'
deadline = datetime.datetime(9999, 9, 9)
resp = self.client.finalize_order(self.orderr, deadline, fetch_alternative_chains=True)
self.net.post.assert_any_call('https://example.com/acme/cert/1',
mock.ANY, acme_version=2, new_nonce_url=mock.ANY)
self.net.post.assert_any_call('https://example.com/acme/cert/2',
mock.ANY, acme_version=2, new_nonce_url=mock.ANY)
self.assertEqual(resp, updated_orderr)
del self.response.headers['Link']
resp = self.client.finalize_order(self.orderr, deadline, fetch_alternative_chains=True)
self.assertEqual(resp, updated_orderr.update(alternative_fullchains_pem=[]))
def test_revoke(self):
self.client.revoke(messages_test.CERT, self.rsn)
self.net.post.assert_called_once_with(
@@ -879,9 +853,9 @@ class ClientV2Test(ClientTestBase):
self.response.headers['Location'] = self.regr.uri
self.response.json.return_value = self.regr.body.to_json()
self.assertEqual(self.regr, self.client.update_registration(self.regr))
self.assertIsNotNone(self.client.net.account)
self.assertNotEqual(self.client.net.account, None)
self.assertEqual(self.client.net.post.call_count, 2)
self.assertIn(DIRECTORY_V2.newAccount, self.net.post.call_args_list[0][0])
self.assertTrue(DIRECTORY_V2.newAccount in self.net.post.call_args_list[0][0])
self.response.json.return_value = self.regr.body.update(
contact=()).to_json()
@@ -945,7 +919,7 @@ class ClientNetworkTest(unittest.TestCase):
self.response.links = {}
def test_init(self):
self.assertIs(self.net.verify_ssl, self.verify_ssl)
self.assertTrue(self.net.verify_ssl is self.verify_ssl)
def test_wrap_in_jws(self):
# pylint: disable=protected-access
@@ -1187,7 +1161,7 @@ class ClientNetworkWithMockedResponseTest(unittest.TestCase):
def send_request(*args, **kwargs):
# pylint: disable=unused-argument,missing-docstring
self.assertNotIn("new_nonce_url", kwargs)
self.assertFalse("new_nonce_url" in kwargs)
method = args[0]
uri = args[1]
if method == 'HEAD' and uri != "new_nonce_uri":
@@ -1332,7 +1306,7 @@ class ClientNetworkSourceAddressBindingTest(unittest.TestCase):
from acme.client import ClientNetwork
net = ClientNetwork(key=None, alg=None, source_address=self.source_address)
for adapter in net.session.adapters.values():
self.assertIn(self.source_address, adapter.source_address)
self.assertTrue(self.source_address in adapter.source_address)
def test_behavior_assumption(self):
"""This is a test that guardrails the HTTPAdapter behavior so that if the default for
@@ -1342,7 +1316,7 @@ class ClientNetworkSourceAddressBindingTest(unittest.TestCase):
# test should fail if the default adapter type is changed by requests
net = ClientNetwork(key=None, alg=None)
session = requests.Session()
for scheme in session.adapters:
for scheme in session.adapters.keys():
client_network_adapter = net.session.adapters.get(scheme)
default_adapter = session.adapters.get(scheme)
self.assertEqual(client_network_adapter.__class__, default_adapter.__class__)

View File

@@ -1,15 +1,14 @@
"""Tests for acme.crypto_util."""
import itertools
import ipaddress
import socket
import socketserver
import threading
import time
import unittest
from typing import List
import josepy as jose
import OpenSSL
import six
from six.moves import socketserver # type: ignore # pylint: disable=import-error
from acme import errors
import test_util
@@ -28,6 +27,8 @@ class SSLSocketAndProbeSNITest(unittest.TestCase):
class _TestServer(socketserver.TCPServer):
# six.moves.* | pylint: disable=attribute-defined-outside-init,no-init
def server_bind(self): # pylint: disable=missing-docstring
self.socket = SSLSocket(socket.socket(),
certs)
@@ -61,6 +62,7 @@ class SSLSocketAndProbeSNITest(unittest.TestCase):
self.assertRaises(errors.Error, self._probe, b'bar')
def test_probe_connection_error(self):
# pylint has a hard time with six
self.server.server_close()
original_timeout = socket.getdefaulttimeout()
try:
@@ -109,6 +111,7 @@ class PyOpenSSLCertOrReqAllNamesTest(unittest.TestCase):
class PyOpenSSLCertOrReqSANTest(unittest.TestCase):
"""Test for acme.crypto_util._pyopenssl_cert_or_req_san."""
@classmethod
def _call(cls, loader, name):
# pylint: disable=protected-access
@@ -118,9 +121,9 @@ class PyOpenSSLCertOrReqSANTest(unittest.TestCase):
@classmethod
def _get_idn_names(cls):
"""Returns expected names from '{cert,csr}-idnsans.pem'."""
chars = [chr(i) for i in itertools.chain(range(0x3c3, 0x400),
range(0x641, 0x6fc),
range(0x1820, 0x1877))]
chars = [six.unichr(i) for i in itertools.chain(range(0x3c3, 0x400),
range(0x641, 0x6fc),
range(0x1820, 0x1877))]
return [''.join(chars[i: i + 45]) + '.invalid'
for i in range(0, len(chars), 45)]
@@ -174,73 +177,24 @@ class PyOpenSSLCertOrReqSANTest(unittest.TestCase):
['chicago-cubs.venafi.example', 'cubs.venafi.example'])
class PyOpenSSLCertOrReqSANIPTest(unittest.TestCase):
"""Test for acme.crypto_util._pyopenssl_cert_or_req_san_ip."""
@classmethod
def _call(cls, loader, name):
# pylint: disable=protected-access
from acme.crypto_util import _pyopenssl_cert_or_req_san_ip
return _pyopenssl_cert_or_req_san_ip(loader(name))
def _call_cert(self, name):
return self._call(test_util.load_cert, name)
def _call_csr(self, name):
return self._call(test_util.load_csr, name)
def test_cert_no_sans(self):
self.assertEqual(self._call_cert('cert.pem'), [])
def test_csr_no_sans(self):
self.assertEqual(self._call_csr('csr-nosans.pem'), [])
def test_cert_domain_sans(self):
self.assertEqual(self._call_cert('cert-san.pem'), [])
def test_csr_domain_sans(self):
self.assertEqual(self._call_csr('csr-san.pem'), [])
def test_cert_ip_two_sans(self):
self.assertEqual(self._call_cert('cert-ipsans.pem'), ['192.0.2.145', '203.0.113.1'])
def test_csr_ip_two_sans(self):
self.assertEqual(self._call_csr('csr-ipsans.pem'), ['192.0.2.145', '203.0.113.1'])
def test_csr_ipv6_sans(self):
self.assertEqual(self._call_csr('csr-ipv6sans.pem'),
['0:0:0:0:0:0:0:1', 'A3BE:32F3:206E:C75D:956:CEE:9858:5EC5'])
def test_cert_ipv6_sans(self):
self.assertEqual(self._call_cert('cert-ipv6sans.pem'),
['0:0:0:0:0:0:0:1', 'A3BE:32F3:206E:C75D:956:CEE:9858:5EC5'])
class GenSsCertTest(unittest.TestCase):
"""Test for gen_ss_cert (generation of self-signed cert)."""
class RandomSnTest(unittest.TestCase):
"""Test for random certificate serial numbers."""
def setUp(self):
self.cert_count = 5
self.serial_num: List[int] = []
self.serial_num = [] # type: List[int]
self.key = OpenSSL.crypto.PKey()
self.key.generate_key(OpenSSL.crypto.TYPE_RSA, 2048)
def test_sn_collisions(self):
from acme.crypto_util import gen_ss_cert
for _ in range(self.cert_count):
cert = gen_ss_cert(self.key, ['dummy'], force_san=True,
ips=[ipaddress.ip_address("10.10.10.10")])
cert = gen_ss_cert(self.key, ['dummy'], force_san=True)
self.serial_num.append(cert.get_serial_number())
self.assertGreaterEqual(len(set(self.serial_num)), self.cert_count)
def test_no_name(self):
from acme.crypto_util import gen_ss_cert
with self.assertRaises(AssertionError):
gen_ss_cert(self.key, ips=[ipaddress.ip_address("1.1.1.1")])
gen_ss_cert(self.key)
self.assertTrue(len(set(self.serial_num)) > 1)
class MakeCSRTest(unittest.TestCase):
"""Test for standalone functions."""
@@ -255,8 +209,8 @@ class MakeCSRTest(unittest.TestCase):
def test_make_csr(self):
csr_pem = self._call_with_key(["a.example", "b.example"])
self.assertIn(b'--BEGIN CERTIFICATE REQUEST--', csr_pem)
self.assertIn(b'--END CERTIFICATE REQUEST--', csr_pem)
self.assertTrue(b'--BEGIN CERTIFICATE REQUEST--' in csr_pem)
self.assertTrue(b'--END CERTIFICATE REQUEST--' in csr_pem)
csr = OpenSSL.crypto.load_certificate_request(
OpenSSL.crypto.FILETYPE_PEM, csr_pem)
# In pyopenssl 0.13 (used with TOXENV=py27-oldest), csr objects don't
@@ -272,27 +226,6 @@ class MakeCSRTest(unittest.TestCase):
).get_data(),
)
def test_make_csr_ip(self):
csr_pem = self._call_with_key(["a.example"], False, [ipaddress.ip_address('127.0.0.1'), ipaddress.ip_address('::1')])
self.assertIn(b'--BEGIN CERTIFICATE REQUEST--' , csr_pem)
self.assertIn(b'--END CERTIFICATE REQUEST--' , csr_pem)
csr = OpenSSL.crypto.load_certificate_request(
OpenSSL.crypto.FILETYPE_PEM, csr_pem)
# In pyopenssl 0.13 (used with TOXENV=py27-oldest), csr objects don't
# have a get_extensions() method, so we skip this test if the method
# isn't available.
if hasattr(csr, 'get_extensions'):
self.assertEqual(len(csr.get_extensions()), 1)
self.assertEqual(csr.get_extensions()[0].get_data(),
OpenSSL.crypto.X509Extension(
b'subjectAltName',
critical=False,
value=b'DNS:a.example, IP:127.0.0.1, IP:::1',
).get_data(),
)
# for IP san it's actually need to be octet-string,
# but somewhere downstream thankfully handle it for us
def test_make_csr_must_staple(self):
csr_pem = self._call_with_key(["a.example"], must_staple=True)
csr = OpenSSL.crypto.load_certificate_request(
@@ -311,9 +244,6 @@ class MakeCSRTest(unittest.TestCase):
self.assertEqual(len(must_staple_exts), 1,
"Expected exactly one Must Staple extension")
def test_make_csr_without_hostname(self):
self.assertRaises(ValueError, self._call_with_key)
class DumpPyopensslChainTest(unittest.TestCase):
"""Test for dump_pyopenssl_chain."""

View File

@@ -1,6 +1,10 @@
"""Tests for acme.errors."""
import unittest
from unittest import mock
try:
import mock
except ImportError: # pragma: no cover
from unittest import mock # type: ignore
class BadNonceTest(unittest.TestCase):
@@ -24,8 +28,8 @@ class MissingNonceTest(unittest.TestCase):
self.error = MissingNonce(self.response)
def test_str(self):
self.assertIn("FOO", str(self.error))
self.assertIn("{}", str(self.error))
self.assertTrue("FOO" in str(self.error))
self.assertTrue("{}" in str(self.error))
class PollErrorTest(unittest.TestCase):

View File

@@ -48,7 +48,7 @@ class JWSTest(unittest.TestCase):
self.assertEqual(jws.signature.combined.nonce, self.nonce)
self.assertEqual(jws.signature.combined.url, self.url)
self.assertEqual(jws.signature.combined.kid, self.kid)
self.assertIsNone(jws.signature.combined.jwk)
self.assertEqual(jws.signature.combined.jwk, None)
# TODO: check that nonce is in protected header
self.assertEqual(jws, JWS.from_json(jws.to_json()))
@@ -58,7 +58,7 @@ class JWSTest(unittest.TestCase):
jws = JWS.sign(payload=b'foo', key=self.privkey,
alg=jose.RS256, nonce=self.nonce,
url=self.url)
self.assertIsNone(jws.signature.combined.kid)
self.assertEqual(jws.signature.combined.kid, None)
self.assertEqual(jws.signature.combined.jwk, self.pubkey)

View File

@@ -1,8 +1,11 @@
"""Tests for acme.magic_typing."""
import sys
import unittest
import warnings
from unittest import mock
try:
import mock
except ImportError: # pragma: no cover
from unittest import mock # type: ignore
class MagicTypingTest(unittest.TestCase):
@@ -10,21 +13,32 @@ class MagicTypingTest(unittest.TestCase):
def test_import_success(self):
try:
import typing as temp_typing
except ImportError: # pragma: no cover
temp_typing = None # pragma: no cover
except ImportError: # pragma: no cover
temp_typing = None # pragma: no cover
typing_class_mock = mock.MagicMock()
text_mock = mock.MagicMock()
typing_class_mock.Text = text_mock
sys.modules['typing'] = typing_class_mock
if 'acme.magic_typing' in sys.modules:
del sys.modules['acme.magic_typing'] # pragma: no cover
with warnings.catch_warnings():
warnings.filterwarnings("ignore", category=DeprecationWarning)
from acme.magic_typing import Text
del sys.modules['acme.magic_typing'] # pragma: no cover
from acme.magic_typing import Text
self.assertEqual(Text, text_mock)
del sys.modules['acme.magic_typing']
sys.modules['typing'] = temp_typing
def test_import_failure(self):
try:
import typing as temp_typing
except ImportError: # pragma: no cover
temp_typing = None # pragma: no cover
sys.modules['typing'] = None
if 'acme.magic_typing' in sys.modules:
del sys.modules['acme.magic_typing'] # pragma: no cover
from acme.magic_typing import Text
self.assertTrue(Text is None)
del sys.modules['acme.magic_typing']
sys.modules['typing'] = temp_typing
if __name__ == '__main__':
unittest.main() # pragma: no cover

View File

@@ -1,9 +1,11 @@
"""Tests for acme.messages."""
from typing import Dict
import unittest
from unittest import mock
import josepy as jose
try:
import mock
except ImportError: # pragma: no cover
from unittest import mock # type: ignore
from acme import challenges
import test_util
@@ -41,13 +43,13 @@ class ErrorTest(unittest.TestCase):
def test_description(self):
self.assertEqual('The request message was malformed', self.error.description)
self.assertIsNone(self.error_custom.description)
self.assertTrue(self.error_custom.description is None)
def test_code(self):
from acme.messages import Error
self.assertEqual('malformed', self.error.code)
self.assertIsNone(self.error_custom.code)
self.assertIsNone(Error().code)
self.assertEqual(None, self.error_custom.code)
self.assertEqual(None, Error().code)
def test_is_acme_error(self):
from acme.messages import is_acme_error, Error
@@ -82,7 +84,7 @@ class ConstantTest(unittest.TestCase):
from acme.messages import _Constant
class MockConstant(_Constant): # pylint: disable=missing-docstring
POSSIBLE_NAMES: Dict = {}
POSSIBLE_NAMES = {} # type: Dict
self.MockConstant = MockConstant # pylint: disable=invalid-name
self.const_a = MockConstant('a')
@@ -106,11 +108,11 @@ class ConstantTest(unittest.TestCase):
def test_equality(self):
const_a_prime = self.MockConstant('a')
self.assertNotEqual(self.const_a, self.const_b)
self.assertEqual(self.const_a, const_a_prime)
self.assertFalse(self.const_a == self.const_b)
self.assertTrue(self.const_a == const_a_prime)
self.assertNotEqual(self.const_a, self.const_b)
self.assertEqual(self.const_a, const_a_prime)
self.assertTrue(self.const_a != self.const_b)
self.assertFalse(self.const_a != const_a_prime)
class DirectoryTest(unittest.TestCase):
@@ -252,19 +254,6 @@ class RegistrationTest(unittest.TestCase):
from acme.messages import Registration
hash(Registration.from_json(self.jobj_from))
def test_default_not_transmitted(self):
from acme.messages import NewRegistration
empty_new_reg = NewRegistration()
new_reg_with_contact = NewRegistration(contact=())
self.assertEqual(empty_new_reg.contact, ())
self.assertEqual(new_reg_with_contact.contact, ())
self.assertNotIn('contact', empty_new_reg.to_partial_json())
self.assertNotIn('contact', empty_new_reg.fields_to_partial_json())
self.assertIn('contact', new_reg_with_contact.to_partial_json())
self.assertIn('contact', new_reg_with_contact.fields_to_partial_json())
class UpdateRegistrationTest(unittest.TestCase):
"""Tests for acme.messages.UpdateRegistration."""
@@ -406,7 +395,7 @@ class AuthorizationResourceTest(unittest.TestCase):
authzr = AuthorizationResource(
uri=mock.sentinel.uri,
body=mock.sentinel.body)
self.assertIsInstance(authzr, jose.JSONDeSerializable)
self.assertTrue(isinstance(authzr, jose.JSONDeSerializable))
class CertificateRequestTest(unittest.TestCase):
@@ -417,7 +406,7 @@ class CertificateRequestTest(unittest.TestCase):
self.req = CertificateRequest(csr=CSR)
def test_json_de_serializable(self):
self.assertIsInstance(self.req, jose.JSONDeSerializable)
self.assertTrue(isinstance(self.req, jose.JSONDeSerializable))
from acme.messages import CertificateRequest
self.assertEqual(
self.req, CertificateRequest.from_json(self.req.to_json()))
@@ -433,7 +422,7 @@ class CertificateResourceTest(unittest.TestCase):
cert_chain_uri=mock.sentinel.cert_chain_uri)
def test_json_de_serializable(self):
self.assertIsInstance(self.certr, jose.JSONDeSerializable)
self.assertTrue(isinstance(self.certr, jose.JSONDeSerializable))
from acme.messages import CertificateResource
self.assertEqual(
self.certr, CertificateResource.from_json(self.certr.to_json()))

View File

@@ -1,14 +1,16 @@
"""Tests for acme.standalone."""
import http.client as http_client
import socket
import socketserver
import threading
import unittest
from typing import Set
from unittest import mock
import josepy as jose
try:
import mock
except ImportError: # pragma: no cover
from unittest import mock # type: ignore
import requests
from six.moves import http_client # pylint: disable=import-error
from six.moves import socketserver # type: ignore # pylint: disable=import-error
from acme import challenges
from acme import crypto_util
@@ -42,7 +44,7 @@ class HTTP01ServerTest(unittest.TestCase):
def setUp(self):
self.account_key = jose.JWK.load(
test_util.load_vector('rsa1024_key.pem'))
self.resources: Set = set()
self.resources = set() # type: Set
from acme.standalone import HTTP01Server
self.server = HTTP01Server(('', 0), resources=self.resources)
@@ -190,18 +192,12 @@ class BaseDualNetworkedServersTest(unittest.TestCase):
@mock.patch("socket.socket.bind")
def test_fail_to_bind(self, mock_bind):
from errno import EADDRINUSE
mock_bind.side_effect = socket.error
from acme.standalone import BaseDualNetworkedServers
mock_bind.side_effect = socket.error(EADDRINUSE, "Fake addr in use error")
with self.assertRaises(socket.error) as em:
BaseDualNetworkedServers(
BaseDualNetworkedServersTest.SingleProtocolServer,
('', 0), socketserver.BaseRequestHandler)
self.assertEqual(em.exception.errno, EADDRINUSE)
self.assertRaises(socket.error, BaseDualNetworkedServers,
BaseDualNetworkedServersTest.SingleProtocolServer,
('', 0),
socketserver.BaseRequestHandler)
def test_ports_equal(self):
from acme.standalone import BaseDualNetworkedServers
@@ -225,7 +221,7 @@ class HTTP01DualNetworkedServersTest(unittest.TestCase):
def setUp(self):
self.account_key = jose.JWK.load(
test_util.load_vector('rsa1024_key.pem'))
self.resources: Set = set()
self.resources = set() # type: Set
from acme.standalone import HTTP01DualNetworkedServers
self.servers = HTTP01DualNetworkedServers(('', 0), resources=self.resources)

View File

@@ -1,21 +0,0 @@
-----BEGIN CERTIFICATE-----
MIIDizCCAnOgAwIBAgIIPNBLQXwhoUkwDQYJKoZIhvcNAQELBQAwKDEmMCQGA1UE
AxMdUGViYmxlIEludGVybWVkaWF0ZSBDQSAxNzNiMjYwHhcNMjAwNTI5MTkxODA5
WhcNMjUwNTI5MTkxODA5WjAWMRQwEgYDVQQDEwsxOTIuMC4yLjE0NTCCASIwDQYJ
KoZIhvcNAQEBBQADggEPADCCAQoCggEBALyChb+NDA26GF1AfC0nzEdfOTchKw0h
q41xEjonvg5UXgZf/aH/ntvugIkYP0MaFifNAjebOVVsemEVEtyWcUKTfBHKZGbZ
ukTDwFIjfTccCfo6U/B2H7ZLzJIywl8DcUw9DypadeQBm8PS0VVR2ncy73dvaqym
crhAwlASyXU0mhLqRDMMxfg5Bn/FWpcsIcDpLmPn8Q/FvdRc2t5ryBNw/aWOlwqT
Oy16nbfLj2T0zG1A3aPuD+eT/JFUe/o3K7R+FAx7wt+RziQO46wLVVF1SueZUrIU
zqN04Gl8Kt1WM2SniZ0gq/rORUNcPtT0NAEsEslTQfA+Trq6j2peqyMCAwEAAaOB
yjCBxzAOBgNVHQ8BAf8EBAMCBaAwHQYDVR0lBBYwFAYIKwYBBQUHAwEGCCsGAQUF
BwMCMAwGA1UdEwEB/wQCMAAwHQYDVR0OBBYEFHj1mwZzP//nMIH2i58NRUl/arHn
MB8GA1UdIwQYMBaAFF5DVAKabvIUvKFHGouscA2Qdpe6MDEGCCsGAQUFBwEBBCUw
IzAhBggrBgEFBQcwAYYVaHR0cDovLzEyNy4wLjAuMTo0MDAyMBUGA1UdEQQOMAyH
BMAAApGHBMsAcQEwDQYJKoZIhvcNAQELBQADggEBAHjSgDg76/UCIMSYddyhj18r
LdNKjA7p8ovnErSkebFT4lIZ9f3Sma9moNr0w64M33NamuFyHe/KTdk90mvoW8Uu
26aDekiRIeeMakzbAtDKn67tt2tbedKIYRATcSYVwsV46uZKbM621dZKIjjxOWpo
IY6rZYrku8LYhoXJXOqRduV3cTRVuTm5bBa9FfVNtt6N1T5JOtKKDEhuSaF4RSug
PDy3hQIiHrVvhPfVrXU3j6owz/8UCS5549inES9ONTFrvM9o0H1R/MsmGNXR5hF5
iJqHKC7n8LZujhVnoFIpHu2Dsiefbfr+yRYJS4I+ezy6Nq/Ok8rc8zp0eoX+uyY=
-----END CERTIFICATE-----

View File

@@ -1,22 +0,0 @@
-----BEGIN CERTIFICATE-----
MIIDmzCCAoOgAwIBAgIIFdxeZP+v2rgwDQYJKoZIhvcNAQELBQAwKDEmMCQGA1UE
AxMdUGViYmxlIEludGVybWVkaWF0ZSBDQSA0M2M5NTcwHhcNMjAwNTMwMDQwNzMw
WhcNMjUwNTMwMDQwNzMwWjAOMQwwCgYDVQQDEwM6OjEwggEiMA0GCSqGSIb3DQEB
AQUAA4IBDwAwggEKAoIBAQC7VidVduJvqKtrSH0fw6PjE0cqL4Kfzo7klexWUkHG
KVAa0fRVZFZ462jxKOt417V2U4WJQ6WHHO9PJ+3gW62d/MhCw8FRtUQS4nYFjqB6
32+RFU21VRN7cWoQEqSwnEPbh/v/zv/KS5JhQ+swWUo79AOLm1kjnZWCKtcqh1Lc
Ug5Tkpot6luoxTKp52MkchvXDpj0q2B/XpLJ8/pw5cqjv7mH12EDOK2HXllA+WwX
ZpstcEhaA4FqtaHOW/OHnwTX5MUbINXE5YYHVEDR6moVM31/W/3pe9NDUMTDE7Si
lVQnZbXM9NYbzZqlh+WhemDWwnIfGI6rtsfNEiirVEOlAgMBAAGjgeIwgd8wDgYD
VR0PAQH/BAQDAgWgMB0GA1UdJQQWMBQGCCsGAQUFBwMBBggrBgEFBQcDAjAMBgNV
HRMBAf8EAjAAMB0GA1UdDgQWBBS8DL+MZfDIy6AKky69Tgry2Vxq5DAfBgNVHSME
GDAWgBRAsFqVenRRKgB1YPzWKzb9bzZ/ozAxBggrBgEFBQcBAQQlMCMwIQYIKwYB
BQUHMAGGFWh0dHA6Ly8xMjcuMC4wLjE6NDAwMjAtBgNVHREEJjAkhxAAAAAAAAAA
AAAAAAAAAAABhxCjvjLzIG7HXQlWDO6YWF7FMA0GCSqGSIb3DQEBCwUAA4IBAQBY
M9UTZ3uaKMQ+He9kWR3p9jh6hTSD0FNi79ZdfkG0lgSzhhduhN7OhzQH2ihUUfa6
rtKTw74fGbszhizCd9UB8YPKlm3si1Xbg6ZUQlA1RtoQo7RUGEa6ZbR68PKGm9Go
hTTFIl/JS8jzxBR8jywZdyqtprUx+nnNUDiNk0hJtFLhw7OJH0AHlAUNqHsfD08m
HXRdaV6q14HXU5g31slBat9H4D6tCU/2uqBURwW0wVdnqh4QeRfAeqiatJS9EmSF
ctbc7n894Idy2Xce7NFoIy5cht3m6Rd42o/LmBsJopBmQcDPZT70/XzRtc2qE0cS
CzBIGQHUJ6BfmBjrCQnp
-----END CERTIFICATE-----

View File

@@ -1,16 +0,0 @@
-----BEGIN CERTIFICATE REQUEST-----
MIICbTCCAVUCAQIwADCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKT/
CE7Y5EYBvI4p7Frt763upIKHDHO/R5/TWMjG8Jm9qTMui8sbMgyh2Yh+lR/j/5Xd
tQrhgC6wx10MrW2+3JtYS88HP1p6si8zU1dbK34n3NyyklR2RivW0R7dXgnYNy7t
5YcDYLCrbRMIPINV/uHrmzIHWYUDNcZVdAfIM2AHfKYuV6Mepcn///5GR+l4GcAh
Nkf9CW8OdAIuKdbyLCxVr0mUW/vJz1b12uxPsgUdax9sjXgZdT4pfMXADsFd1NeF
atpsXU073inqtHru+2F9ijHTQ75TC+u/rr6eYl3BnBntac0gp/ADtDBii7/Q1JOO
Bhq7xJNqqxIEdiyM7zcCAwEAAaAoMCYGCSqGSIb3DQEJDjEZMBcwFQYDVR0RBA4w
DIcEwAACkYcEywBxATANBgkqhkiG9w0BAQsFAAOCAQEADG5g3zdbSCaXpZhWHkzE
Mek3f442TUE1pB+ITRpthmM4N3zZWETYmbLCIAO624uMrRnbCCMvAoLs/L/9ETg/
XMMFtonQC8u9i9tV8B1ceBh8lpIfa+8b9TMWH3bqnrbWQ+YIl+Yd0gXiCZWJ9vK4
eM1Gddu/2bR6s/k4h/XAWRgEexqk57EHr1z0N+T9OoX939n3mVcNI+u9kfd5VJ0z
VyA3R8WR6T6KlEl5P5pcWe5Kuyhi7xMmLVImXqBtvKq4O1AMfM+gQr/yn9aE8IRq
khP7JrMBLUIub1c/qu2TfvnynNPSM/ZcOX+6PHdHmRkR3nI0Ndpv7Ntv31FTplAm
Dw==
-----END CERTIFICATE REQUEST-----

View File

@@ -1,16 +0,0 @@
-----BEGIN CERTIFICATE REQUEST-----
MIIChTCCAW0CAQIwADCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAOIc
UAppcqJfTkSqqOFqGt1v7lIJZPOcF4bcKI3d5cHAGbOuVxbC7uMaDuObwYLzoiED
qnvs1NaEq2phO6KsgGESB7IE2LUjJivO7OnSZjNRpL5si/9egvBiNCn/50lULaWG
gLEuyMfk3awZy2mVAymy7Grhbx069A4TH8TqsHuq2RpKyuDL27e/jUt6yYecb3pu
hWMiWy3segif4tI46pkOW0/I6DpxyYD2OqOvzxm/voS9RMqE2+7YJA327H7bEi3N
lJZEZ1zy7clZ9ga5fBQaetzbg2RyxTrZ7F919NQXSFoXgxb10Eg64wIpz0L3ooCm
GEHehsZZexa3J5ccIvMCAwEAAaBAMD4GCSqGSIb3DQEJDjExMC8wLQYDVR0RBCYw
JIcQAAAAAAAAAAAAAAAAAAAAAYcQo74y8yBux10JVgzumFhexTANBgkqhkiG9w0B
AQsFAAOCAQEALvwVn0A/JPTCiNzcozHFnp5M23C9PXCplWc5u4k34d4XXzpSeFDz
fL4gy7NpYIueme2K2ppw2j3PNQUdR6vQ5a75sriegWYrosL+7Q6Joh51ZyEUZQoD
mNl4M4S4oX85EaChR6NFGBywTfjFarYi32XBTbFE7rK8N8KM+DQkNdwL1MXqaHWz
F1obQKpNXlLedbCBOteV5Eg4zG3565zu/Gw/NhwzzV3mQmgxUcd1sMJxAfHQz4Vl
ImLL+xMcR03nDsH2bgtDbK2tJm7WszSxA9tC+Xp2lRewxrnQloRWPYDz177WGQ5Q
SoGDzTTtA6uWZxG8h7CkNLOGvA8LtU2rNA==
-----END CERTIFICATE REQUEST-----

View File

@@ -1,16 +0,0 @@
-----BEGIN CERTIFICATE REQUEST-----
MIICdjCCAV4CAQIwADCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMXq
v1y8EIcCbaUIzCtOcLkLS0MJ35oS+6DmV5WB1A0cIk6YrjsHIsY2lwMm13BWIvmw
tY+Y6n0rr7eViNx5ZRGHpHEI/TL3Neb+VefTydL5CgvK3dd4ex2kSbTaed3fmpOx
qMajEduwNcZPCcmoEXPkfrCP8w2vKQUkQ+JRPcdX1nTuzticeRP5B7YCmJsmxkEh
Y0tzzZ+NIRDARoYNofefY86h3e5q66gtJxccNchmIM3YQahhg5n3Xoo8hGfM/TIc
R7ncCBCLO6vtqo0QFva/NQODrgOmOsmgvqPkUWQFdZfWM8yIaU826dktx0CPB78t
TudnJ1rBRvGsjHMsZikCAwEAAaAxMC8GCSqGSIb3DQEJDjEiMCAwHgYDVR0RBBcw
FYINYS5leGVtcGxlLmNvbYcEwAACbzANBgkqhkiG9w0BAQsFAAOCAQEAdGMcRCxq
1X09gn1TNdMt64XUv+wdJCKDaJ+AgyIJj7QvVw8H5k7dOnxS4I+a/yo4jE+LDl2/
AuHcBLFEI4ddewdJSMrTNZjuRYuOdr3KP7fL7MffICSBi45vw5EOXg0tnjJCEiKu
6gcJgbLSP5JMMd7Haf33Q/VWsmHofR3VwOMdrnakwAU3Ff5WTuXTNVhL1kT/uLFX
yW1ru6BF4unwNqSR2UeulljpNfRBsiN4zJK11W6n9KT0NkBr9zY5WCM4sW7i8k9V
TeypWGo3jBKzYAGeuxZsB97U77jZ2lrGdBLZKfbcjnTeRVqCvCRrui4El7UGYFmj
7s6OJyWx5DSV8w==
-----END CERTIFICATE REQUEST-----

View File

@@ -9,6 +9,7 @@ import pkg_resources
from certbot import errors
from certbot import util
from certbot.compat import os
logger = logging.getLogger(__name__)
@@ -153,10 +154,13 @@ def parse_defines(apachectl):
return {}
for match in matches:
# Value could also contain = so split only once
parts = match.split('=', 1)
value = parts[1] if len(parts) == 2 else ''
variables[parts[0]] = value
if match.count("=") > 1:
logger.error("Unexpected number of equal signs in "
"runtime config dump.")
raise errors.PluginError(
"Error parsing Apache runtime variables")
parts = match.partition("=")
variables[parts[0]] = parts[2]
return variables
@@ -217,14 +221,12 @@ def _get_runtime_cfg(command):
"""
try:
proc = subprocess.run(
proc = subprocess.Popen(
command,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
universal_newlines=True,
check=False,
env=util.env_no_snap_for_external_calls())
stdout, stderr = proc.stdout, proc.stderr
universal_newlines=True)
stdout, stderr = proc.communicate()
except (OSError, ValueError):
logger.error(

View File

@@ -1,5 +1,4 @@
""" apacheconfig implementation of the ParserNode interfaces """
from typing import Tuple
from certbot_apache._internal import assertions
from certbot_apache._internal import interfaces
@@ -15,14 +14,14 @@ class ApacheParserNode(interfaces.ParserNode):
def __init__(self, **kwargs):
ancestor, dirty, filepath, metadata = util.parsernode_kwargs(kwargs) # pylint: disable=unused-variable
super().__init__(**kwargs)
super(ApacheParserNode, self).__init__(**kwargs)
self.ancestor = ancestor
self.filepath = filepath
self.dirty = dirty
self.metadata = metadata
self._raw = self.metadata["ac_ast"]
def save(self, msg): # pragma: no cover
def save(self, msg): # pragma: no cover
pass
def find_ancestors(self, name): # pylint: disable=unused-variable
@@ -39,7 +38,7 @@ class ApacheCommentNode(ApacheParserNode):
def __init__(self, **kwargs):
comment, kwargs = util.commentnode_kwargs(kwargs) # pylint: disable=unused-variable
super().__init__(**kwargs)
super(ApacheCommentNode, self).__init__(**kwargs)
self.comment = comment
def __eq__(self, other): # pragma: no cover
@@ -57,7 +56,7 @@ class ApacheDirectiveNode(ApacheParserNode):
def __init__(self, **kwargs):
name, parameters, enabled, kwargs = util.directivenode_kwargs(kwargs)
super().__init__(**kwargs)
super(ApacheDirectiveNode, self).__init__(**kwargs)
self.name = name
self.parameters = parameters
self.enabled = enabled
@@ -83,8 +82,8 @@ class ApacheBlockNode(ApacheDirectiveNode):
""" apacheconfig implementation of BlockNode interface """
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.children: Tuple[ApacheParserNode, ...] = ()
super(ApacheBlockNode, self).__init__(**kwargs)
self.children = ()
def __eq__(self, other): # pragma: no cover
if isinstance(other, self.__class__):

View File

@@ -3,6 +3,7 @@ import fnmatch
from certbot_apache._internal import interfaces
PASS = "CERTBOT_PASS_ASSERT"
@@ -136,6 +137,6 @@ def assertEqualPathsList(first, second): # pragma: no cover
if any(isPass(path) for path in second):
return
for fpath in first:
assert any(fnmatch.fnmatch(fpath, spath) for spath in second)
assert any([fnmatch.fnmatch(fpath, spath) for spath in second])
for spath in second:
assert any(fnmatch.fnmatch(fpath, spath) for fpath in first)
assert any([fnmatch.fnmatch(fpath, spath) for fpath in first])

View File

@@ -6,7 +6,7 @@ Authors:
Raphael Pinson <raphink@gmail.com>
About: Reference
Online Apache configuration manual: https://httpd.apache.org/docs/trunk/
Online Apache configuration manual: http://httpd.apache.org/docs/trunk/
About: License
This file is licensed under the LGPL v2+.

View File

@@ -64,10 +64,10 @@ Translates over to:
"/files/etc/apache2/apache2.conf/bLoCk[1]",
]
"""
from typing import Set
from acme.magic_typing import Set
from certbot import errors
from certbot.compat import os
from certbot_apache._internal import apache_util
from certbot_apache._internal import assertions
from certbot_apache._internal import interfaces
@@ -80,7 +80,7 @@ class AugeasParserNode(interfaces.ParserNode):
def __init__(self, **kwargs):
ancestor, dirty, filepath, metadata = util.parsernode_kwargs(kwargs) # pylint: disable=unused-variable
super().__init__(**kwargs)
super(AugeasParserNode, self).__init__(**kwargs)
self.ancestor = ancestor
self.filepath = filepath
self.dirty = dirty
@@ -169,7 +169,7 @@ class AugeasCommentNode(AugeasParserNode):
def __init__(self, **kwargs):
comment, kwargs = util.commentnode_kwargs(kwargs) # pylint: disable=unused-variable
super().__init__(**kwargs)
super(AugeasCommentNode, self).__init__(**kwargs)
# self.comment = comment
self.comment = comment
@@ -188,7 +188,7 @@ class AugeasDirectiveNode(AugeasParserNode):
def __init__(self, **kwargs):
name, parameters, enabled, kwargs = util.directivenode_kwargs(kwargs)
super().__init__(**kwargs)
super(AugeasDirectiveNode, self).__init__(**kwargs)
self.name = name
self.enabled = enabled
if parameters:
@@ -245,7 +245,7 @@ class AugeasBlockNode(AugeasDirectiveNode):
""" Augeas implementation of BlockNode interface """
def __init__(self, **kwargs):
super().__init__(**kwargs)
super(AugeasBlockNode, self).__init__(**kwargs)
self.children = ()
def __eq__(self, other):
@@ -355,7 +355,7 @@ class AugeasBlockNode(AugeasDirectiveNode):
ownpath = self.metadata.get("augeaspath")
directives = self.parser.find_dir(name, start=ownpath, exclude=exclude)
already_parsed: Set[str] = set()
already_parsed = set() # type: Set[str]
for directive in directives:
# Remove the /arg part from the Augeas path
directive = directive.partition("/arg")[0]

View File

@@ -1,28 +1,35 @@
"""Apache Configurator."""
# pylint: disable=too-many-lines
from collections import defaultdict
import copy
from distutils.version import LooseVersion
import copy
import fnmatch
import logging
import re
import socket
import time
from typing import DefaultDict
from typing import Dict
from typing import List
from typing import Optional
from typing import Set
from typing import Union
import six
import zope.component
import zope.interface
try:
import apacheconfig
HAS_APACHECONFIG = True
except ImportError: # pragma: no cover
HAS_APACHECONFIG = False
from acme import challenges
from acme.magic_typing import DefaultDict
from acme.magic_typing import Dict
from acme.magic_typing import List
from acme.magic_typing import Set
from acme.magic_typing import Union
from certbot import errors
from certbot import interfaces
from certbot import util
from certbot.achallenges import KeyAuthorizationAnnotatedChallenge # pylint: disable=unused-import
from certbot.compat import filesystem
from certbot.compat import os
from certbot.display import util as display_util
from certbot.plugins import common
from certbot.plugins.enhancements import AutoHSTSEnhancement
from certbot.plugins.util import path_surgery
@@ -34,61 +41,10 @@ from certbot_apache._internal import dualparser
from certbot_apache._internal import http_01
from certbot_apache._internal import obj
from certbot_apache._internal import parser
from certbot_apache._internal.dualparser import DualBlockNode
from certbot_apache._internal.obj import VirtualHost
from certbot_apache._internal.parser import ApacheParser
try:
import apacheconfig
HAS_APACHECONFIG = True
except ImportError: # pragma: no cover
HAS_APACHECONFIG = False
logger = logging.getLogger(__name__)
class OsOptions:
"""
Dedicated class to describe the OS specificities (eg. paths, binary names)
that the Apache configurator needs to be aware to operate properly.
"""
def __init__(self,
server_root="/etc/apache2",
vhost_root="/etc/apache2/sites-available",
vhost_files="*",
logs_root="/var/log/apache2",
ctl="apache2ctl",
version_cmd: Optional[List[str]] = None,
restart_cmd: Optional[List[str]] = None,
restart_cmd_alt: Optional[List[str]] = None,
conftest_cmd: Optional[List[str]] = None,
enmod: Optional[str] = None,
dismod: Optional[str] = None,
le_vhost_ext="-le-ssl.conf",
handle_modules=False,
handle_sites=False,
challenge_location="/etc/apache2",
apache_bin: Optional[str] = None,
):
self.server_root = server_root
self.vhost_root = vhost_root
self.vhost_files = vhost_files
self.logs_root = logs_root
self.ctl = ctl
self.version_cmd = ['apache2ctl', '-v'] if not version_cmd else version_cmd
self.restart_cmd = ['apache2ctl', 'graceful'] if not restart_cmd else restart_cmd
self.restart_cmd_alt = restart_cmd_alt
self.conftest_cmd = ['apache2ctl', 'configtest'] if not conftest_cmd else conftest_cmd
self.enmod = enmod
self.dismod = dismod
self.le_vhost_ext = le_vhost_ext
self.handle_modules = handle_modules
self.handle_sites = handle_sites
self.challenge_location = challenge_location
self.bin = apache_bin
# TODO: Augeas sections ie. <VirtualHost>, <IfModule> beginning and closing
# tags need to be the same case, otherwise Augeas doesn't recognize them.
# This is not able to be completely remedied by regular expressions because
@@ -117,11 +73,14 @@ class OsOptions:
# TODO: Add directives to sites-enabled... not sites-available.
# sites-available doesn't allow immediate find_dir search even with save()
# and load()
class ApacheConfigurator(common.Installer, interfaces.Authenticator):
@zope.interface.implementer(interfaces.IAuthenticator, interfaces.IInstaller)
@zope.interface.provider(interfaces.IPluginFactory)
class ApacheConfigurator(common.Installer):
"""Apache configurator.
:ivar config: Configuration.
:type config: certbot.configuration.NamespaceConfig
:type config: :class:`~certbot.interfaces.IConfig`
:ivar parser: Handles low level parsing
:type parser: :class:`~certbot_apache._internal.parser`
@@ -141,7 +100,27 @@ class ApacheConfigurator(common.Installer, interfaces.Authenticator):
" change depending on the operating system Certbot is run on.)"
)
OS_DEFAULTS = OsOptions()
OS_DEFAULTS = dict(
server_root="/etc/apache2",
vhost_root="/etc/apache2/sites-available",
vhost_files="*",
logs_root="/var/log/apache2",
ctl="apache2ctl",
version_cmd=['apache2ctl', '-v'],
restart_cmd=['apache2ctl', 'graceful'],
conftest_cmd=['apache2ctl', 'configtest'],
enmod=None,
dismod=None,
le_vhost_ext="-le-ssl.conf",
handle_modules=False,
handle_sites=False,
challenge_location="/etc/apache2",
bin=None
)
def option(self, key):
"""Get a value from options"""
return self.options.get(key)
def pick_apache_config(self, warn_on_no_mod_ssl=True):
"""
@@ -171,14 +150,14 @@ class ApacheConfigurator(common.Installer, interfaces.Authenticator):
for o in opts:
# Config options use dashes instead of underscores
if self.conf(o.replace("_", "-")) is not None:
setattr(self.options, o, self.conf(o.replace("_", "-")))
self.options[o] = self.conf(o.replace("_", "-"))
else:
setattr(self.options, o, getattr(self.OS_DEFAULTS, o))
self.options[o] = self.OS_DEFAULTS[o]
# Special cases
self.options.version_cmd[0] = self.options.ctl
self.options.restart_cmd[0] = self.options.ctl
self.options.conftest_cmd[0] = self.options.ctl
self.options["version_cmd"][0] = self.option("ctl")
self.options["restart_cmd"][0] = self.option("ctl")
self.options["conftest_cmd"][0] = self.option("ctl")
@classmethod
def add_parser_arguments(cls, add):
@@ -193,30 +172,30 @@ class ApacheConfigurator(common.Installer, interfaces.Authenticator):
else:
# cls.OS_DEFAULTS can be distribution specific, see override classes
DEFAULTS = cls.OS_DEFAULTS
add("enmod", default=DEFAULTS.enmod,
add("enmod", default=DEFAULTS["enmod"],
help="Path to the Apache 'a2enmod' binary")
add("dismod", default=DEFAULTS.dismod,
add("dismod", default=DEFAULTS["dismod"],
help="Path to the Apache 'a2dismod' binary")
add("le-vhost-ext", default=DEFAULTS.le_vhost_ext,
add("le-vhost-ext", default=DEFAULTS["le_vhost_ext"],
help="SSL vhost configuration extension")
add("server-root", default=DEFAULTS.server_root,
add("server-root", default=DEFAULTS["server_root"],
help="Apache server root directory")
add("vhost-root", default=None,
help="Apache server VirtualHost configuration root")
add("logs-root", default=DEFAULTS.logs_root,
add("logs-root", default=DEFAULTS["logs_root"],
help="Apache server logs directory")
add("challenge-location",
default=DEFAULTS.challenge_location,
default=DEFAULTS["challenge_location"],
help="Directory path for challenge configuration")
add("handle-modules", default=DEFAULTS.handle_modules,
add("handle-modules", default=DEFAULTS["handle_modules"],
help="Let installer handle enabling required modules for you " +
"(Only Ubuntu/Debian currently)")
add("handle-sites", default=DEFAULTS.handle_sites,
add("handle-sites", default=DEFAULTS["handle_sites"],
help="Let installer handle enabling sites for you " +
"(Only Ubuntu/Debian currently)")
add("ctl", default=DEFAULTS.ctl,
add("ctl", default=DEFAULTS["ctl"],
help="Full path to Apache control script")
add("bin", default=DEFAULTS.bin,
add("bin", default=DEFAULTS["bin"],
help="Full path to apache2/httpd binary")
def __init__(self, *args, **kwargs):
@@ -229,33 +208,33 @@ class ApacheConfigurator(common.Installer, interfaces.Authenticator):
version = kwargs.pop("version", None)
use_parsernode = kwargs.pop("use_parsernode", False)
openssl_version = kwargs.pop("openssl_version", None)
super().__init__(*args, **kwargs)
super(ApacheConfigurator, self).__init__(*args, **kwargs)
# Add name_server association dict
self.assoc: Dict[str, obj.VirtualHost] = {}
self.assoc = {} # type: Dict[str, obj.VirtualHost]
# Outstanding challenges
self._chall_out: Set[KeyAuthorizationAnnotatedChallenge] = set()
self._chall_out = set() # type: Set[KeyAuthorizationAnnotatedChallenge]
# List of vhosts configured per wildcard domain on this run.
# used by deploy_cert() and enhance()
self._wildcard_vhosts: Dict[str, List[obj.VirtualHost]] = {}
self._wildcard_vhosts = {} # type: Dict[str, List[obj.VirtualHost]]
# Maps enhancements to vhosts we've enabled the enhancement for
self._enhanced_vhosts: DefaultDict[str, Set[obj.VirtualHost]] = defaultdict(set)
self._enhanced_vhosts = defaultdict(set) # type: DefaultDict[str, Set[obj.VirtualHost]]
# Temporary state for AutoHSTS enhancement
self._autohsts: Dict[str, Dict[str, Union[int, float]]] = {}
self._autohsts = {} # type: Dict[str, Dict[str, Union[int, float]]]
# Reverter save notes
self.save_notes = ""
# Should we use ParserNode implementation instead of the old behavior
self.USE_PARSERNODE = use_parsernode
# Saves the list of file paths that were parsed initially, and
# not added to parser tree by self.conf("vhost-root") for example.
self.parsed_paths: List[str] = []
self.parsed_paths = [] # type: List[str]
# These will be set in the prepare function
self._prepared = False
self.parser: ApacheParser
self.parser_root: Optional[DualBlockNode] = None
self.parser = None
self.parser_root = None
self.version = version
self._openssl_version = openssl_version
self.vhosts: List[VirtualHost]
self.vhosts = None
self.options = copy.deepcopy(self.OS_DEFAULTS)
self._enhance_func = {"redirect": self._enable_redirect,
"ensure-http-header": self._set_http_header,
@@ -305,8 +284,8 @@ class ApacheConfigurator(common.Installer, interfaces.Authenticator):
ssl_module_location = self.parser.standard_path_from_server_root(ssl_module_location)
else:
# Possibility B: ssl_module is statically linked into Apache
if self.options.bin:
ssl_module_location = self.options.bin
if self.option("bin"):
ssl_module_location = self.option("bin")
else:
logger.warning("ssl_module is statically linked but --apache-bin is "
"missing; not disabling session tickets.")
@@ -336,7 +315,7 @@ class ApacheConfigurator(common.Installer, interfaces.Authenticator):
self._prepare_options()
# Verify Apache is installed
self._verify_exe_availability(self.options.ctl)
self._verify_exe_availability(self.option("ctl"))
# Make sure configuration is valid
self.config_test()
@@ -349,9 +328,6 @@ class ApacheConfigurator(common.Installer, interfaces.Authenticator):
if self.version < (2, 2):
raise errors.NotSupportedError(
"Apache Version {0} not supported.".format(str(self.version)))
elif self.version < (2, 4):
logger.warning('Support for Apache 2.2 is deprecated and will be removed in a '
'future release.')
# Recover from previous crash before Augeas initialization to have the
# correct parse tree from the get go.
@@ -364,9 +340,8 @@ class ApacheConfigurator(common.Installer, interfaces.Authenticator):
"augeaspath": self.parser.get_root_augpath(),
"ac_ast": None}
if self.USE_PARSERNODE:
parser_root = self.get_parsernode_root(pn_meta)
self.parser_root = parser_root
self.parsed_paths = parser_root.parsed_paths()
self.parser_root = self.get_parsernode_root(pn_meta)
self.parsed_paths = self.parser_root.parsed_paths()
# Check for errors in parsing files with Augeas
self.parser.check_parsing_errors("httpd.aug")
@@ -376,20 +351,20 @@ class ApacheConfigurator(common.Installer, interfaces.Authenticator):
# We may try to enable mod_ssl later. If so, we shouldn't warn if we can't find it now.
# This is currently only true for debian/ubuntu.
warn_on_no_mod_ssl = not self.options.handle_modules
warn_on_no_mod_ssl = not self.option("handle_modules")
self.install_ssl_options_conf(self.mod_ssl_conf,
self.updated_mod_ssl_conf_digest,
warn_on_no_mod_ssl)
# Prevent two Apache plugins from modifying a config at once
try:
util.lock_dir_until_exit(self.options.server_root)
util.lock_dir_until_exit(self.option("server_root"))
except (OSError, errors.LockError):
logger.debug("Encountered error:", exc_info=True)
raise errors.PluginError(
"Unable to create a lock file in {0}. Are you running"
" Certbot with sufficient privileges to modify your"
" Apache configuration?".format(self.options.server_root))
" Apache configuration?".format(self.option("server_root")))
self._prepared = True
def save(self, title=None, temporary=False):
@@ -425,10 +400,10 @@ class ApacheConfigurator(common.Installer, interfaces.Authenticator):
:raises .errors.PluginError: If unable to recover the configuration
"""
super().recovery_routine()
super(ApacheConfigurator, self).recovery_routine()
# Reload configuration after these changes take effect if needed
# ie. ApacheParser has been initialized.
if hasattr(self, "parser"):
if self.parser:
# TODO: wrap into non-implementation specific parser interface
self.parser.aug.load()
@@ -450,7 +425,7 @@ class ApacheConfigurator(common.Installer, interfaces.Authenticator):
the function is unable to correctly revert the configuration
"""
super().rollback_checkpoints(rollback)
super(ApacheConfigurator, self).rollback_checkpoints(rollback)
self.parser.aug.load()
def _verify_exe_availability(self, exe):
@@ -464,7 +439,7 @@ class ApacheConfigurator(common.Installer, interfaces.Authenticator):
"""Initializes the ApacheParser"""
# If user provided vhost_root value in command line, use it
return parser.ApacheParser(
self.options.server_root, self.conf("vhost-root"),
self.option("server_root"), self.conf("vhost-root"),
self.version, configurator=self)
def get_parsernode_root(self, metadata):
@@ -472,9 +447,9 @@ class ApacheConfigurator(common.Installer, interfaces.Authenticator):
if HAS_APACHECONFIG:
apache_vars = {}
apache_vars["defines"] = apache_util.parse_defines(self.options.ctl)
apache_vars["includes"] = apache_util.parse_includes(self.options.ctl)
apache_vars["modules"] = apache_util.parse_modules(self.options.ctl)
apache_vars["defines"] = apache_util.parse_defines(self.option("ctl"))
apache_vars["includes"] = apache_util.parse_includes(self.option("ctl"))
apache_vars["modules"] = apache_util.parse_modules(self.option("ctl"))
metadata["apache_vars"] = apache_vars
with open(self.parser.loc["root"]) as f:
@@ -489,6 +464,21 @@ class ApacheConfigurator(common.Installer, interfaces.Authenticator):
metadata=metadata
)
def _wildcard_domain(self, domain):
"""
Checks if domain is a wildcard domain
:param str domain: Domain to check
:returns: If the domain is wildcard domain
:rtype: bool
"""
if isinstance(domain, six.text_type):
wildcard_marker = u"*."
else:
wildcard_marker = b"*."
return domain.startswith(wildcard_marker)
def deploy_cert(self, domain, cert_path, key_path,
chain_path=None, fullchain_path=None):
"""Deploys certificate to specified virtual host.
@@ -510,8 +500,6 @@ class ApacheConfigurator(common.Installer, interfaces.Authenticator):
vhosts = self.choose_vhosts(domain)
for vhost in vhosts:
self._deploy_cert(vhost, cert_path, key_path, chain_path, fullchain_path)
display_util.notify("Successfully deployed certificate for {} to {}"
.format(domain, vhost.filep))
def choose_vhosts(self, domain, create_if_no_ssl=True):
"""
@@ -525,7 +513,7 @@ class ApacheConfigurator(common.Installer, interfaces.Authenticator):
:rtype: `list` of :class:`~certbot_apache._internal.obj.VirtualHost`
"""
if util.is_wildcard_domain(domain):
if self._wildcard_domain(domain):
if domain in self._wildcard_vhosts:
# Vhosts for a wildcard domain were already selected
return self._wildcard_vhosts[domain]
@@ -550,19 +538,6 @@ class ApacheConfigurator(common.Installer, interfaces.Authenticator):
return list(matched)
def _raise_no_suitable_vhost_error(self, target_name: str):
"""
Notifies the user that Certbot could not find a vhost to secure
and raises an error.
:param str target_name: The server name that could not be mapped
:raises errors.PluginError: Raised unconditionally
"""
raise errors.PluginError(
"Certbot could not find a VirtualHost for {0} in the Apache "
"configuration. Please create a VirtualHost with a ServerName "
"matching {0} and try again.".format(target_name)
)
def _in_wildcard_scope(self, name, domain):
"""
Helper method for _vhosts_for_wildcard() that makes sure that the domain
@@ -600,7 +575,12 @@ class ApacheConfigurator(common.Installer, interfaces.Authenticator):
dialog_output = display_ops.select_vhost_multiple(list(dialog_input))
if not dialog_output:
self._raise_no_suitable_vhost_error(domain)
logger.error(
"No vhost exists with servername or alias for domain %s. "
"No vhost was selected. Please specify ServerName or ServerAlias "
"in the Apache config.",
domain)
raise errors.PluginError("No vhost selected")
# Make sure we create SSL vhosts for the ones that are HTTP only
# if requested.
@@ -724,7 +704,12 @@ class ApacheConfigurator(common.Installer, interfaces.Authenticator):
# Select a vhost from a list
vhost = display_ops.select_vhost(target_name, self.vhosts)
if vhost is None:
self._raise_no_suitable_vhost_error(target_name)
logger.error(
"No vhost exists with servername or alias of %s. "
"No vhost was selected. Please specify ServerName or ServerAlias "
"in the Apache config.",
target_name)
raise errors.PluginError("No vhost selected")
if temp:
return vhost
if not vhost.ssl:
@@ -860,7 +845,7 @@ class ApacheConfigurator(common.Installer, interfaces.Authenticator):
:rtype: set
"""
all_names: Set[str] = set()
all_names = set() # type: Set[str]
vhost_macro = []
@@ -878,7 +863,7 @@ class ApacheConfigurator(common.Installer, interfaces.Authenticator):
all_names.add(name)
if vhost_macro:
display_util.notification(
zope.component.getUtility(interfaces.IDisplay).notification(
"Apache mod_macro seems to be in use in file(s):\n{0}"
"\n\nUnfortunately mod_macro is not yet supported".format(
"\n ".join(vhost_macro)), force_interactive=True)
@@ -1024,8 +1009,8 @@ class ApacheConfigurator(common.Installer, interfaces.Authenticator):
"""
# Search base config, and all included paths for VirtualHosts
file_paths: Dict[str, str] = {}
internal_paths: DefaultDict[str, Set[str]] = defaultdict(set)
file_paths = {} # type: Dict[str, str]
internal_paths = defaultdict(set) # type: DefaultDict[str, Set[str]]
vhs = []
# Make a list of parser paths because the parser_paths
# dictionary may be modified during the loop.
@@ -1076,9 +1061,6 @@ class ApacheConfigurator(common.Installer, interfaces.Authenticator):
:rtype: list
"""
if not self.parser_root:
raise errors.Error("This ApacheConfigurator instance is not" # pragma: no cover
" configured to use a node parser.")
vhs = []
vhosts = self.parser_root.find_blocks("VirtualHost", exclude=False)
for vhblock in vhosts:
@@ -1331,7 +1313,7 @@ class ApacheConfigurator(common.Installer, interfaces.Authenticator):
:param boolean temp: If the change is temporary
"""
if self.options.handle_modules:
if self.option("handle_modules"):
if self.version >= (2, 4) and ("socache_shmcb_module" not in
self.parser.modules):
self.enable_mod("socache_shmcb", temp=temp)
@@ -1351,7 +1333,7 @@ class ApacheConfigurator(common.Installer, interfaces.Authenticator):
Duplicates vhost and adds default ssl options
New vhost will reside as (nonssl_vhost.path) +
``self.options.le_vhost_ext``
``self.option("le_vhost_ext")``
.. note:: This function saves the configuration
@@ -1450,15 +1432,15 @@ class ApacheConfigurator(common.Installer, interfaces.Authenticator):
"""
if self.conf("vhost-root") and os.path.exists(self.conf("vhost-root")):
fp = os.path.join(filesystem.realpath(self.options.vhost_root),
fp = os.path.join(filesystem.realpath(self.option("vhost_root")),
os.path.basename(non_ssl_vh_fp))
else:
# Use non-ssl filepath
fp = filesystem.realpath(non_ssl_vh_fp)
if fp.endswith(".conf"):
return fp[:-(len(".conf"))] + self.options.le_vhost_ext
return fp + self.options.le_vhost_ext
return fp[:-(len(".conf"))] + self.option("le_vhost_ext")
return fp + self.option("le_vhost_ext")
def _sift_rewrite_rule(self, line):
"""Decides whether a line should be copied to a SSL vhost.
@@ -1480,7 +1462,7 @@ class ApacheConfigurator(common.Installer, interfaces.Authenticator):
if not line.lower().lstrip().startswith("rewriterule"):
return False
# According to: https://httpd.apache.org/docs/2.4/rewrite/flags.html
# According to: http://httpd.apache.org/docs/2.4/rewrite/flags.html
# The syntax of a RewriteRule is:
# RewriteRule pattern target [Flag1,Flag2,Flag3]
# i.e. target is required, so it must exist.
@@ -1532,11 +1514,12 @@ class ApacheConfigurator(common.Installer, interfaces.Authenticator):
raise errors.PluginError("Unable to write/read in make_vhost_ssl")
if sift:
display_util.notify(
f"Some rewrite rules copied from {vhost.filep} were disabled in the "
f"vhost for your HTTPS site located at {ssl_fp} because they have "
"the potential to create redirection loops."
)
reporter = zope.component.getUtility(interfaces.IReporter)
reporter.add_message(
"Some rewrite rules copied from {0} were disabled in the "
"vhost for your HTTPS site located at {1} because they have "
"the potential to create redirection loops.".format(
vhost.filep, ssl_fp), reporter.MEDIUM_PRIORITY)
self.parser.aug.set("/augeas/files%s/mtime" % (self._escape(ssl_fp)), "0")
self.parser.aug.set("/augeas/files%s/mtime" % (self._escape(vhost.filep)), "0")
@@ -1865,13 +1848,13 @@ class ApacheConfigurator(common.Installer, interfaces.Authenticator):
if options:
msg_enhancement += ": " + options
msg = msg_tmpl.format(domain, msg_enhancement)
logger.error(msg)
logger.warning(msg)
raise errors.PluginError(msg)
try:
for vhost in vhosts:
func(vhost, options)
except errors.PluginError:
logger.error("Failed %s for %s", enhancement, domain)
logger.warning("Failed %s for %s", enhancement, domain)
raise
def _autohsts_increase(self, vhost, id_str, nextstep):
@@ -2186,7 +2169,7 @@ class ApacheConfigurator(common.Installer, interfaces.Authenticator):
# There can be other RewriteRule directive lines in vhost config.
# rewrite_args_dict keys are directive ids and the corresponding value
# for each is a list of arguments to that directive.
rewrite_args_dict: DefaultDict[str, List[str]] = defaultdict(list)
rewrite_args_dict = defaultdict(list) # type: DefaultDict[str, List[str]]
pat = r'(.*directive\[\d+\]).*'
for match in rewrite_path:
m = re.match(pat, match)
@@ -2280,7 +2263,7 @@ class ApacheConfigurator(common.Installer, interfaces.Authenticator):
if ssl_vhost.aliases:
serveralias = "ServerAlias " + " ".join(ssl_vhost.aliases)
rewrite_rule_args: List[str] = []
rewrite_rule_args = [] # type: List[str]
if self.get_version() >= (2, 3, 9):
rewrite_rule_args = constants.REWRITE_HTTPS_ARGS_WITH_END
else:
@@ -2301,7 +2284,7 @@ class ApacheConfigurator(common.Installer, interfaces.Authenticator):
addr in self._get_proposed_addrs(ssl_vhost)),
servername, serveralias,
" ".join(rewrite_rule_args),
self.options.logs_root))
self.option("logs_root")))
def _write_out_redirect(self, ssl_vhost, text):
# This is the default name
@@ -2313,7 +2296,7 @@ class ApacheConfigurator(common.Installer, interfaces.Authenticator):
if len(ssl_vhost.name) < (255 - (len(redirect_filename) + 1)):
redirect_filename = "le-redirect-%s.conf" % ssl_vhost.name
redirect_filepath = os.path.join(self.options.vhost_root,
redirect_filepath = os.path.join(self.option("vhost_root"),
redirect_filename)
# Register the new file that will be created
@@ -2395,7 +2378,7 @@ class ApacheConfigurator(common.Installer, interfaces.Authenticator):
vhost.enabled = True
return
def enable_mod(self, mod_name, temp=False): # pylint: disable=unused-argument
def enable_mod(self, mod_name, temp=False):
"""Enables module in Apache.
Both enables and reloads Apache so module is active.
@@ -2433,18 +2416,19 @@ class ApacheConfigurator(common.Installer, interfaces.Authenticator):
"""
try:
util.run_script(self.options.restart_cmd)
util.run_script(self.option("restart_cmd"))
except errors.SubprocessError as err:
logger.warning("Unable to restart apache using %s",
self.options.restart_cmd)
alt_restart = self.options.restart_cmd_alt
logger.info("Unable to restart apache using %s",
self.option("restart_cmd"))
alt_restart = self.option("restart_cmd_alt")
if alt_restart:
logger.debug("Trying alternative restart command: %s",
alt_restart)
# There is an alternative restart command available
# This usually is "restart" verb while original is "graceful"
try:
util.run_script(self.options.restart_cmd_alt)
util.run_script(self.option(
"restart_cmd_alt"))
return
except errors.SubprocessError as secerr:
error = str(secerr)
@@ -2459,7 +2443,7 @@ class ApacheConfigurator(common.Installer, interfaces.Authenticator):
"""
try:
util.run_script(self.options.conftest_cmd)
util.run_script(self.option("conftest_cmd"))
except errors.SubprocessError as err:
raise errors.MisconfigurationError(str(err))
@@ -2475,11 +2459,11 @@ class ApacheConfigurator(common.Installer, interfaces.Authenticator):
"""
try:
stdout, _ = util.run_script(self.options.version_cmd)
stdout, _ = util.run_script(self.option("version_cmd"))
except errors.SubprocessError:
raise errors.PluginError(
"Unable to run %s -v" %
self.options.version_cmd)
self.option("version_cmd"))
regex = re.compile(r"Apache/([0-9\.]*)", re.IGNORECASE)
matches = regex.findall(stdout)
@@ -2499,11 +2483,6 @@ class ApacheConfigurator(common.Installer, interfaces.Authenticator):
version=".".join(str(i) for i in self.version))
)
def auth_hint(self, failed_achalls): # pragma: no cover
return ("The Certificate Authority failed to verify the temporary Apache configuration "
"changes made by Certbot. Ensure that the listed domains point to this Apache "
"server and that it is accessible from the internet.")
###########################################################################
# Challenges Section
###########################################################################
@@ -2597,7 +2576,7 @@ class ApacheConfigurator(common.Installer, interfaces.Authenticator):
msg_tmpl = ("Certbot was not able to find SSL VirtualHost for a "
"domain {0} for enabling AutoHSTS enhancement.")
msg = msg_tmpl.format(d)
logger.error(msg)
logger.warning(msg)
raise errors.PluginError(msg)
for vh in vhosts:
try:
@@ -2683,7 +2662,7 @@ class ApacheConfigurator(common.Installer, interfaces.Authenticator):
except errors.PluginError:
msg = ("Could not find VirtualHost with ID {0}, disabling "
"AutoHSTS for this VirtualHost").format(id_str)
logger.error(msg)
logger.warning(msg)
# Remove the orphaned AutoHSTS entry from pluginstorage
self._autohsts.pop(id_str)
continue
@@ -2723,7 +2702,7 @@ class ApacheConfigurator(common.Installer, interfaces.Authenticator):
except errors.PluginError:
msg = ("VirtualHost with id {} was not found, unable to "
"make HSTS max-age permanent.").format(id_str)
logger.error(msg)
logger.warning(msg)
self._autohsts.pop(id_str)
continue
if self._autohsts_vhost_in_lineage(vhost, lineage):

View File

@@ -4,13 +4,11 @@ import pkg_resources
from certbot.compat import os
MOD_SSL_CONF_DEST = "options-ssl-apache.conf"
"""Name of the mod_ssl config file as saved
in `certbot.configuration.NamespaceConfig.config_dir`."""
"""Name of the mod_ssl config file as saved in `IConfig.config_dir`."""
UPDATED_MOD_SSL_CONF_DIGEST = ".updated-options-ssl-apache-conf-digest.txt"
"""Name of the hash of the updated or informed mod_ssl_conf as saved
in `certbot.configuration.NamespaceConfig.config_dir`."""
"""Name of the hash of the updated or informed mod_ssl_conf as saved in `IConfig.config_dir`."""
# NEVER REMOVE A SINGLE HASH FROM THIS LIST UNLESS YOU KNOW EXACTLY WHAT YOU ARE DOING!
ALL_SSL_OPTIONS_HASHES = [

View File

@@ -1,9 +1,12 @@
"""Contains UI methods for Apache operations."""
import logging
import zope.component
from certbot import errors
from certbot import interfaces
from certbot.compat import os
from certbot.display import util as display_util
import certbot.display.util as display_util
logger = logging.getLogger(__name__)
@@ -23,7 +26,7 @@ def select_vhost_multiple(vhosts):
# Remove the extra newline from the last entry
if tags_list:
tags_list[-1] = tags_list[-1][:-1]
code, names = display_util.checklist(
code, names = zope.component.getUtility(interfaces.IDisplay).checklist(
"Which VirtualHosts would you like to install the wildcard certificate for?",
tags=tags_list, force_interactive=True)
if code == display_util.OK:
@@ -31,7 +34,6 @@ def select_vhost_multiple(vhosts):
return return_vhosts
return []
def _reversemap_vhosts(names, vhosts):
"""Helper function for select_vhost_multiple for mapping string
representations back to actual vhost objects"""
@@ -43,7 +45,6 @@ def _reversemap_vhosts(names, vhosts):
return_vhosts.append(vhost)
return return_vhosts
def select_vhost(domain, vhosts):
"""Select an appropriate Apache Vhost.
@@ -61,7 +62,6 @@ def select_vhost(domain, vhosts):
return vhosts[tag]
return None
def _vhost_menu(domain, vhosts):
"""Select an appropriate Apache Vhost.
@@ -107,7 +107,7 @@ def _vhost_menu(domain, vhosts):
)
try:
code, tag = display_util.menu(
code, tag = zope.component.getUtility(interfaces.IDisplay).menu(
"We were unable to find a vhost with a ServerName "
"or Address of {0}.{1}Which virtual host would you "
"like to choose?".format(domain, os.linesep),
@@ -119,7 +119,7 @@ def _vhost_menu(domain, vhosts):
"guidance in non-interactive mode. Certbot may need "
"vhosts to be explicitly labelled with ServerName or "
"ServerAlias directives.".format(domain))
logger.error(msg)
logger.warning(msg)
raise errors.MissingCommandlineFlag(msg)
return code, tag

View File

@@ -1,10 +1,10 @@
""" Dual ParserNode implementation """
from certbot_apache._internal import apacheparser
from certbot_apache._internal import assertions
from certbot_apache._internal import augeasparser
from certbot_apache._internal import apacheparser
class DualNodeBase:
class DualNodeBase(object):
""" Dual parser interface for in development testing. This is used as the
base class for dual parser interface classes. This class handles runtime
attribute value assertions."""

View File

@@ -10,7 +10,6 @@ from certbot_apache._internal import override_debian
from certbot_apache._internal import override_fedora
from certbot_apache._internal import override_gentoo
from certbot_apache._internal import override_suse
from certbot_apache._internal import override_void
OVERRIDE_CLASSES = {
"arch": override_arch.ArchConfigurator,
@@ -36,7 +35,6 @@ OVERRIDE_CLASSES = {
"sles": override_suse.OpenSUSEConfigurator,
"scientific": override_centos.CentOSConfigurator,
"scientific linux": override_centos.CentOSConfigurator,
"void": override_void.VoidConfigurator,
}

View File

@@ -1,9 +1,9 @@
"""A class that performs HTTP-01 challenges for Apache"""
import errno
import logging
from typing import List
from typing import Set
import errno
from acme.magic_typing import List
from acme.magic_typing import Set
from certbot import errors
from certbot.compat import filesystem
from certbot.compat import os
@@ -47,7 +47,7 @@ class ApacheHttp01(common.ChallengePerformer):
"""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
super(ApacheHttp01, self).__init__(*args, **kwargs)
self.challenge_conf_pre = os.path.join(
self.configurator.conf("challenge-location"),
"le_http_01_challenge_pre.conf")
@@ -57,7 +57,7 @@ class ApacheHttp01(common.ChallengePerformer):
self.challenge_dir = os.path.join(
self.configurator.config.work_dir,
"http_challenges")
self.moded_vhosts: Set[VirtualHost] = set()
self.moded_vhosts = set() # type: Set[VirtualHost]
def perform(self):
"""Perform all HTTP-01 challenges."""
@@ -93,12 +93,12 @@ class ApacheHttp01(common.ChallengePerformer):
self.configurator.enable_mod(mod, temp=True)
def _mod_config(self):
selected_vhosts: List[VirtualHost] = []
selected_vhosts = [] # type: List[VirtualHost]
http_port = str(self.configurator.config.http01_port)
# Search for VirtualHosts matching by name
for chall in self.achalls:
selected_vhosts += self._matching_vhosts(chall.domain)
# Search for matching VirtualHosts
for vh in self._matching_vhosts(chall.domain):
selected_vhosts.append(vh)
# Ensure that we have one or more VirtualHosts that we can continue
# with. (one that listens to port configured with --http-01-port)
@@ -107,13 +107,9 @@ class ApacheHttp01(common.ChallengePerformer):
if any(a.is_wildcard() or a.get_port() == http_port for a in vhost.addrs):
found = True
# If there's at least one eligible VirtualHost, also add all unnamed VirtualHosts
# because they might match at runtime (#8890)
if found:
selected_vhosts += self._unnamed_vhosts()
# Otherwise, add every Virtualhost which listens on the right port
else:
selected_vhosts += self._relevant_vhosts()
if not found:
for vh in self._relevant_vhosts():
selected_vhosts.append(vh)
# Add the challenge configuration
for vh in selected_vhosts:
@@ -171,13 +167,9 @@ class ApacheHttp01(common.ChallengePerformer):
return relevant_vhosts
def _unnamed_vhosts(self) -> List[VirtualHost]:
"""Return all VirtualHost objects with no ServerName"""
return [vh for vh in self.configurator.vhosts if vh.name is None]
def _set_up_challenges(self):
if not os.path.isdir(self.challenge_dir):
old_umask = filesystem.umask(0o022)
old_umask = os.umask(0o022)
try:
filesystem.makedirs(self.challenge_dir, 0o755)
except OSError as exception:
@@ -185,7 +177,7 @@ class ApacheHttp01(common.ChallengePerformer):
raise errors.PluginError(
"Couldn't create root for http-01 challenge")
finally:
filesystem.umask(old_umask)
os.umask(old_umask)
responses = []
for achall in self.achalls:

View File

@@ -100,9 +100,12 @@ For this reason the internal representation of data should not ignore the case.
"""
import abc
import six
class ParserNode(object, metaclass=abc.ABCMeta):
@six.add_metaclass(abc.ABCMeta)
class ParserNode(object):
"""
ParserNode is the basic building block of the tree of such nodes,
representing the structure of the configuration. It is largely meant to keep
@@ -201,7 +204,9 @@ class ParserNode(object, metaclass=abc.ABCMeta):
"""
class CommentNode(ParserNode, metaclass=abc.ABCMeta):
# Linter rule exclusion done because of https://github.com/PyCQA/pylint/issues/179
@six.add_metaclass(abc.ABCMeta) # pylint: disable=abstract-method
class CommentNode(ParserNode):
"""
CommentNode class is used for representation of comments within the parsed
configuration structure. Because of the nature of comments, it is not able
@@ -238,13 +243,14 @@ class CommentNode(ParserNode, metaclass=abc.ABCMeta):
created or changed after the last save. Default: False.
:type dirty: bool
"""
super().__init__(ancestor=kwargs['ancestor'],
super(CommentNode, self).__init__(ancestor=kwargs['ancestor'],
dirty=kwargs.get('dirty', False),
filepath=kwargs['filepath'],
metadata=kwargs.get('metadata', {})) # pragma: no cover
class DirectiveNode(ParserNode, metaclass=abc.ABCMeta):
@six.add_metaclass(abc.ABCMeta)
class DirectiveNode(ParserNode):
"""
DirectiveNode class represents a configuration directive within the configuration.
It can have zero or more parameters attached to it. Because of the nature of
@@ -302,7 +308,7 @@ class DirectiveNode(ParserNode, metaclass=abc.ABCMeta):
:type enabled: bool
"""
super().__init__(ancestor=kwargs['ancestor'],
super(DirectiveNode, self).__init__(ancestor=kwargs['ancestor'],
dirty=kwargs.get('dirty', False),
filepath=kwargs['filepath'],
metadata=kwargs.get('metadata', {})) # pragma: no cover
@@ -312,14 +318,15 @@ class DirectiveNode(ParserNode, metaclass=abc.ABCMeta):
"""
Sets the sequence of parameters for this ParserNode object without
whitespaces. While the whitespaces for parameters are discarded when using
this method, the whitespacing preceding the ParserNode itself should be
this method, the whitespacing preceeding the ParserNode itself should be
kept intact.
:param list parameters: sequence of parameters
"""
class BlockNode(DirectiveNode, metaclass=abc.ABCMeta):
@six.add_metaclass(abc.ABCMeta)
class BlockNode(DirectiveNode):
"""
BlockNode class represents a block of nested configuration directives, comments
and other blocks as its children. A BlockNode can have zero or more parameters
@@ -364,7 +371,7 @@ class BlockNode(DirectiveNode, metaclass=abc.ABCMeta):
def add_child_block(self, name, parameters=None, position=None):
"""
Adds a new BlockNode child node with provided values and marks the callee
BlockNode dirty. This is used to add new children to the AST. The preceding
BlockNode dirty. This is used to add new children to the AST. The preceeding
whitespaces should not be added based on the ancestor or siblings for the
newly created object. This is to match the current behavior of the legacy
parser implementation.
@@ -385,7 +392,7 @@ class BlockNode(DirectiveNode, metaclass=abc.ABCMeta):
"""
Adds a new DirectiveNode child node with provided values and marks the
callee BlockNode dirty. This is used to add new children to the AST. The
preceding whitespaces should not be added based on the ancestor or siblings
preceeding whitespaces should not be added based on the ancestor or siblings
for the newly created object. This is to match the current behavior of the
legacy parser implementation.
@@ -406,7 +413,7 @@ class BlockNode(DirectiveNode, metaclass=abc.ABCMeta):
"""
Adds a new CommentNode child node with provided value and marks the
callee BlockNode dirty. This is used to add new children to the AST. The
preceding whitespaces should not be added based on the ancestor or siblings
preceeding whitespaces should not be added based on the ancestor or siblings
for the newly created object. This is to match the current behavior of the
legacy parser implementation.

View File

@@ -1,7 +1,7 @@
"""Module contains classes used by the Apache Configurator."""
import re
from typing import Set
from acme.magic_typing import Set
from certbot.plugins import common
@@ -20,13 +20,16 @@ class Addr(common.Addr):
self.is_wildcard() and other.is_wildcard()))
return False
def __ne__(self, other):
return not self.__eq__(other)
def __repr__(self):
return "certbot_apache._internal.obj.Addr(" + repr(self.tup) + ")"
def __hash__(self): # pylint: disable=useless-super-delegation
# Python 3 requires explicit overridden for __hash__ if __eq__ or
# __cmp__ is overridden. See https://bugs.python.org/issue2235
return super().__hash__()
return super(Addr, self).__hash__()
def _addr_less_specific(self, addr):
"""Returns if addr.get_addr() is more specific than self.get_addr()."""
@@ -95,7 +98,7 @@ class Addr(common.Addr):
return self.get_addr_obj(port)
class VirtualHost:
class VirtualHost(object):
"""Represents an Apache Virtualhost.
:ivar str filep: file path of VH
@@ -137,7 +140,7 @@ class VirtualHost:
def get_names(self):
"""Return a set of all names."""
all_names: Set[str] = set()
all_names = set() # type: Set[str]
all_names.update(self.aliases)
# Strip out any scheme:// and <port> field from servername
if self.name is not None:
@@ -188,6 +191,9 @@ class VirtualHost:
return False
def __ne__(self, other):
return not self.__eq__(other)
def __hash__(self):
return hash((self.filep, self.path,
tuple(self.addrs), tuple(self.get_names()),
@@ -245,7 +251,7 @@ class VirtualHost:
# already_found acts to keep everything very conservative.
# Don't allow multiple ip:ports in same set.
already_found: Set[str] = set()
already_found = set() # type: Set[str]
for addr in vhost.addrs:
for local_addr in self.addrs:

View File

@@ -1,12 +1,15 @@
""" Distribution specific override class for Arch Linux """
import zope.interface
from certbot import interfaces
from certbot_apache._internal import configurator
from certbot_apache._internal.configurator import OsOptions
@zope.interface.provider(interfaces.IPluginFactory)
class ArchConfigurator(configurator.ApacheConfigurator):
"""Arch Linux specific ApacheConfigurator override class"""
OS_DEFAULTS = OsOptions(
OS_DEFAULTS = dict(
server_root="/etc/httpd",
vhost_root="/etc/httpd/conf",
vhost_files="*.conf",
@@ -15,5 +18,11 @@ class ArchConfigurator(configurator.ApacheConfigurator):
version_cmd=['apachectl', '-v'],
restart_cmd=['apachectl', 'graceful'],
conftest_cmd=['apachectl', 'configtest'],
enmod=None,
dismod=None,
le_vhost_ext="-le-ssl.conf",
handle_modules=False,
handle_sites=False,
challenge_location="/etc/httpd/conf",
bin=None,
)

View File

@@ -1,23 +1,25 @@
""" Distribution specific override class for CentOS family (RHEL, Fedora) """
import logging
from typing import cast
from typing import List
import zope.interface
from acme.magic_typing import List
from certbot import errors
from certbot import interfaces
from certbot import util
from certbot.errors import MisconfigurationError
from certbot_apache._internal import apache_util
from certbot_apache._internal import configurator
from certbot_apache._internal import parser
from certbot_apache._internal.configurator import OsOptions
logger = logging.getLogger(__name__)
@zope.interface.provider(interfaces.IPluginFactory)
class CentOSConfigurator(configurator.ApacheConfigurator):
"""CentOS specific ApacheConfigurator override class"""
OS_DEFAULTS = OsOptions(
OS_DEFAULTS = dict(
server_root="/etc/httpd",
vhost_root="/etc/httpd/conf.d",
vhost_files="*.conf",
@@ -27,7 +29,13 @@ class CentOSConfigurator(configurator.ApacheConfigurator):
restart_cmd=['apachectl', 'graceful'],
restart_cmd_alt=['apachectl', 'restart'],
conftest_cmd=['apachectl', 'configtest'],
enmod=None,
dismod=None,
le_vhost_ext="-le-ssl.conf",
handle_modules=False,
handle_sites=False,
challenge_location="/etc/httpd/conf.d",
bin=None,
)
def config_test(self):
@@ -42,7 +50,7 @@ class CentOSConfigurator(configurator.ApacheConfigurator):
fedora = os_info[0].lower() == "fedora"
try:
super().config_test()
super(CentOSConfigurator, self).config_test()
except errors.MisconfigurationError:
if fedora:
self._try_restart_fedora()
@@ -51,7 +59,7 @@ class CentOSConfigurator(configurator.ApacheConfigurator):
def _try_restart_fedora(self):
"""
Tries to restart httpd using systemctl to generate the self signed key pair.
Tries to restart httpd using systemctl to generate the self signed keypair.
"""
try:
@@ -60,22 +68,20 @@ class CentOSConfigurator(configurator.ApacheConfigurator):
raise errors.MisconfigurationError(str(err))
# Finish with actual config check to see if systemctl restart helped
super().config_test()
super(CentOSConfigurator, self).config_test()
def _prepare_options(self):
"""
Override the options dictionary initialization in order to support
alternative restart cmd used in CentOS.
"""
super()._prepare_options()
if not self.options.restart_cmd_alt: # pragma: no cover
raise ValueError("OS option restart_cmd_alt must be set for CentOS.")
self.options.restart_cmd_alt[0] = self.options.ctl
super(CentOSConfigurator, self)._prepare_options()
self.options["restart_cmd_alt"][0] = self.option("ctl")
def get_parser(self):
"""Initializes the ApacheParser"""
return CentOSParser(
self.options.server_root, self.options.vhost_root,
self.option("server_root"), self.option("vhost_root"),
self.version, configurator=self)
def _deploy_cert(self, *args, **kwargs): # pylint: disable=arguments-differ
@@ -84,7 +90,7 @@ class CentOSConfigurator(configurator.ApacheConfigurator):
has "LoadModule ssl_module..." before parsing the VirtualHost configuration
that was created by Certbot
"""
super()._deploy_cert(*args, **kwargs)
super(CentOSConfigurator, self)._deploy_cert(*args, **kwargs)
if self.version < (2, 4, 0):
self._deploy_loadmodule_ssl_if_needed()
@@ -96,9 +102,9 @@ class CentOSConfigurator(configurator.ApacheConfigurator):
loadmods = self.parser.find_dir("LoadModule", "ssl_module", exclude=False)
correct_ifmods: List[str] = []
loadmod_args: List[str] = []
loadmod_paths: List[str] = []
correct_ifmods = [] # type: List[str]
loadmod_args = [] # type: List[str]
loadmod_paths = [] # type: List[str]
for m in loadmods:
noarg_path = m.rpartition("/")[0]
path_args = self.parser.get_all_args(noarg_path)
@@ -112,9 +118,8 @@ class CentOSConfigurator(configurator.ApacheConfigurator):
else:
loadmod_args = path_args
centos_parser: CentOSParser = cast(CentOSParser, self.parser)
if centos_parser.not_modssl_ifmodule(noarg_path):
if centos_parser.loc["default"] in noarg_path:
if self.parser.not_modssl_ifmodule(noarg_path): # pylint: disable=no-member
if self.parser.loc["default"] in noarg_path:
# LoadModule already in the main configuration file
if ("ifmodule/" in noarg_path.lower() or
"ifmodule[1]" in noarg_path.lower()):
@@ -162,19 +167,19 @@ class CentOSParser(parser.ApacheParser):
def __init__(self, *args, **kwargs):
# CentOS specific configuration file for Apache
self.sysconfig_filep = "/etc/sysconfig/httpd"
super().__init__(*args, **kwargs)
super(CentOSParser, self).__init__(*args, **kwargs)
def update_runtime_variables(self):
""" Override for update_runtime_variables for custom parsing """
# Opportunistic, works if SELinux not enforced
super().update_runtime_variables()
super(CentOSParser, self).update_runtime_variables()
self.parse_sysconfig_var()
def parse_sysconfig_var(self):
""" Parses Apache CLI options from CentOS configuration file """
defines = apache_util.parse_define_file(self.sysconfig_filep, "OPTIONS")
for k, v in defines.items():
self.variables[k] = v
for k in defines:
self.variables[k] = defines[k]
def not_modssl_ifmodule(self, path):
"""Checks if the provided Augeas path has argument !mod_ssl"""

View File

@@ -1,17 +1,28 @@
""" Distribution specific override class for macOS """
import zope.interface
from certbot import interfaces
from certbot_apache._internal import configurator
from certbot_apache._internal.configurator import OsOptions
@zope.interface.provider(interfaces.IPluginFactory)
class DarwinConfigurator(configurator.ApacheConfigurator):
"""macOS specific ApacheConfigurator override class"""
OS_DEFAULTS = OsOptions(
OS_DEFAULTS = dict(
server_root="/etc/apache2",
vhost_root="/etc/apache2/other",
vhost_files="*.conf",
logs_root="/var/log/apache2",
ctl="apachectl",
version_cmd=['apachectl', '-v'],
restart_cmd=['apachectl', 'graceful'],
conftest_cmd=['apachectl', 'configtest'],
enmod=None,
dismod=None,
le_vhost_ext="-le-ssl.conf",
handle_modules=False,
handle_sites=False,
challenge_location="/etc/apache2/other",
bin=None,
)

View File

@@ -1,25 +1,39 @@
""" Distribution specific override class for Debian family (Ubuntu/Debian) """
import logging
import zope.interface
from certbot import errors
from certbot import interfaces
from certbot import util
from certbot.compat import filesystem
from certbot.compat import os
from certbot_apache._internal import apache_util
from certbot_apache._internal import configurator
from certbot_apache._internal.configurator import OsOptions
logger = logging.getLogger(__name__)
@zope.interface.provider(interfaces.IPluginFactory)
class DebianConfigurator(configurator.ApacheConfigurator):
"""Debian specific ApacheConfigurator override class"""
OS_DEFAULTS = OsOptions(
OS_DEFAULTS = dict(
server_root="/etc/apache2",
vhost_root="/etc/apache2/sites-available",
vhost_files="*",
logs_root="/var/log/apache2",
ctl="apache2ctl",
version_cmd=['apache2ctl', '-v'],
restart_cmd=['apache2ctl', 'graceful'],
conftest_cmd=['apache2ctl', 'configtest'],
enmod="a2enmod",
dismod="a2dismod",
le_vhost_ext="-le-ssl.conf",
handle_modules=True,
handle_sites=True,
challenge_location="/etc/apache2",
bin=None,
)
def enable_site(self, vhost):
@@ -44,7 +58,7 @@ class DebianConfigurator(configurator.ApacheConfigurator):
if not os.path.isdir(os.path.dirname(enabled_path)):
# For some reason, sites-enabled / sites-available do not exist
# Call the parent method
return super().enable_site(vhost)
return super(DebianConfigurator, self).enable_site(vhost)
self.reverter.register_file_creation(False, enabled_path)
try:
os.symlink(vhost.filep, enabled_path)
@@ -54,7 +68,7 @@ class DebianConfigurator(configurator.ApacheConfigurator):
# Already in shape
vhost.enabled = True
return None
logger.error(
logger.warning(
"Could not symlink %s to %s, got error: %s", enabled_path,
vhost.filep, err.strerror)
errstring = ("Encountered error while trying to enable a " +
@@ -118,11 +132,11 @@ class DebianConfigurator(configurator.ApacheConfigurator):
# Generate reversal command.
# Try to be safe here... check that we can probably reverse before
# applying enmod command
if not util.exe_exists(self.options.dismod):
if not util.exe_exists(self.option("dismod")):
raise errors.MisconfigurationError(
"Unable to find a2dismod, please make sure a2enmod and "
"a2dismod are configured correctly for certbot.")
self.reverter.register_undo_command(
temp, [self.options.dismod, "-f", mod_name])
util.run_script([self.options.enmod, mod_name])
temp, [self.option("dismod"), "-f", mod_name])
util.run_script([self.option("enmod"), mod_name])

View File

@@ -1,16 +1,19 @@
""" Distribution specific override class for Fedora 29+ """
import zope.interface
from certbot import errors
from certbot import interfaces
from certbot import util
from certbot_apache._internal import apache_util
from certbot_apache._internal import configurator
from certbot_apache._internal import parser
from certbot_apache._internal.configurator import OsOptions
@zope.interface.provider(interfaces.IPluginFactory)
class FedoraConfigurator(configurator.ApacheConfigurator):
"""Fedora 29+ specific ApacheConfigurator override class"""
OS_DEFAULTS = OsOptions(
OS_DEFAULTS = dict(
server_root="/etc/httpd",
vhost_root="/etc/httpd/conf.d",
vhost_files="*.conf",
@@ -20,7 +23,13 @@ class FedoraConfigurator(configurator.ApacheConfigurator):
restart_cmd=['apachectl', 'graceful'],
restart_cmd_alt=['apachectl', 'restart'],
conftest_cmd=['apachectl', 'configtest'],
enmod=None,
dismod=None,
le_vhost_ext="-le-ssl.conf",
handle_modules=False,
handle_sites=False,
challenge_location="/etc/httpd/conf.d",
bin=None,
)
def config_test(self):
@@ -31,19 +40,19 @@ class FedoraConfigurator(configurator.ApacheConfigurator):
during the first (re)start of httpd.
"""
try:
super().config_test()
super(FedoraConfigurator, self).config_test()
except errors.MisconfigurationError:
self._try_restart_fedora()
def get_parser(self):
"""Initializes the ApacheParser"""
return FedoraParser(
self.options.server_root, self.options.vhost_root,
self.option("server_root"), self.option("vhost_root"),
self.version, configurator=self)
def _try_restart_fedora(self):
"""
Tries to restart httpd using systemctl to generate the self signed key pair.
Tries to restart httpd using systemctl to generate the self signed keypair.
"""
try:
util.run_script(['systemctl', 'restart', 'httpd'])
@@ -51,7 +60,7 @@ class FedoraConfigurator(configurator.ApacheConfigurator):
raise errors.MisconfigurationError(str(err))
# Finish with actual config check to see if systemctl restart helped
super().config_test()
super(FedoraConfigurator, self).config_test()
def _prepare_options(self):
"""
@@ -59,12 +68,10 @@ class FedoraConfigurator(configurator.ApacheConfigurator):
instead of httpd and so take advantages of this new bash script in newer versions
of Fedora to restart httpd.
"""
super()._prepare_options()
self.options.restart_cmd[0] = 'apachectl'
if not self.options.restart_cmd_alt: # pragma: no cover
raise ValueError("OS option restart_cmd_alt must be set for Fedora.")
self.options.restart_cmd_alt[0] = 'apachectl'
self.options.conftest_cmd[0] = 'apachectl'
super(FedoraConfigurator, self)._prepare_options()
self.options["restart_cmd"][0] = 'apachectl'
self.options["restart_cmd_alt"][0] = 'apachectl'
self.options["conftest_cmd"][0] = 'apachectl'
class FedoraParser(parser.ApacheParser):
@@ -72,16 +79,16 @@ class FedoraParser(parser.ApacheParser):
def __init__(self, *args, **kwargs):
# Fedora 29+ specific configuration file for Apache
self.sysconfig_filep = "/etc/sysconfig/httpd"
super().__init__(*args, **kwargs)
super(FedoraParser, self).__init__(*args, **kwargs)
def update_runtime_variables(self):
""" Override for update_runtime_variables for custom parsing """
# Opportunistic, works if SELinux not enforced
super().update_runtime_variables()
super(FedoraParser, self).update_runtime_variables()
self._parse_sysconfig_var()
def _parse_sysconfig_var(self):
""" Parses Apache CLI options from Fedora configuration file """
defines = apache_util.parse_define_file(self.sysconfig_filep, "OPTIONS")
for k, v in defines.items():
self.variables[k] = v
for k in defines:
self.variables[k] = defines[k]

View File

@@ -1,19 +1,33 @@
""" Distribution specific override class for Gentoo Linux """
import zope.interface
from certbot import interfaces
from certbot_apache._internal import apache_util
from certbot_apache._internal import configurator
from certbot_apache._internal import parser
from certbot_apache._internal.configurator import OsOptions
@zope.interface.provider(interfaces.IPluginFactory)
class GentooConfigurator(configurator.ApacheConfigurator):
"""Gentoo specific ApacheConfigurator override class"""
OS_DEFAULTS = OsOptions(
OS_DEFAULTS = dict(
server_root="/etc/apache2",
vhost_root="/etc/apache2/vhosts.d",
vhost_files="*.conf",
logs_root="/var/log/apache2",
ctl="apache2ctl",
version_cmd=['apache2ctl', '-v'],
restart_cmd=['apache2ctl', 'graceful'],
restart_cmd_alt=['apache2ctl', 'restart'],
conftest_cmd=['apache2ctl', 'configtest'],
enmod=None,
dismod=None,
le_vhost_ext="-le-ssl.conf",
handle_modules=False,
handle_sites=False,
challenge_location="/etc/apache2/vhosts.d",
bin=None,
)
def _prepare_options(self):
@@ -21,15 +35,13 @@ class GentooConfigurator(configurator.ApacheConfigurator):
Override the options dictionary initialization in order to support
alternative restart cmd used in Gentoo.
"""
super()._prepare_options()
if not self.options.restart_cmd_alt: # pragma: no cover
raise ValueError("OS option restart_cmd_alt must be set for Gentoo.")
self.options.restart_cmd_alt[0] = self.options.ctl
super(GentooConfigurator, self)._prepare_options()
self.options["restart_cmd_alt"][0] = self.option("ctl")
def get_parser(self):
"""Initializes the ApacheParser"""
return GentooParser(
self.options.server_root, self.options.vhost_root,
self.option("server_root"), self.option("vhost_root"),
self.version, configurator=self)
@@ -38,7 +50,7 @@ class GentooParser(parser.ApacheParser):
def __init__(self, *args, **kwargs):
# Gentoo specific configuration file for Apache2
self.apacheconfig_filep = "/etc/conf.d/apache2"
super().__init__(*args, **kwargs)
super(GentooParser, self).__init__(*args, **kwargs)
def update_runtime_variables(self):
""" Override for update_runtime_variables for custom parsing """
@@ -49,12 +61,12 @@ class GentooParser(parser.ApacheParser):
""" Parses Apache CLI options from Gentoo configuration file """
defines = apache_util.parse_define_file(self.apacheconfig_filep,
"APACHE2_OPTS")
for k, v in defines.items():
self.variables[k] = v
for k in defines:
self.variables[k] = defines[k]
def update_modules(self):
"""Get loaded modules from httpd process, and add them to DOM"""
mod_cmd = [self.configurator.options.ctl, "modules"]
mod_cmd = [self.configurator.option("ctl"), "modules"]
matches = apache_util.parse_from_subprocess(mod_cmd, r"(.*)_module")
for mod in matches:
self.add_mod(mod.strip())

View File

@@ -1,19 +1,28 @@
""" Distribution specific override class for OpenSUSE """
import zope.interface
from certbot import interfaces
from certbot_apache._internal import configurator
from certbot_apache._internal.configurator import OsOptions
@zope.interface.provider(interfaces.IPluginFactory)
class OpenSUSEConfigurator(configurator.ApacheConfigurator):
"""OpenSUSE specific ApacheConfigurator override class"""
OS_DEFAULTS = OsOptions(
OS_DEFAULTS = dict(
server_root="/etc/apache2",
vhost_root="/etc/apache2/vhosts.d",
vhost_files="*.conf",
ctl="apachectl",
version_cmd=['apachectl', '-v'],
restart_cmd=['apachectl', 'graceful'],
conftest_cmd=['apachectl', 'configtest'],
logs_root="/var/log/apache2",
ctl="apache2ctl",
version_cmd=['apache2ctl', '-v'],
restart_cmd=['apache2ctl', 'graceful'],
conftest_cmd=['apache2ctl', 'configtest'],
enmod="a2enmod",
dismod="a2dismod",
le_vhost_ext="-le-ssl.conf",
handle_modules=False,
handle_sites=False,
challenge_location="/etc/apache2/vhosts.d",
bin=None,
)

View File

@@ -1,19 +0,0 @@
""" Distribution specific override class for Void Linux """
from certbot_apache._internal import configurator
from certbot_apache._internal.configurator import OsOptions
class VoidConfigurator(configurator.ApacheConfigurator):
"""Void Linux specific ApacheConfigurator override class"""
OS_DEFAULTS = OsOptions(
server_root="/etc/apache",
vhost_root="/etc/apache/extra",
vhost_files="*.conf",
logs_root="/var/log/httpd",
ctl="apachectl",
version_cmd=['apachectl', '-v'],
restart_cmd=['apachectl', 'graceful'],
conftest_cmd=['apachectl', 'configtest'],
challenge_location="/etc/apache/extra",
)

View File

@@ -3,24 +3,21 @@ import copy
import fnmatch
import logging
import re
from typing import Dict
from typing import List
from typing import Optional
import sys
import six
from acme.magic_typing import Dict
from acme.magic_typing import List
from certbot import errors
from certbot.compat import os
from certbot_apache._internal import apache_util
from certbot_apache._internal import constants
try:
from augeas import Augeas
except ImportError: # pragma: no cover
Augeas = None # type: ignore
logger = logging.getLogger(__name__)
class ApacheParser:
class ApacheParser(object):
"""Class handles the fine details of parsing the Apache Configuration.
.. todo:: Make parsing general... remove sites-available etc...
@@ -45,7 +42,8 @@ class ApacheParser:
self.configurator = configurator
# Initialize augeas
self.aug = init_augeas()
self.aug = None
self.init_augeas()
if not self.check_aug_version():
raise errors.NotSupportedError(
@@ -53,9 +51,9 @@ class ApacheParser:
"version 1.2.0 or higher, please make sure you have you have "
"those installed.")
self.modules: Dict[str, Optional[str]] = {}
self.parser_paths: Dict[str, List[str]] = {}
self.variables: Dict[str, str] = {}
self.modules = {} # type: Dict[str, str]
self.parser_paths = {} # type: Dict[str, List[str]]
self.variables = {} # type: Dict[str, str]
# Find configuration root and make sure augeas can parse it.
self.root = os.path.abspath(root)
@@ -81,13 +79,30 @@ class ApacheParser:
# Must also attempt to parse additional virtual host root
if vhostroot:
self.parse_file(os.path.abspath(vhostroot) + "/" +
self.configurator.options.vhost_files)
self.configurator.option("vhost_files"))
# check to see if there were unparsed define statements
if version < (2, 4):
if self.find_dir("Define", exclude=False):
raise errors.PluginError("Error parsing runtime variables")
def init_augeas(self):
""" Initialize the actual Augeas instance """
try:
import augeas
except ImportError: # pragma: no cover
raise errors.NoInstallationError("Problem in Augeas installation")
self.aug = augeas.Augeas(
# specify a directory to load our preferred lens from
loadpath=constants.AUGEAS_LENS_DIR,
# Do not save backup (we do it ourselves), do not load
# anything by default
flags=(augeas.Augeas.NONE |
augeas.Augeas.NO_MODL_AUTOLOAD |
augeas.Augeas.ENABLE_SPAN))
def check_parsing_errors(self, lens):
"""Verify Augeas can parse all of the lens files.
@@ -251,7 +266,7 @@ class ApacheParser:
the iteration issue. Else... parse and enable mods at same time.
"""
mods: Dict[str, str] = {}
mods = {} # type: Dict[str, str]
matches = self.find_dir("LoadModule")
iterator = iter(matches)
# Make sure prev_size != cur_size for do: while: iteration
@@ -260,7 +275,7 @@ class ApacheParser:
while len(mods) != prev_size:
prev_size = len(mods)
for match_name, match_filename in zip(
for match_name, match_filename in six.moves.zip(
iterator, iterator):
mod_name = self.get_arg(match_name)
mod_filename = self.get_arg(match_filename)
@@ -282,7 +297,7 @@ class ApacheParser:
def update_defines(self):
"""Updates the dictionary of known variables in the configuration"""
self.variables = apache_util.parse_defines(self.configurator.options.ctl)
self.variables = apache_util.parse_defines(self.configurator.option("ctl"))
def update_includes(self):
"""Get includes from httpd process, and add them to DOM if needed"""
@@ -292,7 +307,7 @@ class ApacheParser:
# configuration files
_ = self.find_dir("Include")
matches = apache_util.parse_includes(self.configurator.options.ctl)
matches = apache_util.parse_includes(self.configurator.option("ctl"))
if matches:
for i in matches:
if not self.parsed_in_current(i):
@@ -301,7 +316,7 @@ class ApacheParser:
def update_modules(self):
"""Get loaded modules from httpd process, and add them to DOM"""
matches = apache_util.parse_modules(self.configurator.options.ctl)
matches = apache_util.parse_modules(self.configurator.option("ctl"))
for mod in matches:
self.add_mod(mod.strip())
@@ -440,11 +455,7 @@ class ApacheParser:
:type args: list or str
"""
first_dir = aug_conf_path + "/directive[1]"
if self.aug.get(first_dir):
self.aug.insert(first_dir, "directive", True)
else:
self.aug.set(first_dir, "directive")
self.aug.insert(first_dir, "directive", True)
self.aug.set(first_dir, dirname)
if isinstance(args, list):
for i, value in enumerate(args, 1):
@@ -542,7 +553,7 @@ class ApacheParser:
else:
arg_suffix = "/*[self::arg=~regexp('%s')]" % case_i(arg)
ordered_matches: List[str] = []
ordered_matches = [] # type: List[str]
# TODO: Wildcards should be included in alphabetical order
# https://httpd.apache.org/docs/2.4/mod/core.html#include
@@ -720,6 +731,7 @@ class ApacheParser:
privileged users.
https://apr.apache.org/docs/apr/2.0/apr__fnmatch_8h_source.html
http://apache2.sourcearchive.com/documentation/2.2.16-6/apr__fnmatch_8h_source.html
:param str clean_fn_match: Apache style filename match, like globs
@@ -727,6 +739,9 @@ class ApacheParser:
:rtype: str
"""
if sys.version_info < (3, 6):
# This strips off final /Z(?ms)
return fnmatch.translate(clean_fn_match)[:-7] # pragma: no cover
# Since Python 3.6, it returns a different pattern like (?s:.*\.load)\Z
return fnmatch.translate(clean_fn_match)[4:-3] # pragma: no cover
@@ -784,7 +799,7 @@ class ApacheParser:
def _parsed_by_parser_paths(self, filep, paths):
"""Helper function that searches through provided paths and returns
True if file path is found in the set"""
for directory in paths:
for directory in paths.keys():
for filename in paths[directory]:
if fnmatch.fnmatch(filep, os.path.join(directory, filename)):
return True
@@ -941,19 +956,3 @@ def get_aug_path(file_path):
"""
return "/files%s" % file_path
def init_augeas() -> Augeas:
""" Initialize the actual Augeas instance """
if not Augeas: # pragma: no cover
raise errors.NoInstallationError("Problem in Augeas installation")
return Augeas(
# specify a directory to load our preferred lens from
loadpath=constants.AUGEAS_LENS_DIR,
# Do not save backup (we do it ourselves), do not load
# anything by default
flags=(Augeas.NONE |
Augeas.NO_MODL_AUTOLOAD |
Augeas.ENABLE_SPAN))

View File

@@ -0,0 +1,3 @@
# Remember to update setup.py to match the package versions below.
acme[dev]==0.29.0
certbot[dev]==1.1.0

View File

@@ -1,31 +1,61 @@
from distutils.version import StrictVersion
import sys
from setuptools import __version__ as setuptools_version
from setuptools import find_packages
from setuptools import setup
from setuptools.command.test import test as TestCommand
version = '1.20.0.dev0'
version = '1.6.0.dev0'
# Remember to update local-oldest-requirements.txt when changing the minimum
# acme/certbot version.
install_requires = [
# We specify the minimum acme and certbot version as the current plugin
# version for simplicity. See
# https://github.com/certbot/certbot/issues/8761 for more info.
f'acme>={version}',
f'certbot>={version}',
'acme>=0.29.0',
'certbot>=1.1.0',
'python-augeas',
'setuptools>=39.0.1',
'setuptools',
'zope.component',
'zope.interface',
]
setuptools_known_environment_markers = (StrictVersion(setuptools_version) >= StrictVersion('36.2'))
if setuptools_known_environment_markers:
install_requires.append('mock ; python_version < "3.3"')
elif 'bdist_wheel' in sys.argv[1:]:
raise RuntimeError('Error, you are trying to build certbot wheels using an old version '
'of setuptools. Version 36.2+ of setuptools is required.')
elif sys.version_info < (3,3):
install_requires.append('mock')
dev_extras = [
'apacheconfig>=0.3.2',
]
class PyTest(TestCommand):
user_options = []
def initialize_options(self):
TestCommand.initialize_options(self)
self.pytest_args = ''
def run_tests(self):
import shlex
# import here, cause outside the eggs aren't loaded
import pytest
errno = pytest.main(shlex.split(self.pytest_args))
sys.exit(errno)
setup(
name='certbot-apache',
version=version,
description="Apache plugin for Certbot",
url='https://github.com/letsencrypt/letsencrypt',
author="Certbot Project",
author_email='certbot-dev@eff.org',
author_email='client-dev@letsencrypt.org',
license='Apache License 2.0',
python_requires='>=3.6',
python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Plugins',
@@ -33,11 +63,13 @@ setup(
'License :: OSI Approved :: Apache Software License',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Security',
'Topic :: System :: Installation/Setup',
@@ -57,4 +89,7 @@ setup(
'apache = certbot_apache._internal.entrypoint:ENTRYPOINT',
],
},
test_suite='certbot_apache',
tests_require=["pytest"],
cmdclass={"test": PyTest},
)

View File

@@ -52,7 +52,7 @@ function Cleanup() {
# if our environment asks us to enable modules, do our best!
if [ "$1" = --debian-modules ] ; then
sudo apt-get install -y apache2
sudo apt-get install -y libapache2-mod-wsgi-py3
sudo apt-get install -y libapache2-mod-wsgi
sudo apt-get install -y libapache2-mod-macro
for mod in ssl rewrite macro wsgi deflate userdir version mime setenvif ; do

View File

@@ -1,6 +1,4 @@
"""Tests for AugeasParserNode classes"""
from typing import List
try:
import mock
except ImportError: # pragma: no cover
@@ -29,7 +27,7 @@ class AugeasParserNodeTest(util.ApacheTest): # pylint: disable=too-many-public-
"""Test AugeasParserNode using available test configurations"""
def setUp(self): # pylint: disable=arguments-differ
super().setUp()
super(AugeasParserNodeTest, self).setUp()
with mock.patch("certbot_apache._internal.configurator.ApacheConfigurator.get_parsernode_root") as mock_parsernode:
mock_parsernode.side_effect = _get_augeasnode_mock(
@@ -109,7 +107,7 @@ class AugeasParserNodeTest(util.ApacheTest): # pylint: disable=too-many-public-
def test_set_parameters(self):
servernames = self.config.parser_root.find_directives("servername")
names: List[str] = []
names = [] # type: List[str]
for servername in servernames:
names += servername.parameters
self.assertFalse("going_to_set_this" in names)

View File

@@ -7,6 +7,7 @@ try:
import mock
except ImportError: # pragma: no cover
from unittest import mock # type: ignore
import six # pylint: disable=unused-import # six is used in mock.patch()
from certbot import errors
from certbot_apache._internal import constants
@@ -18,7 +19,7 @@ class AutoHSTSTest(util.ApacheTest):
# pylint: disable=protected-access
def setUp(self): # pylint: disable=arguments-differ
super().setUp()
super(AutoHSTSTest, self).setUp()
self.config = util.get_apache_configurator(
self.config_path, self.vhost_path, self.config_dir, self.work_dir)
@@ -146,7 +147,7 @@ class AutoHSTSTest(util.ApacheTest):
@mock.patch("certbot_apache._internal.display_ops.select_vhost")
def test_autohsts_no_ssl_vhost(self, mock_select):
mock_select.return_value = self.vh_truth[0]
with mock.patch("certbot_apache._internal.configurator.logger.error") as mock_log:
with mock.patch("certbot_apache._internal.configurator.logger.warning") as mock_log:
self.assertRaises(errors.PluginError,
self.config.enable_autohsts,
mock.MagicMock(), "invalid.example.com")
@@ -179,7 +180,7 @@ class AutoHSTSTest(util.ApacheTest):
self.config._autohsts_fetch_state()
self.config._autohsts["orphan_id"] = {"laststep": 999, "timestamp": 0}
self.config._autohsts_save_state()
with mock.patch("certbot_apache._internal.configurator.logger.error") as mock_log:
with mock.patch("certbot_apache._internal.configurator.logger.warning") as mock_log:
self.config.deploy_autohsts(mock.MagicMock())
self.assertTrue(mock_log.called)
self.assertTrue(

View File

@@ -1,6 +1,5 @@
"""Test for certbot_apache._internal.configurator for CentOS 6 overrides"""
import unittest
from unittest import mock
from certbot.compat import os
from certbot.errors import MisconfigurationError
@@ -37,9 +36,9 @@ class CentOS6Tests(util.ApacheTest):
test_dir = "centos6_apache/apache"
config_root = "centos6_apache/apache/httpd"
vhost_root = "centos6_apache/apache/httpd/conf.d"
super().setUp(test_dir=test_dir,
config_root=config_root,
vhost_root=vhost_root)
super(CentOS6Tests, self).setUp(test_dir=test_dir,
config_root=config_root,
vhost_root=vhost_root)
self.config = util.get_apache_configurator(
self.config_path, self.vhost_path, self.config_dir, self.work_dir,
@@ -66,8 +65,7 @@ class CentOS6Tests(util.ApacheTest):
raise Exception("Missed: %s" % vhost) # pragma: no cover
self.assertEqual(found, 2)
@mock.patch("certbot_apache._internal.configurator.display_util.notify")
def test_loadmod_default(self, unused_mock_notify):
def test_loadmod_default(self):
ssl_loadmods = self.config.parser.find_dir(
"LoadModule", "ssl_module", exclude=False)
self.assertEqual(len(ssl_loadmods), 1)
@@ -97,8 +95,7 @@ class CentOS6Tests(util.ApacheTest):
ifmod_args = self.config.parser.get_all_args(lm[:-17])
self.assertTrue("!mod_ssl.c" in ifmod_args)
@mock.patch("certbot_apache._internal.configurator.display_util.notify")
def test_loadmod_multiple(self, unused_mock_notify):
def test_loadmod_multiple(self):
sslmod_args = ["ssl_module", "modules/mod_ssl.so"]
# Adds another LoadModule to main httpd.conf in addtition to ssl.conf
self.config.parser.add_dir(self.config.parser.loc["default"], "LoadModule",
@@ -118,8 +115,7 @@ class CentOS6Tests(util.ApacheTest):
for mod in post_loadmods:
self.assertTrue(self.config.parser.not_modssl_ifmodule(mod)) #pylint: disable=no-member
@mock.patch("certbot_apache._internal.configurator.display_util.notify")
def test_loadmod_rootconf_exists(self, unused_mock_notify):
def test_loadmod_rootconf_exists(self):
sslmod_args = ["ssl_module", "modules/mod_ssl.so"]
rootconf_ifmod = self.config.parser.get_ifmod(
parser.get_aug_path(self.config.parser.loc["default"]),
@@ -146,8 +142,7 @@ class CentOS6Tests(util.ApacheTest):
self.config.parser.get_all_args(mods[0][:-7]),
sslmod_args)
@mock.patch("certbot_apache._internal.configurator.display_util.notify")
def test_neg_loadmod_already_on_path(self, unused_mock_notify):
def test_neg_loadmod_already_on_path(self):
loadmod_args = ["ssl_module", "modules/mod_ssl.so"]
ifmod = self.config.parser.get_ifmod(
self.vh_truth[1].path, "!mod_ssl.c", beginning=True)
@@ -190,8 +185,7 @@ class CentOS6Tests(util.ApacheTest):
# Make sure that none was changed
self.assertEqual(pre_matches, post_matches)
@mock.patch("certbot_apache._internal.configurator.display_util.notify")
def test_loadmod_not_found(self, unused_mock_notify):
def test_loadmod_not_found(self):
# Remove all existing LoadModule ssl_module... directives
orig_loadmods = self.config.parser.find_dir("LoadModule",
"ssl_module",

View File

@@ -41,9 +41,9 @@ class FedoraRestartTest(util.ApacheTest):
test_dir = "centos7_apache/apache"
config_root = "centos7_apache/apache/httpd"
vhost_root = "centos7_apache/apache/httpd/conf.d"
super().setUp(test_dir=test_dir,
config_root=config_root,
vhost_root=vhost_root)
super(FedoraRestartTest, self).setUp(test_dir=test_dir,
config_root=config_root,
vhost_root=vhost_root)
self.config = util.get_apache_configurator(
self.config_path, self.vhost_path, self.config_dir, self.work_dir,
os_info="fedora_old")
@@ -96,9 +96,9 @@ class MultipleVhostsTestCentOS(util.ApacheTest):
test_dir = "centos7_apache/apache"
config_root = "centos7_apache/apache/httpd"
vhost_root = "centos7_apache/apache/httpd/conf.d"
super().setUp(test_dir=test_dir,
config_root=config_root,
vhost_root=vhost_root)
super(MultipleVhostsTestCentOS, self).setUp(test_dir=test_dir,
config_root=config_root,
vhost_root=vhost_root)
self.config = util.get_apache_configurator(
self.config_path, self.vhost_path, self.config_dir, self.work_dir,
@@ -140,7 +140,7 @@ class MultipleVhostsTestCentOS(util.ApacheTest):
self.assertEqual(mock_get.call_count, 3)
self.assertEqual(len(self.config.parser.modules), 4)
self.assertEqual(len(self.config.parser.variables), 2)
self.assertTrue("TEST2" in self.config.parser.variables)
self.assertTrue("TEST2" in self.config.parser.variables.keys())
self.assertTrue("mod_another.c" in self.config.parser.modules)
def test_get_virtual_hosts(self):
@@ -172,11 +172,11 @@ class MultipleVhostsTestCentOS(util.ApacheTest):
mock_osi.return_value = ("centos", "7")
self.config.parser.update_runtime_variables()
self.assertTrue("mock_define" in self.config.parser.variables)
self.assertTrue("mock_define_too" in self.config.parser.variables)
self.assertTrue("mock_value" in self.config.parser.variables)
self.assertTrue("mock_define" in self.config.parser.variables.keys())
self.assertTrue("mock_define_too" in self.config.parser.variables.keys())
self.assertTrue("mock_value" in self.config.parser.variables.keys())
self.assertEqual("TRUE", self.config.parser.variables["mock_value"])
self.assertTrue("MOCK_NOSEP" in self.config.parser.variables)
self.assertTrue("MOCK_NOSEP" in self.config.parser.variables.keys())
self.assertEqual("NOSEP_VAL", self.config.parser.variables["NOSEP_TWO"])
@mock.patch("certbot_apache._internal.configurator.util.run_script")

View File

@@ -11,7 +11,7 @@ class ComplexParserTest(util.ParserTest):
"""Apache Parser Test."""
def setUp(self): # pylint: disable=arguments-differ
super().setUp(
super(ComplexParserTest, self).setUp(
"complex_parsing", "complex_parsing")
self.setup_variables()

View File

@@ -16,7 +16,7 @@ class ConfiguratorReverterTest(util.ApacheTest):
def setUp(self): # pylint: disable=arguments-differ
super().setUp()
super(ConfiguratorReverterTest, self).setUp()
self.config = util.get_apache_configurator(
self.config_path, self.vhost_path, self.config_dir, self.work_dir)

View File

@@ -10,6 +10,7 @@ try:
import mock
except ImportError: # pragma: no cover
from unittest import mock # type: ignore
import six # pylint: disable=unused-import # six is used in mock.patch()
from acme import challenges
from certbot import achallenges
@@ -30,7 +31,7 @@ class MultipleVhostsTest(util.ApacheTest):
"""Test two standard well-configured HTTP vhosts."""
def setUp(self): # pylint: disable=arguments-differ
super().setUp()
super(MultipleVhostsTest, self).setUp()
self.config = util.get_apache_configurator(
self.config_path, self.vhost_path, self.config_dir, self.work_dir)
@@ -103,9 +104,9 @@ class MultipleVhostsTest(util.ApacheTest):
"handle_modules", "handle_sites", "ctl"]
exp = {}
for k in ApacheConfigurator.OS_DEFAULTS.__dict__.keys():
for k in ApacheConfigurator.OS_DEFAULTS:
if k in parserargs:
exp[k.replace("_", "-")] = getattr(ApacheConfigurator.OS_DEFAULTS, k)
exp[k.replace("_", "-")] = ApacheConfigurator.OS_DEFAULTS[k]
# Special cases
exp["vhost-root"] = None
@@ -128,15 +129,16 @@ class MultipleVhostsTest(util.ApacheTest):
def test_all_configurators_defaults_defined(self):
from certbot_apache._internal.entrypoint import OVERRIDE_CLASSES
from certbot_apache._internal.configurator import ApacheConfigurator
parameters = set(ApacheConfigurator.OS_DEFAULTS.__dict__.keys())
parameters = set(ApacheConfigurator.OS_DEFAULTS.keys())
for cls in OVERRIDE_CLASSES.values():
self.assertTrue(parameters.issubset(set(cls.OS_DEFAULTS.__dict__.keys())))
self.assertTrue(parameters.issubset(set(cls.OS_DEFAULTS.keys())))
def test_constant(self):
self.assertTrue("debian_apache_2_4/multiple_vhosts/apache" in
self.config.options.server_root)
self.config.option("server_root"))
self.assertEqual(self.config.option("nonexistent"), None)
@certbot_util.patch_display_util()
@certbot_util.patch_get_utility()
def test_get_all_names(self, mock_getutility):
mock_utility = mock_getutility()
mock_utility.notification = mock.MagicMock(return_value=True)
@@ -145,7 +147,7 @@ class MultipleVhostsTest(util.ApacheTest):
"nonsym.link", "vhost.in.rootconf", "www.certbot.demo",
"duplicate.example.com"})
@certbot_util.patch_display_util()
@certbot_util.patch_get_utility()
@mock.patch("certbot_apache._internal.configurator.socket.gethostbyaddr")
def test_get_all_names_addrs(self, mock_gethost, mock_getutility):
mock_gethost.side_effect = [("google.com", "", ""), socket.error]
@@ -337,8 +339,7 @@ class MultipleVhostsTest(util.ApacheTest):
vhosts = self.config._non_default_vhosts(self.config.vhosts)
self.assertEqual(len(vhosts), 10)
@mock.patch('certbot_apache._internal.configurator.display_util.notify')
def test_deploy_cert_enable_new_vhost(self, unused_mock_notify):
def test_deploy_cert_enable_new_vhost(self):
# Create
ssl_vhost = self.config.make_vhost_ssl(self.vh_truth[0])
self.config.parser.modules["ssl_module"] = None
@@ -376,8 +377,7 @@ class MultipleVhostsTest(util.ApacheTest):
self.fail("Include shouldn't be added, as patched find_dir 'finds' existing one") \
# pragma: no cover
@mock.patch('certbot_apache._internal.configurator.display_util.notify')
def test_deploy_cert(self, unused_mock_notify):
def test_deploy_cert(self):
self.config.parser.modules["ssl_module"] = None
self.config.parser.modules["mod_ssl.c"] = None
self.config.parser.modules["socache_shmcb_module"] = None
@@ -726,7 +726,7 @@ class MultipleVhostsTest(util.ApacheTest):
# This calls open
self.config.reverter.register_file_creation = mock.Mock()
mock_open.side_effect = IOError
with mock.patch("builtins.open", mock_open):
with mock.patch("six.moves.builtins.open", mock_open):
self.assertRaises(
errors.PluginError,
self.config.make_vhost_ssl, self.vh_truth[0])
@@ -893,7 +893,7 @@ class MultipleVhostsTest(util.ApacheTest):
self.config.enhance, "certbot.demo", "unknown_enhancement")
def test_enhance_no_ssl_vhost(self):
with mock.patch("certbot_apache._internal.configurator.logger.error") as mock_log:
with mock.patch("certbot_apache._internal.configurator.logger.warning") as mock_log:
self.assertRaises(errors.PluginError, self.config.enhance,
"certbot.demo", "redirect")
# Check that correct logger.warning was printed
@@ -1292,8 +1292,7 @@ class MultipleVhostsTest(util.ApacheTest):
os.path.basename(inc_path) in self.config.parser.existing_paths[
os.path.dirname(inc_path)])
@mock.patch('certbot_apache._internal.configurator.display_util.notify')
def test_deploy_cert_not_parsed_path(self, unused_mock_notify):
def test_deploy_cert_not_parsed_path(self):
# Make sure that we add include to root config for vhosts when
# handle-sites is false
self.config.parser.modules["ssl_module"] = None
@@ -1338,6 +1337,13 @@ class MultipleVhostsTest(util.ApacheTest):
self.config.enable_mod,
"whatever")
def test_wildcard_domain(self):
# pylint: disable=protected-access
cases = {u"*.example.org": True, b"*.x.example.org": True,
u"a.example.org": False, b"a.x.example.org": False}
for key in cases:
self.assertEqual(self.config._wildcard_domain(key), cases[key])
def test_choose_vhosts_wildcard(self):
# pylint: disable=protected-access
mock_path = "certbot_apache._internal.display_ops.select_vhost_multiple"
@@ -1351,10 +1357,10 @@ class MultipleVhostsTest(util.ApacheTest):
# And the actual returned values
self.assertEqual(len(vhs), 1)
self.assertEqual(vhs[0].name, "certbot.demo")
self.assertTrue(vhs[0].name == "certbot.demo")
self.assertTrue(vhs[0].ssl)
self.assertNotEqual(vhs[0], self.vh_truth[3])
self.assertFalse(vhs[0] == self.vh_truth[3])
@mock.patch("certbot_apache._internal.configurator.ApacheConfigurator.make_vhost_ssl")
def test_choose_vhosts_wildcard_no_ssl(self, mock_makessl):
@@ -1389,8 +1395,7 @@ class MultipleVhostsTest(util.ApacheTest):
self.assertEqual(vhs[0], self.vh_truth[7])
@mock.patch('certbot_apache._internal.configurator.display_util.notify')
def test_deploy_cert_wildcard(self, unused_mock_notify):
def test_deploy_cert_wildcard(self):
# pylint: disable=protected-access
mock_choose_vhosts = mock.MagicMock()
mock_choose_vhosts.return_value = [self.vh_truth[7]]
@@ -1466,10 +1471,10 @@ class MultipleVhostsTest(util.ApacheTest):
self.config.parser.aug.match = mock_match
vhs = self.config.get_virtual_hosts()
self.assertEqual(len(vhs), 2)
self.assertEqual(vhs[0], self.vh_truth[1])
self.assertTrue(vhs[0] == self.vh_truth[1])
# mock_vhost should have replaced the vh_truth[0], because its filepath
# isn't a symlink
self.assertEqual(vhs[1], mock_vhost)
self.assertTrue(vhs[1] == mock_vhost)
class AugeasVhostsTest(util.ApacheTest):
@@ -1480,9 +1485,9 @@ class AugeasVhostsTest(util.ApacheTest):
td = "debian_apache_2_4/augeas_vhosts"
cr = "debian_apache_2_4/augeas_vhosts/apache2"
vr = "debian_apache_2_4/augeas_vhosts/apache2/sites-available"
super().setUp(test_dir=td,
config_root=cr,
vhost_root=vr)
super(AugeasVhostsTest, self).setUp(test_dir=td,
config_root=cr,
vhost_root=vr)
self.config = util.get_apache_configurator(
self.config_path, self.vhost_path, self.config_dir,
@@ -1559,9 +1564,9 @@ class MultiVhostsTest(util.ApacheTest):
td = "debian_apache_2_4/multi_vhosts"
cr = "debian_apache_2_4/multi_vhosts/apache2"
vr = "debian_apache_2_4/multi_vhosts/apache2/sites-available"
super().setUp(test_dir=td,
config_root=cr,
vhost_root=vr)
super(MultiVhostsTest, self).setUp(test_dir=td,
config_root=cr,
vhost_root=vr)
self.config = util.get_apache_configurator(
self.config_path, self.vhost_path,
@@ -1610,8 +1615,8 @@ class MultiVhostsTest(util.ApacheTest):
self.assertEqual(self.config._get_new_vh_path(without_index, both),
with_index_2[0])
@mock.patch("certbot_apache._internal.configurator.display_util.notify")
def test_make_vhost_ssl_with_existing_rewrite_rule(self, mock_notify):
@certbot_util.patch_get_utility()
def test_make_vhost_ssl_with_existing_rewrite_rule(self, mock_get_utility):
self.config.parser.modules["rewrite_module"] = None
ssl_vhost = self.config.make_vhost_ssl(self.vh_truth[4])
@@ -1627,11 +1632,11 @@ class MultiVhostsTest(util.ApacheTest):
"\"http://new.example.com/docs/$1\" [R,L]")
self.assertTrue(commented_rewrite_rule in conf_text)
self.assertTrue(uncommented_rewrite_rule in conf_text)
self.assertEqual(mock_notify.call_count, 1)
self.assertIn("Some rewrite rules", mock_notify.call_args[0][0])
mock_get_utility().add_message.assert_called_once_with(mock.ANY,
mock.ANY)
@mock.patch("certbot_apache._internal.configurator.display_util.notify")
def test_make_vhost_ssl_with_existing_rewrite_conds(self, mock_notify):
@certbot_util.patch_get_utility()
def test_make_vhost_ssl_with_existing_rewrite_conds(self, mock_get_utility):
self.config.parser.modules["rewrite_module"] = None
ssl_vhost = self.config.make_vhost_ssl(self.vh_truth[3])
@@ -1656,15 +1661,15 @@ class MultiVhostsTest(util.ApacheTest):
self.assertTrue(commented_cond1 in conf_line_set)
self.assertTrue(commented_cond2 in conf_line_set)
self.assertTrue(commented_rewrite_rule in conf_line_set)
self.assertEqual(mock_notify.call_count, 1)
self.assertIn("Some rewrite rules", mock_notify.call_args[0][0])
mock_get_utility().add_message.assert_called_once_with(mock.ANY,
mock.ANY)
class InstallSslOptionsConfTest(util.ApacheTest):
"""Test that the options-ssl-nginx.conf file is installed and updated properly."""
def setUp(self): # pylint: disable=arguments-differ
super().setUp()
super(InstallSslOptionsConfTest, self).setUp()
self.config = util.get_apache_configurator(
self.config_path, self.vhost_path, self.config_dir, self.work_dir)
@@ -1777,7 +1782,7 @@ class InstallSslOptionsConfTest(util.ApacheTest):
# ssl_module statically linked
self.config._openssl_version = None
self.config.parser.modules['ssl_module'] = None
self.config.options.bin = '/fake/path/to/httpd'
self.config.options['bin'] = '/fake/path/to/httpd'
with mock.patch("certbot_apache._internal.configurator."
"ApacheConfigurator._open_module_file") as mock_omf:
mock_omf.return_value = some_string_contents
@@ -1813,7 +1818,7 @@ class InstallSslOptionsConfTest(util.ApacheTest):
# When ssl_module is statically linked but --apache-bin not provided
self.config._openssl_version = None
self.config.options.bin = None
self.config.options['bin'] = None
self.config.parser.modules['ssl_module'] = None
with mock.patch("certbot_apache._internal.configurator.logger.warning") as mock_log:
self.assertEqual(self.config.openssl_version(), None)
@@ -1836,7 +1841,7 @@ class InstallSslOptionsConfTest(util.ApacheTest):
def test_open_module_file(self):
mock_open = mock.mock_open(read_data="testing 12 3")
with mock.patch("builtins.open", mock_open):
with mock.patch("six.moves.builtins.open", mock_open):
self.assertEqual(self.config._open_module_file("/nonsense/"), "testing 12 3")
if __name__ == "__main__":

View File

@@ -9,7 +9,6 @@ except ImportError: # pragma: no cover
from certbot import errors
from certbot.compat import os
from certbot.tests import util as certbot_util
from certbot_apache._internal import apache_util
from certbot_apache._internal import obj
import util
@@ -21,7 +20,7 @@ class MultipleVhostsTestDebian(util.ApacheTest):
_multiprocess_can_split_ = True
def setUp(self): # pylint: disable=arguments-differ
super().setUp()
super(MultipleVhostsTestDebian, self).setUp()
self.config = util.get_apache_configurator(
self.config_path, self.vhost_path, self.config_dir, self.work_dir,
os_info="debian")
@@ -50,11 +49,10 @@ class MultipleVhostsTestDebian(util.ApacheTest):
@mock.patch("certbot.util.run_script")
@mock.patch("certbot.util.exe_exists")
@mock.patch("certbot_apache._internal.apache_util.subprocess.run")
def test_enable_mod(self, mock_run, mock_exe_exists, mock_run_script):
mock_run.return_value.stdout = "Define: DUMP_RUN_CFG"
mock_run.return_value.stderr = ""
mock_run.return_value.returncode = 0
@mock.patch("certbot_apache._internal.apache_util.subprocess.Popen")
def test_enable_mod(self, mock_popen, mock_exe_exists, mock_run_script):
mock_popen().communicate.return_value = ("Define: DUMP_RUN_CFG", "")
mock_popen().returncode = 0
mock_exe_exists.return_value = True
self.config.enable_mod("ssl")
@@ -69,18 +67,17 @@ class MultipleVhostsTestDebian(util.ApacheTest):
self.config.parser.modules["ssl_module"] = None
self.config.parser.modules["mod_ssl.c"] = None
self.assertFalse(ssl_vhost.enabled)
with certbot_util.patch_display_util():
self.config.deploy_cert(
"encryption-example.demo", "example/cert.pem", "example/key.pem",
"example/cert_chain.pem", "example/fullchain.pem")
self.assertTrue(ssl_vhost.enabled)
# Make sure that we don't error out if symlink already exists
ssl_vhost.enabled = False
self.assertFalse(ssl_vhost.enabled)
self.config.deploy_cert(
"encryption-example.demo", "example/cert.pem", "example/key.pem",
"example/cert_chain.pem", "example/fullchain.pem")
self.assertTrue(ssl_vhost.enabled)
self.config.deploy_cert(
"encryption-example.demo", "example/cert.pem", "example/key.pem",
"example/cert_chain.pem", "example/fullchain.pem")
self.assertTrue(ssl_vhost.enabled)
# Make sure that we don't error out if symlink already exists
ssl_vhost.enabled = False
self.assertFalse(ssl_vhost.enabled)
self.config.deploy_cert(
"encryption-example.demo", "example/cert.pem", "example/key.pem",
"example/cert_chain.pem", "example/fullchain.pem")
self.assertTrue(ssl_vhost.enabled)
def test_enable_site_failure(self):
self.config.parser.root = "/tmp/nonexistent"
@@ -103,10 +100,9 @@ class MultipleVhostsTestDebian(util.ApacheTest):
# Get the default 443 vhost
self.config.assoc["random.demo"] = self.vh_truth[1]
with certbot_util.patch_display_util():
self.config.deploy_cert(
"random.demo", "example/cert.pem", "example/key.pem",
"example/cert_chain.pem", "example/fullchain.pem")
self.config.deploy_cert(
"random.demo", "example/cert.pem", "example/key.pem",
"example/cert_chain.pem", "example/fullchain.pem")
self.config.save()
# Verify ssl_module was enabled.

View File

@@ -3,8 +3,8 @@ import unittest
try:
import mock
except ImportError: # pragma: no cover
from unittest import mock # type: ignore
except ImportError: # pragma: no cover
from unittest import mock # type: ignore
from certbot import errors
from certbot.display import util as display_util
@@ -25,7 +25,7 @@ class SelectVhostMultiTest(unittest.TestCase):
def test_select_no_input(self):
self.assertFalse(select_vhost_multiple([]))
@certbot_util.patch_display_util()
@certbot_util.patch_get_utility()
def test_select_correct(self, mock_util):
mock_util().checklist.return_value = (
display_util.OK, [self.vhosts[3].display_repr(),
@@ -37,13 +37,12 @@ class SelectVhostMultiTest(unittest.TestCase):
self.assertTrue(self.vhosts[3] in vhs)
self.assertFalse(self.vhosts[1] in vhs)
@certbot_util.patch_display_util()
@certbot_util.patch_get_utility()
def test_select_cancel(self, mock_util):
mock_util().checklist.return_value = (display_util.CANCEL, "whatever")
vhs = select_vhost_multiple([self.vhosts[2], self.vhosts[3]])
self.assertFalse(vhs)
class SelectVhostTest(unittest.TestCase):
"""Tests for certbot_apache._internal.display_ops.select_vhost."""
@@ -57,12 +56,12 @@ class SelectVhostTest(unittest.TestCase):
from certbot_apache._internal.display_ops import select_vhost
return select_vhost("example.com", vhosts)
@certbot_util.patch_display_util()
@certbot_util.patch_get_utility()
def test_successful_choice(self, mock_util):
mock_util().menu.return_value = (display_util.OK, 3)
self.assertEqual(self.vhosts[3], self._call(self.vhosts))
@certbot_util.patch_display_util()
@certbot_util.patch_get_utility()
def test_noninteractive(self, mock_util):
mock_util().menu.side_effect = errors.MissingCommandlineFlag("no vhost default")
try:
@@ -70,7 +69,7 @@ class SelectVhostTest(unittest.TestCase):
except errors.MissingCommandlineFlag as e:
self.assertTrue("vhost ambiguity" in str(e))
@certbot_util.patch_display_util()
@certbot_util.patch_get_utility()
def test_more_info_cancel(self, mock_util):
mock_util().menu.side_effect = [
(display_util.CANCEL, -1),
@@ -82,15 +81,16 @@ class SelectVhostTest(unittest.TestCase):
self.assertEqual(self._call([]), None)
@mock.patch("certbot_apache._internal.display_ops.display_util")
@certbot_util.patch_get_utility()
@mock.patch("certbot_apache._internal.display_ops.logger")
def test_small_display(self, mock_logger, mock_display_util):
def test_small_display(self, mock_logger, mock_util, mock_display_util):
mock_display_util.WIDTH = 20
mock_display_util.menu.return_value = (display_util.OK, 0)
mock_util().menu.return_value = (display_util.OK, 0)
self._call(self.vhosts)
self.assertEqual(mock_logger.debug.call_count, 1)
@certbot_util.patch_display_util()
@certbot_util.patch_get_utility()
def test_multiple_names(self, mock_util):
mock_util().menu.return_value = (display_util.OK, 5)

View File

@@ -412,9 +412,9 @@ class DualParserNodeTest(unittest.TestCase): # pylint: disable=too-many-public-
ancestor=self.block,
filepath="/path/to/whatever",
metadata=self.metadata)
self.assertNotEqual(self.block, ne_block)
self.assertNotEqual(self.directive, ne_directive)
self.assertNotEqual(self.comment, ne_comment)
self.assertFalse(self.block == ne_block)
self.assertFalse(self.directive == ne_directive)
self.assertFalse(self.comment == ne_comment)
def test_parsed_paths(self):
mock_p = mock.MagicMock(return_value=['/path/file.conf',

View File

@@ -41,7 +41,7 @@ class EntryPointTest(unittest.TestCase):
with mock.patch("certbot.util.get_os_info") as mock_info:
mock_info.return_value = ("nonexistent", "irrelevant")
with mock.patch("certbot.util.get_systemd_os_like") as mock_like:
mock_like.return_value = ["unknown"]
mock_like.return_value = ["unknonwn"]
self.assertEqual(entrypoint.get_configurator(),
configurator.ApacheConfigurator)

View File

@@ -46,9 +46,9 @@ class FedoraRestartTest(util.ApacheTest):
test_dir = "centos7_apache/apache"
config_root = "centos7_apache/apache/httpd"
vhost_root = "centos7_apache/apache/httpd/conf.d"
super().setUp(test_dir=test_dir,
config_root=config_root,
vhost_root=vhost_root)
super(FedoraRestartTest, self).setUp(test_dir=test_dir,
config_root=config_root,
vhost_root=vhost_root)
self.config = util.get_apache_configurator(
self.config_path, self.vhost_path, self.config_dir, self.work_dir,
os_info="fedora")
@@ -90,9 +90,9 @@ class MultipleVhostsTestFedora(util.ApacheTest):
test_dir = "centos7_apache/apache"
config_root = "centos7_apache/apache/httpd"
vhost_root = "centos7_apache/apache/httpd/conf.d"
super().setUp(test_dir=test_dir,
config_root=config_root,
vhost_root=vhost_root)
super(MultipleVhostsTestFedora, self).setUp(test_dir=test_dir,
config_root=config_root,
vhost_root=vhost_root)
self.config = util.get_apache_configurator(
self.config_path, self.vhost_path, self.config_dir, self.work_dir,
@@ -134,7 +134,7 @@ class MultipleVhostsTestFedora(util.ApacheTest):
self.assertEqual(mock_get.call_count, 3)
self.assertEqual(len(self.config.parser.modules), 4)
self.assertEqual(len(self.config.parser.variables), 2)
self.assertTrue("TEST2" in self.config.parser.variables)
self.assertTrue("TEST2" in self.config.parser.variables.keys())
self.assertTrue("mod_another.c" in self.config.parser.modules)
@mock.patch("certbot_apache._internal.configurator.util.run_script")
@@ -172,11 +172,11 @@ class MultipleVhostsTestFedora(util.ApacheTest):
mock_osi.return_value = ("fedora", "29")
self.config.parser.update_runtime_variables()
self.assertTrue("mock_define" in self.config.parser.variables)
self.assertTrue("mock_define_too" in self.config.parser.variables)
self.assertTrue("mock_value" in self.config.parser.variables)
self.assertTrue("mock_define" in self.config.parser.variables.keys())
self.assertTrue("mock_define_too" in self.config.parser.variables.keys())
self.assertTrue("mock_value" in self.config.parser.variables.keys())
self.assertEqual("TRUE", self.config.parser.variables["mock_value"])
self.assertTrue("MOCK_NOSEP" in self.config.parser.variables)
self.assertTrue("MOCK_NOSEP" in self.config.parser.variables.keys())
self.assertEqual("NOSEP_VAL", self.config.parser.variables["NOSEP_TWO"])
@mock.patch("certbot_apache._internal.configurator.util.run_script")

View File

@@ -50,9 +50,9 @@ class MultipleVhostsTestGentoo(util.ApacheTest):
test_dir = "gentoo_apache/apache"
config_root = "gentoo_apache/apache/apache2"
vhost_root = "gentoo_apache/apache/apache2/vhosts.d"
super().setUp(test_dir=test_dir,
config_root=config_root,
vhost_root=vhost_root)
super(MultipleVhostsTestGentoo, self).setUp(test_dir=test_dir,
config_root=config_root,
vhost_root=vhost_root)
# pylint: disable=line-too-long
with mock.patch("certbot_apache._internal.override_gentoo.GentooParser.update_runtime_variables"):
@@ -91,7 +91,7 @@ class MultipleVhostsTestGentoo(util.ApacheTest):
with mock.patch("certbot_apache._internal.override_gentoo.GentooParser.update_modules"):
self.config.parser.update_runtime_variables()
for define in defines:
self.assertTrue(define in self.config.parser.variables)
self.assertTrue(define in self.config.parser.variables.keys())
@mock.patch("certbot_apache._internal.apache_util.parse_from_subprocess")
def test_no_binary_configdump(self, mock_subprocess):

Some files were not shown because too many files have changed in this diff Show More