Compare commits
6 Commits
v1.29.0
...
test-apach
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f9749f64d8 | ||
|
|
c0633ec3ac | ||
|
|
c9edb49267 | ||
|
|
9cc15da90f | ||
|
|
69bb92d698 | ||
|
|
bafa9e2eb0 |
@@ -69,12 +69,12 @@ Access can be defined for all or only selected repositories, which is nice.
|
||||
```
|
||||
|
||||
- Redirected to Azure DevOps, select the account created in _Having an Azure DevOps account_ section.
|
||||
- Select the organization, and click "Create a new project" (let's name it the same than the targeted github repo)
|
||||
- Select the organization, and click "Create a new project" (let's name it the same than the targetted github repo)
|
||||
- The Visibility is public, to profit from 10 parallel jobs
|
||||
|
||||
```
|
||||
!!! ACCESS !!!
|
||||
Azure Pipelines needs access to the GitHub account (in term of being able to check it is valid), and the Resources shared between the GitHub account and Azure Pipelines.
|
||||
Azure Pipelines needs access to the GitHub account (in term of beeing able to check it is valid), and the Resources shared between the GitHub account and Azure Pipelines.
|
||||
```
|
||||
|
||||
_Done. We can move to pipelines configuration._
|
||||
|
||||
@@ -1,15 +0,0 @@
|
||||
# Advanced pipeline for running our full test suite on demand.
|
||||
trigger:
|
||||
# When changing these triggers, please ensure the documentation under
|
||||
# "Running tests in CI" is still correct.
|
||||
- test-*
|
||||
pr: none
|
||||
|
||||
variables:
|
||||
# We don't publish our Docker images in this pipeline, but when building them
|
||||
# for testing, let's use the nightly tag.
|
||||
dockerTag: nightly
|
||||
snapBuildTimeout: 5400
|
||||
|
||||
stages:
|
||||
- template: templates/stages/test-and-package-stage.yml
|
||||
20
.azure-pipelines/advanced.yml
Normal file
20
.azure-pipelines/advanced.yml
Normal file
@@ -0,0 +1,20 @@
|
||||
# Advanced pipeline for isolated checks and release purpose
|
||||
trigger:
|
||||
- test-*
|
||||
- '*.x'
|
||||
pr:
|
||||
- test-*
|
||||
# This pipeline is also nightly run on master
|
||||
schedules:
|
||||
- cron: "0 4 * * *"
|
||||
displayName: Nightly build
|
||||
branches:
|
||||
include:
|
||||
- master
|
||||
always: true
|
||||
|
||||
jobs:
|
||||
# Any addition here should be reflected in the release pipeline.
|
||||
# It is advised to declare all jobs here as templates to improve maintainability.
|
||||
- template: templates/tests-suite.yml
|
||||
- template: templates/installer-tests.yml
|
||||
@@ -1,8 +1,12 @@
|
||||
trigger: none
|
||||
trigger:
|
||||
# apache-parser-v2 is a temporary branch for doing work related to
|
||||
# rewriting the parser in the Apache plugin.
|
||||
- apache-parser-v2
|
||||
- master
|
||||
pr:
|
||||
- apache-parser-v2
|
||||
- master
|
||||
- '*.x'
|
||||
|
||||
jobs:
|
||||
- template: templates/jobs/standard-tests-jobs.yml
|
||||
|
||||
- template: templates/tests-suite.yml
|
||||
|
||||
@@ -1,19 +0,0 @@
|
||||
# Nightly pipeline running each day for master.
|
||||
trigger: none
|
||||
pr: none
|
||||
schedules:
|
||||
- cron: "30 4 * * *"
|
||||
displayName: Nightly build
|
||||
branches:
|
||||
include:
|
||||
- master
|
||||
always: true
|
||||
|
||||
variables:
|
||||
dockerTag: nightly
|
||||
snapBuildTimeout: 19800
|
||||
|
||||
stages:
|
||||
- template: templates/stages/test-and-package-stage.yml
|
||||
- template: templates/stages/deploy-stage.yml
|
||||
- template: templates/stages/notify-failure-stage.yml
|
||||
@@ -1,19 +1,13 @@
|
||||
# Release pipeline to run our full test suite, build artifacts, and deploy them
|
||||
# for GitHub release tags.
|
||||
# Release pipeline to build and deploy Certbot for Windows for GitHub release tags
|
||||
trigger:
|
||||
tags:
|
||||
include:
|
||||
- v*
|
||||
pr: none
|
||||
|
||||
variables:
|
||||
dockerTag: ${{variables['Build.SourceBranchName']}}
|
||||
snapBuildTimeout: 5400
|
||||
|
||||
stages:
|
||||
- template: templates/stages/test-and-package-stage.yml
|
||||
- template: templates/stages/changelog-stage.yml
|
||||
- template: templates/stages/deploy-stage.yml
|
||||
parameters:
|
||||
snapReleaseChannel: beta
|
||||
- template: templates/stages/notify-failure-stage.yml
|
||||
jobs:
|
||||
# Any addition here should be reflected in the advanced pipeline.
|
||||
# It is advised to declare all jobs here as templates to improve maintainability.
|
||||
- template: templates/tests-suite.yml
|
||||
- template: templates/installer-tests.yml
|
||||
- template: templates/changelog.yml
|
||||
|
||||
14
.azure-pipelines/templates/changelog.yml
Normal file
14
.azure-pipelines/templates/changelog.yml
Normal file
@@ -0,0 +1,14 @@
|
||||
jobs:
|
||||
- job: changelog
|
||||
pool:
|
||||
vmImage: vs2017-win2016
|
||||
steps:
|
||||
- bash: |
|
||||
CERTBOT_VERSION="$(cd certbot && python -c "import certbot; print(certbot.__version__)" && cd ~-)"
|
||||
"${BUILD_REPOSITORY_LOCALPATH}\tools\extract_changelog.py" "${CERTBOT_VERSION}" >> "${BUILD_ARTIFACTSTAGINGDIRECTORY}/release_notes.md"
|
||||
displayName: Prepare changelog
|
||||
- task: PublishPipelineArtifact@1
|
||||
inputs:
|
||||
path: $(Build.ArtifactStagingDirectory)
|
||||
artifact: changelog
|
||||
displayName: Publish changelog
|
||||
54
.azure-pipelines/templates/installer-tests.yml
Normal file
54
.azure-pipelines/templates/installer-tests.yml
Normal file
@@ -0,0 +1,54 @@
|
||||
jobs:
|
||||
- job: installer_build
|
||||
pool:
|
||||
vmImage: vs2017-win2016
|
||||
steps:
|
||||
- task: UsePythonVersion@0
|
||||
inputs:
|
||||
versionSpec: 3.7
|
||||
architecture: x86
|
||||
addToPath: true
|
||||
- script: python windows-installer/construct.py
|
||||
displayName: Build Certbot installer
|
||||
- task: CopyFiles@2
|
||||
inputs:
|
||||
sourceFolder: $(System.DefaultWorkingDirectory)/windows-installer/build/nsis
|
||||
contents: '*.exe'
|
||||
targetFolder: $(Build.ArtifactStagingDirectory)
|
||||
- task: PublishPipelineArtifact@1
|
||||
inputs:
|
||||
path: $(Build.ArtifactStagingDirectory)
|
||||
artifact: windows-installer
|
||||
displayName: Publish Windows installer
|
||||
- job: installer_run
|
||||
dependsOn: installer_build
|
||||
strategy:
|
||||
matrix:
|
||||
win2019:
|
||||
imageName: windows-2019
|
||||
win2016:
|
||||
imageName: vs2017-win2016
|
||||
win2012r2:
|
||||
imageName: vs2015-win2012r2
|
||||
pool:
|
||||
vmImage: $(imageName)
|
||||
steps:
|
||||
- task: DownloadPipelineArtifact@2
|
||||
inputs:
|
||||
artifact: windows-installer
|
||||
path: $(Build.SourcesDirectory)/bin
|
||||
displayName: Retrieve Windows installer
|
||||
- script: $(Build.SourcesDirectory)\bin\certbot-beta-installer-win32.exe /S
|
||||
displayName: Install Certbot
|
||||
- powershell: Invoke-WebRequest https://www.python.org/ftp/python/3.8.0/python-3.8.0-amd64-webinstall.exe -OutFile C:\py3-setup.exe
|
||||
displayName: Get Python
|
||||
- script: C:\py3-setup.exe /quiet PrependPath=1 InstallAllUsers=1 Include_launcher=1 InstallLauncherAllUsers=1 Include_test=0 Include_doc=0 Include_dev=1 Include_debug=0 Include_tcltk=0 TargetDir=C:\py3
|
||||
displayName: Install Python
|
||||
- script: |
|
||||
py -3 -m venv venv
|
||||
venv\Scripts\python tools\pip_install.py -e certbot-ci
|
||||
displayName: Prepare Certbot-CI
|
||||
- script: |
|
||||
set PATH=%ProgramFiles(x86)%\Certbot\bin;%PATH%
|
||||
venv\Scripts\python -m pytest certbot-ci\certbot_integration_tests\certbot_tests -n 4
|
||||
displayName: Run integration tests
|
||||
@@ -1,64 +0,0 @@
|
||||
jobs:
|
||||
- job: extended_test
|
||||
variables:
|
||||
- name: IMAGE_NAME
|
||||
value: ubuntu-18.04
|
||||
- name: PYTHON_VERSION
|
||||
value: 3.10
|
||||
- group: certbot-common
|
||||
strategy:
|
||||
matrix:
|
||||
linux-py38:
|
||||
PYTHON_VERSION: 3.8
|
||||
TOXENV: py38
|
||||
linux-py39:
|
||||
PYTHON_VERSION: 3.9
|
||||
TOXENV: py39
|
||||
linux-py37-nopin:
|
||||
PYTHON_VERSION: 3.7
|
||||
TOXENV: py37
|
||||
CERTBOT_NO_PIN: 1
|
||||
linux-external-mock:
|
||||
TOXENV: external-mock
|
||||
linux-boulder-v2-integration-certbot-oldest:
|
||||
PYTHON_VERSION: 3.7
|
||||
TOXENV: integration-certbot-oldest
|
||||
ACME_SERVER: boulder-v2
|
||||
linux-boulder-v2-integration-nginx-oldest:
|
||||
PYTHON_VERSION: 3.7
|
||||
TOXENV: integration-nginx-oldest
|
||||
ACME_SERVER: boulder-v2
|
||||
linux-boulder-v2-py37-integration:
|
||||
PYTHON_VERSION: 3.7
|
||||
TOXENV: integration
|
||||
ACME_SERVER: boulder-v2
|
||||
linux-boulder-v2-py38-integration:
|
||||
PYTHON_VERSION: 3.8
|
||||
TOXENV: integration
|
||||
ACME_SERVER: boulder-v2
|
||||
linux-boulder-v2-py39-integration:
|
||||
PYTHON_VERSION: 3.9
|
||||
TOXENV: integration
|
||||
ACME_SERVER: boulder-v2
|
||||
linux-boulder-v2-py310-integration:
|
||||
PYTHON_VERSION: 3.10
|
||||
TOXENV: integration
|
||||
ACME_SERVER: boulder-v2
|
||||
nginx-compat:
|
||||
TOXENV: nginx_compat
|
||||
linux-integration-rfc2136:
|
||||
IMAGE_NAME: ubuntu-18.04
|
||||
PYTHON_VERSION: 3.8
|
||||
TOXENV: integration-dns-rfc2136
|
||||
docker-dev:
|
||||
TOXENV: docker_dev
|
||||
le-modification:
|
||||
IMAGE_NAME: ubuntu-18.04
|
||||
TOXENV: modification
|
||||
farmtest-apache2:
|
||||
PYTHON_VERSION: 3.8
|
||||
TOXENV: test-farm-apache2
|
||||
pool:
|
||||
vmImage: $(IMAGE_NAME)
|
||||
steps:
|
||||
- template: ../steps/tox-steps.yml
|
||||
@@ -1,222 +0,0 @@
|
||||
jobs:
|
||||
- job: docker_build
|
||||
pool:
|
||||
vmImage: ubuntu-18.04
|
||||
strategy:
|
||||
matrix:
|
||||
amd64:
|
||||
DOCKER_ARCH: amd64
|
||||
# Do not run the heavy non-amd64 builds for test branches
|
||||
${{ if not(startsWith(variables['Build.SourceBranchName'], 'test-')) }}:
|
||||
arm32v6:
|
||||
DOCKER_ARCH: arm32v6
|
||||
arm64v8:
|
||||
DOCKER_ARCH: arm64v8
|
||||
# The default timeout of 60 minutes is a little low for compiling
|
||||
# cryptography on ARM architectures.
|
||||
timeoutInMinutes: 180
|
||||
steps:
|
||||
- bash: set -e && tools/docker/build.sh $(dockerTag) $DOCKER_ARCH
|
||||
displayName: Build the Docker images
|
||||
# We don't filter for the Docker Hub organization to continue to allow
|
||||
# easy testing of these scripts on forks.
|
||||
- bash: |
|
||||
set -e
|
||||
DOCKER_IMAGES=$(docker images --filter reference='*/certbot' --filter reference='*/dns-*' --format '{{.Repository}}')
|
||||
docker save --output images.tar $DOCKER_IMAGES
|
||||
displayName: Save the Docker images
|
||||
# If the name of the tar file or artifact changes, the deploy stage will
|
||||
# also need to be updated.
|
||||
- bash: set -e && mv images.tar $(Build.ArtifactStagingDirectory)
|
||||
displayName: Prepare Docker artifact
|
||||
- task: PublishPipelineArtifact@1
|
||||
inputs:
|
||||
path: $(Build.ArtifactStagingDirectory)
|
||||
artifact: docker_$(DOCKER_ARCH)
|
||||
displayName: Store Docker artifact
|
||||
- job: docker_run
|
||||
dependsOn: docker_build
|
||||
pool:
|
||||
vmImage: ubuntu-18.04
|
||||
steps:
|
||||
- task: DownloadPipelineArtifact@2
|
||||
inputs:
|
||||
artifact: docker_amd64
|
||||
path: $(Build.SourcesDirectory)
|
||||
displayName: Retrieve Docker images
|
||||
- bash: set -e && docker load --input $(Build.SourcesDirectory)/images.tar
|
||||
displayName: Load Docker images
|
||||
- bash: |
|
||||
set -ex
|
||||
DOCKER_IMAGES=$(docker images --filter reference='*/certbot' --filter reference='*/dns-*' --format '{{.Repository}}:{{.Tag}}')
|
||||
for DOCKER_IMAGE in ${DOCKER_IMAGES}
|
||||
do docker run --rm "${DOCKER_IMAGE}" plugins --prepare
|
||||
done
|
||||
displayName: Run integration tests for Docker images
|
||||
- job: installer_build
|
||||
pool:
|
||||
vmImage: windows-2019
|
||||
steps:
|
||||
- task: UsePythonVersion@0
|
||||
inputs:
|
||||
versionSpec: 3.9
|
||||
architecture: x64
|
||||
addToPath: true
|
||||
- script: |
|
||||
python -m venv venv
|
||||
venv\Scripts\python tools\pipstrap.py
|
||||
venv\Scripts\python tools\pip_install.py -e windows-installer
|
||||
displayName: Prepare Windows installer build environment
|
||||
- script: |
|
||||
venv\Scripts\construct-windows-installer
|
||||
displayName: Build Certbot installer
|
||||
- task: CopyFiles@2
|
||||
inputs:
|
||||
sourceFolder: $(System.DefaultWorkingDirectory)/windows-installer/build/nsis
|
||||
contents: '*.exe'
|
||||
targetFolder: $(Build.ArtifactStagingDirectory)
|
||||
- task: PublishPipelineArtifact@1
|
||||
inputs:
|
||||
path: $(Build.ArtifactStagingDirectory)
|
||||
# If we change the artifact's name, it should also be changed in tools/create_github_release.py
|
||||
artifact: windows-installer
|
||||
displayName: Publish Windows installer
|
||||
- job: installer_run
|
||||
dependsOn: installer_build
|
||||
strategy:
|
||||
matrix:
|
||||
win2019:
|
||||
imageName: windows-2019
|
||||
pool:
|
||||
vmImage: $(imageName)
|
||||
steps:
|
||||
- task: UsePythonVersion@0
|
||||
inputs:
|
||||
versionSpec: 3.9
|
||||
addToPath: true
|
||||
- task: DownloadPipelineArtifact@2
|
||||
inputs:
|
||||
artifact: windows-installer
|
||||
path: $(Build.SourcesDirectory)/bin
|
||||
displayName: Retrieve Windows installer
|
||||
- script: |
|
||||
python -m venv venv
|
||||
venv\Scripts\python tools\pipstrap.py
|
||||
venv\Scripts\python tools\pip_install.py -e certbot-ci
|
||||
env:
|
||||
PIP_NO_BUILD_ISOLATION: no
|
||||
displayName: Prepare Certbot-CI
|
||||
- script: |
|
||||
set PATH=%ProgramFiles%\Certbot\bin;%PATH%
|
||||
venv\Scripts\python -m pytest certbot-ci\windows_installer_integration_tests --allow-persistent-changes --installer-path $(Build.SourcesDirectory)\bin\certbot-beta-installer-win_amd64.exe
|
||||
displayName: Run windows installer integration tests
|
||||
- script: |
|
||||
set PATH=%ProgramFiles%\Certbot\bin;%PATH%
|
||||
venv\Scripts\python -m pytest certbot-ci\certbot_integration_tests\certbot_tests -n 4
|
||||
displayName: Run certbot integration tests
|
||||
- job: snaps_build
|
||||
pool:
|
||||
vmImage: ubuntu-18.04
|
||||
strategy:
|
||||
matrix:
|
||||
amd64:
|
||||
SNAP_ARCH: amd64
|
||||
# Do not run the heavy non-amd64 builds for test branches
|
||||
${{ if not(startsWith(variables['Build.SourceBranchName'], 'test-')) }}:
|
||||
armhf:
|
||||
SNAP_ARCH: armhf
|
||||
arm64:
|
||||
SNAP_ARCH: arm64
|
||||
timeoutInMinutes: 0
|
||||
steps:
|
||||
- script: |
|
||||
set -e
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y --no-install-recommends snapd
|
||||
sudo snap install --classic snapcraft
|
||||
displayName: Install dependencies
|
||||
- task: UsePythonVersion@0
|
||||
inputs:
|
||||
versionSpec: 3.8
|
||||
addToPath: true
|
||||
- task: DownloadSecureFile@1
|
||||
name: credentials
|
||||
inputs:
|
||||
secureFile: launchpad-credentials
|
||||
- script: |
|
||||
set -e
|
||||
git config --global user.email "$(Build.RequestedForEmail)"
|
||||
git config --global user.name "$(Build.RequestedFor)"
|
||||
mkdir -p ~/.local/share/snapcraft/provider/launchpad
|
||||
cp $(credentials.secureFilePath) ~/.local/share/snapcraft/provider/launchpad/credentials
|
||||
python3 tools/snap/build_remote.py ALL --archs ${SNAP_ARCH} --timeout $(snapBuildTimeout)
|
||||
displayName: Build snaps
|
||||
- script: |
|
||||
set -e
|
||||
mv *.snap $(Build.ArtifactStagingDirectory)
|
||||
mv certbot-dns-*/*.snap $(Build.ArtifactStagingDirectory)
|
||||
displayName: Prepare artifacts
|
||||
- task: PublishPipelineArtifact@1
|
||||
inputs:
|
||||
path: $(Build.ArtifactStagingDirectory)
|
||||
artifact: snaps_$(SNAP_ARCH)
|
||||
displayName: Store snaps artifacts
|
||||
- job: snap_run
|
||||
dependsOn: snaps_build
|
||||
pool:
|
||||
vmImage: ubuntu-18.04
|
||||
steps:
|
||||
- task: UsePythonVersion@0
|
||||
inputs:
|
||||
versionSpec: 3.8
|
||||
addToPath: true
|
||||
- script: |
|
||||
set -e
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y --no-install-recommends nginx-light snapd
|
||||
python3 -m venv venv
|
||||
venv/bin/python tools/pipstrap.py
|
||||
venv/bin/python tools/pip_install.py -U tox
|
||||
displayName: Install dependencies
|
||||
- task: DownloadPipelineArtifact@2
|
||||
inputs:
|
||||
artifact: snaps_amd64
|
||||
path: $(Build.SourcesDirectory)/snap
|
||||
displayName: Retrieve Certbot snaps
|
||||
- script: |
|
||||
set -e
|
||||
sudo snap install --dangerous --classic snap/certbot_*.snap
|
||||
displayName: Install Certbot snap
|
||||
- script: |
|
||||
set -e
|
||||
venv/bin/python -m tox -e integration-external,apacheconftest-external-with-pebble
|
||||
displayName: Run tox
|
||||
- job: snap_dns_run
|
||||
dependsOn: snaps_build
|
||||
pool:
|
||||
vmImage: ubuntu-18.04
|
||||
steps:
|
||||
- script: |
|
||||
set -e
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y --no-install-recommends snapd
|
||||
displayName: Install dependencies
|
||||
- task: UsePythonVersion@0
|
||||
inputs:
|
||||
versionSpec: 3.8
|
||||
addToPath: true
|
||||
- task: DownloadPipelineArtifact@2
|
||||
inputs:
|
||||
artifact: snaps_amd64
|
||||
path: $(Build.SourcesDirectory)/snap
|
||||
displayName: Retrieve Certbot snaps
|
||||
- script: |
|
||||
set -e
|
||||
python3 -m venv venv
|
||||
venv/bin/python tools/pipstrap.py
|
||||
venv/bin/python tools/pip_install.py -e certbot-ci
|
||||
displayName: Prepare Certbot-CI
|
||||
- script: |
|
||||
set -e
|
||||
sudo -E venv/bin/pytest certbot-ci/snap_integration_tests/dns_tests --allow-persistent-changes --snap-folder $(Build.SourcesDirectory)/snap --snap-arch amd64
|
||||
displayName: Test DNS plugins snaps
|
||||
@@ -1,73 +0,0 @@
|
||||
jobs:
|
||||
- job: test
|
||||
variables:
|
||||
PYTHON_VERSION: 3.10
|
||||
strategy:
|
||||
matrix:
|
||||
macos-py37-cover:
|
||||
IMAGE_NAME: macOS-10.15
|
||||
PYTHON_VERSION: 3.7
|
||||
TOXENV: py37-cover
|
||||
macos-py310-cover:
|
||||
IMAGE_NAME: macOS-10.15
|
||||
PYTHON_VERSION: 3.10
|
||||
TOXENV: py310-cover
|
||||
windows-py37:
|
||||
IMAGE_NAME: windows-2019
|
||||
PYTHON_VERSION: 3.7
|
||||
TOXENV: py37-win
|
||||
windows-py39-cover:
|
||||
IMAGE_NAME: windows-2019
|
||||
PYTHON_VERSION: 3.9
|
||||
TOXENV: py39-cover-win
|
||||
windows-integration-certbot:
|
||||
IMAGE_NAME: windows-2019
|
||||
PYTHON_VERSION: 3.9
|
||||
TOXENV: integration-certbot
|
||||
linux-oldest-tests-1:
|
||||
IMAGE_NAME: ubuntu-18.04
|
||||
PYTHON_VERSION: 3.7
|
||||
TOXENV: '{acme,apache,apache-v2,certbot}-oldest'
|
||||
linux-oldest-tests-2:
|
||||
IMAGE_NAME: ubuntu-18.04
|
||||
PYTHON_VERSION: 3.7
|
||||
TOXENV: '{dns,nginx}-oldest'
|
||||
linux-py37:
|
||||
IMAGE_NAME: ubuntu-18.04
|
||||
PYTHON_VERSION: 3.7
|
||||
TOXENV: py37
|
||||
linux-py310-cover:
|
||||
IMAGE_NAME: ubuntu-18.04
|
||||
PYTHON_VERSION: 3.10
|
||||
TOXENV: py310-cover
|
||||
linux-py310-lint:
|
||||
IMAGE_NAME: ubuntu-18.04
|
||||
PYTHON_VERSION: 3.10
|
||||
TOXENV: lint-posix
|
||||
linux-py310-mypy:
|
||||
IMAGE_NAME: ubuntu-18.04
|
||||
PYTHON_VERSION: 3.10
|
||||
TOXENV: mypy-posix
|
||||
linux-integration:
|
||||
IMAGE_NAME: ubuntu-18.04
|
||||
PYTHON_VERSION: 3.8
|
||||
TOXENV: integration
|
||||
ACME_SERVER: pebble
|
||||
apache-compat:
|
||||
IMAGE_NAME: ubuntu-18.04
|
||||
TOXENV: apache_compat
|
||||
apacheconftest:
|
||||
IMAGE_NAME: ubuntu-18.04
|
||||
TOXENV: apacheconftest-with-pebble
|
||||
nginxroundtrip:
|
||||
IMAGE_NAME: ubuntu-18.04
|
||||
TOXENV: nginxroundtrip
|
||||
pool:
|
||||
vmImage: $(IMAGE_NAME)
|
||||
steps:
|
||||
- template: ../steps/tox-steps.yml
|
||||
- job: test_sphinx_builds
|
||||
pool:
|
||||
vmImage: ubuntu-20.04
|
||||
steps:
|
||||
- template: ../steps/sphinx-steps.yml
|
||||
@@ -1,19 +0,0 @@
|
||||
stages:
|
||||
- stage: Changelog
|
||||
jobs:
|
||||
- job: prepare
|
||||
pool:
|
||||
vmImage: windows-2019
|
||||
steps:
|
||||
# If we change the output filename from `release_notes.md`, it should also be changed in tools/create_github_release.py
|
||||
- bash: |
|
||||
set -e
|
||||
CERTBOT_VERSION="$(cd certbot && python -c "import certbot; print(certbot.__version__)" && cd ~-)"
|
||||
"${BUILD_REPOSITORY_LOCALPATH}\tools\extract_changelog.py" "${CERTBOT_VERSION}" >> "${BUILD_ARTIFACTSTAGINGDIRECTORY}/release_notes.md"
|
||||
displayName: Prepare changelog
|
||||
- task: PublishPipelineArtifact@1
|
||||
inputs:
|
||||
path: $(Build.ArtifactStagingDirectory)
|
||||
# If we change the artifact's name, it should also be changed in tools/create_github_release.py
|
||||
artifact: changelog
|
||||
displayName: Publish changelog
|
||||
@@ -1,107 +0,0 @@
|
||||
parameters:
|
||||
- name: snapReleaseChannel
|
||||
type: string
|
||||
default: edge
|
||||
values:
|
||||
- edge
|
||||
- beta
|
||||
|
||||
stages:
|
||||
- stage: Deploy
|
||||
jobs:
|
||||
# This job relies on credentials used to publish the Certbot snaps. This
|
||||
# credential file was created by running:
|
||||
#
|
||||
# snapcraft logout
|
||||
# snapcraft export-login --channels=beta,edge snapcraft.cfg
|
||||
# (provide the shared snapcraft credentials when prompted)
|
||||
#
|
||||
# Then the file was added as a secure file in Azure pipelines
|
||||
# with the name snapcraft.cfg by following the instructions at
|
||||
# https://docs.microsoft.com/en-us/azure/devops/pipelines/library/secure-files?view=azure-devops
|
||||
# including authorizing the file for use in the "nightly" and "release"
|
||||
# pipelines as described at
|
||||
# https://docs.microsoft.com/en-us/azure/devops/pipelines/library/secure-files?view=azure-devops#q-how-do-i-authorize-a-secure-file-for-use-in-a-specific-pipeline.
|
||||
#
|
||||
# This file has a maximum lifetime of one year and the current
|
||||
# file will expire on 2023-06-17 which is also tracked by
|
||||
# https://github.com/certbot/certbot/issues/7931. The file will
|
||||
# need to be updated before then to prevent automated deploys
|
||||
# from breaking.
|
||||
#
|
||||
# Revoking these credentials can be done by changing the password of the
|
||||
# account used to generate the credentials. See
|
||||
# https://forum.snapcraft.io/t/revoking-exported-credentials/19031 for
|
||||
# more info.
|
||||
- job: publish_snap
|
||||
pool:
|
||||
vmImage: ubuntu-18.04
|
||||
variables:
|
||||
- group: certbot-common
|
||||
strategy:
|
||||
matrix:
|
||||
amd64:
|
||||
SNAP_ARCH: amd64
|
||||
arm32v6:
|
||||
SNAP_ARCH: armhf
|
||||
arm64v8:
|
||||
SNAP_ARCH: arm64
|
||||
steps:
|
||||
- bash: |
|
||||
set -e
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y --no-install-recommends snapd
|
||||
sudo snap install --classic snapcraft
|
||||
displayName: Install dependencies
|
||||
- task: DownloadPipelineArtifact@2
|
||||
inputs:
|
||||
artifact: snaps_$(SNAP_ARCH)
|
||||
path: $(Build.SourcesDirectory)/snap
|
||||
displayName: Retrieve Certbot snaps
|
||||
- task: DownloadSecureFile@1
|
||||
name: snapcraftCfg
|
||||
inputs:
|
||||
secureFile: snapcraft.cfg
|
||||
- bash: |
|
||||
set -e
|
||||
export SNAPCRAFT_STORE_CREDENTIALS=$(cat $(snapcraftCfg.secureFilePath))
|
||||
for SNAP_FILE in snap/*.snap; do
|
||||
tools/retry.sh eval snapcraft upload --release=${{ parameters.snapReleaseChannel }} "${SNAP_FILE}"
|
||||
done
|
||||
displayName: Publish to Snap store
|
||||
- job: publish_docker
|
||||
pool:
|
||||
vmImage: ubuntu-18.04
|
||||
strategy:
|
||||
matrix:
|
||||
amd64:
|
||||
DOCKER_ARCH: amd64
|
||||
arm32v6:
|
||||
DOCKER_ARCH: arm32v6
|
||||
arm64v8:
|
||||
DOCKER_ARCH: arm64v8
|
||||
steps:
|
||||
- task: DownloadPipelineArtifact@2
|
||||
inputs:
|
||||
artifact: docker_$(DOCKER_ARCH)
|
||||
path: $(Build.SourcesDirectory)
|
||||
displayName: Retrieve Docker images
|
||||
- bash: set -e && docker load --input $(Build.SourcesDirectory)/images.tar
|
||||
displayName: Load Docker images
|
||||
- task: Docker@2
|
||||
inputs:
|
||||
command: login
|
||||
# The credentials used here are for the shared certbotbot account
|
||||
# on Docker Hub. The credentials are stored in a service account
|
||||
# which was created by following the instructions at
|
||||
# https://docs.microsoft.com/en-us/azure/devops/pipelines/library/service-endpoints?view=azure-devops&tabs=yaml#sep-docreg.
|
||||
# The name given to this service account must match the value
|
||||
# given to containerRegistry below. "Grant access to all
|
||||
# pipelines" should also be checked. To revoke these
|
||||
# credentials, we can change the password on the certbotbot
|
||||
# Docker Hub account or remove the account from the
|
||||
# Certbot organization on Docker Hub.
|
||||
containerRegistry: docker-hub
|
||||
displayName: Login to Docker Hub
|
||||
- bash: set -e && tools/docker/deploy.sh $(dockerTag) $DOCKER_ARCH
|
||||
displayName: Deploy the Docker images
|
||||
@@ -1,19 +0,0 @@
|
||||
stages:
|
||||
- stage: On_Failure
|
||||
jobs:
|
||||
- job: notify_mattermost
|
||||
variables:
|
||||
- group: certbot-common
|
||||
pool:
|
||||
vmImage: ubuntu-20.04
|
||||
steps:
|
||||
- bash: |
|
||||
set -e
|
||||
MESSAGE="\
|
||||
---\n\
|
||||
##### Azure Pipeline
|
||||
*Repo* $(Build.Repository.ID) - *Pipeline* $(Build.DefinitionName) #$(Build.BuildNumber) - *Branch/PR* $(Build.SourceBranchName)\n\
|
||||
:warning: __Pipeline has failed__: [Link to the build](https://dev.azure.com/$(Build.Repository.ID)/_build/results?buildId=$(Build.BuildId)&view=results)\n\n\
|
||||
---"
|
||||
curl -i -X POST --data-urlencode "payload={\"text\":\"${MESSAGE}\"}" "$(MATTERMOST_URL)"
|
||||
condition: failed()
|
||||
@@ -1,6 +0,0 @@
|
||||
stages:
|
||||
- stage: TestAndPackage
|
||||
jobs:
|
||||
- template: ../jobs/standard-tests-jobs.yml
|
||||
- template: ../jobs/extended-tests-jobs.yml
|
||||
- template: ../jobs/packaging-jobs.yml
|
||||
@@ -1,24 +0,0 @@
|
||||
steps:
|
||||
- bash: |
|
||||
set -e
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y --no-install-recommends libaugeas0
|
||||
FINAL_STATUS=0
|
||||
declare -a FAILED_BUILDS
|
||||
tools/venv.py
|
||||
source venv/bin/activate
|
||||
for doc_path in */docs
|
||||
do
|
||||
echo ""
|
||||
echo "##[group]Building $doc_path"
|
||||
if ! sphinx-build -W --keep-going -b html $doc_path $doc_path/_build/html; then
|
||||
FINAL_STATUS=1
|
||||
FAILED_BUILDS[${#FAILED_BUILDS[@]}]="${doc_path%/docs}"
|
||||
fi
|
||||
echo "##[endgroup]"
|
||||
done
|
||||
if [[ $FINAL_STATUS -ne 0 ]]; then
|
||||
echo "##[error]The following builds failed: ${FAILED_BUILDS[*]}"
|
||||
exit 1
|
||||
fi
|
||||
displayName: Build Sphinx Documentation
|
||||
@@ -1,57 +0,0 @@
|
||||
steps:
|
||||
# We run brew update because we've seen attempts to install an older version
|
||||
# of a package fail. See
|
||||
# https://github.com/actions/virtual-environments/issues/3165.
|
||||
- bash: |
|
||||
set -e
|
||||
brew update
|
||||
brew install augeas
|
||||
condition: startswith(variables['IMAGE_NAME'], 'macOS')
|
||||
displayName: Install MacOS dependencies
|
||||
- bash: |
|
||||
set -e
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y --no-install-recommends \
|
||||
python-dev \
|
||||
gcc \
|
||||
libaugeas0 \
|
||||
libssl-dev \
|
||||
libffi-dev \
|
||||
ca-certificates \
|
||||
nginx-light \
|
||||
openssl
|
||||
sudo systemctl stop nginx
|
||||
condition: startswith(variables['IMAGE_NAME'], 'ubuntu')
|
||||
displayName: Install Linux dependencies
|
||||
- task: UsePythonVersion@0
|
||||
inputs:
|
||||
versionSpec: $(PYTHON_VERSION)
|
||||
addToPath: true
|
||||
# tools/pip_install.py is used to pin packages to a known working version
|
||||
# except in tests where the environment variable CERTBOT_NO_PIN is set.
|
||||
# virtualenv is listed here explicitly to make sure it is upgraded when
|
||||
# CERTBOT_NO_PIN is set to work around failures we've seen when using an older
|
||||
# version of virtualenv. The option "-I" is set so when CERTBOT_NO_PIN is also
|
||||
# set, pip updates dependencies it thinks are already satisfied to avoid some
|
||||
# problems with its lack of real dependency resolution.
|
||||
- bash: |
|
||||
set -e
|
||||
python tools/pipstrap.py
|
||||
python tools/pip_install.py -I tox virtualenv
|
||||
displayName: Install runtime dependencies
|
||||
- task: DownloadSecureFile@1
|
||||
name: testFarmPem
|
||||
inputs:
|
||||
secureFile: azure-test-farm.pem
|
||||
condition: contains(variables['TOXENV'], 'test-farm')
|
||||
- bash: |
|
||||
set -e
|
||||
export TARGET_BRANCH="`echo "${BUILD_SOURCEBRANCH}" | sed -E 's!refs/(heads|tags)/!!g'`"
|
||||
[ -z "${SYSTEM_PULLREQUEST_TARGETBRANCH}" ] || export TARGET_BRANCH="${SYSTEM_PULLREQUEST_TARGETBRANCH}"
|
||||
env
|
||||
python -m tox
|
||||
env:
|
||||
AWS_ACCESS_KEY_ID: $(AWS_ACCESS_KEY_ID)
|
||||
AWS_SECRET_ACCESS_KEY: $(AWS_SECRET_ACCESS_KEY)
|
||||
AWS_EC2_PEM_FILE: $(testFarmPem.secureFilePath)
|
||||
displayName: Run tox
|
||||
38
.azure-pipelines/templates/tests-suite.yml
Normal file
38
.azure-pipelines/templates/tests-suite.yml
Normal file
@@ -0,0 +1,38 @@
|
||||
jobs:
|
||||
- job: test
|
||||
pool:
|
||||
vmImage: vs2017-win2016
|
||||
strategy:
|
||||
matrix:
|
||||
py35:
|
||||
PYTHON_VERSION: 3.5
|
||||
TOXENV: py35
|
||||
py37-cover:
|
||||
PYTHON_VERSION: 3.7
|
||||
TOXENV: py37-cover
|
||||
integration-certbot:
|
||||
PYTHON_VERSION: 3.7
|
||||
TOXENV: integration-certbot
|
||||
PYTEST_ADDOPTS: --numprocesses 4
|
||||
variables:
|
||||
- group: certbot-common
|
||||
steps:
|
||||
- task: UsePythonVersion@0
|
||||
inputs:
|
||||
versionSpec: $(PYTHON_VERSION)
|
||||
addToPath: true
|
||||
- script: python tools/pip_install.py -U tox coverage
|
||||
displayName: Install dependencies
|
||||
- script: python -m tox
|
||||
displayName: Run tox
|
||||
# We do not require codecov report upload to succeed. So to avoid to break the pipeline if
|
||||
# something goes wrong, each command is suffixed with a command that hides any non zero exit
|
||||
# codes and echoes an informative message instead.
|
||||
- bash: |
|
||||
curl -s https://codecov.io/bash -o codecov-bash || echo "Failed to download codecov-bash"
|
||||
chmod +x codecov-bash || echo "Failed to apply execute permissions on codecov-bash"
|
||||
./codecov-bash -F windows || echo "Codecov did not collect coverage reports"
|
||||
condition: in(variables['TOXENV'], 'py37-cover', 'integration-certbot')
|
||||
env:
|
||||
CODECOV_TOKEN: $(codecov_token)
|
||||
displayName: Publish coverage
|
||||
18
.codecov.yml
Normal file
18
.codecov.yml
Normal file
@@ -0,0 +1,18 @@
|
||||
coverage:
|
||||
status:
|
||||
project:
|
||||
default: off
|
||||
linux:
|
||||
flags: linux
|
||||
# Fixed target instead of auto set by #7173, can
|
||||
# be removed when flags in Codecov are added back.
|
||||
target: 97.4
|
||||
threshold: 0.1
|
||||
base: auto
|
||||
windows:
|
||||
flags: windows
|
||||
# Fixed target instead of auto set by #7173, can
|
||||
# be removed when flags in Codecov are added back.
|
||||
target: 97.4
|
||||
threshold: 0.1
|
||||
base: auto
|
||||
@@ -8,4 +8,5 @@
|
||||
.git
|
||||
.tox
|
||||
venv
|
||||
venv3
|
||||
docs
|
||||
|
||||
@@ -1,18 +0,0 @@
|
||||
# https://editorconfig.org/
|
||||
|
||||
root = true
|
||||
|
||||
[*]
|
||||
insert_final_newline = true
|
||||
trim_trailing_whitespace = true
|
||||
end_of_line = lf
|
||||
|
||||
[*.py]
|
||||
indent_style = space
|
||||
indent_size = 4
|
||||
charset = utf-8
|
||||
max_line_length = 100
|
||||
|
||||
[*.yaml]
|
||||
indent_style = space
|
||||
indent_size = 2
|
||||
12
.envrc
12
.envrc
@@ -1,12 +0,0 @@
|
||||
# This file is just a nicety for developers who use direnv. When you cd under
|
||||
# the Certbot repo, Certbot's virtual environment will be automatically
|
||||
# activated and then deactivated when you cd elsewhere. Developers have to have
|
||||
# direnv set up and run `direnv allow` to allow this file to execute on their
|
||||
# system. You can find more information at https://direnv.net/.
|
||||
. venv/bin/activate
|
||||
# direnv doesn't support modifying PS1 so we unset it to squelch the error
|
||||
# it'll otherwise print about this being done in the activate script. See
|
||||
# https://github.com/direnv/direnv/wiki/PS1. If you would like your shell
|
||||
# prompt to change like it normally does, see
|
||||
# https://github.com/direnv/direnv/wiki/Python#restoring-the-ps1.
|
||||
unset PS1
|
||||
1
.github/FUNDING.yml
vendored
1
.github/FUNDING.yml
vendored
@@ -1 +0,0 @@
|
||||
custom: https://supporters.eff.org/donate/support-work-on-certbot
|
||||
25
.gitignore
vendored
25
.gitignore
vendored
@@ -4,16 +4,16 @@
|
||||
build/
|
||||
dist*/
|
||||
/venv*/
|
||||
/kgs/
|
||||
/.tox/
|
||||
/releases*/
|
||||
/log*
|
||||
letsencrypt.log
|
||||
certbot.log
|
||||
poetry.lock
|
||||
letsencrypt-auto-source/letsencrypt-auto.sig.lzma.base64
|
||||
|
||||
# coverage
|
||||
.coverage
|
||||
.coverage.*
|
||||
/htmlcov/
|
||||
|
||||
/.vagrant
|
||||
@@ -26,11 +26,15 @@ tags
|
||||
\#*#
|
||||
.idea
|
||||
.ropeproject
|
||||
.vscode
|
||||
|
||||
# auth --cert-path --chain-path
|
||||
/*.pem
|
||||
|
||||
# letstest
|
||||
tests/letstest/letest-*/
|
||||
tests/letstest/*.pem
|
||||
tests/letstest/venv/
|
||||
|
||||
.venv
|
||||
|
||||
# pytest cache
|
||||
@@ -45,18 +49,3 @@ tags
|
||||
.certbot_test_workspace
|
||||
**/assets/pebble*
|
||||
**/assets/challtestsrv*
|
||||
|
||||
# snap files
|
||||
.snapcraft
|
||||
parts
|
||||
prime
|
||||
stage
|
||||
*.snap
|
||||
snap-constraints.txt
|
||||
qemu-*
|
||||
certbot-dns*/certbot-dns*_amd64*.txt
|
||||
certbot-dns*/certbot-dns*_arm*.txt
|
||||
/certbot_amd64*.txt
|
||||
/certbot_arm*.txt
|
||||
certbot-dns*/snap
|
||||
snapcraft.cfg
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
[settings]
|
||||
skip_glob=venv*
|
||||
skip=letsencrypt-auto-source
|
||||
force_sort_within_sections=True
|
||||
force_single_line=True
|
||||
order_by_type=False
|
||||
|
||||
27
.pylintrc
27
.pylintrc
@@ -8,10 +8,7 @@ jobs=0
|
||||
|
||||
# Python code to execute, usually for sys.path manipulation such as
|
||||
# pygtk.require().
|
||||
# CERTBOT COMMENT
|
||||
# This is needed for pylint to import linter_plugin.py since
|
||||
# https://github.com/PyCQA/pylint/pull/3396.
|
||||
init-hook="import pylint.config, os, sys; sys.path.append(os.path.dirname(pylint.config.PYLINTRC))"
|
||||
#init-hook=
|
||||
|
||||
# Profiled execution.
|
||||
profile=no
|
||||
@@ -56,25 +53,7 @@ extension-pkg-whitelist=pywintypes,win32api,win32file,win32security
|
||||
# See https://github.com/PyCQA/pylint/issues/1498.
|
||||
# 3) Same as point 2 for no-value-for-parameter.
|
||||
# See https://github.com/PyCQA/pylint/issues/2820.
|
||||
# 4) raise-missing-from makes it an error to raise an exception from except
|
||||
# block without using explicit exception chaining. While explicit exception
|
||||
# chaining results in a slightly more informative traceback, I don't think
|
||||
# it's beneficial enough for us to change all of our current instances and
|
||||
# give Certbot developers errors about this when they're working on new code
|
||||
# in the future. You can read more about exception chaining and this pylint
|
||||
# check at
|
||||
# https://blog.ram.rachum.com/post/621791438475296768/improving-python-exception-chaining-with.
|
||||
# 5) wrong-import-order generates false positives and a pylint developer
|
||||
# suggests that people using isort should disable this check at
|
||||
# https://github.com/PyCQA/pylint/issues/3817#issuecomment-687892090.
|
||||
# 6) unspecified-encoding generates errors when encoding is not specified in
|
||||
# in a call to the built-in open function. This relates more to a design decision
|
||||
# (unspecified encoding makes the open function use the default encoding of the system)
|
||||
# than a clear flaw on which a check should be enforced. Anyway the project does
|
||||
# not need to enforce encoding on files so we disable this check.
|
||||
# 7) consider-using-f-string is "suggesting" to move to f-string when possible with an error. This
|
||||
# clearly relates to code design and not to potential defects in the code, let's just ignore that.
|
||||
disable=fixme,locally-disabled,locally-enabled,bad-continuation,no-self-use,invalid-name,cyclic-import,duplicate-code,design,import-outside-toplevel,useless-object-inheritance,unsubscriptable-object,no-value-for-parameter,no-else-return,no-else-raise,no-else-break,no-else-continue,raise-missing-from,wrong-import-order,unspecified-encoding,consider-using-f-string
|
||||
disable=fixme,locally-disabled,locally-enabled,bad-continuation,no-self-use,invalid-name,cyclic-import,duplicate-code,design,import-outside-toplevel,useless-object-inheritance,unsubscriptable-object,no-value-for-parameter,no-else-return,no-else-raise,no-else-break,no-else-continue
|
||||
|
||||
[REPORTS]
|
||||
|
||||
@@ -275,7 +254,7 @@ ignore-mixin-members=yes
|
||||
# List of module names for which member attributes should not be checked
|
||||
# (useful for modules/projects where namespaces are manipulated during runtime
|
||||
# and thus existing member attributes cannot be deduced by static analysis
|
||||
ignored-modules=pkg_resources,confargparse,argparse
|
||||
ignored-modules=pkg_resources,confargparse,argparse,six.moves,six.moves.urllib
|
||||
# import errors ignored only in 1.4.4
|
||||
# https://bitbucket.org/logilab/pylint/commits/cd000904c9e2
|
||||
|
||||
|
||||
309
.travis.yml
Normal file
309
.travis.yml
Normal file
@@ -0,0 +1,309 @@
|
||||
language: python
|
||||
dist: xenial
|
||||
|
||||
cache:
|
||||
directories:
|
||||
- $HOME/.cache/pip
|
||||
|
||||
before_script:
|
||||
- 'if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then ulimit -n 1024 ; fi'
|
||||
# On Travis, the fastest parallelization for integration tests has proved to be 4.
|
||||
- 'if [[ "$TOXENV" == *"integration"* ]]; then export PYTEST_ADDOPTS="--numprocesses 4"; fi'
|
||||
# Use Travis retry feature for farm tests since they are flaky
|
||||
- 'if [[ "$TOXENV" == "travis-test-farm"* ]]; then export TRAVIS_RETRY=travis_retry; fi'
|
||||
- export TOX_TESTENV_PASSENV=TRAVIS
|
||||
|
||||
# Only build pushes to the master branch, PRs, and branches beginning with
|
||||
# `test-` or of the form `digit(s).digit(s).x`. This reduces the number of
|
||||
# simultaneous Travis runs, which speeds turnaround time on review since there
|
||||
# is a cap of on the number of simultaneous runs.
|
||||
branches:
|
||||
only:
|
||||
# apache-parser-v2 is a temporary branch for doing work related to
|
||||
# rewriting the parser in the Apache plugin.
|
||||
- apache-parser-v2
|
||||
- master
|
||||
- /^\d+\.\d+\.x$/
|
||||
- /^test-.*$/
|
||||
|
||||
# Jobs for the main test suite are always executed (including on PRs) except for pushes on master.
|
||||
not-on-master: ¬-on-master
|
||||
if: NOT (type = push AND branch = master)
|
||||
|
||||
# Jobs for the extended test suite are executed for cron jobs and pushes to
|
||||
# non-development branches. See the explanation for apache-parser-v2 above.
|
||||
extended-test-suite: &extended-test-suite
|
||||
if: type = cron OR (type = push AND branch NOT IN (apache-parser-v2, master))
|
||||
|
||||
matrix:
|
||||
include:
|
||||
# Main test suite
|
||||
- python: "2.7"
|
||||
env: ACME_SERVER=pebble TOXENV=integration
|
||||
<<: *not-on-master
|
||||
|
||||
# This job is always executed, including on master
|
||||
- python: "2.7"
|
||||
env: TOXENV=py27-cover FYI="py27 tests + code coverage"
|
||||
|
||||
- python: "3.7"
|
||||
env: TOXENV=lint
|
||||
<<: *not-on-master
|
||||
- python: "3.5"
|
||||
env: TOXENV=mypy
|
||||
<<: *not-on-master
|
||||
- python: "2.7"
|
||||
# Ubuntu Trusty or older must be used because the oldest version of
|
||||
# cryptography we support cannot be compiled against the version of
|
||||
# OpenSSL in Xenial or newer.
|
||||
dist: trusty
|
||||
env: TOXENV='py27-{acme,apache,apache-v2,certbot,dns,nginx}-oldest'
|
||||
<<: *not-on-master
|
||||
- python: "3.4"
|
||||
env: TOXENV=py34
|
||||
<<: *not-on-master
|
||||
- python: "3.7"
|
||||
env: TOXENV=py37
|
||||
<<: *not-on-master
|
||||
- python: "3.8"
|
||||
env: TOXENV=py38
|
||||
<<: *not-on-master
|
||||
- sudo: required
|
||||
env: TOXENV=apache_compat
|
||||
services: docker
|
||||
before_install:
|
||||
addons:
|
||||
<<: *not-on-master
|
||||
- sudo: required
|
||||
env: TOXENV=le_auto_xenial
|
||||
services: docker
|
||||
<<: *not-on-master
|
||||
- python: "2.7"
|
||||
env: TOXENV=apacheconftest-with-pebble
|
||||
<<: *not-on-master
|
||||
- python: "2.7"
|
||||
env: TOXENV=nginxroundtrip
|
||||
<<: *not-on-master
|
||||
|
||||
# Extended test suite on cron jobs and pushes to tested branches other than master
|
||||
- sudo: required
|
||||
env: TOXENV=nginx_compat
|
||||
services: docker
|
||||
before_install:
|
||||
addons:
|
||||
<<: *extended-test-suite
|
||||
- python: "2.7"
|
||||
env:
|
||||
- TOXENV=travis-test-farm-apache2
|
||||
- secure: "f+j/Lj9s1lcuKo5sEFrlRd1kIAMnIJI4z0MTI7QF8jl9Fkmbx7KECGzw31TNgzrOSzxSapHbcueFYvNCLKST+kE/8ogMZBbwqXfEDuKpyF6BY3uYoJn+wPVE5pIb8Hhe08xPte8TTDSMIyHI3EyTfcAKrIreauoArePvh/cRvSw="
|
||||
<<: *extended-test-suite
|
||||
- python: "2.7"
|
||||
env:
|
||||
- TOXENV=travis-test-farm-leauto-upgrades
|
||||
- secure: "f+j/Lj9s1lcuKo5sEFrlRd1kIAMnIJI4z0MTI7QF8jl9Fkmbx7KECGzw31TNgzrOSzxSapHbcueFYvNCLKST+kE/8ogMZBbwqXfEDuKpyF6BY3uYoJn+wPVE5pIb8Hhe08xPte8TTDSMIyHI3EyTfcAKrIreauoArePvh/cRvSw="
|
||||
git:
|
||||
depth: false # This is needed to have the history to checkout old versions of certbot-auto.
|
||||
<<: *extended-test-suite
|
||||
- python: "2.7"
|
||||
env:
|
||||
- TOXENV=travis-test-farm-certonly-standalone
|
||||
- secure: "f+j/Lj9s1lcuKo5sEFrlRd1kIAMnIJI4z0MTI7QF8jl9Fkmbx7KECGzw31TNgzrOSzxSapHbcueFYvNCLKST+kE/8ogMZBbwqXfEDuKpyF6BY3uYoJn+wPVE5pIb8Hhe08xPte8TTDSMIyHI3EyTfcAKrIreauoArePvh/cRvSw="
|
||||
<<: *extended-test-suite
|
||||
- python: "2.7"
|
||||
env:
|
||||
- TOXENV=travis-test-farm-sdists
|
||||
- secure: "f+j/Lj9s1lcuKo5sEFrlRd1kIAMnIJI4z0MTI7QF8jl9Fkmbx7KECGzw31TNgzrOSzxSapHbcueFYvNCLKST+kE/8ogMZBbwqXfEDuKpyF6BY3uYoJn+wPVE5pIb8Hhe08xPte8TTDSMIyHI3EyTfcAKrIreauoArePvh/cRvSw="
|
||||
<<: *extended-test-suite
|
||||
- python: "3.7"
|
||||
env: TOXENV=py37 CERTBOT_NO_PIN=1
|
||||
<<: *extended-test-suite
|
||||
- python: "2.7"
|
||||
env: ACME_SERVER=boulder-v1 TOXENV=integration
|
||||
sudo: required
|
||||
services: docker
|
||||
<<: *extended-test-suite
|
||||
- python: "2.7"
|
||||
env: ACME_SERVER=boulder-v2 TOXENV=integration
|
||||
sudo: required
|
||||
services: docker
|
||||
<<: *extended-test-suite
|
||||
- python: "2.7"
|
||||
env: ACME_SERVER=boulder-v1 TOXENV=integration-certbot-oldest
|
||||
# Ubuntu Trusty or older must be used because the oldest version of
|
||||
# cryptography we support cannot be compiled against the version of
|
||||
# OpenSSL in Xenial or newer.
|
||||
dist: trusty
|
||||
sudo: required
|
||||
services: docker
|
||||
<<: *extended-test-suite
|
||||
- python: "2.7"
|
||||
env: ACME_SERVER=boulder-v2 TOXENV=integration-certbot-oldest
|
||||
# Ubuntu Trusty or older must be used because the oldest version of
|
||||
# cryptography we support cannot be compiled against the version of
|
||||
# OpenSSL in Xenial or newer.
|
||||
dist: trusty
|
||||
sudo: required
|
||||
services: docker
|
||||
<<: *extended-test-suite
|
||||
- python: "2.7"
|
||||
env: ACME_SERVER=boulder-v1 TOXENV=integration-nginx-oldest
|
||||
# Ubuntu Trusty or older must be used because the oldest version of
|
||||
# cryptography we support cannot be compiled against the version of
|
||||
# OpenSSL in Xenial or newer.
|
||||
dist: trusty
|
||||
sudo: required
|
||||
services: docker
|
||||
<<: *extended-test-suite
|
||||
- python: "2.7"
|
||||
env: ACME_SERVER=boulder-v2 TOXENV=integration-nginx-oldest
|
||||
# Ubuntu Trusty or older must be used because the oldest version of
|
||||
# cryptography we support cannot be compiled against the version of
|
||||
# OpenSSL in Xenial or newer.
|
||||
dist: trusty
|
||||
sudo: required
|
||||
services: docker
|
||||
<<: *extended-test-suite
|
||||
- python: "3.4"
|
||||
env: TOXENV=py34
|
||||
<<: *extended-test-suite
|
||||
- python: "3.5"
|
||||
env: TOXENV=py35
|
||||
<<: *extended-test-suite
|
||||
- python: "3.6"
|
||||
env: TOXENV=py36
|
||||
<<: *extended-test-suite
|
||||
- python: "3.7"
|
||||
env: TOXENV=py37
|
||||
<<: *extended-test-suite
|
||||
- python: "3.8"
|
||||
env: TOXENV=py38
|
||||
<<: *extended-test-suite
|
||||
- python: "3.4"
|
||||
env: ACME_SERVER=boulder-v1 TOXENV=integration
|
||||
sudo: required
|
||||
services: docker
|
||||
<<: *extended-test-suite
|
||||
- python: "3.4"
|
||||
env: ACME_SERVER=boulder-v2 TOXENV=integration
|
||||
sudo: required
|
||||
services: docker
|
||||
<<: *extended-test-suite
|
||||
- python: "3.5"
|
||||
env: ACME_SERVER=boulder-v1 TOXENV=integration
|
||||
sudo: required
|
||||
services: docker
|
||||
<<: *extended-test-suite
|
||||
- python: "3.5"
|
||||
env: ACME_SERVER=boulder-v2 TOXENV=integration
|
||||
sudo: required
|
||||
services: docker
|
||||
<<: *extended-test-suite
|
||||
- python: "3.6"
|
||||
env: ACME_SERVER=boulder-v1 TOXENV=integration
|
||||
sudo: required
|
||||
services: docker
|
||||
<<: *extended-test-suite
|
||||
- python: "3.6"
|
||||
env: ACME_SERVER=boulder-v2 TOXENV=integration
|
||||
sudo: required
|
||||
services: docker
|
||||
<<: *extended-test-suite
|
||||
- python: "3.7"
|
||||
env: ACME_SERVER=boulder-v1 TOXENV=integration
|
||||
sudo: required
|
||||
services: docker
|
||||
<<: *extended-test-suite
|
||||
- python: "3.7"
|
||||
env: ACME_SERVER=boulder-v2 TOXENV=integration
|
||||
sudo: required
|
||||
services: docker
|
||||
<<: *extended-test-suite
|
||||
- python: "3.8"
|
||||
env: ACME_SERVER=boulder-v1 TOXENV=integration
|
||||
<<: *extended-test-suite
|
||||
- python: "3.8"
|
||||
env: ACME_SERVER=boulder-v2 TOXENV=integration
|
||||
<<: *extended-test-suite
|
||||
- sudo: required
|
||||
env: TOXENV=le_auto_jessie
|
||||
services: docker
|
||||
<<: *extended-test-suite
|
||||
- sudo: required
|
||||
env: TOXENV=le_auto_centos6
|
||||
services: docker
|
||||
<<: *extended-test-suite
|
||||
- sudo: required
|
||||
env: TOXENV=docker_dev
|
||||
services: docker
|
||||
addons:
|
||||
apt:
|
||||
packages: # don't install nginx and apache
|
||||
- libaugeas0
|
||||
<<: *extended-test-suite
|
||||
- language: generic
|
||||
env: TOXENV=py27
|
||||
os: osx
|
||||
# Using this osx_image is a workaround for
|
||||
# https://travis-ci.community/t/xcode-8-3-homebrew-outdated-error/3798.
|
||||
osx_image: xcode10.2
|
||||
addons:
|
||||
homebrew:
|
||||
packages:
|
||||
- augeas
|
||||
- python2
|
||||
<<: *extended-test-suite
|
||||
- language: generic
|
||||
env: TOXENV=py3
|
||||
os: osx
|
||||
# Using this osx_image is a workaround for
|
||||
# https://travis-ci.community/t/xcode-8-3-homebrew-outdated-error/3798.
|
||||
osx_image: xcode10.2
|
||||
addons:
|
||||
homebrew:
|
||||
packages:
|
||||
- augeas
|
||||
- python3
|
||||
<<: *extended-test-suite
|
||||
|
||||
# container-based infrastructure
|
||||
sudo: false
|
||||
|
||||
addons:
|
||||
apt:
|
||||
packages: # Keep in sync with letsencrypt-auto-source/pieces/bootstrappers/deb_common.sh and Boulder.
|
||||
- python-dev
|
||||
- gcc
|
||||
- libaugeas0
|
||||
- libssl-dev
|
||||
- libffi-dev
|
||||
- ca-certificates
|
||||
# For certbot-nginx integration testing
|
||||
- nginx-light
|
||||
- openssl
|
||||
|
||||
# tools/pip_install.py is used to pin packages to a known working version
|
||||
# except in tests where the environment variable CERTBOT_NO_PIN is set.
|
||||
# virtualenv is listed here explicitly to make sure it is upgraded when
|
||||
# CERTBOT_NO_PIN is set to work around failures we've seen when using an older
|
||||
# version of virtualenv.
|
||||
install: 'tools/pip_install.py -U codecov tox virtualenv'
|
||||
# Most of the time TRAVIS_RETRY is an empty string, and has no effect on the
|
||||
# script command. It is set only to `travis_retry` during farm tests, in
|
||||
# order to trigger the Travis retry feature, and compensate the inherent
|
||||
# flakiness of these specific tests.
|
||||
script: '$TRAVIS_RETRY tox'
|
||||
|
||||
after_success: '[ "$TOXENV" == "py27-cover" ] && codecov -F linux'
|
||||
|
||||
notifications:
|
||||
email: false
|
||||
irc:
|
||||
channels:
|
||||
# This is set to a secure variable to prevent forks from sending
|
||||
# notifications. This value was created by installing
|
||||
# https://github.com/travis-ci/travis.rb and running
|
||||
# `travis encrypt "chat.freenode.net#certbot-devel"`.
|
||||
- secure: "EWW66E2+KVPZyIPR8ViENZwfcup4Gx3/dlimmAZE0WuLwxDCshBBOd3O8Rf6pBokEoZlXM5eDT6XdyJj8n0DLslgjO62pExdunXpbcMwdY7l1ELxX2/UbnDTE6UnPYa09qVBHNG7156Z6yE0x2lH4M9Ykvp0G0cubjPQHylAwo0="
|
||||
on_cancel: never
|
||||
on_success: never
|
||||
on_failure: always
|
||||
26
AUTHORS.md
26
AUTHORS.md
@@ -1,7 +1,6 @@
|
||||
Authors
|
||||
=======
|
||||
|
||||
* [Aaron Gable](https://github.com/aarongable)
|
||||
* [Aaron Zirbes](https://github.com/aaronzirbes)
|
||||
* Aaron Zuehlke
|
||||
* Ada Lovelace
|
||||
@@ -17,15 +16,11 @@ Authors
|
||||
* [Alex Halderman](https://github.com/jhalderm)
|
||||
* [Alex Jordan](https://github.com/strugee)
|
||||
* [Alex Zorin](https://github.com/alexzorin)
|
||||
* [Alexis Hancock](https://github.com/zoracon)
|
||||
* [Amir Omidi](https://github.com/aaomidi)
|
||||
* [Amjad Mashaal](https://github.com/TheNavigat)
|
||||
* [amplifi](https://github.com/amplifi)
|
||||
* [Andrew Murray](https://github.com/radarhere)
|
||||
* [Andrzej Górski](https://github.com/andrzej3393)
|
||||
* [Anselm Levskaya](https://github.com/levskaya)
|
||||
* [Antoine Jacoutot](https://github.com/ajacoutot)
|
||||
* [April King](https://github.com/april)
|
||||
* [asaph](https://github.com/asaph)
|
||||
* [Axel Beckert](https://github.com/xtaran)
|
||||
* [Bas](https://github.com/Mechazawa)
|
||||
@@ -40,9 +35,7 @@ Authors
|
||||
* [Blake Griffith](https://github.com/cowlicks)
|
||||
* [Brad Warren](https://github.com/bmw)
|
||||
* [Brandon Kraft](https://github.com/kraftbj)
|
||||
* [Brandon Kreisel](https://github.com/BKreisel)
|
||||
* [Brian Heim](https://github.com/brianlheim)
|
||||
* [Cameron Steel](https://github.com/Tugzrida)
|
||||
* [Brandon Kreisel](https://github.com/kraftbj)
|
||||
* [Ceesjan Luiten](https://github.com/quinox)
|
||||
* [Chad Whitacre](https://github.com/whit537)
|
||||
* [Chhatoi Pritam Baral](https://github.com/pritambaral)
|
||||
@@ -64,9 +57,7 @@ Authors
|
||||
* [DanCld](https://github.com/DanCld)
|
||||
* [Daniel Albers](https://github.com/AID)
|
||||
* [Daniel Aleksandersen](https://github.com/da2x)
|
||||
* [Daniel Almasi](https://github.com/almasen)
|
||||
* [Daniel Convissor](https://github.com/convissor)
|
||||
* [Daniel "Drex" Drexler](https://github.com/aeturnum)
|
||||
* [Daniel Huang](https://github.com/dhuang)
|
||||
* [Dave Guarino](https://github.com/daguar)
|
||||
* [David cz](https://github.com/dave-cz)
|
||||
@@ -91,7 +82,6 @@ Authors
|
||||
* [Felix Schwarz](https://github.com/FelixSchwarz)
|
||||
* [Felix Yan](https://github.com/felixonmars)
|
||||
* [Filip Ochnik](https://github.com/filipochnik)
|
||||
* [Florian Klink](https://github.com/flokli)
|
||||
* [Francois Marier](https://github.com/fmarier)
|
||||
* [Frank](https://github.com/Frankkkkk)
|
||||
* [Frederic BLANC](https://github.com/fblanc)
|
||||
@@ -110,15 +100,12 @@ Authors
|
||||
* [Harlan Lieberman-Berg](https://github.com/hlieberman)
|
||||
* [Henri Salo](https://github.com/fgeek)
|
||||
* [Henry Chen](https://github.com/henrychen95)
|
||||
* [Hugo van Kemenade](https://github.com/hugovk)
|
||||
* [Ingolf Becker](https://github.com/watercrossing)
|
||||
* [Ivan Nejgebauer](https://github.com/inejge)
|
||||
* [Jaap Eldering](https://github.com/eldering)
|
||||
* [Jacob Hoffman-Andrews](https://github.com/jsha)
|
||||
* [Jacob Sachs](https://github.com/jsachs)
|
||||
* [Jairo Llopis](https://github.com/Yajo)
|
||||
* [Jakub Warmuz](https://github.com/kuba)
|
||||
* [James Balazs](https://github.com/jamesbalazs)
|
||||
* [James Kasten](https://github.com/jdkasten)
|
||||
* [Jason Grinblat](https://github.com/ptychomancer)
|
||||
* [Jay Faulkner](https://github.com/jayofdoom)
|
||||
@@ -137,12 +124,10 @@ Authors
|
||||
* [Jonathan Herlin](https://github.com/Jonher937)
|
||||
* [Jon Walsh](https://github.com/code-tree)
|
||||
* [Joona Hoikkala](https://github.com/joohoi)
|
||||
* [Josh McCullough](https://github.com/JoshMcCullough)
|
||||
* [Josh Soref](https://github.com/jsoref)
|
||||
* [Joubin Jabbari](https://github.com/joubin)
|
||||
* [Juho Juopperi](https://github.com/jkjuopperi)
|
||||
* [Kane York](https://github.com/riking)
|
||||
* [Katsuyoshi Ozaki](https://github.com/moratori)
|
||||
* [Kenichi Maehashi](https://github.com/kmaehashi)
|
||||
* [Kenneth Skovhede](https://github.com/kenkendk)
|
||||
* [Kevin Burke](https://github.com/kevinburke)
|
||||
@@ -156,13 +141,11 @@ Authors
|
||||
* [Lior Sabag](https://github.com/liorsbg)
|
||||
* [Lipis](https://github.com/lipis)
|
||||
* [lord63](https://github.com/lord63)
|
||||
* [Lorenzo Fundaró](https://github.com/lfundaro)
|
||||
* [Luca Beltrame](https://github.com/lbeltrame)
|
||||
* [Luca Ebach](https://github.com/lucebac)
|
||||
* [Luca Olivetti](https://github.com/olivluca)
|
||||
* [Luke Rogers](https://github.com/lukeroge)
|
||||
* [Maarten](https://github.com/mrtndwrd)
|
||||
* [Mads Jensen](https://github.com/atombrella)
|
||||
* [Maikel Martens](https://github.com/krukas)
|
||||
* [Malte Janduda](https://github.com/MalteJ)
|
||||
* [Mantas Mikulėnas](https://github.com/grawity)
|
||||
@@ -202,7 +185,6 @@ Authors
|
||||
* [osirisinferi](https://github.com/osirisinferi)
|
||||
* Patrick Figel
|
||||
* [Patrick Heppler](https://github.com/PatrickHeppler)
|
||||
* [Paul Buonopane](https://github.com/Zenexer)
|
||||
* [Paul Feitzinger](https://github.com/pfeyz)
|
||||
* [Pavan Gupta](https://github.com/pavgup)
|
||||
* [Pavel Pavlov](https://github.com/ghost355)
|
||||
@@ -215,7 +197,6 @@ Authors
|
||||
* [Pierre Jaury](https://github.com/kaiyou)
|
||||
* [Piotr Kasprzyk](https://github.com/kwadrat)
|
||||
* [Prayag Verma](https://github.com/pra85)
|
||||
* [Rasesh Patel](https://github.com/raspat1)
|
||||
* [Reinaldo de Souza Jr](https://github.com/juniorz)
|
||||
* [Remi Rampin](https://github.com/remram44)
|
||||
* [Rémy HUBSCHER](https://github.com/Natim)
|
||||
@@ -223,7 +204,6 @@ Authors
|
||||
* [Richard Barnes](https://github.com/r-barnes)
|
||||
* [Richard Panek](https://github.com/kernelpanek)
|
||||
* [Robert Buchholz](https://github.com/rbu)
|
||||
* [Robert Dailey](https://github.com/pahrohfit)
|
||||
* [Robert Habermann](https://github.com/frennkie)
|
||||
* [Robert Xiao](https://github.com/nneonneo)
|
||||
* [Roland Shoemaker](https://github.com/rolandshoemaker)
|
||||
@@ -249,11 +229,9 @@ Authors
|
||||
* [Spencer Bliven](https://github.com/sbliven)
|
||||
* [Stacey Sheldon](https://github.com/solidgoldbomb)
|
||||
* [Stavros Korokithakis](https://github.com/skorokithakis)
|
||||
* [Ștefan Talpalaru](https://github.com/stefantalpalaru)
|
||||
* [Stefan Weil](https://github.com/stweil)
|
||||
* [Steve Desmond](https://github.com/stevedesmond-ca)
|
||||
* [sydneyli](https://github.com/sydneyli)
|
||||
* [taixx046](https://github.com/taixx046)
|
||||
* [Tan Jay Jun](https://github.com/jayjun)
|
||||
* [Tapple Gao](https://github.com/tapple)
|
||||
* [Telepenin Nikolay](https://github.com/telepenin)
|
||||
@@ -278,7 +256,6 @@ Authors
|
||||
* [Wilfried Teiken](https://github.com/wteiken)
|
||||
* [Willem Fibbe](https://github.com/fibbers)
|
||||
* [William Budington](https://github.com/Hainish)
|
||||
* [Will Greenberg](https://github.com/wgreenberg)
|
||||
* [Will Newby](https://github.com/willnewby)
|
||||
* [Will Oller](https://github.com/willoller)
|
||||
* [Yan](https://github.com/diracdeltas)
|
||||
@@ -286,6 +263,5 @@ Authors
|
||||
* [Yomna](https://github.com/ynasser)
|
||||
* [Yoni Jah](https://github.com/yonjah)
|
||||
* [YourDaddyIsHere](https://github.com/YourDaddyIsHere)
|
||||
* [Yuseong Cho](https://github.com/g6123)
|
||||
* [Zach Shepherd](https://github.com/zjs)
|
||||
* [陈三](https://github.com/chenxsan)
|
||||
|
||||
@@ -11,7 +11,7 @@ to the Sphinx generated docs is provided below.
|
||||
|
||||
|
||||
[1] https://github.com/blog/1184-contributing-guidelines
|
||||
[2] https://docutils.sourceforge.io/docs/user/rst/quickref.html#hyperlink-targets
|
||||
[2] http://docutils.sourceforge.net/docs/user/rst/quickref.html#hyperlink-targets
|
||||
|
||||
-->
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# This Dockerfile builds an image for development.
|
||||
FROM ubuntu:focal
|
||||
FROM debian:buster
|
||||
|
||||
# Note: this only exposes the port to other docker containers.
|
||||
EXPOSE 80 443
|
||||
@@ -8,14 +8,13 @@ WORKDIR /opt/certbot/src
|
||||
|
||||
COPY . .
|
||||
RUN apt-get update && \
|
||||
DEBIAN_FRONTEND=noninteractive apt-get install apache2 git python3-dev \
|
||||
python3-venv gcc libaugeas0 libssl-dev libffi-dev ca-certificates \
|
||||
openssl nginx-light -y --no-install-recommends && \
|
||||
apt-get install apache2 git python3-dev python3-venv gcc libaugeas0 \
|
||||
libssl-dev libffi-dev ca-certificates openssl nginx-light -y && \
|
||||
apt-get clean && \
|
||||
rm -rf /var/lib/apt/lists/* \
|
||||
/tmp/* \
|
||||
/var/tmp/*
|
||||
|
||||
RUN VENV_NAME="../venv" python3 tools/venv.py
|
||||
RUN VENV_NAME="../venv3" python3 tools/venv3.py
|
||||
|
||||
ENV PATH /opt/certbot/venv/bin:$PATH
|
||||
ENV PATH /opt/certbot/venv3/bin:$PATH
|
||||
|
||||
@@ -7,7 +7,7 @@ questions.
|
||||
## My operating system is (include version):
|
||||
|
||||
|
||||
## I installed Certbot with (snap, OS package manager, pip, certbot-auto, etc):
|
||||
## I installed Certbot with (certbot-auto, OS package manager, pip, etc):
|
||||
|
||||
|
||||
## I ran this command and it produced this output:
|
||||
@@ -4,6 +4,5 @@ include pytest.ini
|
||||
recursive-include docs *
|
||||
recursive-include examples *
|
||||
recursive-include tests *
|
||||
include acme/py.typed
|
||||
global-exclude __pycache__
|
||||
global-exclude *.py[cod]
|
||||
|
||||
@@ -2,10 +2,11 @@
|
||||
|
||||
This module is an implementation of the `ACME protocol`_.
|
||||
|
||||
.. _`ACME protocol`: https://datatracker.ietf.org/doc/html/rfc8555
|
||||
.. _`ACME protocol`: https://ietf-wg-acme.github.io/acme
|
||||
|
||||
"""
|
||||
import sys
|
||||
import warnings
|
||||
|
||||
# This code exists to keep backwards compatibility with people using acme.jose
|
||||
# before it became the standalone josepy package.
|
||||
|
||||
@@ -1,58 +1,39 @@
|
||||
"""ACME Identifier Validation Challenges."""
|
||||
import abc
|
||||
import codecs
|
||||
import functools
|
||||
import hashlib
|
||||
import logging
|
||||
import socket
|
||||
from typing import cast
|
||||
from typing import Any
|
||||
from typing import Dict
|
||||
from typing import Mapping
|
||||
from typing import Optional
|
||||
from typing import Tuple
|
||||
from typing import Type
|
||||
from typing import TypeVar
|
||||
from typing import Union
|
||||
|
||||
from cryptography.hazmat.primitives import hashes
|
||||
from cryptography.hazmat.primitives import hashes # type: ignore
|
||||
import josepy as jose
|
||||
from OpenSSL import crypto
|
||||
from OpenSSL import SSL
|
||||
import requests
|
||||
import six
|
||||
|
||||
from acme import crypto_util
|
||||
from acme import errors
|
||||
from acme import fields
|
||||
from acme.mixins import ResourceMixin
|
||||
from acme.mixins import TypeMixin
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
GenericChallenge = TypeVar('GenericChallenge', bound='Challenge')
|
||||
|
||||
|
||||
class Challenge(jose.TypedJSONObjectWithFields):
|
||||
# _fields_to_partial_json
|
||||
"""ACME challenge."""
|
||||
TYPES: Dict[str, Type['Challenge']] = {}
|
||||
TYPES = {} # type: dict
|
||||
|
||||
@classmethod
|
||||
def from_json(cls: Type[GenericChallenge],
|
||||
jobj: Mapping[str, Any]) -> Union[GenericChallenge, 'UnrecognizedChallenge']:
|
||||
def from_json(cls, jobj):
|
||||
try:
|
||||
return cast(GenericChallenge, super().from_json(jobj))
|
||||
return super(Challenge, cls).from_json(jobj)
|
||||
except jose.UnrecognizedTypeError as error:
|
||||
logger.debug(error)
|
||||
return UnrecognizedChallenge.from_json(jobj)
|
||||
|
||||
|
||||
class ChallengeResponse(ResourceMixin, TypeMixin, jose.TypedJSONObjectWithFields):
|
||||
class ChallengeResponse(jose.TypedJSONObjectWithFields):
|
||||
# _fields_to_partial_json
|
||||
"""ACME challenge response."""
|
||||
TYPES: Dict[str, Type['ChallengeResponse']] = {}
|
||||
TYPES = {} # type: dict
|
||||
resource_type = 'challenge'
|
||||
resource: str = fields.resource(resource_type)
|
||||
resource = fields.Resource(resource_type)
|
||||
|
||||
|
||||
class UnrecognizedChallenge(Challenge):
|
||||
@@ -67,17 +48,16 @@ class UnrecognizedChallenge(Challenge):
|
||||
:ivar jobj: Original JSON decoded object.
|
||||
|
||||
"""
|
||||
jobj: Dict[str, Any]
|
||||
|
||||
def __init__(self, jobj: Mapping[str, Any]) -> None:
|
||||
super().__init__()
|
||||
def __init__(self, jobj):
|
||||
super(UnrecognizedChallenge, self).__init__()
|
||||
object.__setattr__(self, "jobj", jobj)
|
||||
|
||||
def to_partial_json(self) -> Dict[str, Any]:
|
||||
def to_partial_json(self):
|
||||
return self.jobj # pylint: disable=no-member
|
||||
|
||||
@classmethod
|
||||
def from_json(cls, jobj: Mapping[str, Any]) -> 'UnrecognizedChallenge':
|
||||
def from_json(cls, jobj):
|
||||
return cls(jobj)
|
||||
|
||||
|
||||
@@ -91,13 +71,13 @@ class _TokenChallenge(Challenge):
|
||||
"""Minimum size of the :attr:`token` in bytes."""
|
||||
|
||||
# TODO: acme-spec doesn't specify token as base64-encoded value
|
||||
token: bytes = jose.field(
|
||||
token = jose.Field(
|
||||
"token", encoder=jose.encode_b64jose, decoder=functools.partial(
|
||||
jose.decode_b64jose, size=TOKEN_SIZE, minimum=True))
|
||||
|
||||
# XXX: rename to ~token_good_for_url
|
||||
@property
|
||||
def good_token(self) -> bool: # XXX: @token.decoder
|
||||
def good_token(self): # XXX: @token.decoder
|
||||
"""Is `token` good?
|
||||
|
||||
.. todo:: acme-spec wants "It MUST NOT contain any non-ASCII
|
||||
@@ -114,13 +94,13 @@ class _TokenChallenge(Challenge):
|
||||
class KeyAuthorizationChallengeResponse(ChallengeResponse):
|
||||
"""Response to Challenges based on Key Authorization.
|
||||
|
||||
:param str key_authorization:
|
||||
:param unicode key_authorization:
|
||||
|
||||
"""
|
||||
key_authorization: str = jose.field("keyAuthorization")
|
||||
key_authorization = jose.Field("keyAuthorization")
|
||||
thumbprint_hash_function = hashes.SHA256
|
||||
|
||||
def verify(self, chall: 'KeyAuthorizationChallenge', account_public_key: jose.JWK) -> bool:
|
||||
def verify(self, chall, account_public_key):
|
||||
"""Verify the key authorization.
|
||||
|
||||
:param KeyAuthorization chall: Challenge that corresponds to
|
||||
@@ -132,7 +112,7 @@ class KeyAuthorizationChallengeResponse(ChallengeResponse):
|
||||
:rtype: bool
|
||||
|
||||
"""
|
||||
parts = self.key_authorization.split('.') # pylint: disable=no-member
|
||||
parts = self.key_authorization.split('.')
|
||||
if len(parts) != 2:
|
||||
logger.debug("Key authorization (%r) is not well formed",
|
||||
self.key_authorization)
|
||||
@@ -152,39 +132,37 @@ class KeyAuthorizationChallengeResponse(ChallengeResponse):
|
||||
|
||||
return True
|
||||
|
||||
def to_partial_json(self) -> Dict[str, Any]:
|
||||
jobj = super().to_partial_json()
|
||||
def to_partial_json(self):
|
||||
jobj = super(KeyAuthorizationChallengeResponse, self).to_partial_json()
|
||||
jobj.pop('keyAuthorization', None)
|
||||
return jobj
|
||||
|
||||
|
||||
# TODO: Make this method a generic of K (bound=KeyAuthorizationChallenge), response_cls of type
|
||||
# Type[K] and use it in response/response_and_validation return types once Python 3.6 support is
|
||||
# dropped (do not support generic ABC classes, see https://github.com/python/typing/issues/449).
|
||||
class KeyAuthorizationChallenge(_TokenChallenge, metaclass=abc.ABCMeta):
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class KeyAuthorizationChallenge(_TokenChallenge):
|
||||
"""Challenge based on Key Authorization.
|
||||
|
||||
:param response_cls: Subclass of `KeyAuthorizationChallengeResponse`
|
||||
that will be used to generate ``response``.
|
||||
that will be used to generate `response`.
|
||||
:param str typ: type of the challenge
|
||||
"""
|
||||
typ: str = NotImplemented
|
||||
response_cls: Type[KeyAuthorizationChallengeResponse] = NotImplemented
|
||||
typ = NotImplemented
|
||||
response_cls = NotImplemented
|
||||
thumbprint_hash_function = (
|
||||
KeyAuthorizationChallengeResponse.thumbprint_hash_function)
|
||||
|
||||
def key_authorization(self, account_key: jose.JWK) -> str:
|
||||
def key_authorization(self, account_key):
|
||||
"""Generate Key Authorization.
|
||||
|
||||
:param JWK account_key:
|
||||
:rtype str:
|
||||
:rtype unicode:
|
||||
|
||||
"""
|
||||
return self.encode("token") + "." + jose.b64encode(
|
||||
account_key.thumbprint(
|
||||
hash_function=self.thumbprint_hash_function)).decode()
|
||||
|
||||
def response(self, account_key: jose.JWK) -> KeyAuthorizationChallengeResponse:
|
||||
def response(self, account_key):
|
||||
"""Generate response to the challenge.
|
||||
|
||||
:param JWK account_key:
|
||||
@@ -197,7 +175,7 @@ class KeyAuthorizationChallenge(_TokenChallenge, metaclass=abc.ABCMeta):
|
||||
key_authorization=self.key_authorization(account_key))
|
||||
|
||||
@abc.abstractmethod
|
||||
def validation(self, account_key: jose.JWK, **kwargs: Any) -> Any:
|
||||
def validation(self, account_key, **kwargs):
|
||||
"""Generate validation for the challenge.
|
||||
|
||||
Subclasses must implement this method, but they are likely to
|
||||
@@ -211,8 +189,7 @@ class KeyAuthorizationChallenge(_TokenChallenge, metaclass=abc.ABCMeta):
|
||||
"""
|
||||
raise NotImplementedError() # pragma: no cover
|
||||
|
||||
def response_and_validation(self, account_key: jose.JWK, *args: Any, **kwargs: Any
|
||||
) -> Tuple[KeyAuthorizationChallengeResponse, Any]:
|
||||
def response_and_validation(self, account_key, *args, **kwargs):
|
||||
"""Generate response and validation.
|
||||
|
||||
Convenience function that return results of `response` and
|
||||
@@ -231,14 +208,14 @@ class DNS01Response(KeyAuthorizationChallengeResponse):
|
||||
"""ACME dns-01 challenge response."""
|
||||
typ = "dns-01"
|
||||
|
||||
def simple_verify(self, chall: 'DNS01', domain: str, account_public_key: jose.JWK) -> bool: # pylint: disable=unused-argument
|
||||
def simple_verify(self, chall, domain, account_public_key): # pylint: disable=unused-argument
|
||||
"""Simple verify.
|
||||
|
||||
This method no longer checks DNS records and is a simple wrapper
|
||||
around `KeyAuthorizationChallengeResponse.verify`.
|
||||
|
||||
:param challenges.DNS01 chall: Corresponding challenge.
|
||||
:param str domain: Domain name being verified.
|
||||
:param unicode domain: Domain name being verified.
|
||||
:param JWK account_public_key: Public key for the key pair
|
||||
being authorized.
|
||||
|
||||
@@ -262,24 +239,23 @@ class DNS01(KeyAuthorizationChallenge):
|
||||
LABEL = "_acme-challenge"
|
||||
"""Label clients prepend to the domain name being validated."""
|
||||
|
||||
def validation(self, account_key: jose.JWK, **unused_kwargs: Any) -> str:
|
||||
def validation(self, account_key, **unused_kwargs):
|
||||
"""Generate validation.
|
||||
|
||||
:param JWK account_key:
|
||||
:rtype: str
|
||||
:rtype: unicode
|
||||
|
||||
"""
|
||||
return jose.b64encode(hashlib.sha256(self.key_authorization(
|
||||
account_key).encode("utf-8")).digest()).decode()
|
||||
|
||||
def validation_domain_name(self, name: str) -> str:
|
||||
def validation_domain_name(self, name):
|
||||
"""Domain name for TXT validation record.
|
||||
|
||||
:param str name: Domain name being validated.
|
||||
:rtype: str
|
||||
:param unicode name: Domain name being validated.
|
||||
|
||||
"""
|
||||
return f"{self.LABEL}.{name}"
|
||||
return "{0}.{1}".format(self.LABEL, name)
|
||||
|
||||
|
||||
@ChallengeResponse.register
|
||||
@@ -298,12 +274,11 @@ class HTTP01Response(KeyAuthorizationChallengeResponse):
|
||||
WHITESPACE_CUTSET = "\n\r\t "
|
||||
"""Whitespace characters which should be ignored at the end of the body."""
|
||||
|
||||
def simple_verify(self, chall: 'HTTP01', domain: str, account_public_key: jose.JWK,
|
||||
port: Optional[int] = None) -> bool:
|
||||
def simple_verify(self, chall, domain, account_public_key, port=None):
|
||||
"""Simple verify.
|
||||
|
||||
:param challenges.SimpleHTTP chall: Corresponding challenge.
|
||||
:param str domain: Domain name being verified.
|
||||
:param unicode domain: Domain name being verified.
|
||||
:param JWK account_public_key: Public key for the key pair
|
||||
being authorized.
|
||||
:param int port: Port used in the validation.
|
||||
@@ -328,19 +303,10 @@ class HTTP01Response(KeyAuthorizationChallengeResponse):
|
||||
uri = chall.uri(domain)
|
||||
logger.debug("Verifying %s at %s...", chall.typ, uri)
|
||||
try:
|
||||
http_response = requests.get(uri, verify=False)
|
||||
http_response = requests.get(uri)
|
||||
except requests.exceptions.RequestException as error:
|
||||
logger.error("Unable to reach %s: %s", uri, error)
|
||||
return False
|
||||
# By default, http_response.text will try to guess the encoding to use
|
||||
# when decoding the response to Python unicode strings. This guesswork
|
||||
# is error prone. RFC 8555 specifies that HTTP-01 responses should be
|
||||
# key authorizations with possible trailing whitespace. Since key
|
||||
# authorizations must be composed entirely of the base64url alphabet
|
||||
# plus ".", we tell requests that the response should be ASCII. See
|
||||
# https://datatracker.ietf.org/doc/html/rfc8555#section-8.3 for more
|
||||
# info.
|
||||
http_response.encoding = "ascii"
|
||||
logger.debug("Received %s: %s. Headers: %s", http_response,
|
||||
http_response.text, http_response.headers)
|
||||
|
||||
@@ -364,31 +330,31 @@ class HTTP01(KeyAuthorizationChallenge):
|
||||
"""URI root path for the server provisioned resource."""
|
||||
|
||||
@property
|
||||
def path(self) -> str:
|
||||
def path(self):
|
||||
"""Path (starting with '/') for provisioned resource.
|
||||
|
||||
:rtype: str
|
||||
:rtype: string
|
||||
|
||||
"""
|
||||
return '/' + self.URI_ROOT_PATH + '/' + self.encode('token')
|
||||
|
||||
def uri(self, domain: str) -> str:
|
||||
def uri(self, domain):
|
||||
"""Create an URI to the provisioned resource.
|
||||
|
||||
Forms an URI to the HTTPS server provisioned resource
|
||||
(containing :attr:`~SimpleHTTP.token`).
|
||||
|
||||
:param str domain: Domain name being verified.
|
||||
:rtype: str
|
||||
:param unicode domain: Domain name being verified.
|
||||
:rtype: string
|
||||
|
||||
"""
|
||||
return "http://" + domain + self.path
|
||||
|
||||
def validation(self, account_key: jose.JWK, **unused_kwargs: Any) -> str:
|
||||
def validation(self, account_key, **unused_kwargs):
|
||||
"""Generate validation.
|
||||
|
||||
:param JWK account_key:
|
||||
:rtype: str
|
||||
:rtype: unicode
|
||||
|
||||
"""
|
||||
return self.key_authorization(account_key)
|
||||
@@ -396,169 +362,29 @@ class HTTP01(KeyAuthorizationChallenge):
|
||||
|
||||
@ChallengeResponse.register
|
||||
class TLSALPN01Response(KeyAuthorizationChallengeResponse):
|
||||
"""ACME tls-alpn-01 challenge response."""
|
||||
"""ACME TLS-ALPN-01 challenge response.
|
||||
|
||||
This class only allows initiating a TLS-ALPN-01 challenge returned from the
|
||||
CA. Full support for responding to TLS-ALPN-01 challenges by generating and
|
||||
serving the expected response certificate is not currently provided.
|
||||
"""
|
||||
typ = "tls-alpn-01"
|
||||
|
||||
PORT = 443
|
||||
"""Verification port as defined by the protocol.
|
||||
|
||||
You can override it (e.g. for testing) by passing ``port`` to
|
||||
`simple_verify`.
|
||||
@Challenge.register
|
||||
class TLSALPN01(KeyAuthorizationChallenge):
|
||||
"""ACME tls-alpn-01 challenge.
|
||||
|
||||
This class simply allows parsing the TLS-ALPN-01 challenge returned from
|
||||
the CA. Full TLS-ALPN-01 support is not currently provided.
|
||||
|
||||
"""
|
||||
|
||||
ID_PE_ACME_IDENTIFIER_V1 = b"1.3.6.1.5.5.7.1.30.1"
|
||||
ACME_TLS_1_PROTOCOL = "acme-tls/1"
|
||||
|
||||
@property
|
||||
def h(self) -> bytes:
|
||||
"""Hash value stored in challenge certificate"""
|
||||
return hashlib.sha256(self.key_authorization.encode('utf-8')).digest()
|
||||
|
||||
def gen_cert(self, domain: str, key: Optional[crypto.PKey] = None, bits: int = 2048
|
||||
) -> Tuple[crypto.X509, crypto.PKey]:
|
||||
"""Generate tls-alpn-01 certificate.
|
||||
|
||||
:param str domain: Domain verified by the challenge.
|
||||
:param OpenSSL.crypto.PKey key: Optional private key used in
|
||||
certificate generation. If not provided (``None``), then
|
||||
fresh key will be generated.
|
||||
:param int bits: Number of bits for newly generated key.
|
||||
|
||||
:rtype: `tuple` of `OpenSSL.crypto.X509` and `OpenSSL.crypto.PKey`
|
||||
|
||||
"""
|
||||
if key is None:
|
||||
key = crypto.PKey()
|
||||
key.generate_key(crypto.TYPE_RSA, bits)
|
||||
|
||||
der_value = b"DER:" + codecs.encode(self.h, 'hex')
|
||||
acme_extension = crypto.X509Extension(self.ID_PE_ACME_IDENTIFIER_V1,
|
||||
critical=True, value=der_value)
|
||||
|
||||
return crypto_util.gen_ss_cert(key, [domain], force_san=True,
|
||||
extensions=[acme_extension]), key
|
||||
|
||||
def probe_cert(self, domain: str, host: Optional[str] = None,
|
||||
port: Optional[int] = None) -> crypto.X509:
|
||||
"""Probe tls-alpn-01 challenge certificate.
|
||||
|
||||
:param str domain: domain being validated, required.
|
||||
:param str host: IP address used to probe the certificate.
|
||||
:param int port: Port used to probe the certificate.
|
||||
|
||||
"""
|
||||
if host is None:
|
||||
host = socket.gethostbyname(domain)
|
||||
logger.debug('%s resolved to %s', domain, host)
|
||||
if port is None:
|
||||
port = self.PORT
|
||||
|
||||
return crypto_util.probe_sni(host=host.encode(), port=port, name=domain.encode(),
|
||||
alpn_protocols=[self.ACME_TLS_1_PROTOCOL])
|
||||
|
||||
def verify_cert(self, domain: str, cert: crypto.X509) -> bool:
|
||||
"""Verify tls-alpn-01 challenge certificate.
|
||||
|
||||
:param str domain: Domain name being validated.
|
||||
:param OpensSSL.crypto.X509 cert: Challenge certificate.
|
||||
|
||||
:returns: Whether the certificate was successfully verified.
|
||||
:rtype: bool
|
||||
|
||||
"""
|
||||
# pylint: disable=protected-access
|
||||
names = crypto_util._pyopenssl_cert_or_req_all_names(cert)
|
||||
# Type ignore needed due to
|
||||
# https://github.com/pyca/pyopenssl/issues/730.
|
||||
logger.debug('Certificate %s. SANs: %s',
|
||||
cert.digest('sha256'), names)
|
||||
if len(names) != 1 or names[0].lower() != domain.lower():
|
||||
return False
|
||||
|
||||
for i in range(cert.get_extension_count()):
|
||||
ext = cert.get_extension(i)
|
||||
# FIXME: assume this is the ACME extension. Currently there is no
|
||||
# way to get full OID of an unknown extension from pyopenssl.
|
||||
if ext.get_short_name() == b'UNDEF':
|
||||
data = ext.get_data()
|
||||
return data == self.h
|
||||
|
||||
return False
|
||||
|
||||
# pylint: disable=too-many-arguments
|
||||
def simple_verify(self, chall: 'TLSALPN01', domain: str, account_public_key: jose.JWK,
|
||||
cert: Optional[crypto.X509] = None, host: Optional[str] = None,
|
||||
port: Optional[int] = None) -> bool:
|
||||
"""Simple verify.
|
||||
|
||||
Verify ``validation`` using ``account_public_key``, optionally
|
||||
probe tls-alpn-01 certificate and check using `verify_cert`.
|
||||
|
||||
:param .challenges.TLSALPN01 chall: Corresponding challenge.
|
||||
:param str domain: Domain name being validated.
|
||||
:param JWK account_public_key:
|
||||
:param OpenSSL.crypto.X509 cert: Optional certificate. If not
|
||||
provided (``None``) certificate will be retrieved using
|
||||
`probe_cert`.
|
||||
:param string host: IP address used to probe the certificate.
|
||||
:param int port: Port used to probe the certificate.
|
||||
|
||||
|
||||
:returns: ``True`` if and only if client's control of the domain has been verified.
|
||||
:rtype: bool
|
||||
|
||||
"""
|
||||
if not self.verify(chall, account_public_key):
|
||||
logger.debug("Verification of key authorization in response failed")
|
||||
return False
|
||||
|
||||
if cert is None:
|
||||
try:
|
||||
cert = self.probe_cert(domain=domain, host=host, port=port)
|
||||
except errors.Error as error:
|
||||
logger.debug(str(error), exc_info=True)
|
||||
return False
|
||||
|
||||
return self.verify_cert(domain, cert)
|
||||
|
||||
|
||||
@Challenge.register # pylint: disable=too-many-ancestors
|
||||
class TLSALPN01(KeyAuthorizationChallenge):
|
||||
"""ACME tls-alpn-01 challenge."""
|
||||
typ = "tls-alpn-01"
|
||||
response_cls = TLSALPN01Response
|
||||
typ = response_cls.typ
|
||||
|
||||
def validation(self, account_key: jose.JWK, **kwargs: Any) -> Tuple[crypto.X509, crypto.PKey]:
|
||||
"""Generate validation.
|
||||
|
||||
:param JWK account_key:
|
||||
:param str domain: Domain verified by the challenge.
|
||||
:param OpenSSL.crypto.PKey cert_key: Optional private key used
|
||||
in certificate generation. If not provided (``None``), then
|
||||
fresh key will be generated.
|
||||
|
||||
:rtype: `tuple` of `OpenSSL.crypto.X509` and `OpenSSL.crypto.PKey`
|
||||
|
||||
"""
|
||||
# TODO: Remove cast when response() is generic.
|
||||
return cast(TLSALPN01Response, self.response(account_key)).gen_cert(
|
||||
key=kwargs.get('cert_key'),
|
||||
domain=cast(str, kwargs.get('domain')))
|
||||
|
||||
@staticmethod
|
||||
def is_supported() -> bool:
|
||||
"""
|
||||
Check if TLS-ALPN-01 challenge is supported on this machine.
|
||||
This implies that a recent version of OpenSSL is installed (>= 1.0.2),
|
||||
or a recent cryptography version shipped with the OpenSSL library is installed.
|
||||
|
||||
:returns: ``True`` if TLS-ALPN-01 is supported on this machine, ``False`` otherwise.
|
||||
:rtype: bool
|
||||
|
||||
"""
|
||||
return (hasattr(SSL.Connection, "set_alpn_protos")
|
||||
and hasattr(SSL.Context, "set_alpn_select_callback"))
|
||||
def validation(self, account_key, **kwargs):
|
||||
"""Generate validation for the challenge."""
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
@Challenge.register
|
||||
@@ -569,8 +395,7 @@ class DNS(_TokenChallenge):
|
||||
LABEL = "_acme-challenge"
|
||||
"""Label clients prepend to the domain name being validated."""
|
||||
|
||||
def gen_validation(self, account_key: jose.JWK, alg: jose.JWASignature = jose.RS256,
|
||||
**kwargs: Any) -> jose.JWS:
|
||||
def gen_validation(self, account_key, alg=jose.RS256, **kwargs):
|
||||
"""Generate validation.
|
||||
|
||||
:param .JWK account_key: Private account key.
|
||||
@@ -584,7 +409,7 @@ class DNS(_TokenChallenge):
|
||||
payload=self.json_dumps(sort_keys=True).encode('utf-8'),
|
||||
key=account_key, alg=alg, **kwargs)
|
||||
|
||||
def check_validation(self, validation: jose.JWS, account_public_key: jose.JWK) -> bool:
|
||||
def check_validation(self, validation, account_public_key):
|
||||
"""Check validation.
|
||||
|
||||
:param JWS validation:
|
||||
@@ -601,7 +426,7 @@ class DNS(_TokenChallenge):
|
||||
logger.debug("Checking validation for DNS failed: %s", error)
|
||||
return False
|
||||
|
||||
def gen_response(self, account_key: jose.JWK, **kwargs: Any) -> 'DNSResponse':
|
||||
def gen_response(self, account_key, **kwargs):
|
||||
"""Generate response.
|
||||
|
||||
:param .JWK account_key: Private account key.
|
||||
@@ -610,12 +435,13 @@ class DNS(_TokenChallenge):
|
||||
:rtype: DNSResponse
|
||||
|
||||
"""
|
||||
return DNSResponse(validation=self.gen_validation(account_key, **kwargs))
|
||||
return DNSResponse(validation=self.gen_validation(
|
||||
account_key, **kwargs))
|
||||
|
||||
def validation_domain_name(self, name: str) -> str:
|
||||
def validation_domain_name(self, name):
|
||||
"""Domain name for TXT validation record.
|
||||
|
||||
:param str name: Domain name being validated.
|
||||
:param unicode name: Domain name being validated.
|
||||
|
||||
"""
|
||||
return "{0}.{1}".format(self.LABEL, name)
|
||||
@@ -630,9 +456,9 @@ class DNSResponse(ChallengeResponse):
|
||||
"""
|
||||
typ = "dns"
|
||||
|
||||
validation: jose.JWS = jose.field("validation", decoder=jose.JWS.from_json)
|
||||
validation = jose.Field("validation", decoder=jose.JWS.from_json)
|
||||
|
||||
def check_validation(self, chall: 'DNS', account_public_key: jose.JWK) -> bool:
|
||||
def check_validation(self, chall, account_public_key):
|
||||
"""Check validation.
|
||||
|
||||
:param challenges.DNS chall:
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,25 +1,20 @@
|
||||
"""Crypto utilities."""
|
||||
import binascii
|
||||
import contextlib
|
||||
import ipaddress
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import socket
|
||||
from typing import Any
|
||||
from typing import Callable
|
||||
from typing import List
|
||||
from typing import Mapping
|
||||
from typing import Optional
|
||||
from typing import Set
|
||||
from typing import Tuple
|
||||
from typing import Union
|
||||
|
||||
import josepy as jose
|
||||
from OpenSSL import crypto
|
||||
from OpenSSL import SSL
|
||||
from OpenSSL import SSL # type: ignore # https://github.com/python/typeshed/issues/2052
|
||||
|
||||
from acme import errors
|
||||
from acme.magic_typing import Callable # pylint: disable=unused-import, no-name-in-module
|
||||
from acme.magic_typing import Optional # pylint: disable=unused-import, no-name-in-module
|
||||
from acme.magic_typing import Tuple # pylint: disable=unused-import, no-name-in-module
|
||||
from acme.magic_typing import Union # pylint: disable=unused-import, no-name-in-module
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -30,57 +25,27 @@ logger = logging.getLogger(__name__)
|
||||
# https://www.openssl.org/docs/ssl/SSLv23_method.html). _serve_sni
|
||||
# should be changed to use "set_options" to disable SSLv2 and SSLv3,
|
||||
# in case it's used for things other than probing/serving!
|
||||
_DEFAULT_SSL_METHOD = SSL.SSLv23_METHOD
|
||||
_DEFAULT_SSL_METHOD = SSL.SSLv23_METHOD # type: ignore
|
||||
|
||||
|
||||
class _DefaultCertSelection:
|
||||
def __init__(self, certs: Mapping[bytes, Tuple[crypto.PKey, crypto.X509]]):
|
||||
self.certs = certs
|
||||
|
||||
def __call__(self, connection: SSL.Connection) -> Optional[Tuple[crypto.PKey, crypto.X509]]:
|
||||
server_name = connection.get_servername()
|
||||
return self.certs.get(server_name, None)
|
||||
|
||||
|
||||
class SSLSocket: # pylint: disable=too-few-public-methods
|
||||
class SSLSocket(object):
|
||||
"""SSL wrapper for sockets.
|
||||
|
||||
:ivar socket sock: Original wrapped socket.
|
||||
:ivar dict certs: Mapping from domain names (`bytes`) to
|
||||
`OpenSSL.crypto.X509`.
|
||||
:ivar method: See `OpenSSL.SSL.Context` for allowed values.
|
||||
:ivar alpn_selection: Hook to select negotiated ALPN protocol for
|
||||
connection.
|
||||
:ivar cert_selection: Hook to select certificate for connection. If given,
|
||||
`certs` parameter would be ignored, and therefore must be empty.
|
||||
|
||||
"""
|
||||
def __init__(self, sock: socket.socket,
|
||||
certs: Optional[Mapping[bytes, Tuple[crypto.PKey, crypto.X509]]] = None,
|
||||
method: int = _DEFAULT_SSL_METHOD,
|
||||
alpn_selection: Optional[Callable[[SSL.Connection, List[bytes]], bytes]] = None,
|
||||
cert_selection: Optional[Callable[[SSL.Connection],
|
||||
Tuple[crypto.PKey, crypto.X509]]] = None
|
||||
) -> None:
|
||||
def __init__(self, sock, certs, method=_DEFAULT_SSL_METHOD):
|
||||
self.sock = sock
|
||||
self.alpn_selection = alpn_selection
|
||||
self.certs = certs
|
||||
self.method = method
|
||||
if not cert_selection and not certs:
|
||||
raise ValueError("Neither cert_selection or certs specified.")
|
||||
if cert_selection and certs:
|
||||
raise ValueError("Both cert_selection and certs specified.")
|
||||
actual_cert_selection: Union[_DefaultCertSelection,
|
||||
Optional[Callable[[SSL.Connection],
|
||||
Tuple[crypto.PKey,
|
||||
crypto.X509]]]] = cert_selection
|
||||
if actual_cert_selection is None:
|
||||
actual_cert_selection = _DefaultCertSelection(certs if certs else {})
|
||||
self.cert_selection = actual_cert_selection
|
||||
|
||||
def __getattr__(self, name: str) -> Any:
|
||||
def __getattr__(self, name):
|
||||
return getattr(self.sock, name)
|
||||
|
||||
def _pick_certificate_cb(self, connection: SSL.Connection) -> None:
|
||||
def _pick_certificate_cb(self, connection):
|
||||
"""SNI certificate callback.
|
||||
|
||||
This method will set a new OpenSSL context object for this
|
||||
@@ -92,58 +57,46 @@ class SSLSocket: # pylint: disable=too-few-public-methods
|
||||
:type connection: :class:`OpenSSL.Connection`
|
||||
|
||||
"""
|
||||
pair = self.cert_selection(connection)
|
||||
if pair is None:
|
||||
logger.debug("Certificate selection for server name %s failed, dropping SSL",
|
||||
connection.get_servername())
|
||||
server_name = connection.get_servername()
|
||||
try:
|
||||
key, cert = self.certs[server_name]
|
||||
except KeyError:
|
||||
logger.debug("Server name (%s) not recognized, dropping SSL",
|
||||
server_name)
|
||||
return
|
||||
key, cert = pair
|
||||
new_context = SSL.Context(self.method)
|
||||
new_context.set_options(SSL.OP_NO_SSLv2)
|
||||
new_context.set_options(SSL.OP_NO_SSLv3)
|
||||
new_context.use_privatekey(key)
|
||||
new_context.use_certificate(cert)
|
||||
if self.alpn_selection is not None:
|
||||
new_context.set_alpn_select_callback(self.alpn_selection)
|
||||
connection.set_context(new_context)
|
||||
|
||||
class FakeConnection:
|
||||
class FakeConnection(object):
|
||||
"""Fake OpenSSL.SSL.Connection."""
|
||||
|
||||
# pylint: disable=missing-function-docstring
|
||||
# pylint: disable=missing-docstring
|
||||
|
||||
def __init__(self, connection: SSL.Connection) -> None:
|
||||
def __init__(self, connection):
|
||||
self._wrapped = connection
|
||||
|
||||
def __getattr__(self, name: str) -> Any:
|
||||
def __getattr__(self, name):
|
||||
return getattr(self._wrapped, name)
|
||||
|
||||
def shutdown(self, *unused_args: Any) -> bool:
|
||||
def shutdown(self, *unused_args):
|
||||
# OpenSSL.SSL.Connection.shutdown doesn't accept any args
|
||||
try:
|
||||
return self._wrapped.shutdown()
|
||||
except SSL.Error as error:
|
||||
# We wrap the error so we raise the same error type as sockets
|
||||
# in the standard library. This is useful when this object is
|
||||
# used by code which expects a standard socket such as
|
||||
# socketserver in the standard library.
|
||||
raise socket.error(error)
|
||||
return self._wrapped.shutdown()
|
||||
|
||||
def accept(self) -> Tuple[FakeConnection, Any]: # pylint: disable=missing-function-docstring
|
||||
def accept(self): # pylint: disable=missing-docstring
|
||||
sock, addr = self.sock.accept()
|
||||
|
||||
context = SSL.Context(self.method)
|
||||
context.set_options(SSL.OP_NO_SSLv2)
|
||||
context.set_options(SSL.OP_NO_SSLv3)
|
||||
context.set_tlsext_servername_callback(self._pick_certificate_cb)
|
||||
if self.alpn_selection is not None:
|
||||
context.set_alpn_select_callback(self.alpn_selection)
|
||||
|
||||
ssl_sock = self.FakeConnection(SSL.Connection(context, sock))
|
||||
ssl_sock.set_accept_state()
|
||||
|
||||
# This log line is especially desirable because without it requests to
|
||||
# our standalone TLSALPN server would not be logged.
|
||||
logger.debug("Performing handshake with %s", addr)
|
||||
try:
|
||||
ssl_sock.do_handshake()
|
||||
@@ -155,9 +108,8 @@ class SSLSocket: # pylint: disable=too-few-public-methods
|
||||
return ssl_sock, addr
|
||||
|
||||
|
||||
def probe_sni(name: bytes, host: bytes, port: int = 443, timeout: int = 300, # pylint: disable=too-many-arguments
|
||||
method: int = _DEFAULT_SSL_METHOD, source_address: Tuple[str, int] = ('', 0),
|
||||
alpn_protocols: Optional[List[str]] = None) -> crypto.X509:
|
||||
def probe_sni(name, host, port=443, timeout=300,
|
||||
method=_DEFAULT_SSL_METHOD, source_address=('', 0)):
|
||||
"""Probe SNI server for SSL certificate.
|
||||
|
||||
:param bytes name: Byte string to send as the server name in the
|
||||
@@ -169,8 +121,6 @@ def probe_sni(name: bytes, host: bytes, port: int = 443, timeout: int = 300, #
|
||||
:param tuple source_address: Enables multi-path probing (selection
|
||||
of source interface). See `socket.creation_connection` for more
|
||||
info. Available only in Python 2.7+.
|
||||
:param alpn_protocols: Protocols to request using ALPN.
|
||||
:type alpn_protocols: `list` of `str`
|
||||
|
||||
:raises acme.errors.Error: In case of any problems.
|
||||
|
||||
@@ -189,10 +139,10 @@ def probe_sni(name: bytes, host: bytes, port: int = 443, timeout: int = 300, #
|
||||
" from {0}:{1}".format(
|
||||
source_address[0],
|
||||
source_address[1]
|
||||
) if any(source_address) else ""
|
||||
) if socket_kwargs else ""
|
||||
)
|
||||
socket_tuple: Tuple[bytes, int] = (host, port)
|
||||
sock = socket.create_connection(socket_tuple, **socket_kwargs) # type: ignore[arg-type]
|
||||
socket_tuple = (host, port) # type: Tuple[str, int]
|
||||
sock = socket.create_connection(socket_tuple, **socket_kwargs) # type: ignore
|
||||
except socket.error as error:
|
||||
raise errors.Error(error)
|
||||
|
||||
@@ -200,8 +150,6 @@ def probe_sni(name: bytes, host: bytes, port: int = 443, timeout: int = 300, #
|
||||
client_ssl = SSL.Connection(context, client)
|
||||
client_ssl.set_connect_state()
|
||||
client_ssl.set_tlsext_host_name(name) # pyOpenSSL>=0.13
|
||||
if alpn_protocols is not None:
|
||||
client_ssl.set_alpn_protos(alpn_protocols)
|
||||
try:
|
||||
client_ssl.do_handshake()
|
||||
client_ssl.shutdown()
|
||||
@@ -209,46 +157,23 @@ def probe_sni(name: bytes, host: bytes, port: int = 443, timeout: int = 300, #
|
||||
raise errors.Error(error)
|
||||
return client_ssl.get_peer_certificate()
|
||||
|
||||
|
||||
def make_csr(private_key_pem: bytes, domains: Optional[Union[Set[str], List[str]]] = None,
|
||||
must_staple: bool = False,
|
||||
ipaddrs: Optional[List[Union[ipaddress.IPv4Address, ipaddress.IPv6Address]]] = None
|
||||
) -> bytes:
|
||||
"""Generate a CSR containing domains or IPs as subjectAltNames.
|
||||
def make_csr(private_key_pem, domains, must_staple=False):
|
||||
"""Generate a CSR containing a list of domains as subjectAltNames.
|
||||
|
||||
:param buffer private_key_pem: Private key, in PEM PKCS#8 format.
|
||||
:param list domains: List of DNS names to include in subjectAltNames of CSR.
|
||||
:param bool must_staple: Whether to include the TLS Feature extension (aka
|
||||
OCSP Must Staple: https://tools.ietf.org/html/rfc7633).
|
||||
:param list ipaddrs: List of IPaddress(type ipaddress.IPv4Address or ipaddress.IPv6Address)
|
||||
names to include in subbjectAltNames of CSR.
|
||||
params ordered this way for backward competablity when called by positional argument.
|
||||
:returns: buffer PEM-encoded Certificate Signing Request.
|
||||
"""
|
||||
private_key = crypto.load_privatekey(
|
||||
crypto.FILETYPE_PEM, private_key_pem)
|
||||
csr = crypto.X509Req()
|
||||
sanlist = []
|
||||
# if domain or ip list not supplied make it empty list so it's easier to iterate
|
||||
if domains is None:
|
||||
domains = []
|
||||
if ipaddrs is None:
|
||||
ipaddrs = []
|
||||
if len(domains)+len(ipaddrs) == 0:
|
||||
raise ValueError("At least one of domains or ipaddrs parameter need to be not empty")
|
||||
for address in domains:
|
||||
sanlist.append('DNS:' + address)
|
||||
for ips in ipaddrs:
|
||||
sanlist.append('IP:' + ips.exploded)
|
||||
# make sure its ascii encoded
|
||||
san_string = ', '.join(sanlist).encode('ascii')
|
||||
# for IP san it's actually need to be octet-string,
|
||||
# but somewhere downsteam thankfully handle it for us
|
||||
extensions = [
|
||||
crypto.X509Extension(
|
||||
b'subjectAltName',
|
||||
critical=False,
|
||||
value=san_string
|
||||
value=', '.join('DNS:' + d for d in domains).encode('ascii')
|
||||
),
|
||||
]
|
||||
if must_staple:
|
||||
@@ -258,16 +183,12 @@ def make_csr(private_key_pem: bytes, domains: Optional[Union[Set[str], List[str]
|
||||
value=b"DER:30:03:02:01:05"))
|
||||
csr.add_extensions(extensions)
|
||||
csr.set_pubkey(private_key)
|
||||
# RFC 2986 Section 4.1 only defines version 0
|
||||
csr.set_version(0)
|
||||
csr.set_version(2)
|
||||
csr.sign(private_key, 'sha256')
|
||||
return crypto.dump_certificate_request(
|
||||
crypto.FILETYPE_PEM, csr)
|
||||
|
||||
|
||||
def _pyopenssl_cert_or_req_all_names(loaded_cert_or_req: Union[crypto.X509, crypto.X509Req]
|
||||
) -> List[str]:
|
||||
# unlike its name this only outputs DNS names, other type of idents will ignored
|
||||
def _pyopenssl_cert_or_req_all_names(loaded_cert_or_req):
|
||||
common_name = loaded_cert_or_req.get_subject().CN
|
||||
sans = _pyopenssl_cert_or_req_san(loaded_cert_or_req)
|
||||
|
||||
@@ -275,8 +196,7 @@ def _pyopenssl_cert_or_req_all_names(loaded_cert_or_req: Union[crypto.X509, cryp
|
||||
return sans
|
||||
return [common_name] + [d for d in sans if d != common_name]
|
||||
|
||||
|
||||
def _pyopenssl_cert_or_req_san(cert_or_req: Union[crypto.X509, crypto.X509Req]) -> List[str]:
|
||||
def _pyopenssl_cert_or_req_san(cert_or_req):
|
||||
"""Get Subject Alternative Names from certificate or CSR using pyOpenSSL.
|
||||
|
||||
.. todo:: Implement directly in PyOpenSSL!
|
||||
@@ -287,87 +207,45 @@ def _pyopenssl_cert_or_req_san(cert_or_req: Union[crypto.X509, crypto.X509Req])
|
||||
:param cert_or_req: Certificate or CSR.
|
||||
:type cert_or_req: `OpenSSL.crypto.X509` or `OpenSSL.crypto.X509Req`.
|
||||
|
||||
:returns: A list of Subject Alternative Names that is DNS.
|
||||
:rtype: `list` of `str`
|
||||
:returns: A list of Subject Alternative Names.
|
||||
:rtype: `list` of `unicode`
|
||||
|
||||
"""
|
||||
# This function finds SANs with dns name
|
||||
# This function finds SANs by dumping the certificate/CSR to text and
|
||||
# searching for "X509v3 Subject Alternative Name" in the text. This method
|
||||
# is used to support PyOpenSSL version 0.13 where the
|
||||
# `_subjectAltNameString` and `get_extensions` methods are not available
|
||||
# for CSRs.
|
||||
|
||||
# constants based on PyOpenSSL certificate/CSR text dump
|
||||
part_separator = ":"
|
||||
parts_separator = ", "
|
||||
prefix = "DNS" + part_separator
|
||||
|
||||
sans_parts = _pyopenssl_extract_san_list_raw(cert_or_req)
|
||||
if isinstance(cert_or_req, crypto.X509):
|
||||
# pylint: disable=line-too-long
|
||||
func = crypto.dump_certificate # type: Union[Callable[[int, crypto.X509Req], bytes], Callable[[int, crypto.X509], bytes]]
|
||||
else:
|
||||
func = crypto.dump_certificate_request
|
||||
text = func(crypto.FILETYPE_TEXT, cert_or_req).decode("utf-8")
|
||||
# WARNING: this function does not support multiple SANs extensions.
|
||||
# Multiple X509v3 extensions of the same type is disallowed by RFC 5280.
|
||||
match = re.search(r"X509v3 Subject Alternative Name:(?: critical)?\s*(.*)", text)
|
||||
# WARNING: this function assumes that no SAN can include
|
||||
# parts_separator, hence the split!
|
||||
sans_parts = [] if match is None else match.group(1).split(parts_separator)
|
||||
|
||||
return [part.split(part_separator)[1]
|
||||
for part in sans_parts if part.startswith(prefix)]
|
||||
|
||||
|
||||
def _pyopenssl_cert_or_req_san_ip(cert_or_req: Union[crypto.X509, crypto.X509Req]) -> List[str]:
|
||||
"""Get Subject Alternative Names IPs from certificate or CSR using pyOpenSSL.
|
||||
|
||||
:param cert_or_req: Certificate or CSR.
|
||||
:type cert_or_req: `OpenSSL.crypto.X509` or `OpenSSL.crypto.X509Req`.
|
||||
|
||||
:returns: A list of Subject Alternative Names that are IP Addresses.
|
||||
:rtype: `list` of `str`. note that this returns as string, not IPaddress object
|
||||
|
||||
"""
|
||||
|
||||
# constants based on PyOpenSSL certificate/CSR text dump
|
||||
part_separator = ":"
|
||||
prefix = "IP Address" + part_separator
|
||||
|
||||
sans_parts = _pyopenssl_extract_san_list_raw(cert_or_req)
|
||||
|
||||
return [part[len(prefix):] for part in sans_parts if part.startswith(prefix)]
|
||||
|
||||
|
||||
def _pyopenssl_extract_san_list_raw(cert_or_req: Union[crypto.X509, crypto.X509Req]) -> List[str]:
|
||||
"""Get raw SAN string from cert or csr, parse it as UTF-8 and return.
|
||||
|
||||
:param cert_or_req: Certificate or CSR.
|
||||
:type cert_or_req: `OpenSSL.crypto.X509` or `OpenSSL.crypto.X509Req`.
|
||||
|
||||
:returns: raw san strings, parsed byte as utf-8
|
||||
:rtype: `list` of `str`
|
||||
|
||||
"""
|
||||
# This function finds SANs by dumping the certificate/CSR to text and
|
||||
# searching for "X509v3 Subject Alternative Name" in the text. This method
|
||||
# is used to because in PyOpenSSL version <0.17 `_subjectAltNameString` methods are
|
||||
# not able to Parse IP Addresses in subjectAltName string.
|
||||
|
||||
if isinstance(cert_or_req, crypto.X509):
|
||||
# pylint: disable=line-too-long
|
||||
text = crypto.dump_certificate(crypto.FILETYPE_TEXT, cert_or_req).decode('utf-8')
|
||||
else:
|
||||
text = crypto.dump_certificate_request(crypto.FILETYPE_TEXT, cert_or_req).decode('utf-8')
|
||||
# WARNING: this function does not support multiple SANs extensions.
|
||||
# Multiple X509v3 extensions of the same type is disallowed by RFC 5280.
|
||||
raw_san = re.search(r"X509v3 Subject Alternative Name:(?: critical)?\s*(.*)", text)
|
||||
|
||||
parts_separator = ", "
|
||||
# WARNING: this function assumes that no SAN can include
|
||||
# parts_separator, hence the split!
|
||||
sans_parts = [] if raw_san is None else raw_san.group(1).split(parts_separator)
|
||||
return sans_parts
|
||||
|
||||
|
||||
def gen_ss_cert(key: crypto.PKey, domains: Optional[List[str]] = None,
|
||||
not_before: Optional[int] = None,
|
||||
validity: int = (7 * 24 * 60 * 60), force_san: bool = True,
|
||||
extensions: Optional[List[crypto.X509Extension]] = None,
|
||||
ips: Optional[List[Union[ipaddress.IPv4Address, ipaddress.IPv6Address]]] = None
|
||||
) -> crypto.X509:
|
||||
def gen_ss_cert(key, domains, not_before=None,
|
||||
validity=(7 * 24 * 60 * 60), force_san=True):
|
||||
"""Generate new self-signed certificate.
|
||||
|
||||
:type domains: `list` of `str`
|
||||
:type domains: `list` of `unicode`
|
||||
:param OpenSSL.crypto.PKey key:
|
||||
:param bool force_san:
|
||||
:param extensions: List of additional extensions to include in the cert.
|
||||
:type extensions: `list` of `OpenSSL.crypto.X509Extension`
|
||||
:type ips: `list` of (`ipaddress.IPv4Address` or `ipaddress.IPv6Address`)
|
||||
|
||||
If more than one domain is provided, all of the domains are put into
|
||||
``subjectAltName`` X.509 extension and first domain is set as the
|
||||
@@ -375,39 +253,25 @@ def gen_ss_cert(key: crypto.PKey, domains: Optional[List[str]] = None,
|
||||
extension is used, unless `force_san` is ``True``.
|
||||
|
||||
"""
|
||||
assert domains or ips, "Must provide one or more hostnames or IPs for the cert."
|
||||
|
||||
assert domains, "Must provide one or more hostnames for the cert."
|
||||
cert = crypto.X509()
|
||||
cert.set_serial_number(int(binascii.hexlify(os.urandom(16)), 16))
|
||||
cert.set_version(2)
|
||||
|
||||
if extensions is None:
|
||||
extensions = []
|
||||
if domains is None:
|
||||
domains = []
|
||||
if ips is None:
|
||||
ips = []
|
||||
extensions.append(
|
||||
extensions = [
|
||||
crypto.X509Extension(
|
||||
b"basicConstraints", True, b"CA:TRUE, pathlen:0"),
|
||||
)
|
||||
]
|
||||
|
||||
if len(domains) > 0:
|
||||
cert.get_subject().CN = domains[0]
|
||||
cert.get_subject().CN = domains[0]
|
||||
# TODO: what to put into cert.get_subject()?
|
||||
cert.set_issuer(cert.get_subject())
|
||||
|
||||
sanlist = []
|
||||
for address in domains:
|
||||
sanlist.append('DNS:' + address)
|
||||
for ip in ips:
|
||||
sanlist.append('IP:' + ip.exploded)
|
||||
san_string = ', '.join(sanlist).encode('ascii')
|
||||
if force_san or len(domains) > 1 or len(ips) > 0:
|
||||
if force_san or len(domains) > 1:
|
||||
extensions.append(crypto.X509Extension(
|
||||
b"subjectAltName",
|
||||
critical=False,
|
||||
value=san_string
|
||||
value=b", ".join(b"DNS:" + d.encode() for d in domains)
|
||||
))
|
||||
|
||||
cert.add_extensions(extensions)
|
||||
@@ -419,9 +283,7 @@ def gen_ss_cert(key: crypto.PKey, domains: Optional[List[str]] = None,
|
||||
cert.sign(key, "sha256")
|
||||
return cert
|
||||
|
||||
|
||||
def dump_pyopenssl_chain(chain: Union[List[jose.ComparableX509], List[crypto.X509]],
|
||||
filetype: int = crypto.FILETYPE_PEM) -> bytes:
|
||||
def dump_pyopenssl_chain(chain, filetype=crypto.FILETYPE_PEM):
|
||||
"""Dump certificate chain into a bundle.
|
||||
|
||||
:param list chain: List of `OpenSSL.crypto.X509` (or wrapped in
|
||||
@@ -434,10 +296,9 @@ def dump_pyopenssl_chain(chain: Union[List[jose.ComparableX509], List[crypto.X50
|
||||
# XXX: returns empty string when no chain is available, which
|
||||
# shuts up RenewableCert, but might not be the best solution...
|
||||
|
||||
def _dump_cert(cert: Union[jose.ComparableX509, crypto.X509]) -> bytes:
|
||||
def _dump_cert(cert):
|
||||
if isinstance(cert, jose.ComparableX509):
|
||||
if isinstance(cert.wrapped, crypto.X509Req):
|
||||
raise errors.Error("Unexpected CSR provided.") # pragma: no cover
|
||||
# pylint: disable=protected-access
|
||||
cert = cert.wrapped
|
||||
return crypto.dump_certificate(filetype, cert)
|
||||
|
||||
|
||||
@@ -1,17 +1,5 @@
|
||||
"""ACME errors."""
|
||||
import typing
|
||||
from typing import Any
|
||||
from typing import List
|
||||
from typing import Mapping
|
||||
from typing import Set
|
||||
|
||||
from josepy import errors as jose_errors
|
||||
import requests
|
||||
|
||||
# We import acme.messages only during type check to avoid circular dependencies. Type references
|
||||
# to acme.message.* must be quoted to be lazily initialized and avoid compilation errors.
|
||||
if typing.TYPE_CHECKING:
|
||||
from acme import messages # pragma: no cover
|
||||
|
||||
|
||||
class Error(Exception):
|
||||
@@ -40,12 +28,17 @@ class NonceError(ClientError):
|
||||
|
||||
class BadNonce(NonceError):
|
||||
"""Bad nonce error."""
|
||||
def __init__(self, nonce: str, error: Exception, *args: Any) -> None:
|
||||
super().__init__(*args)
|
||||
def __init__(self, nonce, error, *args, **kwargs):
|
||||
# MyPy complains here that there is too many arguments for BaseException constructor.
|
||||
# This is an error fixed in typeshed, see https://github.com/python/mypy/issues/4183
|
||||
# The fix is included in MyPy>=0.740, but upgrading it would bring dozen of errors due to
|
||||
# new types definitions. So we ignore the error until the code base is fixed to match
|
||||
# with MyPy>=0.740 referential.
|
||||
super(BadNonce, self).__init__(*args, **kwargs) # type: ignore
|
||||
self.nonce = nonce
|
||||
self.error = error
|
||||
|
||||
def __str__(self) -> str:
|
||||
def __str__(self):
|
||||
return 'Invalid nonce ({0!r}): {1}'.format(self.nonce, self.error)
|
||||
|
||||
|
||||
@@ -56,14 +49,15 @@ class MissingNonce(NonceError):
|
||||
Replay-Nonce header field in each successful response to a POST it
|
||||
provides to a client (...)".
|
||||
|
||||
:ivar requests.Response ~.response: HTTP Response
|
||||
:ivar requests.Response response: HTTP Response
|
||||
|
||||
"""
|
||||
def __init__(self, response: requests.Response, *args: Any) -> None:
|
||||
super().__init__(*args)
|
||||
def __init__(self, response, *args, **kwargs):
|
||||
# See comment in BadNonce constructor above for an explanation of type: ignore here.
|
||||
super(MissingNonce, self).__init__(*args, **kwargs) # type: ignore
|
||||
self.response = response
|
||||
|
||||
def __str__(self) -> str:
|
||||
def __str__(self):
|
||||
return ('Server {0} response did not include a replay '
|
||||
'nonce, headers: {1} (This may be a service outage)'.format(
|
||||
self.response.request.method, self.response.headers))
|
||||
@@ -81,20 +75,17 @@ class PollError(ClientError):
|
||||
to the most recently updated one
|
||||
|
||||
"""
|
||||
def __init__(self, exhausted: Set['messages.AuthorizationResource'],
|
||||
updated: Mapping['messages.AuthorizationResource',
|
||||
'messages.AuthorizationResource']
|
||||
) -> None:
|
||||
def __init__(self, exhausted, updated):
|
||||
self.exhausted = exhausted
|
||||
self.updated = updated
|
||||
super().__init__()
|
||||
super(PollError, self).__init__()
|
||||
|
||||
@property
|
||||
def timeout(self) -> bool:
|
||||
def timeout(self):
|
||||
"""Was the error caused by timeout?"""
|
||||
return bool(self.exhausted)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
def __repr__(self):
|
||||
return '{0}(exhausted={1!r}, updated={2!r})'.format(
|
||||
self.__class__.__name__, self.exhausted, self.updated)
|
||||
|
||||
@@ -103,9 +94,9 @@ class ValidationError(Error):
|
||||
"""Error for authorization failures. Contains a list of authorization
|
||||
resources, each of which is invalid and should have an error field.
|
||||
"""
|
||||
def __init__(self, failed_authzrs: List['messages.AuthorizationResource']) -> None:
|
||||
def __init__(self, failed_authzrs):
|
||||
self.failed_authzrs = failed_authzrs
|
||||
super().__init__()
|
||||
super(ValidationError, self).__init__()
|
||||
|
||||
|
||||
class TimeoutError(Error): # pylint: disable=redefined-builtin
|
||||
@@ -115,13 +106,13 @@ class TimeoutError(Error): # pylint: disable=redefined-builtin
|
||||
class IssuanceError(Error):
|
||||
"""Error sent by the server after requesting issuance of a certificate."""
|
||||
|
||||
def __init__(self, error: 'messages.Error') -> None:
|
||||
def __init__(self, error):
|
||||
"""Initialize.
|
||||
|
||||
:param messages.Error error: The error provided by the server.
|
||||
"""
|
||||
self.error = error
|
||||
super().__init__()
|
||||
super(IssuanceError, self).__init__()
|
||||
|
||||
|
||||
class ConflictError(ClientError):
|
||||
@@ -132,9 +123,9 @@ class ConflictError(ClientError):
|
||||
|
||||
Also used in V2 of the ACME client for the same purpose.
|
||||
"""
|
||||
def __init__(self, location: str) -> None:
|
||||
def __init__(self, location):
|
||||
self.location = location
|
||||
super().__init__()
|
||||
super(ConflictError, self).__init__()
|
||||
|
||||
|
||||
class WildcardUnsupportedError(Error):
|
||||
|
||||
@@ -1,7 +1,4 @@
|
||||
"""ACME JSON fields."""
|
||||
import datetime
|
||||
from typing import Any
|
||||
|
||||
import logging
|
||||
|
||||
import josepy as jose
|
||||
@@ -13,17 +10,17 @@ logger = logging.getLogger(__name__)
|
||||
class Fixed(jose.Field):
|
||||
"""Fixed field."""
|
||||
|
||||
def __init__(self, json_name: str, value: Any) -> None:
|
||||
def __init__(self, json_name, value):
|
||||
self.value = value
|
||||
super().__init__(
|
||||
super(Fixed, self).__init__(
|
||||
json_name=json_name, default=value, omitempty=False)
|
||||
|
||||
def decode(self, value: Any) -> Any:
|
||||
def decode(self, value):
|
||||
if value != self.value:
|
||||
raise jose.DeserializationError('Expected {0!r}'.format(self.value))
|
||||
return self.value
|
||||
|
||||
def encode(self, value: Any) -> Any:
|
||||
def encode(self, value):
|
||||
if value != self.value:
|
||||
logger.warning(
|
||||
'Overriding fixed field (%s) with %r', self.json_name, value)
|
||||
@@ -40,11 +37,11 @@ class RFC3339Field(jose.Field):
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
def default_encoder(cls, value: datetime.datetime) -> str:
|
||||
def default_encoder(cls, value):
|
||||
return pyrfc3339.generate(value)
|
||||
|
||||
@classmethod
|
||||
def default_decoder(cls, value: str) -> datetime.datetime:
|
||||
def default_decoder(cls, value):
|
||||
try:
|
||||
return pyrfc3339.parse(value)
|
||||
except ValueError as error:
|
||||
@@ -54,29 +51,14 @@ class RFC3339Field(jose.Field):
|
||||
class Resource(jose.Field):
|
||||
"""Resource MITM field."""
|
||||
|
||||
def __init__(self, resource_type: str, *args: Any, **kwargs: Any) -> None:
|
||||
def __init__(self, resource_type, *args, **kwargs):
|
||||
self.resource_type = resource_type
|
||||
kwargs['default'] = resource_type
|
||||
super().__init__('resource', *args, **kwargs)
|
||||
super(Resource, self).__init__(
|
||||
'resource', default=resource_type, *args, **kwargs)
|
||||
|
||||
def decode(self, value: Any) -> Any:
|
||||
def decode(self, value):
|
||||
if value != self.resource_type:
|
||||
raise jose.DeserializationError(
|
||||
'Wrong resource type: {0} instead of {1}'.format(
|
||||
value, self.resource_type))
|
||||
return value
|
||||
|
||||
|
||||
def fixed(json_name: str, value: Any) -> Any:
|
||||
"""Generates a type-friendly Fixed field."""
|
||||
return Fixed(json_name, value)
|
||||
|
||||
|
||||
def rfc3339(json_name: str, omitempty: bool = False) -> Any:
|
||||
"""Generates a type-friendly RFC3339 field."""
|
||||
return RFC3339Field(json_name, omitempty=omitempty)
|
||||
|
||||
|
||||
def resource(resource_type: str) -> Any:
|
||||
"""Generates a type-friendly Resource field."""
|
||||
return Resource(resource_type)
|
||||
|
||||
@@ -4,22 +4,18 @@ The JWS implementation in josepy only implements the base JOSE standard. In
|
||||
order to support the new header fields defined in ACME, this module defines some
|
||||
ACME-specific classes that layer on top of josepy.
|
||||
"""
|
||||
from typing import Optional
|
||||
|
||||
import josepy as jose
|
||||
|
||||
|
||||
class Header(jose.Header):
|
||||
"""ACME-specific JOSE Header. Implements nonce, kid, and url.
|
||||
"""
|
||||
nonce: Optional[bytes] = jose.field('nonce', omitempty=True, encoder=jose.encode_b64jose)
|
||||
kid: Optional[str] = jose.field('kid', omitempty=True)
|
||||
url: Optional[str] = jose.field('url', omitempty=True)
|
||||
nonce = jose.Field('nonce', omitempty=True, encoder=jose.encode_b64jose)
|
||||
kid = jose.Field('kid', omitempty=True)
|
||||
url = jose.Field('url', omitempty=True)
|
||||
|
||||
# Mypy does not understand the josepy magic happening here, and falsely claims
|
||||
# that nonce is redefined. Let's ignore the type check here.
|
||||
@nonce.decoder # type: ignore[no-redef,union-attr]
|
||||
def nonce(value: str) -> bytes: # type: ignore[misc] # pylint: disable=no-self-argument,missing-function-docstring
|
||||
@nonce.decoder
|
||||
def nonce(value): # pylint: disable=missing-docstring,no-self-argument
|
||||
try:
|
||||
return jose.decode_b64jose(value)
|
||||
except jose.DeserializationError as error:
|
||||
@@ -29,12 +25,12 @@ class Header(jose.Header):
|
||||
|
||||
class Signature(jose.Signature):
|
||||
"""ACME-specific Signature. Uses ACME-specific Header for customer fields."""
|
||||
__slots__ = jose.Signature._orig_slots # type: ignore[attr-defined] # pylint: disable=protected-access,no-member
|
||||
__slots__ = jose.Signature._orig_slots # pylint: disable=no-member
|
||||
|
||||
# TODO: decoder/encoder should accept cls? Otherwise, subclassing
|
||||
# JSONObjectWithFields is tricky...
|
||||
header_cls = Header
|
||||
header: Header = jose.field(
|
||||
header = jose.Field(
|
||||
'header', omitempty=True, default=header_cls(),
|
||||
decoder=header_cls.from_json)
|
||||
|
||||
@@ -44,16 +40,15 @@ class Signature(jose.Signature):
|
||||
class JWS(jose.JWS):
|
||||
"""ACME-specific JWS. Includes none, url, and kid in protected header."""
|
||||
signature_cls = Signature
|
||||
__slots__ = jose.JWS._orig_slots # type: ignore[attr-defined] # pylint: disable=protected-access
|
||||
__slots__ = jose.JWS._orig_slots
|
||||
|
||||
@classmethod
|
||||
# type: ignore[override] # pylint: disable=arguments-differ
|
||||
def sign(cls, payload: bytes, key: jose.JWK, alg: jose.JWASignature, nonce: Optional[bytes],
|
||||
url: Optional[str] = None, kid: Optional[str] = None) -> jose.JWS:
|
||||
# pylint: disable=arguments-differ
|
||||
def sign(cls, payload, key, alg, nonce, url=None, kid=None):
|
||||
# Per ACME spec, jwk and kid are mutually exclusive, so only include a
|
||||
# jwk field if kid is not provided.
|
||||
include_jwk = kid is None
|
||||
return super().sign(payload, key=key, alg=alg,
|
||||
protect=frozenset(['nonce', 'url', 'kid', 'jwk', 'alg']),
|
||||
nonce=nonce, url=url, kid=kid,
|
||||
include_jwk=include_jwk)
|
||||
return super(JWS, cls).sign(payload, key=key, alg=alg,
|
||||
protect=frozenset(['nonce', 'url', 'kid', 'jwk', 'alg']),
|
||||
nonce=nonce, url=url, kid=kid,
|
||||
include_jwk=include_jwk)
|
||||
|
||||
@@ -1,18 +1,17 @@
|
||||
"""Simple shim around the typing module.
|
||||
|
||||
This was useful when this code supported Python 2 and typing wasn't always
|
||||
available. This code is being kept for now for backwards compatibility.
|
||||
|
||||
"""
|
||||
import warnings
|
||||
from typing import * # pylint: disable=wildcard-import, unused-wildcard-import
|
||||
from typing import Any
|
||||
|
||||
warnings.warn("acme.magic_typing is deprecated and will be removed in a future release.",
|
||||
DeprecationWarning)
|
||||
"""Shim class to not have to depend on typing module in prod."""
|
||||
import sys
|
||||
|
||||
|
||||
class TypingClass:
|
||||
class TypingClass(object):
|
||||
"""Ignore import errors by getting anything"""
|
||||
def __getattr__(self, name: str) -> Any:
|
||||
return None # pragma: no cover
|
||||
def __getattr__(self, name):
|
||||
return None
|
||||
|
||||
try:
|
||||
# mypy doesn't respect modifying sys.modules
|
||||
from typing import * # pylint: disable=wildcard-import, unused-wildcard-import
|
||||
# pylint: disable=unused-import
|
||||
from typing import Collection, IO # type: ignore
|
||||
# pylint: enable=unused-import
|
||||
except ImportError:
|
||||
sys.modules[__name__] = TypingClass()
|
||||
|
||||
@@ -1,33 +1,21 @@
|
||||
"""ACME protocol messages."""
|
||||
import datetime
|
||||
from collections.abc import Hashable
|
||||
import json
|
||||
from typing import Any
|
||||
from typing import Dict
|
||||
from typing import Iterator
|
||||
from typing import List
|
||||
from typing import Mapping
|
||||
from typing import MutableMapping
|
||||
from typing import Optional
|
||||
from typing import Tuple
|
||||
from typing import Type
|
||||
from typing import TYPE_CHECKING
|
||||
from typing import TypeVar
|
||||
from typing import Union
|
||||
|
||||
import josepy as jose
|
||||
import six
|
||||
|
||||
from acme import challenges
|
||||
from acme import errors
|
||||
from acme import fields
|
||||
from acme import jws
|
||||
from acme import util
|
||||
from acme.mixins import ResourceMixin
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing_extensions import Protocol # pragma: no cover
|
||||
else:
|
||||
Protocol = object
|
||||
try:
|
||||
from collections.abc import Hashable # pylint: disable=no-name-in-module
|
||||
except ImportError: # pragma: no cover
|
||||
from collections import Hashable
|
||||
|
||||
|
||||
|
||||
OLD_ERROR_PREFIX = "urn:acme:error:"
|
||||
ERROR_PREFIX = "urn:ietf:params:acme:error:"
|
||||
@@ -48,7 +36,7 @@ ERROR_CODES = {
|
||||
' domain'),
|
||||
'dns': 'There was a problem with a DNS query during identifier validation',
|
||||
'dnssec': 'The server could not validate a DNSSEC signed domain',
|
||||
'incorrectResponse': 'Response received didn\'t match the challenge\'s requirements',
|
||||
'incorrectResponse': 'Response recieved didn\'t match the challenge\'s requirements',
|
||||
# deprecate invalidEmail
|
||||
'invalidEmail': 'The provided email for a registration was invalid',
|
||||
'invalidContact': 'The provided contact URI was invalid',
|
||||
@@ -65,100 +53,40 @@ ERROR_CODES = {
|
||||
'externalAccountRequired': 'The server requires external account binding',
|
||||
}
|
||||
|
||||
ERROR_TYPE_DESCRIPTIONS = {**{
|
||||
ERROR_PREFIX + name: desc for name, desc in ERROR_CODES.items()
|
||||
}, **{ # add errors with old prefix, deprecate me
|
||||
OLD_ERROR_PREFIX + name: desc for name, desc in ERROR_CODES.items()
|
||||
}}
|
||||
ERROR_TYPE_DESCRIPTIONS = dict(
|
||||
(ERROR_PREFIX + name, desc) for name, desc in ERROR_CODES.items())
|
||||
|
||||
ERROR_TYPE_DESCRIPTIONS.update(dict( # add errors with old prefix, deprecate me
|
||||
(OLD_ERROR_PREFIX + name, desc) for name, desc in ERROR_CODES.items()))
|
||||
|
||||
|
||||
def is_acme_error(err: BaseException) -> bool:
|
||||
def is_acme_error(err):
|
||||
"""Check if argument is an ACME error."""
|
||||
if isinstance(err, Error) and (err.typ is not None):
|
||||
return (ERROR_PREFIX in err.typ) or (OLD_ERROR_PREFIX in err.typ)
|
||||
return False
|
||||
|
||||
|
||||
class _Constant(jose.JSONDeSerializable, Hashable):
|
||||
"""ACME constant."""
|
||||
__slots__ = ('name',)
|
||||
POSSIBLE_NAMES: Dict[str, '_Constant'] = NotImplemented
|
||||
|
||||
def __init__(self, name: str) -> None:
|
||||
super().__init__()
|
||||
self.POSSIBLE_NAMES[name] = self # pylint: disable=unsupported-assignment-operation
|
||||
self.name = name
|
||||
|
||||
def to_partial_json(self) -> str:
|
||||
return self.name
|
||||
|
||||
@classmethod
|
||||
def from_json(cls, jobj: str) -> '_Constant':
|
||||
if jobj not in cls.POSSIBLE_NAMES: # pylint: disable=unsupported-membership-test
|
||||
raise jose.DeserializationError(f'{cls.__name__} not recognized')
|
||||
return cls.POSSIBLE_NAMES[jobj]
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f'{self.__class__.__name__}({self.name})'
|
||||
|
||||
def __eq__(self, other: Any) -> bool:
|
||||
return isinstance(other, type(self)) and other.name == self.name
|
||||
|
||||
def __hash__(self) -> int:
|
||||
return hash((self.__class__, self.name))
|
||||
|
||||
|
||||
class IdentifierType(_Constant):
|
||||
"""ACME identifier type."""
|
||||
POSSIBLE_NAMES: Dict[str, _Constant] = {}
|
||||
|
||||
|
||||
IDENTIFIER_FQDN = IdentifierType('dns') # IdentifierDNS in Boulder
|
||||
IDENTIFIER_IP = IdentifierType('ip') # IdentifierIP in pebble - not in Boulder yet
|
||||
|
||||
|
||||
class Identifier(jose.JSONObjectWithFields):
|
||||
"""ACME identifier.
|
||||
|
||||
:ivar IdentifierType typ:
|
||||
:ivar str value:
|
||||
|
||||
"""
|
||||
typ: IdentifierType = jose.field('type', decoder=IdentifierType.from_json)
|
||||
value: str = jose.field('value')
|
||||
|
||||
|
||||
@six.python_2_unicode_compatible
|
||||
class Error(jose.JSONObjectWithFields, errors.Error):
|
||||
"""ACME error.
|
||||
|
||||
https://datatracker.ietf.org/doc/html/rfc7807
|
||||
https://tools.ietf.org/html/draft-ietf-appsawg-http-problem-00
|
||||
|
||||
:ivar str typ:
|
||||
:ivar str title:
|
||||
:ivar str detail:
|
||||
:ivar Identifier identifier:
|
||||
:ivar tuple subproblems: An array of ACME Errors which may be present when the CA
|
||||
returns multiple errors related to the same request, `tuple` of `Error`.
|
||||
:ivar unicode typ:
|
||||
:ivar unicode title:
|
||||
:ivar unicode detail:
|
||||
|
||||
"""
|
||||
typ: str = jose.field('type', omitempty=True, default='about:blank')
|
||||
title: str = jose.field('title', omitempty=True)
|
||||
detail: str = jose.field('detail', omitempty=True)
|
||||
identifier: Optional['Identifier'] = jose.field(
|
||||
'identifier', decoder=Identifier.from_json, omitempty=True)
|
||||
subproblems: Optional[Tuple['Error', ...]] = jose.field('subproblems', omitempty=True)
|
||||
|
||||
# Mypy does not understand the josepy magic happening here, and falsely claims
|
||||
# that subproblems is redefined. Let's ignore the type check here.
|
||||
@subproblems.decoder # type: ignore
|
||||
def subproblems(value: List[Dict[str, Any]]) -> Tuple['Error', ...]: # type: ignore[misc] # pylint: disable=no-self-argument,missing-function-docstring
|
||||
return tuple(Error.from_json(subproblem) for subproblem in value)
|
||||
typ = jose.Field('type', omitempty=True, default='about:blank')
|
||||
title = jose.Field('title', omitempty=True)
|
||||
detail = jose.Field('detail', omitempty=True)
|
||||
|
||||
@classmethod
|
||||
def with_code(cls, code: str, **kwargs: Any) -> 'Error':
|
||||
def with_code(cls, code, **kwargs):
|
||||
"""Create an Error instance with an ACME Error code.
|
||||
|
||||
:str code: An ACME error code, like 'dnssec'.
|
||||
:unicode code: An ACME error code, like 'dnssec'.
|
||||
:kwargs: kwargs to pass to Error.
|
||||
|
||||
"""
|
||||
@@ -166,53 +94,76 @@ class Error(jose.JSONObjectWithFields, errors.Error):
|
||||
raise ValueError("The supplied code: %s is not a known ACME error"
|
||||
" code" % code)
|
||||
typ = ERROR_PREFIX + code
|
||||
# Mypy will not understand that the Error constructor accepts a named argument
|
||||
# "typ" because of josepy magic. Let's ignore the type check here.
|
||||
return cls(typ=typ, **kwargs)
|
||||
|
||||
@property
|
||||
def description(self) -> Optional[str]:
|
||||
def description(self):
|
||||
"""Hardcoded error description based on its type.
|
||||
|
||||
:returns: Description if standard ACME error or ``None``.
|
||||
:rtype: str
|
||||
:rtype: unicode
|
||||
|
||||
"""
|
||||
return ERROR_TYPE_DESCRIPTIONS.get(self.typ)
|
||||
|
||||
@property
|
||||
def code(self) -> Optional[str]:
|
||||
def code(self):
|
||||
"""ACME error code.
|
||||
|
||||
Basically self.typ without the ERROR_PREFIX.
|
||||
|
||||
:returns: error code if standard ACME code or ``None``.
|
||||
:rtype: str
|
||||
:rtype: unicode
|
||||
|
||||
"""
|
||||
code = str(self.typ).rsplit(':', maxsplit=1)[-1]
|
||||
code = str(self.typ).split(':')[-1]
|
||||
if code in ERROR_CODES:
|
||||
return code
|
||||
return None
|
||||
|
||||
def __str__(self) -> str:
|
||||
result = b' :: '.join(
|
||||
def __str__(self):
|
||||
return b' :: '.join(
|
||||
part.encode('ascii', 'backslashreplace') for part in
|
||||
(self.typ, self.description, self.detail, self.title)
|
||||
if part is not None).decode()
|
||||
if self.identifier:
|
||||
result = f'Problem for {self.identifier.value}: ' + result # pylint: disable=no-member
|
||||
if self.subproblems and len(self.subproblems) > 0:
|
||||
for subproblem in self.subproblems:
|
||||
result += f'\n{subproblem}'
|
||||
return result
|
||||
|
||||
|
||||
class _Constant(jose.JSONDeSerializable, Hashable): # type: ignore
|
||||
"""ACME constant."""
|
||||
__slots__ = ('name',)
|
||||
POSSIBLE_NAMES = NotImplemented
|
||||
|
||||
def __init__(self, name):
|
||||
super(_Constant, self).__init__()
|
||||
self.POSSIBLE_NAMES[name] = self # pylint: disable=unsupported-assignment-operation
|
||||
self.name = name
|
||||
|
||||
def to_partial_json(self):
|
||||
return self.name
|
||||
|
||||
@classmethod
|
||||
def from_json(cls, jobj):
|
||||
if jobj not in cls.POSSIBLE_NAMES: # pylint: disable=unsupported-membership-test
|
||||
raise jose.DeserializationError(
|
||||
'{0} not recognized'.format(cls.__name__))
|
||||
return cls.POSSIBLE_NAMES[jobj]
|
||||
|
||||
def __repr__(self):
|
||||
return '{0}({1})'.format(self.__class__.__name__, self.name)
|
||||
|
||||
def __eq__(self, other):
|
||||
return isinstance(other, type(self)) and other.name == self.name
|
||||
|
||||
def __hash__(self):
|
||||
return hash((self.__class__, self.name))
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
||||
|
||||
class Status(_Constant):
|
||||
"""ACME "status" field."""
|
||||
POSSIBLE_NAMES: Dict[str, _Constant] = {}
|
||||
|
||||
|
||||
POSSIBLE_NAMES = {} # type: dict
|
||||
STATUS_UNKNOWN = Status('unknown')
|
||||
STATUS_PENDING = Status('pending')
|
||||
STATUS_PROCESSING = Status('processing')
|
||||
@@ -223,85 +174,90 @@ STATUS_READY = Status('ready')
|
||||
STATUS_DEACTIVATED = Status('deactivated')
|
||||
|
||||
|
||||
class HasResourceType(Protocol):
|
||||
"""
|
||||
Represents a class with a resource_type class parameter of type string.
|
||||
"""
|
||||
resource_type: str = NotImplemented
|
||||
class IdentifierType(_Constant):
|
||||
"""ACME identifier type."""
|
||||
POSSIBLE_NAMES = {} # type: dict
|
||||
IDENTIFIER_FQDN = IdentifierType('dns') # IdentifierDNS in Boulder
|
||||
|
||||
|
||||
GenericHasResourceType = TypeVar("GenericHasResourceType", bound=HasResourceType)
|
||||
class Identifier(jose.JSONObjectWithFields):
|
||||
"""ACME identifier.
|
||||
|
||||
:ivar IdentifierType typ:
|
||||
:ivar unicode value:
|
||||
|
||||
"""
|
||||
typ = jose.Field('type', decoder=IdentifierType.from_json)
|
||||
value = jose.Field('value')
|
||||
|
||||
|
||||
class Directory(jose.JSONDeSerializable):
|
||||
"""Directory."""
|
||||
|
||||
_REGISTERED_TYPES: Dict[str, Type[HasResourceType]] = {}
|
||||
_REGISTERED_TYPES = {} # type: dict
|
||||
|
||||
class Meta(jose.JSONObjectWithFields):
|
||||
"""Directory Meta."""
|
||||
_terms_of_service: str = jose.field('terms-of-service', omitempty=True)
|
||||
_terms_of_service_v2: str = jose.field('termsOfService', omitempty=True)
|
||||
website: str = jose.field('website', omitempty=True)
|
||||
caa_identities: List[str] = jose.field('caaIdentities', omitempty=True)
|
||||
external_account_required: bool = jose.field('externalAccountRequired', omitempty=True)
|
||||
_terms_of_service = jose.Field('terms-of-service', omitempty=True)
|
||||
_terms_of_service_v2 = jose.Field('termsOfService', omitempty=True)
|
||||
website = jose.Field('website', omitempty=True)
|
||||
caa_identities = jose.Field('caaIdentities', omitempty=True)
|
||||
external_account_required = jose.Field('externalAccountRequired', omitempty=True)
|
||||
|
||||
def __init__(self, **kwargs: Any) -> None:
|
||||
kwargs = {self._internal_name(k): v for k, v in kwargs.items()}
|
||||
super().__init__(**kwargs)
|
||||
def __init__(self, **kwargs):
|
||||
kwargs = dict((self._internal_name(k), v) for k, v in kwargs.items())
|
||||
super(Directory.Meta, self).__init__(**kwargs)
|
||||
|
||||
@property
|
||||
def terms_of_service(self) -> str:
|
||||
def terms_of_service(self):
|
||||
"""URL for the CA TOS"""
|
||||
return self._terms_of_service or self._terms_of_service_v2
|
||||
|
||||
def __iter__(self) -> Iterator[str]:
|
||||
def __iter__(self):
|
||||
# When iterating over fields, use the external name 'terms_of_service' instead of
|
||||
# the internal '_terms_of_service'.
|
||||
for name in super().__iter__():
|
||||
for name in super(Directory.Meta, self).__iter__():
|
||||
yield name[1:] if name == '_terms_of_service' else name
|
||||
|
||||
def _internal_name(self, name: str) -> str:
|
||||
def _internal_name(self, name):
|
||||
return '_' + name if name == 'terms_of_service' else name
|
||||
|
||||
@classmethod
|
||||
def _canon_key(cls, key: Union[str, HasResourceType, Type[HasResourceType]]) -> str:
|
||||
if isinstance(key, str):
|
||||
return key
|
||||
return key.resource_type
|
||||
|
||||
@classmethod
|
||||
def register(cls,
|
||||
resource_body_cls: Type[GenericHasResourceType]) -> Type[GenericHasResourceType]:
|
||||
def _canon_key(cls, key):
|
||||
return getattr(key, 'resource_type', key)
|
||||
|
||||
@classmethod
|
||||
def register(cls, resource_body_cls):
|
||||
"""Register resource."""
|
||||
resource_type = resource_body_cls.resource_type
|
||||
assert resource_type not in cls._REGISTERED_TYPES
|
||||
cls._REGISTERED_TYPES[resource_type] = resource_body_cls
|
||||
return resource_body_cls
|
||||
|
||||
def __init__(self, jobj: Mapping[str, Any]) -> None:
|
||||
def __init__(self, jobj):
|
||||
canon_jobj = util.map_keys(jobj, self._canon_key)
|
||||
# TODO: check that everything is an absolute URL; acme-spec is
|
||||
# not clear on that
|
||||
self._jobj = canon_jobj
|
||||
|
||||
def __getattr__(self, name: str) -> Any:
|
||||
def __getattr__(self, name):
|
||||
try:
|
||||
return self[name.replace('_', '-')]
|
||||
except KeyError as error:
|
||||
raise AttributeError(str(error))
|
||||
raise AttributeError(str(error) + ': ' + name)
|
||||
|
||||
def __getitem__(self, name: Union[str, HasResourceType, Type[HasResourceType]]) -> Any:
|
||||
def __getitem__(self, name):
|
||||
try:
|
||||
return self._jobj[self._canon_key(name)]
|
||||
except KeyError:
|
||||
raise KeyError('Directory field "' + self._canon_key(name) + '" not found')
|
||||
raise KeyError('Directory field not found')
|
||||
|
||||
def to_partial_json(self) -> Dict[str, Any]:
|
||||
def to_partial_json(self):
|
||||
return self._jobj
|
||||
|
||||
@classmethod
|
||||
def from_json(cls, jobj: MutableMapping[str, Any]) -> 'Directory':
|
||||
def from_json(cls, jobj):
|
||||
jobj['meta'] = cls.Meta.from_json(jobj.pop('meta', {}))
|
||||
return cls(jobj)
|
||||
|
||||
@@ -312,28 +268,27 @@ class Resource(jose.JSONObjectWithFields):
|
||||
:ivar acme.messages.ResourceBody body: Resource body.
|
||||
|
||||
"""
|
||||
body: "ResourceBody" = jose.field('body')
|
||||
body = jose.Field('body')
|
||||
|
||||
|
||||
class ResourceWithURI(Resource):
|
||||
"""ACME Resource with URI.
|
||||
|
||||
:ivar str uri: Location of the resource.
|
||||
:ivar unicode uri: Location of the resource.
|
||||
|
||||
"""
|
||||
uri: str = jose.field('uri') # no ChallengeResource.uri
|
||||
uri = jose.Field('uri') # no ChallengeResource.uri
|
||||
|
||||
|
||||
class ResourceBody(jose.JSONObjectWithFields):
|
||||
"""ACME Resource Body."""
|
||||
|
||||
|
||||
class ExternalAccountBinding:
|
||||
class ExternalAccountBinding(object):
|
||||
"""ACME External Account Binding"""
|
||||
|
||||
@classmethod
|
||||
def from_data(cls, account_public_key: jose.JWK, kid: str, hmac_key: str,
|
||||
directory: Directory) -> Dict[str, Any]:
|
||||
def from_data(cls, account_public_key, kid, hmac_key, directory):
|
||||
"""Create External Account Binding Resource from contact details, kid and hmac."""
|
||||
|
||||
key_json = json.dumps(account_public_key.to_partial_json()).encode()
|
||||
@@ -347,142 +302,83 @@ class ExternalAccountBinding:
|
||||
return eab.to_partial_json()
|
||||
|
||||
|
||||
GenericRegistration = TypeVar('GenericRegistration', bound='Registration')
|
||||
|
||||
|
||||
class Registration(ResourceBody):
|
||||
"""Registration Resource Body.
|
||||
|
||||
:ivar jose.JWK key: Public key.
|
||||
:ivar josepy.jwk.JWK key: Public key.
|
||||
:ivar tuple contact: Contact information following ACME spec,
|
||||
`tuple` of `str`.
|
||||
:ivar str agreement:
|
||||
`tuple` of `unicode`.
|
||||
:ivar unicode agreement:
|
||||
|
||||
"""
|
||||
# on new-reg key server ignores 'key' and populates it based on
|
||||
# JWS.signature.combined.jwk
|
||||
key: jose.JWK = jose.field('key', omitempty=True, decoder=jose.JWK.from_json)
|
||||
# Contact field implements special behavior to allow messages that clear existing
|
||||
# contacts while not expecting the `contact` field when loading from json.
|
||||
# This is implemented in the constructor and *_json methods.
|
||||
contact: Tuple[str, ...] = jose.field('contact', omitempty=True, default=())
|
||||
agreement: str = jose.field('agreement', omitempty=True)
|
||||
status: Status = jose.field('status', omitempty=True)
|
||||
terms_of_service_agreed: bool = jose.field('termsOfServiceAgreed', omitempty=True)
|
||||
only_return_existing: bool = jose.field('onlyReturnExisting', omitempty=True)
|
||||
external_account_binding: Dict[str, Any] = jose.field('externalAccountBinding',
|
||||
omitempty=True)
|
||||
key = jose.Field('key', omitempty=True, decoder=jose.JWK.from_json)
|
||||
contact = jose.Field('contact', omitempty=True, default=())
|
||||
agreement = jose.Field('agreement', omitempty=True)
|
||||
status = jose.Field('status', omitempty=True)
|
||||
terms_of_service_agreed = jose.Field('termsOfServiceAgreed', omitempty=True)
|
||||
only_return_existing = jose.Field('onlyReturnExisting', omitempty=True)
|
||||
external_account_binding = jose.Field('externalAccountBinding', omitempty=True)
|
||||
|
||||
phone_prefix = 'tel:'
|
||||
email_prefix = 'mailto:'
|
||||
|
||||
@classmethod
|
||||
def from_data(cls: Type[GenericRegistration], phone: Optional[str] = None,
|
||||
email: Optional[str] = None,
|
||||
external_account_binding: Optional[Dict[str, Any]] = None,
|
||||
**kwargs: Any) -> GenericRegistration:
|
||||
"""
|
||||
Create registration resource from contact details.
|
||||
|
||||
The `contact` keyword being passed to a Registration object is meaningful, so
|
||||
this function represents empty iterables in its kwargs by passing on an empty
|
||||
`tuple`.
|
||||
"""
|
||||
|
||||
# Note if `contact` was in kwargs.
|
||||
contact_provided = 'contact' in kwargs
|
||||
|
||||
# Pop `contact` from kwargs and add formatted email or phone numbers
|
||||
def from_data(cls, phone=None, email=None, external_account_binding=None, **kwargs):
|
||||
"""Create registration resource from contact details."""
|
||||
details = list(kwargs.pop('contact', ()))
|
||||
if phone is not None:
|
||||
details.append(cls.phone_prefix + phone)
|
||||
if email is not None:
|
||||
details.extend([cls.email_prefix + mail for mail in email.split(',')])
|
||||
|
||||
# Insert formatted contact information back into kwargs
|
||||
# or insert an empty tuple if `contact` provided.
|
||||
if details or contact_provided:
|
||||
kwargs['contact'] = tuple(details)
|
||||
kwargs['contact'] = tuple(details)
|
||||
|
||||
if external_account_binding:
|
||||
kwargs['external_account_binding'] = external_account_binding
|
||||
|
||||
return cls(**kwargs)
|
||||
|
||||
def __init__(self, **kwargs: Any) -> None:
|
||||
"""Note if the user provides a value for the `contact` member."""
|
||||
if 'contact' in kwargs and kwargs['contact'] is not None:
|
||||
# Avoid the __setattr__ used by jose.TypedJSONObjectWithFields
|
||||
object.__setattr__(self, '_add_contact', True)
|
||||
super().__init__(**kwargs)
|
||||
|
||||
def _filter_contact(self, prefix: str) -> Tuple[str, ...]:
|
||||
def _filter_contact(self, prefix):
|
||||
return tuple(
|
||||
detail[len(prefix):] for detail in self.contact # pylint: disable=not-an-iterable
|
||||
if detail.startswith(prefix))
|
||||
|
||||
def _add_contact_if_appropriate(self, jobj: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""
|
||||
The `contact` member of Registration objects should not be required when
|
||||
de-serializing (as it would be if the Fields' `omitempty` flag were `False`), but
|
||||
it should be included in serializations if it was provided.
|
||||
|
||||
:param jobj: Dictionary containing this Registrations' data
|
||||
:type jobj: dict
|
||||
|
||||
:returns: Dictionary containing Registrations data to transmit to the server
|
||||
:rtype: dict
|
||||
"""
|
||||
if getattr(self, '_add_contact', False):
|
||||
jobj['contact'] = self.encode('contact')
|
||||
|
||||
return jobj
|
||||
|
||||
def to_partial_json(self) -> Dict[str, Any]:
|
||||
"""Modify josepy.JSONDeserializable.to_partial_json()"""
|
||||
jobj = super().to_partial_json()
|
||||
return self._add_contact_if_appropriate(jobj)
|
||||
|
||||
def fields_to_partial_json(self) -> Dict[str, Any]:
|
||||
"""Modify josepy.JSONObjectWithFields.fields_to_partial_json()"""
|
||||
jobj = super().fields_to_partial_json()
|
||||
return self._add_contact_if_appropriate(jobj)
|
||||
|
||||
@property
|
||||
def phones(self) -> Tuple[str, ...]:
|
||||
def phones(self):
|
||||
"""All phones found in the ``contact`` field."""
|
||||
return self._filter_contact(self.phone_prefix)
|
||||
|
||||
@property
|
||||
def emails(self) -> Tuple[str, ...]:
|
||||
def emails(self):
|
||||
"""All emails found in the ``contact`` field."""
|
||||
return self._filter_contact(self.email_prefix)
|
||||
|
||||
|
||||
@Directory.register
|
||||
class NewRegistration(ResourceMixin, Registration):
|
||||
class NewRegistration(Registration):
|
||||
"""New registration."""
|
||||
resource_type = 'new-reg'
|
||||
resource: str = fields.resource(resource_type)
|
||||
resource = fields.Resource(resource_type)
|
||||
|
||||
|
||||
class UpdateRegistration(ResourceMixin, Registration):
|
||||
class UpdateRegistration(Registration):
|
||||
"""Update registration."""
|
||||
resource_type = 'reg'
|
||||
resource: str = fields.resource(resource_type)
|
||||
resource = fields.Resource(resource_type)
|
||||
|
||||
|
||||
class RegistrationResource(ResourceWithURI):
|
||||
"""Registration Resource.
|
||||
|
||||
:ivar acme.messages.Registration body:
|
||||
:ivar str new_authzr_uri: Deprecated. Do not use.
|
||||
:ivar str terms_of_service: URL for the CA TOS.
|
||||
:ivar unicode new_authzr_uri: Deprecated. Do not use.
|
||||
:ivar unicode terms_of_service: URL for the CA TOS.
|
||||
|
||||
"""
|
||||
body: Registration = jose.field('body', decoder=Registration.from_json)
|
||||
new_authzr_uri: str = jose.field('new_authzr_uri', omitempty=True)
|
||||
terms_of_service: str = jose.field('terms_of_service', omitempty=True)
|
||||
body = jose.Field('body', decoder=Registration.from_json)
|
||||
new_authzr_uri = jose.Field('new_authzr_uri', omitempty=True)
|
||||
terms_of_service = jose.Field('terms_of_service', omitempty=True)
|
||||
|
||||
|
||||
class ChallengeBody(ResourceBody):
|
||||
@@ -507,47 +403,47 @@ class ChallengeBody(ResourceBody):
|
||||
# challenge object supports either one, but should be accessed through the
|
||||
# name "uri". In Client.answer_challenge, whichever one is set will be
|
||||
# used.
|
||||
_uri: str = jose.field('uri', omitempty=True, default=None)
|
||||
_url: str = jose.field('url', omitempty=True, default=None)
|
||||
status: Status = jose.field('status', decoder=Status.from_json,
|
||||
_uri = jose.Field('uri', omitempty=True, default=None)
|
||||
_url = jose.Field('url', omitempty=True, default=None)
|
||||
status = jose.Field('status', decoder=Status.from_json,
|
||||
omitempty=True, default=STATUS_PENDING)
|
||||
validated: datetime.datetime = fields.rfc3339('validated', omitempty=True)
|
||||
error: Error = jose.field('error', decoder=Error.from_json,
|
||||
validated = fields.RFC3339Field('validated', omitempty=True)
|
||||
error = jose.Field('error', decoder=Error.from_json,
|
||||
omitempty=True, default=None)
|
||||
|
||||
def __init__(self, **kwargs: Any) -> None:
|
||||
kwargs = {self._internal_name(k): v for k, v in kwargs.items()}
|
||||
super().__init__(**kwargs)
|
||||
def __init__(self, **kwargs):
|
||||
kwargs = dict((self._internal_name(k), v) for k, v in kwargs.items())
|
||||
super(ChallengeBody, self).__init__(**kwargs)
|
||||
|
||||
def encode(self, name: str) -> Any:
|
||||
return super().encode(self._internal_name(name))
|
||||
def encode(self, name):
|
||||
return super(ChallengeBody, self).encode(self._internal_name(name))
|
||||
|
||||
def to_partial_json(self) -> Dict[str, Any]:
|
||||
jobj = super().to_partial_json()
|
||||
def to_partial_json(self):
|
||||
jobj = super(ChallengeBody, self).to_partial_json()
|
||||
jobj.update(self.chall.to_partial_json())
|
||||
return jobj
|
||||
|
||||
@classmethod
|
||||
def fields_from_json(cls, jobj: Mapping[str, Any]) -> Dict[str, Any]:
|
||||
jobj_fields = super().fields_from_json(jobj)
|
||||
def fields_from_json(cls, jobj):
|
||||
jobj_fields = super(ChallengeBody, cls).fields_from_json(jobj)
|
||||
jobj_fields['chall'] = challenges.Challenge.from_json(jobj)
|
||||
return jobj_fields
|
||||
|
||||
@property
|
||||
def uri(self) -> str:
|
||||
def uri(self):
|
||||
"""The URL of this challenge."""
|
||||
return self._url or self._uri
|
||||
|
||||
def __getattr__(self, name: str) -> Any:
|
||||
def __getattr__(self, name):
|
||||
return getattr(self.chall, name)
|
||||
|
||||
def __iter__(self) -> Iterator[str]:
|
||||
def __iter__(self):
|
||||
# When iterating over fields, use the external name 'uri' instead of
|
||||
# the internal '_uri'.
|
||||
for name in super().__iter__():
|
||||
for name in super(ChallengeBody, self).__iter__():
|
||||
yield name[1:] if name == '_uri' else name
|
||||
|
||||
def _internal_name(self, name: str) -> str:
|
||||
def _internal_name(self, name):
|
||||
return '_' + name if name == 'uri' else name
|
||||
|
||||
|
||||
@@ -555,16 +451,17 @@ class ChallengeResource(Resource):
|
||||
"""Challenge Resource.
|
||||
|
||||
:ivar acme.messages.ChallengeBody body:
|
||||
:ivar str authzr_uri: URI found in the 'up' ``Link`` header.
|
||||
:ivar unicode authzr_uri: URI found in the 'up' ``Link`` header.
|
||||
|
||||
"""
|
||||
body: ChallengeBody = jose.field('body', decoder=ChallengeBody.from_json)
|
||||
authzr_uri: str = jose.field('authzr_uri')
|
||||
body = jose.Field('body', decoder=ChallengeBody.from_json)
|
||||
authzr_uri = jose.Field('authzr_uri')
|
||||
|
||||
@property
|
||||
def uri(self) -> str:
|
||||
def uri(self):
|
||||
"""The URL of the challenge body."""
|
||||
return self.body.uri # pylint: disable=no-member
|
||||
# pylint: disable=function-redefined,no-member
|
||||
return self.body.uri
|
||||
|
||||
|
||||
class Authorization(ResourceBody):
|
||||
@@ -578,66 +475,64 @@ class Authorization(ResourceBody):
|
||||
:ivar datetime.datetime expires:
|
||||
|
||||
"""
|
||||
identifier: Identifier = jose.field('identifier', decoder=Identifier.from_json, omitempty=True)
|
||||
challenges: List[ChallengeBody] = jose.field('challenges', omitempty=True)
|
||||
combinations: Tuple[Tuple[int, ...], ...] = jose.field('combinations', omitempty=True)
|
||||
identifier = jose.Field('identifier', decoder=Identifier.from_json, omitempty=True)
|
||||
challenges = jose.Field('challenges', omitempty=True)
|
||||
combinations = jose.Field('combinations', omitempty=True)
|
||||
|
||||
status: Status = jose.field('status', omitempty=True, decoder=Status.from_json)
|
||||
status = jose.Field('status', omitempty=True, decoder=Status.from_json)
|
||||
# TODO: 'expires' is allowed for Authorization Resources in
|
||||
# general, but for Key Authorization '[t]he "expires" field MUST
|
||||
# be absent'... then acme-spec gives example with 'expires'
|
||||
# present... That's confusing!
|
||||
expires: datetime.datetime = fields.rfc3339('expires', omitempty=True)
|
||||
wildcard: bool = jose.field('wildcard', omitempty=True)
|
||||
expires = fields.RFC3339Field('expires', omitempty=True)
|
||||
wildcard = jose.Field('wildcard', omitempty=True)
|
||||
|
||||
# Mypy does not understand the josepy magic happening here, and falsely claims
|
||||
# that challenge is redefined. Let's ignore the type check here.
|
||||
@challenges.decoder # type: ignore
|
||||
def challenges(value: List[Dict[str, Any]]) -> Tuple[ChallengeBody, ...]: # type: ignore[misc] # pylint: disable=no-self-argument,missing-function-docstring
|
||||
@challenges.decoder
|
||||
def challenges(value): # pylint: disable=missing-docstring,no-self-argument
|
||||
return tuple(ChallengeBody.from_json(chall) for chall in value)
|
||||
|
||||
@property
|
||||
def resolved_combinations(self) -> Tuple[Tuple[ChallengeBody, ...], ...]:
|
||||
def resolved_combinations(self):
|
||||
"""Combinations with challenges instead of indices."""
|
||||
return tuple(tuple(self.challenges[idx] for idx in combo)
|
||||
for combo in self.combinations) # pylint: disable=not-an-iterable
|
||||
|
||||
|
||||
@Directory.register
|
||||
class NewAuthorization(ResourceMixin, Authorization):
|
||||
class NewAuthorization(Authorization):
|
||||
"""New authorization."""
|
||||
resource_type = 'new-authz'
|
||||
resource: str = fields.resource(resource_type)
|
||||
resource = fields.Resource(resource_type)
|
||||
|
||||
|
||||
class UpdateAuthorization(ResourceMixin, Authorization):
|
||||
class UpdateAuthorization(Authorization):
|
||||
"""Update authorization."""
|
||||
resource_type = 'authz'
|
||||
resource: str = fields.resource(resource_type)
|
||||
resource = fields.Resource(resource_type)
|
||||
|
||||
|
||||
class AuthorizationResource(ResourceWithURI):
|
||||
"""Authorization Resource.
|
||||
|
||||
:ivar acme.messages.Authorization body:
|
||||
:ivar str new_cert_uri: Deprecated. Do not use.
|
||||
:ivar unicode new_cert_uri: Deprecated. Do not use.
|
||||
|
||||
"""
|
||||
body: Authorization = jose.field('body', decoder=Authorization.from_json)
|
||||
new_cert_uri: str = jose.field('new_cert_uri', omitempty=True)
|
||||
body = jose.Field('body', decoder=Authorization.from_json)
|
||||
new_cert_uri = jose.Field('new_cert_uri', omitempty=True)
|
||||
|
||||
|
||||
@Directory.register
|
||||
class CertificateRequest(ResourceMixin, jose.JSONObjectWithFields):
|
||||
class CertificateRequest(jose.JSONObjectWithFields):
|
||||
"""ACME new-cert request.
|
||||
|
||||
:ivar jose.ComparableX509 csr:
|
||||
:ivar josepy.util.ComparableX509 csr:
|
||||
`OpenSSL.crypto.X509Req` wrapped in `.ComparableX509`
|
||||
|
||||
"""
|
||||
resource_type = 'new-cert'
|
||||
resource: str = fields.resource(resource_type)
|
||||
csr: jose.ComparableX509 = jose.field('csr', decoder=jose.decode_csr, encoder=jose.encode_csr)
|
||||
resource = fields.Resource(resource_type)
|
||||
csr = jose.Field('csr', decoder=jose.decode_csr, encoder=jose.encode_csr)
|
||||
|
||||
|
||||
class CertificateResource(ResourceWithURI):
|
||||
@@ -645,79 +540,68 @@ class CertificateResource(ResourceWithURI):
|
||||
|
||||
:ivar josepy.util.ComparableX509 body:
|
||||
`OpenSSL.crypto.X509` wrapped in `.ComparableX509`
|
||||
:ivar str cert_chain_uri: URI found in the 'up' ``Link`` header
|
||||
:ivar unicode cert_chain_uri: URI found in the 'up' ``Link`` header
|
||||
:ivar tuple authzrs: `tuple` of `AuthorizationResource`.
|
||||
|
||||
"""
|
||||
cert_chain_uri: str = jose.field('cert_chain_uri')
|
||||
authzrs: Tuple[AuthorizationResource, ...] = jose.field('authzrs')
|
||||
cert_chain_uri = jose.Field('cert_chain_uri')
|
||||
authzrs = jose.Field('authzrs')
|
||||
|
||||
|
||||
@Directory.register
|
||||
class Revocation(ResourceMixin, jose.JSONObjectWithFields):
|
||||
class Revocation(jose.JSONObjectWithFields):
|
||||
"""Revocation message.
|
||||
|
||||
:ivar jose.ComparableX509 certificate: `OpenSSL.crypto.X509` wrapped in
|
||||
`jose.ComparableX509`
|
||||
:ivar .ComparableX509 certificate: `OpenSSL.crypto.X509` wrapped in
|
||||
`.ComparableX509`
|
||||
|
||||
"""
|
||||
resource_type = 'revoke-cert'
|
||||
resource: str = fields.resource(resource_type)
|
||||
certificate: jose.ComparableX509 = jose.field(
|
||||
resource = fields.Resource(resource_type)
|
||||
certificate = jose.Field(
|
||||
'certificate', decoder=jose.decode_cert, encoder=jose.encode_cert)
|
||||
reason: int = jose.field('reason')
|
||||
reason = jose.Field('reason')
|
||||
|
||||
|
||||
class Order(ResourceBody):
|
||||
"""Order Resource Body.
|
||||
|
||||
:ivar identifiers: List of identifiers for the certificate.
|
||||
:vartype identifiers: `list` of `.Identifier`
|
||||
:ivar list of .Identifier: List of identifiers for the certificate.
|
||||
:ivar acme.messages.Status status:
|
||||
:ivar authorizations: URLs of authorizations.
|
||||
:vartype authorizations: `list` of `str`
|
||||
:ivar list of str authorizations: URLs of authorizations.
|
||||
:ivar str certificate: URL to download certificate as a fullchain PEM.
|
||||
:ivar str finalize: URL to POST to to request issuance once all
|
||||
authorizations have "valid" status.
|
||||
:ivar datetime.datetime expires: When the order expires.
|
||||
:ivar ~.Error error: Any error that occurred during finalization, if applicable.
|
||||
:ivar .Error error: Any error that occurred during finalization, if applicable.
|
||||
"""
|
||||
identifiers: List[Identifier] = jose.field('identifiers', omitempty=True)
|
||||
status: Status = jose.field('status', decoder=Status.from_json, omitempty=True)
|
||||
authorizations: List[str] = jose.field('authorizations', omitempty=True)
|
||||
certificate: str = jose.field('certificate', omitempty=True)
|
||||
finalize: str = jose.field('finalize', omitempty=True)
|
||||
expires: datetime.datetime = fields.rfc3339('expires', omitempty=True)
|
||||
error: Error = jose.field('error', omitempty=True, decoder=Error.from_json)
|
||||
identifiers = jose.Field('identifiers', omitempty=True)
|
||||
status = jose.Field('status', decoder=Status.from_json,
|
||||
omitempty=True)
|
||||
authorizations = jose.Field('authorizations', omitempty=True)
|
||||
certificate = jose.Field('certificate', omitempty=True)
|
||||
finalize = jose.Field('finalize', omitempty=True)
|
||||
expires = fields.RFC3339Field('expires', omitempty=True)
|
||||
error = jose.Field('error', omitempty=True, decoder=Error.from_json)
|
||||
|
||||
# Mypy does not understand the josepy magic happening here, and falsely claims
|
||||
# that identifiers is redefined. Let's ignore the type check here.
|
||||
@identifiers.decoder # type: ignore
|
||||
def identifiers(value: List[Dict[str, Any]]) -> Tuple[Identifier, ...]: # type: ignore[misc] # pylint: disable=no-self-argument,missing-function-docstring
|
||||
@identifiers.decoder
|
||||
def identifiers(value): # pylint: disable=missing-docstring,no-self-argument
|
||||
return tuple(Identifier.from_json(identifier) for identifier in value)
|
||||
|
||||
|
||||
class OrderResource(ResourceWithURI):
|
||||
"""Order Resource.
|
||||
|
||||
:ivar acme.messages.Order body:
|
||||
:ivar bytes csr_pem: The CSR this Order will be finalized with.
|
||||
:ivar authorizations: Fully-fetched AuthorizationResource objects.
|
||||
:vartype authorizations: `list` of `acme.messages.AuthorizationResource`
|
||||
:ivar str csr_pem: The CSR this Order will be finalized with.
|
||||
:ivar list of acme.messages.AuthorizationResource authorizations:
|
||||
Fully-fetched AuthorizationResource objects.
|
||||
:ivar str fullchain_pem: The fetched contents of the certificate URL
|
||||
produced once the order was finalized, if it's present.
|
||||
:ivar alternative_fullchains_pem: The fetched contents of alternative certificate
|
||||
chain URLs produced once the order was finalized, if present and requested during
|
||||
finalization.
|
||||
:vartype alternative_fullchains_pem: `list` of `str`
|
||||
"""
|
||||
body: Order = jose.field('body', decoder=Order.from_json)
|
||||
csr_pem: bytes = jose.field('csr_pem', omitempty=True)
|
||||
authorizations: List[AuthorizationResource] = jose.field('authorizations')
|
||||
fullchain_pem: str = jose.field('fullchain_pem', omitempty=True)
|
||||
alternative_fullchains_pem: List[str] = jose.field('alternative_fullchains_pem',
|
||||
omitempty=True)
|
||||
|
||||
body = jose.Field('body', decoder=Order.from_json)
|
||||
csr_pem = jose.Field('csr_pem', omitempty=True)
|
||||
authorizations = jose.Field('authorizations')
|
||||
fullchain_pem = jose.Field('fullchain_pem', omitempty=True)
|
||||
|
||||
@Directory.register
|
||||
class NewOrder(Order):
|
||||
|
||||
@@ -1,68 +0,0 @@
|
||||
"""Useful mixins for Challenge and Resource objects"""
|
||||
from typing import Any
|
||||
from typing import Dict
|
||||
|
||||
|
||||
class VersionedLEACMEMixin:
|
||||
"""This mixin stores the version of Let's Encrypt's endpoint being used."""
|
||||
@property
|
||||
def le_acme_version(self) -> int:
|
||||
"""Define the version of ACME protocol to use"""
|
||||
return getattr(self, '_le_acme_version', 1)
|
||||
|
||||
@le_acme_version.setter
|
||||
def le_acme_version(self, version: int) -> None:
|
||||
# We need to use object.__setattr__ to not depend on the specific implementation of
|
||||
# __setattr__ in current class (eg. jose.TypedJSONObjectWithFields raises AttributeError
|
||||
# for any attempt to set an attribute to make objects immutable).
|
||||
object.__setattr__(self, '_le_acme_version', version)
|
||||
|
||||
def __setattr__(self, key: str, value: Any) -> None:
|
||||
if key == 'le_acme_version':
|
||||
# Required for @property to operate properly. See comment above.
|
||||
object.__setattr__(self, key, value)
|
||||
else:
|
||||
super().__setattr__(key, value) # pragma: no cover
|
||||
|
||||
|
||||
class ResourceMixin(VersionedLEACMEMixin):
|
||||
"""
|
||||
This mixin generates a RFC8555 compliant JWS payload
|
||||
by removing the `resource` field if needed (eg. ACME v2 protocol).
|
||||
"""
|
||||
def to_partial_json(self) -> Dict[str, Any]:
|
||||
"""See josepy.JSONDeserializable.to_partial_json()"""
|
||||
return _safe_jobj_compliance(super(),
|
||||
'to_partial_json', 'resource')
|
||||
|
||||
def fields_to_partial_json(self) -> Dict[str, Any]:
|
||||
"""See josepy.JSONObjectWithFields.fields_to_partial_json()"""
|
||||
return _safe_jobj_compliance(super(),
|
||||
'fields_to_partial_json', 'resource')
|
||||
|
||||
|
||||
class TypeMixin(VersionedLEACMEMixin):
|
||||
"""
|
||||
This mixin allows generation of a RFC8555 compliant JWS payload
|
||||
by removing the `type` field if needed (eg. ACME v2 protocol).
|
||||
"""
|
||||
def to_partial_json(self) -> Dict[str, Any]:
|
||||
"""See josepy.JSONDeserializable.to_partial_json()"""
|
||||
return _safe_jobj_compliance(super(),
|
||||
'to_partial_json', 'type')
|
||||
|
||||
def fields_to_partial_json(self) -> Dict[str, Any]:
|
||||
"""See josepy.JSONObjectWithFields.fields_to_partial_json()"""
|
||||
return _safe_jobj_compliance(super(),
|
||||
'fields_to_partial_json', 'type')
|
||||
|
||||
|
||||
def _safe_jobj_compliance(instance: Any, jobj_method: str,
|
||||
uncompliant_field: str) -> Dict[str, Any]:
|
||||
if hasattr(instance, jobj_method):
|
||||
jobj: Dict[str, Any] = getattr(instance, jobj_method)()
|
||||
if instance.le_acme_version == 2:
|
||||
jobj.pop(uncompliant_field, None)
|
||||
return jobj
|
||||
|
||||
raise AttributeError(f'Method {jobj_method}() is not implemented.') # pragma: no cover
|
||||
@@ -1,57 +1,45 @@
|
||||
"""Support for standalone client challenge solvers. """
|
||||
import collections
|
||||
import functools
|
||||
import http.client as http_client
|
||||
import http.server as BaseHTTPServer
|
||||
import logging
|
||||
import socket
|
||||
import socketserver
|
||||
import threading
|
||||
from typing import Any
|
||||
from typing import cast
|
||||
from typing import List
|
||||
from typing import Mapping
|
||||
from typing import Optional
|
||||
from typing import Set
|
||||
from typing import Tuple
|
||||
from typing import Type
|
||||
|
||||
from OpenSSL import crypto
|
||||
from OpenSSL import SSL
|
||||
from six.moves import BaseHTTPServer # type: ignore # pylint: disable=import-error
|
||||
from six.moves import http_client # pylint: disable=import-error
|
||||
from six.moves import socketserver # type: ignore # pylint: disable=import-error
|
||||
|
||||
from acme import challenges
|
||||
from acme import crypto_util
|
||||
from acme.magic_typing import List # pylint: disable=unused-import, no-name-in-module
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# six.moves.* | pylint: disable=no-member,attribute-defined-outside-init
|
||||
# pylint: disable=no-init
|
||||
|
||||
|
||||
class TLSServer(socketserver.TCPServer):
|
||||
"""Generic TLS Server."""
|
||||
|
||||
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.ipv6 = kwargs.pop("ipv6", False)
|
||||
if self.ipv6:
|
||||
self.address_family = socket.AF_INET6
|
||||
else:
|
||||
self.address_family = socket.AF_INET
|
||||
self.certs = kwargs.pop("certs", {})
|
||||
self.method = kwargs.pop("method", crypto_util._DEFAULT_SSL_METHOD)
|
||||
self.method = kwargs.pop(
|
||||
# pylint: disable=protected-access
|
||||
"method", crypto_util._DEFAULT_SSL_METHOD)
|
||||
self.allow_reuse_address = kwargs.pop("allow_reuse_address", True)
|
||||
super().__init__(*args, **kwargs)
|
||||
socketserver.TCPServer.__init__(self, *args, **kwargs)
|
||||
|
||||
def _wrap_sock(self) -> None:
|
||||
self.socket = cast(socket.socket, crypto_util.SSLSocket(
|
||||
self.socket, cert_selection=self._cert_selection,
|
||||
alpn_selection=getattr(self, '_alpn_selection', None),
|
||||
method=self.method))
|
||||
def _wrap_sock(self):
|
||||
self.socket = crypto_util.SSLSocket(
|
||||
self.socket, certs=self.certs, method=self.method)
|
||||
|
||||
def _cert_selection(self, connection: SSL.Connection
|
||||
) -> Tuple[crypto.PKey, crypto.X509]: # pragma: no cover
|
||||
"""Callback selecting certificate for connection."""
|
||||
server_name = connection.get_servername()
|
||||
return self.certs.get(server_name, None)
|
||||
|
||||
def server_bind(self) -> None:
|
||||
def server_bind(self): # pylint: disable=missing-docstring
|
||||
self._wrap_sock()
|
||||
return socketserver.TCPServer.server_bind(self)
|
||||
|
||||
@@ -63,7 +51,7 @@ class ACMEServerMixin:
|
||||
allow_reuse_address = True
|
||||
|
||||
|
||||
class BaseDualNetworkedServers:
|
||||
class BaseDualNetworkedServers(object):
|
||||
"""Base class for a pair of IPv6 and IPv4 servers that tries to do everything
|
||||
it's asked for both servers, but where failures in one server don't
|
||||
affect the other.
|
||||
@@ -71,14 +59,10 @@ class BaseDualNetworkedServers:
|
||||
If two servers are instantiated, they will serve on the same port.
|
||||
"""
|
||||
|
||||
def __init__(self, ServerClass: Type[socketserver.TCPServer], server_address: Tuple[str, int],
|
||||
*remaining_args: Any, **kwargs: Any) -> None:
|
||||
def __init__(self, ServerClass, server_address, *remaining_args, **kwargs):
|
||||
port = server_address[1]
|
||||
self.threads: List[threading.Thread] = []
|
||||
self.servers: List[socketserver.BaseServer] = []
|
||||
|
||||
# Preserve socket error for re-raising, if no servers can be started
|
||||
last_socket_err: Optional[socket.error] = None
|
||||
self.threads = [] # type: List[threading.Thread]
|
||||
self.servers = [] # type: List[ACMEServerMixin]
|
||||
|
||||
# Must try True first.
|
||||
# Ubuntu, for example, will fail to bind to IPv4 if we've already bound
|
||||
@@ -96,8 +80,7 @@ class BaseDualNetworkedServers:
|
||||
logger.debug(
|
||||
"Successfully bound to %s:%s using %s", new_address[0],
|
||||
new_address[1], "IPv6" if ip_version else "IPv4")
|
||||
except socket.error as e:
|
||||
last_socket_err = e
|
||||
except socket.error:
|
||||
if self.servers:
|
||||
# Already bound using IPv6.
|
||||
logger.debug(
|
||||
@@ -116,12 +99,9 @@ class BaseDualNetworkedServers:
|
||||
# bind to the same port for both servers.
|
||||
port = server.socket.getsockname()[1]
|
||||
if not self.servers:
|
||||
if last_socket_err:
|
||||
raise last_socket_err
|
||||
else: # pragma: no cover
|
||||
raise socket.error("Could not bind to IPv4 or IPv6.")
|
||||
raise socket.error("Could not bind to IPv4 or IPv6.")
|
||||
|
||||
def serve_forever(self) -> None:
|
||||
def serve_forever(self):
|
||||
"""Wraps socketserver.TCPServer.serve_forever"""
|
||||
for server in self.servers:
|
||||
thread = threading.Thread(
|
||||
@@ -129,11 +109,11 @@ class BaseDualNetworkedServers:
|
||||
thread.start()
|
||||
self.threads.append(thread)
|
||||
|
||||
def getsocknames(self) -> List[Tuple[str, int]]:
|
||||
def getsocknames(self):
|
||||
"""Wraps socketserver.TCPServer.socket.getsockname"""
|
||||
return [server.socket.getsockname() for server in self.servers]
|
||||
|
||||
def shutdown_and_server_close(self) -> None:
|
||||
def shutdown_and_server_close(self):
|
||||
"""Wraps socketserver.TCPServer.shutdown, socketserver.TCPServer.server_close, and
|
||||
threading.Thread.join"""
|
||||
for server in self.servers:
|
||||
@@ -144,74 +124,33 @@ class BaseDualNetworkedServers:
|
||||
self.threads = []
|
||||
|
||||
|
||||
class TLSALPN01Server(TLSServer, ACMEServerMixin):
|
||||
"""TLSALPN01 Server."""
|
||||
|
||||
ACME_TLS_1_PROTOCOL = b"acme-tls/1"
|
||||
|
||||
def __init__(self, server_address: Tuple[str, int],
|
||||
certs: List[Tuple[crypto.PKey, crypto.X509]],
|
||||
challenge_certs: Mapping[str, Tuple[crypto.PKey, crypto.X509]],
|
||||
ipv6: bool = False) -> None:
|
||||
# We don't need to implement a request handler here because the work
|
||||
# (including logging) is being done by wrapped socket set up in the
|
||||
# parent TLSServer class.
|
||||
TLSServer.__init__(
|
||||
self, server_address, socketserver.BaseRequestHandler, certs=certs,
|
||||
ipv6=ipv6)
|
||||
self.challenge_certs = challenge_certs
|
||||
|
||||
def _cert_selection(self, connection: SSL.Connection) -> Tuple[crypto.PKey, crypto.X509]:
|
||||
# TODO: We would like to serve challenge cert only if asked for it via
|
||||
# ALPN. To do this, we need to retrieve the list of protos from client
|
||||
# hello, but this is currently impossible with openssl [0], and ALPN
|
||||
# negotiation is done after cert selection.
|
||||
# Therefore, currently we always return challenge cert, and terminate
|
||||
# handshake in alpn_selection() if ALPN protos are not what we expect.
|
||||
# [0] https://github.com/openssl/openssl/issues/4952
|
||||
server_name = connection.get_servername()
|
||||
logger.debug("Serving challenge cert for server name %s", server_name)
|
||||
return self.challenge_certs[server_name]
|
||||
|
||||
def _alpn_selection(self, _connection: SSL.Connection, alpn_protos: List[bytes]) -> bytes:
|
||||
"""Callback to select alpn protocol."""
|
||||
if len(alpn_protos) == 1 and alpn_protos[0] == self.ACME_TLS_1_PROTOCOL:
|
||||
logger.debug("Agreed on %s ALPN", self.ACME_TLS_1_PROTOCOL)
|
||||
return self.ACME_TLS_1_PROTOCOL
|
||||
logger.debug("Cannot agree on ALPN proto. Got: %s", str(alpn_protos))
|
||||
# Explicitly close the connection now, by returning an empty string.
|
||||
# See https://www.pyopenssl.org/en/stable/api/ssl.html#OpenSSL.SSL.Context.set_alpn_select_callback # pylint: disable=line-too-long
|
||||
return b""
|
||||
|
||||
|
||||
class HTTPServer(BaseHTTPServer.HTTPServer):
|
||||
"""Generic HTTP Server."""
|
||||
|
||||
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.ipv6 = kwargs.pop("ipv6", False)
|
||||
if self.ipv6:
|
||||
self.address_family = socket.AF_INET6
|
||||
else:
|
||||
self.address_family = socket.AF_INET
|
||||
super().__init__(*args, **kwargs)
|
||||
BaseHTTPServer.HTTPServer.__init__(self, *args, **kwargs)
|
||||
|
||||
|
||||
class HTTP01Server(HTTPServer, ACMEServerMixin):
|
||||
"""HTTP01 Server."""
|
||||
|
||||
def __init__(self, server_address: Tuple[str, int], resources: Set[challenges.HTTP01],
|
||||
ipv6: bool = False, timeout: int = 30) -> None:
|
||||
super().__init__(
|
||||
server_address, HTTP01RequestHandler.partial_init(
|
||||
simple_http_resources=resources, timeout=timeout), ipv6=ipv6)
|
||||
def __init__(self, server_address, resources, ipv6=False):
|
||||
HTTPServer.__init__(
|
||||
self, server_address, HTTP01RequestHandler.partial_init(
|
||||
simple_http_resources=resources), ipv6=ipv6)
|
||||
|
||||
|
||||
class HTTP01DualNetworkedServers(BaseDualNetworkedServers):
|
||||
"""HTTP01Server Wrapper. Tries everything for both. Failures for one don't
|
||||
affect the other."""
|
||||
|
||||
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
||||
super().__init__(HTTP01Server, *args, **kwargs)
|
||||
def __init__(self, *args, **kwargs):
|
||||
BaseDualNetworkedServers.__init__(self, HTTP01Server, *args, **kwargs)
|
||||
|
||||
|
||||
class HTTP01RequestHandler(BaseHTTPServer.BaseHTTPRequestHandler):
|
||||
@@ -226,37 +165,20 @@ class HTTP01RequestHandler(BaseHTTPServer.BaseHTTPRequestHandler):
|
||||
HTTP01Resource = collections.namedtuple(
|
||||
"HTTP01Resource", "chall response validation")
|
||||
|
||||
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.simple_http_resources = kwargs.pop("simple_http_resources", set())
|
||||
self._timeout = kwargs.pop('timeout', 30)
|
||||
super().__init__(*args, **kwargs)
|
||||
self.server: HTTP01Server
|
||||
BaseHTTPServer.BaseHTTPRequestHandler.__init__(self, *args, **kwargs)
|
||||
|
||||
# In parent class BaseHTTPRequestHandler, 'timeout' is a class-level property but we
|
||||
# need to define its value during the initialization phase in HTTP01RequestHandler.
|
||||
# However MyPy does not appreciate that we dynamically shadow a class-level property
|
||||
# with an instance-level property (eg. self.timeout = ... in __init__()). So to make
|
||||
# everyone happy, we statically redefine 'timeout' as a method property, and set the
|
||||
# timeout value in a new internal instance-level property _timeout.
|
||||
@property
|
||||
def timeout(self) -> int: # type: ignore[override]
|
||||
"""
|
||||
The default timeout this server should apply to requests.
|
||||
:return: timeout to apply
|
||||
:rtype: int
|
||||
"""
|
||||
return self._timeout
|
||||
|
||||
def log_message(self, format: str, *args: Any) -> None: # pylint: disable=redefined-builtin
|
||||
def log_message(self, format, *args): # pylint: disable=redefined-builtin
|
||||
"""Log arbitrary message."""
|
||||
logger.debug("%s - - %s", self.client_address[0], format % args)
|
||||
|
||||
def handle(self) -> None:
|
||||
def handle(self):
|
||||
"""Handle request."""
|
||||
self.log_message("Incoming request")
|
||||
BaseHTTPServer.BaseHTTPRequestHandler.handle(self)
|
||||
|
||||
def do_GET(self) -> None: # pylint: disable=invalid-name,missing-function-docstring
|
||||
def do_GET(self): # pylint: disable=invalid-name,missing-docstring
|
||||
if self.path == "/":
|
||||
self.handle_index()
|
||||
elif self.path.startswith("/" + challenges.HTTP01.URI_ROOT_PATH):
|
||||
@@ -264,21 +186,21 @@ class HTTP01RequestHandler(BaseHTTPServer.BaseHTTPRequestHandler):
|
||||
else:
|
||||
self.handle_404()
|
||||
|
||||
def handle_index(self) -> None:
|
||||
def handle_index(self):
|
||||
"""Handle index page."""
|
||||
self.send_response(200)
|
||||
self.send_header("Content-Type", "text/html")
|
||||
self.end_headers()
|
||||
self.wfile.write(self.server.server_version.encode())
|
||||
|
||||
def handle_404(self) -> None:
|
||||
def handle_404(self):
|
||||
"""Handler 404 Not Found errors."""
|
||||
self.send_response(http_client.NOT_FOUND, message="Not Found")
|
||||
self.send_header("Content-type", "text/html")
|
||||
self.end_headers()
|
||||
self.wfile.write(b"404")
|
||||
|
||||
def handle_simple_http_resource(self) -> None:
|
||||
def handle_simple_http_resource(self):
|
||||
"""Handle HTTP01 provisioned resources."""
|
||||
for resource in self.simple_http_resources:
|
||||
if resource.chall.path == self.path:
|
||||
@@ -294,8 +216,7 @@ class HTTP01RequestHandler(BaseHTTPServer.BaseHTTPRequestHandler):
|
||||
self.path)
|
||||
|
||||
@classmethod
|
||||
def partial_init(cls, simple_http_resources: Set[challenges.HTTP01],
|
||||
timeout: int) -> 'functools.partial[HTTP01RequestHandler]':
|
||||
def partial_init(cls, simple_http_resources):
|
||||
"""Partially initialize this handler.
|
||||
|
||||
This is useful because `socketserver.BaseServer` takes
|
||||
@@ -304,5 +225,4 @@ class HTTP01RequestHandler(BaseHTTPServer.BaseHTTPRequestHandler):
|
||||
|
||||
"""
|
||||
return functools.partial(
|
||||
cls, simple_http_resources=simple_http_resources,
|
||||
timeout=timeout)
|
||||
cls, simple_http_resources=simple_http_resources)
|
||||
|
||||
@@ -1,10 +1,7 @@
|
||||
"""ACME utilities."""
|
||||
from typing import Any
|
||||
from typing import Callable
|
||||
from typing import Dict
|
||||
from typing import Mapping
|
||||
import six
|
||||
|
||||
|
||||
def map_keys(dikt: Mapping[Any, Any], func: Callable[[Any], Any]) -> Dict[Any, Any]:
|
||||
def map_keys(dikt, func):
|
||||
"""Map dictionary keys."""
|
||||
return {func(key): value for key, value in dikt.items()}
|
||||
return dict((func(key), value) for key, value in six.iteritems(dikt))
|
||||
|
||||
@@ -9,7 +9,7 @@ BUILDDIR = _build
|
||||
|
||||
# User-friendly check for sphinx-build
|
||||
ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1)
|
||||
$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from https://www.sphinx-doc.org/)
|
||||
$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/)
|
||||
endif
|
||||
|
||||
# Internal variables.
|
||||
|
||||
@@ -13,6 +13,7 @@
|
||||
# serve to show the default.
|
||||
|
||||
import os
|
||||
import shlex
|
||||
import sys
|
||||
|
||||
here = os.path.abspath(os.path.dirname(__file__))
|
||||
@@ -40,7 +41,7 @@ extensions = [
|
||||
]
|
||||
|
||||
autodoc_member_order = 'bysource'
|
||||
autodoc_default_flags = ['show-inheritance']
|
||||
autodoc_default_flags = ['show-inheritance', 'private-members']
|
||||
|
||||
# Add any paths that contain templates here, relative to this directory.
|
||||
templates_path = ['_templates']
|
||||
@@ -58,7 +59,7 @@ master_doc = 'index'
|
||||
|
||||
# General information about the project.
|
||||
project = u'acme-python'
|
||||
copyright = u'2015, Let\'s Encrypt Project'
|
||||
copyright = u'2015-2015, Let\'s Encrypt Project'
|
||||
author = u'Let\'s Encrypt Project'
|
||||
|
||||
# The version info for the project you're documenting, acts as replacement for
|
||||
@@ -85,9 +86,7 @@ language = 'en'
|
||||
|
||||
# List of patterns, relative to source directory, that match files and
|
||||
# directories to ignore when looking for source files.
|
||||
exclude_patterns = [
|
||||
'_build',
|
||||
]
|
||||
exclude_patterns = ['_build']
|
||||
|
||||
# The reST default role (used for this markup: `text`) to use for all
|
||||
# documents.
|
||||
@@ -114,7 +113,7 @@ pygments_style = 'sphinx'
|
||||
#keep_warnings = False
|
||||
|
||||
# If true, `todo` and `todoList` produce output, else they produce nothing.
|
||||
todo_include_todos = False
|
||||
todo_include_todos = True
|
||||
|
||||
|
||||
# -- Options for HTML output ----------------------------------------------
|
||||
@@ -122,7 +121,7 @@ todo_include_todos = False
|
||||
# The theme to use for HTML and HTML Help pages. See the documentation for
|
||||
# a list of builtin themes.
|
||||
|
||||
# https://docs.readthedocs.io/en/stable/faq.html#i-want-to-use-the-read-the-docs-theme-locally
|
||||
# http://docs.readthedocs.org/en/latest/theme.html#how-do-i-use-this-locally-and-on-read-the-docs
|
||||
# on_rtd is whether we are on readthedocs.org
|
||||
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
|
||||
if not on_rtd: # only import and set the theme if we're building docs locally
|
||||
|
||||
@@ -65,7 +65,7 @@ if errorlevel 9009 (
|
||||
echo.may add the Sphinx directory to PATH.
|
||||
echo.
|
||||
echo.If you don't have Sphinx installed, grab it from
|
||||
echo.https://www.sphinx-doc.org/
|
||||
echo.http://sphinx-doc.org/
|
||||
exit /b 1
|
||||
)
|
||||
|
||||
|
||||
@@ -1,3 +1 @@
|
||||
:orphan:
|
||||
|
||||
.. literalinclude:: ../jws-help.txt
|
||||
|
||||
2
acme/examples/standalone/README
Normal file
2
acme/examples/standalone/README
Normal file
@@ -0,0 +1,2 @@
|
||||
python -m acme.standalone -p 1234
|
||||
curl -k https://localhost:1234
|
||||
1
acme/examples/standalone/localhost/cert.pem
Symbolic link
1
acme/examples/standalone/localhost/cert.pem
Symbolic link
@@ -0,0 +1 @@
|
||||
../../../acme/testdata/rsa2048_cert.pem
|
||||
1
acme/examples/standalone/localhost/key.pem
Symbolic link
1
acme/examples/standalone/localhost/key.pem
Symbolic link
@@ -0,0 +1 @@
|
||||
../../../acme/testdata/rsa2048_key.pem
|
||||
@@ -7,7 +7,4 @@
|
||||
# in --editable mode (-e), just "pip install acme[docs]" does not work as
|
||||
# expected and "pip install -e acme[docs]" must be used instead
|
||||
|
||||
# We also pin our dependencies for increased stability.
|
||||
|
||||
-c ../tools/requirements.txt
|
||||
-e acme[docs]
|
||||
|
||||
2
acme/setup.cfg
Normal file
2
acme/setup.cfg
Normal file
@@ -0,0 +1,2 @@
|
||||
[bdist_wheel]
|
||||
universal = 1
|
||||
@@ -2,18 +2,34 @@ import sys
|
||||
|
||||
from setuptools import find_packages
|
||||
from setuptools import setup
|
||||
from setuptools.command.test import test as TestCommand
|
||||
|
||||
version = '1.29.0'
|
||||
version = '1.1.0.dev0'
|
||||
|
||||
# Please update tox.ini when modifying dependency version requirements
|
||||
install_requires = [
|
||||
'cryptography>=2.5.0',
|
||||
'josepy>=1.13.0',
|
||||
'PyOpenSSL>=17.5.0',
|
||||
# load_pem_private/public_key (>=0.6)
|
||||
# rsa_recover_prime_factors (>=0.8)
|
||||
'cryptography>=1.2.3',
|
||||
# formerly known as acme.jose:
|
||||
# 1.1.0+ is required to avoid the warnings described at
|
||||
# https://github.com/certbot/josepy/issues/13.
|
||||
'josepy>=1.1.0',
|
||||
'mock',
|
||||
# Connection.set_tlsext_host_name (>=0.13)
|
||||
'PyOpenSSL>=0.13.1',
|
||||
'pyrfc3339',
|
||||
'pytz>=2019.3',
|
||||
'requests>=2.20.0',
|
||||
'pytz',
|
||||
'requests[security]>=2.6.0', # security extras added in 2.4.1
|
||||
'requests-toolbelt>=0.3.0',
|
||||
'setuptools>=41.6.0',
|
||||
'setuptools',
|
||||
'six>=1.9.0', # needed for python_2_unicode_compatible
|
||||
]
|
||||
|
||||
dev_extras = [
|
||||
'pytest',
|
||||
'pytest-xdist',
|
||||
'tox',
|
||||
]
|
||||
|
||||
docs_extras = [
|
||||
@@ -21,11 +37,21 @@ docs_extras = [
|
||||
'sphinx_rtd_theme',
|
||||
]
|
||||
|
||||
test_extras = [
|
||||
'pytest',
|
||||
'pytest-xdist',
|
||||
'typing-extensions',
|
||||
]
|
||||
|
||||
class PyTest(TestCommand):
|
||||
user_options = []
|
||||
|
||||
def initialize_options(self):
|
||||
TestCommand.initialize_options(self)
|
||||
self.pytest_args = ''
|
||||
|
||||
def run_tests(self):
|
||||
import shlex
|
||||
# import here, cause outside the eggs aren't loaded
|
||||
import pytest
|
||||
errno = pytest.main(shlex.split(self.pytest_args))
|
||||
sys.exit(errno)
|
||||
|
||||
|
||||
setup(
|
||||
name='acme',
|
||||
@@ -33,19 +59,22 @@ setup(
|
||||
description='ACME protocol implementation in Python',
|
||||
url='https://github.com/letsencrypt/letsencrypt',
|
||||
author="Certbot Project",
|
||||
author_email='certbot-dev@eff.org',
|
||||
author_email='client-dev@letsencrypt.org',
|
||||
license='Apache License 2.0',
|
||||
python_requires='>=3.7',
|
||||
python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*',
|
||||
classifiers=[
|
||||
'Development Status :: 5 - Production/Stable',
|
||||
'Intended Audience :: Developers',
|
||||
'License :: OSI Approved :: Apache Software License',
|
||||
'Programming Language :: Python',
|
||||
'Programming Language :: Python :: 2',
|
||||
'Programming Language :: Python :: 2.7',
|
||||
'Programming Language :: Python :: 3',
|
||||
'Programming Language :: Python :: 3.4',
|
||||
'Programming Language :: Python :: 3.5',
|
||||
'Programming Language :: Python :: 3.6',
|
||||
'Programming Language :: Python :: 3.7',
|
||||
'Programming Language :: Python :: 3.8',
|
||||
'Programming Language :: Python :: 3.9',
|
||||
'Programming Language :: Python :: 3.10',
|
||||
'Topic :: Internet :: WWW/HTTP',
|
||||
'Topic :: Security',
|
||||
],
|
||||
@@ -54,7 +83,10 @@ setup(
|
||||
include_package_data=True,
|
||||
install_requires=install_requires,
|
||||
extras_require={
|
||||
'dev': dev_extras,
|
||||
'docs': docs_extras,
|
||||
'test': test_extras,
|
||||
},
|
||||
test_suite='acme',
|
||||
tests_require=["pytest"],
|
||||
cmdclass={"test": PyTest},
|
||||
)
|
||||
|
||||
@@ -1,14 +1,10 @@
|
||||
"""Tests for acme.challenges."""
|
||||
import urllib.parse as urllib_parse
|
||||
import unittest
|
||||
from unittest import mock
|
||||
|
||||
import josepy as jose
|
||||
import OpenSSL
|
||||
import mock
|
||||
import requests
|
||||
from josepy.jwk import JWKEC
|
||||
|
||||
from acme import errors
|
||||
from six.moves.urllib import parse as urllib_parse
|
||||
|
||||
import test_util
|
||||
|
||||
@@ -185,7 +181,7 @@ class HTTP01ResponseTest(unittest.TestCase):
|
||||
mock_get.return_value = mock.MagicMock(text=validation)
|
||||
self.assertTrue(self.response.simple_verify(
|
||||
self.chall, "local", KEY.public_key()))
|
||||
mock_get.assert_called_once_with(self.chall.uri("local"), verify=False)
|
||||
mock_get.assert_called_once_with(self.chall.uri("local"))
|
||||
|
||||
@mock.patch("acme.challenges.requests.get")
|
||||
def test_simple_verify_bad_validation(self, mock_get):
|
||||
@@ -201,7 +197,7 @@ class HTTP01ResponseTest(unittest.TestCase):
|
||||
HTTP01Response.WHITESPACE_CUTSET))
|
||||
self.assertTrue(self.response.simple_verify(
|
||||
self.chall, "local", KEY.public_key()))
|
||||
mock_get.assert_called_once_with(self.chall.uri("local"), verify=False)
|
||||
mock_get.assert_called_once_with(self.chall.uri("local"))
|
||||
|
||||
@mock.patch("acme.challenges.requests.get")
|
||||
def test_simple_verify_connection_error(self, mock_get):
|
||||
@@ -260,87 +256,30 @@ class HTTP01Test(unittest.TestCase):
|
||||
class TLSALPN01ResponseTest(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
from acme.challenges import TLSALPN01
|
||||
self.chall = TLSALPN01(
|
||||
token=jose.b64decode(b'a82d5ff8ef740d12881f6d3c2277ab2e'))
|
||||
self.domain = u'example.com'
|
||||
self.domain2 = u'example2.com'
|
||||
|
||||
self.response = self.chall.response(KEY)
|
||||
from acme.challenges import TLSALPN01Response
|
||||
self.msg = TLSALPN01Response(key_authorization=u'foo')
|
||||
self.jmsg = {
|
||||
'resource': 'challenge',
|
||||
'type': 'tls-alpn-01',
|
||||
'keyAuthorization': self.response.key_authorization,
|
||||
'keyAuthorization': u'foo',
|
||||
}
|
||||
|
||||
from acme.challenges import TLSALPN01
|
||||
self.chall = TLSALPN01(token=(b'x' * 16))
|
||||
self.response = self.chall.response(KEY)
|
||||
|
||||
def test_to_partial_json(self):
|
||||
self.assertEqual({k: v for k, v in self.jmsg.items() if k != 'keyAuthorization'},
|
||||
self.response.to_partial_json())
|
||||
self.msg.to_partial_json())
|
||||
|
||||
def test_from_json(self):
|
||||
from acme.challenges import TLSALPN01Response
|
||||
self.assertEqual(self.response, TLSALPN01Response.from_json(self.jmsg))
|
||||
self.assertEqual(self.msg, TLSALPN01Response.from_json(self.jmsg))
|
||||
|
||||
def test_from_json_hashable(self):
|
||||
from acme.challenges import TLSALPN01Response
|
||||
hash(TLSALPN01Response.from_json(self.jmsg))
|
||||
|
||||
def test_gen_verify_cert(self):
|
||||
key1 = test_util.load_pyopenssl_private_key('rsa512_key.pem')
|
||||
cert, key2 = self.response.gen_cert(self.domain, key1)
|
||||
self.assertEqual(key1, key2)
|
||||
self.assertTrue(self.response.verify_cert(self.domain, cert))
|
||||
|
||||
def test_gen_verify_cert_gen_key(self):
|
||||
cert, key = self.response.gen_cert(self.domain)
|
||||
self.assertIsInstance(key, OpenSSL.crypto.PKey)
|
||||
self.assertTrue(self.response.verify_cert(self.domain, cert))
|
||||
|
||||
def test_verify_bad_cert(self):
|
||||
self.assertFalse(self.response.verify_cert(self.domain,
|
||||
test_util.load_cert('cert.pem')))
|
||||
|
||||
def test_verify_bad_domain(self):
|
||||
key1 = test_util.load_pyopenssl_private_key('rsa512_key.pem')
|
||||
cert, key2 = self.response.gen_cert(self.domain, key1)
|
||||
self.assertEqual(key1, key2)
|
||||
self.assertFalse(self.response.verify_cert(self.domain2, cert))
|
||||
|
||||
def test_simple_verify_bad_key_authorization(self):
|
||||
key2 = jose.JWKRSA.load(test_util.load_vector('rsa256_key.pem'))
|
||||
self.response.simple_verify(self.chall, "local", key2.public_key())
|
||||
|
||||
@mock.patch('acme.challenges.TLSALPN01Response.verify_cert', autospec=True)
|
||||
def test_simple_verify(self, mock_verify_cert):
|
||||
mock_verify_cert.return_value = mock.sentinel.verification
|
||||
self.assertEqual(
|
||||
mock.sentinel.verification, self.response.simple_verify(
|
||||
self.chall, self.domain, KEY.public_key(),
|
||||
cert=mock.sentinel.cert))
|
||||
mock_verify_cert.assert_called_once_with(
|
||||
self.response, self.domain, mock.sentinel.cert)
|
||||
|
||||
@mock.patch('acme.challenges.socket.gethostbyname')
|
||||
@mock.patch('acme.challenges.crypto_util.probe_sni')
|
||||
def test_probe_cert(self, mock_probe_sni, mock_gethostbyname):
|
||||
mock_gethostbyname.return_value = '127.0.0.1'
|
||||
self.response.probe_cert('foo.com')
|
||||
mock_gethostbyname.assert_called_once_with('foo.com')
|
||||
mock_probe_sni.assert_called_once_with(
|
||||
host=b'127.0.0.1', port=self.response.PORT, name=b'foo.com',
|
||||
alpn_protocols=['acme-tls/1'])
|
||||
|
||||
self.response.probe_cert('foo.com', host='8.8.8.8')
|
||||
mock_probe_sni.assert_called_with(
|
||||
host=b'8.8.8.8', port=mock.ANY, name=b'foo.com',
|
||||
alpn_protocols=['acme-tls/1'])
|
||||
|
||||
@mock.patch('acme.challenges.TLSALPN01Response.probe_cert')
|
||||
def test_simple_verify_false_on_probe_error(self, mock_probe_cert):
|
||||
mock_probe_cert.side_effect = errors.Error
|
||||
self.assertFalse(self.response.simple_verify(
|
||||
self.chall, self.domain, KEY.public_key()))
|
||||
|
||||
|
||||
class TLSALPN01Test(unittest.TestCase):
|
||||
|
||||
@@ -370,13 +309,8 @@ class TLSALPN01Test(unittest.TestCase):
|
||||
self.assertRaises(
|
||||
jose.DeserializationError, TLSALPN01.from_json, self.jmsg)
|
||||
|
||||
@mock.patch('acme.challenges.TLSALPN01Response.gen_cert')
|
||||
def test_validation(self, mock_gen_cert):
|
||||
mock_gen_cert.return_value = ('cert', 'key')
|
||||
self.assertEqual(('cert', 'key'), self.msg.validation(
|
||||
KEY, cert_key=mock.sentinel.cert_key, domain=mock.sentinel.domain))
|
||||
mock_gen_cert.assert_called_once_with(key=mock.sentinel.cert_key,
|
||||
domain=mock.sentinel.domain)
|
||||
def test_validation(self):
|
||||
self.assertRaises(NotImplementedError, self.msg.validation, KEY)
|
||||
|
||||
|
||||
class DNSTest(unittest.TestCase):
|
||||
@@ -402,11 +336,8 @@ class DNSTest(unittest.TestCase):
|
||||
hash(DNS.from_json(self.jmsg))
|
||||
|
||||
def test_gen_check_validation(self):
|
||||
ec_key_secp384r1 = JWKEC(key=test_util.load_ecdsa_private_key('ec_secp384r1_key.pem'))
|
||||
for key, alg in [(KEY, jose.RS256), (ec_key_secp384r1, jose.ES384)]:
|
||||
with self.subTest(key=key, alg=alg):
|
||||
self.assertTrue(self.msg.check_validation(
|
||||
self.msg.gen_validation(key, alg=alg), key.public_key()))
|
||||
self.assertTrue(self.msg.check_validation(
|
||||
self.msg.gen_validation(KEY), KEY.public_key()))
|
||||
|
||||
def test_gen_check_validation_wrong_key(self):
|
||||
key2 = jose.JWKRSA.load(test_util.load_vector('rsa1024_key.pem'))
|
||||
@@ -427,25 +358,20 @@ class DNSTest(unittest.TestCase):
|
||||
payload=self.msg.update(
|
||||
token=b'x' * 20).json_dumps().encode('utf-8'),
|
||||
alg=jose.RS256, key=KEY)
|
||||
self.assertFalse(self.msg.check_validation(bad_validation, KEY.public_key()))
|
||||
self.assertFalse(self.msg.check_validation(
|
||||
bad_validation, KEY.public_key()))
|
||||
|
||||
def test_gen_response(self):
|
||||
with mock.patch('acme.challenges.DNS.gen_validation') as mock_gen:
|
||||
mock_gen.return_value = mock.sentinel.validation
|
||||
response = self.msg.gen_response(KEY)
|
||||
from acme.challenges import DNSResponse
|
||||
self.assertIsInstance(response, DNSResponse)
|
||||
self.assertTrue(isinstance(response, DNSResponse))
|
||||
self.assertEqual(response.validation, mock.sentinel.validation)
|
||||
|
||||
def test_validation_domain_name(self):
|
||||
self.assertEqual('_acme-challenge.le.wtf', self.msg.validation_domain_name('le.wtf'))
|
||||
|
||||
def test_validation_domain_name_ecdsa(self):
|
||||
ec_key_secp384r1 = JWKEC(key=test_util.load_ecdsa_private_key('ec_secp384r1_key.pem'))
|
||||
self.assertIs(self.msg.check_validation(
|
||||
self.msg.gen_validation(ec_key_secp384r1, alg=jose.ES384),
|
||||
ec_key_secp384r1.public_key()), True
|
||||
)
|
||||
self.assertEqual(
|
||||
'_acme-challenge.le.wtf', self.msg.validation_domain_name('le.wtf'))
|
||||
|
||||
|
||||
class DNSResponseTest(unittest.TestCase):
|
||||
@@ -483,20 +409,8 @@ class DNSResponseTest(unittest.TestCase):
|
||||
hash(DNSResponse.from_json(self.jmsg_from))
|
||||
|
||||
def test_check_validation(self):
|
||||
self.assertTrue(self.msg.check_validation(self.chall, KEY.public_key()))
|
||||
|
||||
|
||||
class JWSPayloadRFC8555Compliant(unittest.TestCase):
|
||||
"""Test for RFC8555 compliance of JWS generated from resources/challenges"""
|
||||
def test_challenge_payload(self):
|
||||
from acme.challenges import HTTP01Response
|
||||
|
||||
challenge_body = HTTP01Response()
|
||||
challenge_body.le_acme_version = 2
|
||||
|
||||
jobj = challenge_body.json_dumps(indent=2).encode()
|
||||
# RFC8555 states that challenge responses must have an empty payload.
|
||||
self.assertEqual(jobj, b'{}')
|
||||
self.assertTrue(
|
||||
self.msg.check_validation(self.chall, KEY.public_key()))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
@@ -2,29 +2,26 @@
|
||||
# pylint: disable=too-many-lines
|
||||
import copy
|
||||
import datetime
|
||||
import http.client as http_client
|
||||
import ipaddress
|
||||
import json
|
||||
import unittest
|
||||
from typing import Dict
|
||||
from unittest import mock
|
||||
|
||||
import josepy as jose
|
||||
import mock
|
||||
import OpenSSL
|
||||
import requests
|
||||
from six.moves import http_client # pylint: disable=import-error
|
||||
|
||||
from acme import challenges
|
||||
from acme import errors
|
||||
from acme import jws as acme_jws
|
||||
from acme import messages
|
||||
from acme.mixins import VersionedLEACMEMixin
|
||||
from acme.magic_typing import Dict # pylint: disable=unused-import, no-name-in-module
|
||||
import messages_test
|
||||
import test_util
|
||||
|
||||
CERT_DER = test_util.load_vector('cert.der')
|
||||
CERT_SAN_PEM = test_util.load_vector('cert-san.pem')
|
||||
CSR_SAN_PEM = test_util.load_vector('csr-san.pem')
|
||||
CSR_MIXED_PEM = test_util.load_vector('csr-mixed.pem')
|
||||
KEY = jose.JWKRSA.load(test_util.load_vector('rsa512_key.pem'))
|
||||
KEY2 = jose.JWKRSA.load(test_util.load_vector('rsa256_key.pem'))
|
||||
|
||||
@@ -64,7 +61,7 @@ class ClientTestBase(unittest.TestCase):
|
||||
self.contact = ('mailto:cert-admin@example.com', 'tel:+12025551212')
|
||||
reg = messages.Registration(
|
||||
contact=self.contact, key=KEY.public_key())
|
||||
the_arg: Dict = dict(reg)
|
||||
the_arg = dict(reg) # type: Dict
|
||||
self.new_reg = messages.NewRegistration(**the_arg)
|
||||
self.regr = messages.RegistrationResource(
|
||||
body=reg, uri='https://www.letsencrypt-demo.org/acme/reg/1')
|
||||
@@ -92,7 +89,7 @@ class BackwardsCompatibleClientV2Test(ClientTestBase):
|
||||
"""Tests for acme.client.BackwardsCompatibleClientV2."""
|
||||
|
||||
def setUp(self):
|
||||
super().setUp()
|
||||
super(BackwardsCompatibleClientV2Test, self).setUp()
|
||||
# contains a loaded cert
|
||||
self.certr = messages.CertificateResource(
|
||||
body=messages_test.CERT)
|
||||
@@ -140,7 +137,6 @@ class BackwardsCompatibleClientV2Test(ClientTestBase):
|
||||
self.response.json.return_value = DIRECTORY_V2.to_json()
|
||||
client = self._init()
|
||||
self.response.json.return_value = self.regr.body.to_json()
|
||||
self.response.headers = {'Location': 'https://www.letsencrypt-demo.org/acme/reg/1'}
|
||||
self.assertEqual(self.regr, client.query_registration(self.regr))
|
||||
|
||||
def test_forwarding(self):
|
||||
@@ -264,7 +260,7 @@ class BackwardsCompatibleClientV2Test(ClientTestBase):
|
||||
with mock.patch('acme.client.ClientV2') as mock_client:
|
||||
client = self._init()
|
||||
client.finalize_order(mock_orderr, mock_deadline)
|
||||
mock_client().finalize_order.assert_called_once_with(mock_orderr, mock_deadline, False)
|
||||
mock_client().finalize_order.assert_called_once_with(mock_orderr, mock_deadline)
|
||||
|
||||
def test_revoke(self):
|
||||
self.response.json.return_value = DIRECTORY_V1.to_json()
|
||||
@@ -322,7 +318,7 @@ class ClientTest(ClientTestBase):
|
||||
"""Tests for acme.client.Client."""
|
||||
|
||||
def setUp(self):
|
||||
super().setUp()
|
||||
super(ClientTest, self).setUp()
|
||||
|
||||
self.directory = DIRECTORY_V1
|
||||
|
||||
@@ -607,8 +603,8 @@ class ClientTest(ClientTestBase):
|
||||
# make sure that max_attempts is per-authorization, rather
|
||||
# than global
|
||||
max_attempts=max(len(authzrs[0].retries), len(authzrs[1].retries)))
|
||||
self.assertIs(cert[0], csr)
|
||||
self.assertIs(cert[1], updated_authzrs)
|
||||
self.assertTrue(cert[0] is csr)
|
||||
self.assertTrue(cert[1] is updated_authzrs)
|
||||
self.assertEqual(updated_authzrs[0].uri, 'a...')
|
||||
self.assertEqual(updated_authzrs[1].uri, 'b.')
|
||||
self.assertEqual(updated_authzrs[0].times, [
|
||||
@@ -644,7 +640,7 @@ class ClientTest(ClientTestBase):
|
||||
authzr = self.client.deactivate_authorization(self.authzr)
|
||||
self.assertEqual(authzb, authzr.body)
|
||||
self.assertEqual(self.client.net.post.call_count, 1)
|
||||
self.assertIn(self.authzr.uri, self.net.post.call_args_list[0][0])
|
||||
self.assertTrue(self.authzr.uri in self.net.post.call_args_list[0][0])
|
||||
|
||||
def test_check_cert(self):
|
||||
self.response.headers['Location'] = self.certr.uri
|
||||
@@ -703,7 +699,7 @@ class ClientTest(ClientTestBase):
|
||||
|
||||
def test_revocation_payload(self):
|
||||
obj = messages.Revocation(certificate=self.certr.body, reason=self.rsn)
|
||||
self.assertIn('reason', obj.to_partial_json().keys())
|
||||
self.assertTrue('reason' in obj.to_partial_json().keys())
|
||||
self.assertEqual(self.rsn, obj.to_partial_json()['reason'])
|
||||
|
||||
def test_revoke_bad_status_raises_error(self):
|
||||
@@ -719,7 +715,7 @@ class ClientV2Test(ClientTestBase):
|
||||
"""Tests for acme.client.ClientV2."""
|
||||
|
||||
def setUp(self):
|
||||
super().setUp()
|
||||
super(ClientV2Test, self).setUp()
|
||||
|
||||
self.directory = DIRECTORY_V2
|
||||
|
||||
@@ -743,7 +739,7 @@ class ClientV2Test(ClientTestBase):
|
||||
self.orderr = messages.OrderResource(
|
||||
body=self.order,
|
||||
uri='https://www.letsencrypt-demo.org/acme/acct/1/order/1',
|
||||
authorizations=[self.authzr, self.authzr2], csr_pem=CSR_MIXED_PEM)
|
||||
authorizations=[self.authzr, self.authzr2], csr_pem=CSR_SAN_PEM)
|
||||
|
||||
def test_new_account(self):
|
||||
self.response.status_code = http_client.CREATED
|
||||
@@ -773,7 +769,7 @@ class ClientV2Test(ClientTestBase):
|
||||
|
||||
with mock.patch('acme.client.ClientV2._post_as_get') as mock_post_as_get:
|
||||
mock_post_as_get.side_effect = (authz_response, authz_response2)
|
||||
self.assertEqual(self.client.new_order(CSR_MIXED_PEM), self.orderr)
|
||||
self.assertEqual(self.client.new_order(CSR_SAN_PEM), self.orderr)
|
||||
|
||||
@mock.patch('acme.client.datetime')
|
||||
def test_poll_and_finalize(self, mock_datetime):
|
||||
@@ -823,8 +819,7 @@ class ClientV2Test(ClientTestBase):
|
||||
|
||||
def test_finalize_order_success(self):
|
||||
updated_order = self.order.update(
|
||||
certificate='https://www.letsencrypt-demo.org/acme/cert/',
|
||||
status=messages.STATUS_VALID)
|
||||
certificate='https://www.letsencrypt-demo.org/acme/cert/')
|
||||
updated_orderr = self.orderr.update(body=updated_order, fullchain_pem=CERT_SAN_PEM)
|
||||
|
||||
self.response.json.return_value = updated_order.to_json()
|
||||
@@ -834,53 +829,16 @@ class ClientV2Test(ClientTestBase):
|
||||
self.assertEqual(self.client.finalize_order(self.orderr, deadline), updated_orderr)
|
||||
|
||||
def test_finalize_order_error(self):
|
||||
updated_order = self.order.update(
|
||||
error=messages.Error.with_code('unauthorized'),
|
||||
status=messages.STATUS_INVALID)
|
||||
updated_order = self.order.update(error=messages.Error.with_code('unauthorized'))
|
||||
self.response.json.return_value = updated_order.to_json()
|
||||
|
||||
deadline = datetime.datetime(9999, 9, 9)
|
||||
self.assertRaises(errors.IssuanceError, self.client.finalize_order, self.orderr, deadline)
|
||||
|
||||
def test_finalize_order_invalid_status(self):
|
||||
# https://github.com/certbot/certbot/issues/9296
|
||||
order = self.order.update(error=None, status=messages.STATUS_INVALID)
|
||||
self.response.json.return_value = order.to_json()
|
||||
with self.assertRaises(errors.Error) as error:
|
||||
self.client.finalize_order(self.orderr, datetime.datetime(9999, 9, 9))
|
||||
self.assertIn("The certificate order failed", str(error.exception))
|
||||
|
||||
def test_finalize_order_timeout(self):
|
||||
deadline = datetime.datetime.now() - datetime.timedelta(seconds=60)
|
||||
self.assertRaises(errors.TimeoutError, self.client.finalize_order, self.orderr, deadline)
|
||||
|
||||
def test_finalize_order_alt_chains(self):
|
||||
updated_order = self.order.update(
|
||||
certificate='https://www.letsencrypt-demo.org/acme/cert/',
|
||||
status=messages.STATUS_VALID
|
||||
)
|
||||
updated_orderr = self.orderr.update(body=updated_order,
|
||||
fullchain_pem=CERT_SAN_PEM,
|
||||
alternative_fullchains_pem=[CERT_SAN_PEM,
|
||||
CERT_SAN_PEM])
|
||||
self.response.json.return_value = updated_order.to_json()
|
||||
self.response.text = CERT_SAN_PEM
|
||||
self.response.headers['Link'] ='<https://example.com/acme/cert/1>;rel="alternate", ' + \
|
||||
'<https://example.com/dir>;rel="index", ' + \
|
||||
'<https://example.com/acme/cert/2>;title="foo";rel="alternate"'
|
||||
|
||||
deadline = datetime.datetime(9999, 9, 9)
|
||||
resp = self.client.finalize_order(self.orderr, deadline, fetch_alternative_chains=True)
|
||||
self.net.post.assert_any_call('https://example.com/acme/cert/1',
|
||||
mock.ANY, acme_version=2, new_nonce_url=mock.ANY)
|
||||
self.net.post.assert_any_call('https://example.com/acme/cert/2',
|
||||
mock.ANY, acme_version=2, new_nonce_url=mock.ANY)
|
||||
self.assertEqual(resp, updated_orderr)
|
||||
|
||||
del self.response.headers['Link']
|
||||
resp = self.client.finalize_order(self.orderr, deadline, fetch_alternative_chains=True)
|
||||
self.assertEqual(resp, updated_orderr.update(alternative_fullchains_pem=[]))
|
||||
|
||||
def test_revoke(self):
|
||||
self.client.revoke(messages_test.CERT, self.rsn)
|
||||
self.net.post.assert_called_once_with(
|
||||
@@ -892,9 +850,9 @@ class ClientV2Test(ClientTestBase):
|
||||
self.response.headers['Location'] = self.regr.uri
|
||||
self.response.json.return_value = self.regr.body.to_json()
|
||||
self.assertEqual(self.regr, self.client.update_registration(self.regr))
|
||||
self.assertIsNotNone(self.client.net.account)
|
||||
self.assertNotEqual(self.client.net.account, None)
|
||||
self.assertEqual(self.client.net.post.call_count, 2)
|
||||
self.assertIn(DIRECTORY_V2.newAccount, self.net.post.call_args_list[0][0])
|
||||
self.assertTrue(DIRECTORY_V2.newAccount in self.net.post.call_args_list[0][0])
|
||||
|
||||
self.response.json.return_value = self.regr.body.update(
|
||||
contact=()).to_json()
|
||||
@@ -928,7 +886,7 @@ class ClientV2Test(ClientTestBase):
|
||||
self.client.net.get.assert_not_called()
|
||||
|
||||
|
||||
class MockJSONDeSerializable(VersionedLEACMEMixin, jose.JSONDeSerializable):
|
||||
class MockJSONDeSerializable(jose.JSONDeSerializable):
|
||||
# pylint: disable=missing-docstring
|
||||
def __init__(self, value):
|
||||
self.value = value
|
||||
@@ -958,7 +916,7 @@ class ClientNetworkTest(unittest.TestCase):
|
||||
self.response.links = {}
|
||||
|
||||
def test_init(self):
|
||||
self.assertIs(self.net.verify_ssl, self.verify_ssl)
|
||||
self.assertTrue(self.net.verify_ssl is self.verify_ssl)
|
||||
|
||||
def test_wrap_in_jws(self):
|
||||
# pylint: disable=protected-access
|
||||
@@ -1022,35 +980,6 @@ class ClientNetworkTest(unittest.TestCase):
|
||||
self.assertEqual(
|
||||
self.response, self.net._check_response(self.response))
|
||||
|
||||
@mock.patch('acme.client.logger')
|
||||
def test_check_response_ok_ct_with_charset(self, mock_logger):
|
||||
self.response.json.return_value = {}
|
||||
self.response.headers['Content-Type'] = 'application/json; charset=utf-8'
|
||||
# pylint: disable=protected-access
|
||||
self.assertEqual(self.response, self.net._check_response(
|
||||
self.response, content_type='application/json'))
|
||||
try:
|
||||
mock_logger.debug.assert_called_with(
|
||||
'Ignoring wrong Content-Type (%r) for JSON decodable response',
|
||||
'application/json; charset=utf-8'
|
||||
)
|
||||
except AssertionError:
|
||||
return
|
||||
raise AssertionError('Expected Content-Type warning ' #pragma: no cover
|
||||
'to not have been logged')
|
||||
|
||||
@mock.patch('acme.client.logger')
|
||||
def test_check_response_ok_bad_ct(self, mock_logger):
|
||||
self.response.json.return_value = {}
|
||||
self.response.headers['Content-Type'] = 'text/plain'
|
||||
# pylint: disable=protected-access
|
||||
self.assertEqual(self.response, self.net._check_response(
|
||||
self.response, content_type='application/json'))
|
||||
mock_logger.debug.assert_called_with(
|
||||
'Ignoring wrong Content-Type (%r) for JSON decodable response',
|
||||
'text/plain'
|
||||
)
|
||||
|
||||
def test_check_response_conflict(self):
|
||||
self.response.ok = False
|
||||
self.response.status_code = 409
|
||||
@@ -1200,7 +1129,7 @@ class ClientNetworkWithMockedResponseTest(unittest.TestCase):
|
||||
|
||||
def send_request(*args, **kwargs):
|
||||
# pylint: disable=unused-argument,missing-docstring
|
||||
self.assertNotIn("new_nonce_url", kwargs)
|
||||
self.assertFalse("new_nonce_url" in kwargs)
|
||||
method = args[0]
|
||||
uri = args[1]
|
||||
if method == 'HEAD' and uri != "new_nonce_uri":
|
||||
@@ -1345,7 +1274,7 @@ class ClientNetworkSourceAddressBindingTest(unittest.TestCase):
|
||||
from acme.client import ClientNetwork
|
||||
net = ClientNetwork(key=None, alg=None, source_address=self.source_address)
|
||||
for adapter in net.session.adapters.values():
|
||||
self.assertIn(self.source_address, adapter.source_address)
|
||||
self.assertTrue(self.source_address in adapter.source_address)
|
||||
|
||||
def test_behavior_assumption(self):
|
||||
"""This is a test that guardrails the HTTPAdapter behavior so that if the default for
|
||||
@@ -1355,7 +1284,7 @@ class ClientNetworkSourceAddressBindingTest(unittest.TestCase):
|
||||
# test should fail if the default adapter type is changed by requests
|
||||
net = ClientNetwork(key=None, alg=None)
|
||||
session = requests.Session()
|
||||
for scheme in session.adapters:
|
||||
for scheme in session.adapters.keys():
|
||||
client_network_adapter = net.session.adapters.get(scheme)
|
||||
default_adapter = session.adapters.get(scheme)
|
||||
self.assertEqual(client_network_adapter.__class__, default_adapter.__class__)
|
||||
|
||||
@@ -1,23 +1,24 @@
|
||||
"""Tests for acme.crypto_util."""
|
||||
import itertools
|
||||
import ipaddress
|
||||
import socket
|
||||
import socketserver
|
||||
import threading
|
||||
import time
|
||||
import unittest
|
||||
from typing import List
|
||||
|
||||
import josepy as jose
|
||||
import OpenSSL
|
||||
import six
|
||||
from six.moves import socketserver # type: ignore # pylint: disable=import-error
|
||||
|
||||
from acme import errors
|
||||
from acme.magic_typing import List # pylint: disable=unused-import, no-name-in-module
|
||||
import test_util
|
||||
|
||||
|
||||
class SSLSocketAndProbeSNITest(unittest.TestCase):
|
||||
"""Tests for acme.crypto_util.SSLSocket/probe_sni."""
|
||||
|
||||
|
||||
def setUp(self):
|
||||
self.cert = test_util.load_comparable_cert('rsa2048_cert.pem')
|
||||
key = test_util.load_pyopenssl_private_key('rsa2048_key.pem')
|
||||
@@ -28,9 +29,10 @@ class SSLSocketAndProbeSNITest(unittest.TestCase):
|
||||
|
||||
class _TestServer(socketserver.TCPServer):
|
||||
|
||||
# six.moves.* | pylint: disable=attribute-defined-outside-init,no-init
|
||||
|
||||
def server_bind(self): # pylint: disable=missing-docstring
|
||||
self.socket = SSLSocket(socket.socket(),
|
||||
certs)
|
||||
self.socket = SSLSocket(socket.socket(), certs=certs)
|
||||
socketserver.TCPServer.server_bind(self)
|
||||
|
||||
self.server = _TestServer(('', 0), socketserver.BaseRequestHandler)
|
||||
@@ -61,6 +63,7 @@ class SSLSocketAndProbeSNITest(unittest.TestCase):
|
||||
self.assertRaises(errors.Error, self._probe, b'bar')
|
||||
|
||||
def test_probe_connection_error(self):
|
||||
# pylint has a hard time with six
|
||||
self.server.server_close()
|
||||
original_timeout = socket.getdefaulttimeout()
|
||||
try:
|
||||
@@ -70,18 +73,6 @@ class SSLSocketAndProbeSNITest(unittest.TestCase):
|
||||
socket.setdefaulttimeout(original_timeout)
|
||||
|
||||
|
||||
class SSLSocketTest(unittest.TestCase):
|
||||
"""Tests for acme.crypto_util.SSLSocket."""
|
||||
|
||||
def test_ssl_socket_invalid_arguments(self):
|
||||
from acme.crypto_util import SSLSocket
|
||||
with self.assertRaises(ValueError):
|
||||
_ = SSLSocket(None, {'sni': ('key', 'cert')},
|
||||
cert_selection=lambda _: None)
|
||||
with self.assertRaises(ValueError):
|
||||
_ = SSLSocket(None)
|
||||
|
||||
|
||||
class PyOpenSSLCertOrReqAllNamesTest(unittest.TestCase):
|
||||
"""Test for acme.crypto_util._pyopenssl_cert_or_req_all_names."""
|
||||
|
||||
@@ -109,6 +100,7 @@ class PyOpenSSLCertOrReqAllNamesTest(unittest.TestCase):
|
||||
class PyOpenSSLCertOrReqSANTest(unittest.TestCase):
|
||||
"""Test for acme.crypto_util._pyopenssl_cert_or_req_san."""
|
||||
|
||||
|
||||
@classmethod
|
||||
def _call(cls, loader, name):
|
||||
# pylint: disable=protected-access
|
||||
@@ -118,9 +110,9 @@ class PyOpenSSLCertOrReqSANTest(unittest.TestCase):
|
||||
@classmethod
|
||||
def _get_idn_names(cls):
|
||||
"""Returns expected names from '{cert,csr}-idnsans.pem'."""
|
||||
chars = [chr(i) for i in itertools.chain(range(0x3c3, 0x400),
|
||||
range(0x641, 0x6fc),
|
||||
range(0x1820, 0x1877))]
|
||||
chars = [six.unichr(i) for i in itertools.chain(range(0x3c3, 0x400),
|
||||
range(0x641, 0x6fc),
|
||||
range(0x1820, 0x1877))]
|
||||
return [''.join(chars[i: i + 45]) + '.invalid'
|
||||
for i in range(0, len(chars), 45)]
|
||||
|
||||
@@ -174,73 +166,24 @@ class PyOpenSSLCertOrReqSANTest(unittest.TestCase):
|
||||
['chicago-cubs.venafi.example', 'cubs.venafi.example'])
|
||||
|
||||
|
||||
class PyOpenSSLCertOrReqSANIPTest(unittest.TestCase):
|
||||
"""Test for acme.crypto_util._pyopenssl_cert_or_req_san_ip."""
|
||||
|
||||
@classmethod
|
||||
def _call(cls, loader, name):
|
||||
# pylint: disable=protected-access
|
||||
from acme.crypto_util import _pyopenssl_cert_or_req_san_ip
|
||||
return _pyopenssl_cert_or_req_san_ip(loader(name))
|
||||
|
||||
def _call_cert(self, name):
|
||||
return self._call(test_util.load_cert, name)
|
||||
|
||||
def _call_csr(self, name):
|
||||
return self._call(test_util.load_csr, name)
|
||||
|
||||
def test_cert_no_sans(self):
|
||||
self.assertEqual(self._call_cert('cert.pem'), [])
|
||||
|
||||
def test_csr_no_sans(self):
|
||||
self.assertEqual(self._call_csr('csr-nosans.pem'), [])
|
||||
|
||||
def test_cert_domain_sans(self):
|
||||
self.assertEqual(self._call_cert('cert-san.pem'), [])
|
||||
|
||||
def test_csr_domain_sans(self):
|
||||
self.assertEqual(self._call_csr('csr-san.pem'), [])
|
||||
|
||||
def test_cert_ip_two_sans(self):
|
||||
self.assertEqual(self._call_cert('cert-ipsans.pem'), ['192.0.2.145', '203.0.113.1'])
|
||||
|
||||
def test_csr_ip_two_sans(self):
|
||||
self.assertEqual(self._call_csr('csr-ipsans.pem'), ['192.0.2.145', '203.0.113.1'])
|
||||
|
||||
def test_csr_ipv6_sans(self):
|
||||
self.assertEqual(self._call_csr('csr-ipv6sans.pem'),
|
||||
['0:0:0:0:0:0:0:1', 'A3BE:32F3:206E:C75D:956:CEE:9858:5EC5'])
|
||||
|
||||
def test_cert_ipv6_sans(self):
|
||||
self.assertEqual(self._call_cert('cert-ipv6sans.pem'),
|
||||
['0:0:0:0:0:0:0:1', 'A3BE:32F3:206E:C75D:956:CEE:9858:5EC5'])
|
||||
|
||||
|
||||
class GenSsCertTest(unittest.TestCase):
|
||||
"""Test for gen_ss_cert (generation of self-signed cert)."""
|
||||
class RandomSnTest(unittest.TestCase):
|
||||
"""Test for random certificate serial numbers."""
|
||||
|
||||
|
||||
def setUp(self):
|
||||
self.cert_count = 5
|
||||
self.serial_num: List[int] = []
|
||||
self.serial_num = [] # type: List[int]
|
||||
self.key = OpenSSL.crypto.PKey()
|
||||
self.key.generate_key(OpenSSL.crypto.TYPE_RSA, 2048)
|
||||
|
||||
def test_sn_collisions(self):
|
||||
from acme.crypto_util import gen_ss_cert
|
||||
|
||||
for _ in range(self.cert_count):
|
||||
cert = gen_ss_cert(self.key, ['dummy'], force_san=True,
|
||||
ips=[ipaddress.ip_address("10.10.10.10")])
|
||||
cert = gen_ss_cert(self.key, ['dummy'], force_san=True)
|
||||
self.serial_num.append(cert.get_serial_number())
|
||||
self.assertGreaterEqual(len(set(self.serial_num)), self.cert_count)
|
||||
|
||||
|
||||
def test_no_name(self):
|
||||
from acme.crypto_util import gen_ss_cert
|
||||
with self.assertRaises(AssertionError):
|
||||
gen_ss_cert(self.key, ips=[ipaddress.ip_address("1.1.1.1")])
|
||||
gen_ss_cert(self.key)
|
||||
|
||||
self.assertTrue(len(set(self.serial_num)) > 1)
|
||||
|
||||
class MakeCSRTest(unittest.TestCase):
|
||||
"""Test for standalone functions."""
|
||||
@@ -255,8 +198,8 @@ class MakeCSRTest(unittest.TestCase):
|
||||
|
||||
def test_make_csr(self):
|
||||
csr_pem = self._call_with_key(["a.example", "b.example"])
|
||||
self.assertIn(b'--BEGIN CERTIFICATE REQUEST--', csr_pem)
|
||||
self.assertIn(b'--END CERTIFICATE REQUEST--', csr_pem)
|
||||
self.assertTrue(b'--BEGIN CERTIFICATE REQUEST--' in csr_pem)
|
||||
self.assertTrue(b'--END CERTIFICATE REQUEST--' in csr_pem)
|
||||
csr = OpenSSL.crypto.load_certificate_request(
|
||||
OpenSSL.crypto.FILETYPE_PEM, csr_pem)
|
||||
# In pyopenssl 0.13 (used with TOXENV=py27-oldest), csr objects don't
|
||||
@@ -272,27 +215,6 @@ class MakeCSRTest(unittest.TestCase):
|
||||
).get_data(),
|
||||
)
|
||||
|
||||
def test_make_csr_ip(self):
|
||||
csr_pem = self._call_with_key(["a.example"], False, [ipaddress.ip_address('127.0.0.1'), ipaddress.ip_address('::1')])
|
||||
self.assertIn(b'--BEGIN CERTIFICATE REQUEST--' , csr_pem)
|
||||
self.assertIn(b'--END CERTIFICATE REQUEST--' , csr_pem)
|
||||
csr = OpenSSL.crypto.load_certificate_request(
|
||||
OpenSSL.crypto.FILETYPE_PEM, csr_pem)
|
||||
# In pyopenssl 0.13 (used with TOXENV=py27-oldest), csr objects don't
|
||||
# have a get_extensions() method, so we skip this test if the method
|
||||
# isn't available.
|
||||
if hasattr(csr, 'get_extensions'):
|
||||
self.assertEqual(len(csr.get_extensions()), 1)
|
||||
self.assertEqual(csr.get_extensions()[0].get_data(),
|
||||
OpenSSL.crypto.X509Extension(
|
||||
b'subjectAltName',
|
||||
critical=False,
|
||||
value=b'DNS:a.example, IP:127.0.0.1, IP:::1',
|
||||
).get_data(),
|
||||
)
|
||||
# for IP san it's actually need to be octet-string,
|
||||
# but somewhere downstream thankfully handle it for us
|
||||
|
||||
def test_make_csr_must_staple(self):
|
||||
csr_pem = self._call_with_key(["a.example"], must_staple=True)
|
||||
csr = OpenSSL.crypto.load_certificate_request(
|
||||
@@ -311,17 +233,6 @@ class MakeCSRTest(unittest.TestCase):
|
||||
self.assertEqual(len(must_staple_exts), 1,
|
||||
"Expected exactly one Must Staple extension")
|
||||
|
||||
def test_make_csr_without_hostname(self):
|
||||
self.assertRaises(ValueError, self._call_with_key)
|
||||
|
||||
def test_make_csr_correct_version(self):
|
||||
csr_pem = self._call_with_key(["a.example"])
|
||||
csr = OpenSSL.crypto.load_certificate_request(
|
||||
OpenSSL.crypto.FILETYPE_PEM, csr_pem)
|
||||
|
||||
self.assertEqual(csr.get_version(), 0,
|
||||
"Expected CSR version to be v1 (encoded as 0), per RFC 2986, section 4")
|
||||
|
||||
|
||||
class DumpPyopensslChainTest(unittest.TestCase):
|
||||
"""Test for dump_pyopenssl_chain."""
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
"""Tests for acme.errors."""
|
||||
import unittest
|
||||
from unittest import mock
|
||||
|
||||
import mock
|
||||
|
||||
|
||||
class BadNonceTest(unittest.TestCase):
|
||||
@@ -24,8 +25,8 @@ class MissingNonceTest(unittest.TestCase):
|
||||
self.error = MissingNonce(self.response)
|
||||
|
||||
def test_str(self):
|
||||
self.assertIn("FOO", str(self.error))
|
||||
self.assertIn("{}", str(self.error))
|
||||
self.assertTrue("FOO" in str(self.error))
|
||||
self.assertTrue("{}" in str(self.error))
|
||||
|
||||
|
||||
class PollErrorTest(unittest.TestCase):
|
||||
@@ -34,7 +35,7 @@ class PollErrorTest(unittest.TestCase):
|
||||
def setUp(self):
|
||||
from acme.errors import PollError
|
||||
self.timeout = PollError(
|
||||
exhausted={mock.sentinel.AR},
|
||||
exhausted=set([mock.sentinel.AR]),
|
||||
updated={})
|
||||
self.invalid = PollError(exhausted=set(), updated={
|
||||
mock.sentinel.AR: mock.sentinel.AR2})
|
||||
|
||||
@@ -10,8 +10,8 @@ class FixedTest(unittest.TestCase):
|
||||
"""Tests for acme.fields.Fixed."""
|
||||
|
||||
def setUp(self):
|
||||
from acme.fields import fixed
|
||||
self.field = fixed('name', 'x')
|
||||
from acme.fields import Fixed
|
||||
self.field = Fixed('name', 'x')
|
||||
|
||||
def test_decode(self):
|
||||
self.assertEqual('x', self.field.decode('x'))
|
||||
|
||||
@@ -48,7 +48,7 @@ class JWSTest(unittest.TestCase):
|
||||
self.assertEqual(jws.signature.combined.nonce, self.nonce)
|
||||
self.assertEqual(jws.signature.combined.url, self.url)
|
||||
self.assertEqual(jws.signature.combined.kid, self.kid)
|
||||
self.assertIsNone(jws.signature.combined.jwk)
|
||||
self.assertEqual(jws.signature.combined.jwk, None)
|
||||
# TODO: check that nonce is in protected header
|
||||
|
||||
self.assertEqual(jws, JWS.from_json(jws.to_json()))
|
||||
@@ -58,7 +58,7 @@ class JWSTest(unittest.TestCase):
|
||||
jws = JWS.sign(payload=b'foo', key=self.privkey,
|
||||
alg=jose.RS256, nonce=self.nonce,
|
||||
url=self.url)
|
||||
self.assertIsNone(jws.signature.combined.kid)
|
||||
self.assertEqual(jws.signature.combined.kid, None)
|
||||
self.assertEqual(jws.signature.combined.jwk, self.pubkey)
|
||||
|
||||
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
"""Tests for acme.magic_typing."""
|
||||
import sys
|
||||
import unittest
|
||||
import warnings
|
||||
from unittest import mock
|
||||
|
||||
import mock
|
||||
|
||||
|
||||
class MagicTypingTest(unittest.TestCase):
|
||||
@@ -10,21 +10,32 @@ class MagicTypingTest(unittest.TestCase):
|
||||
def test_import_success(self):
|
||||
try:
|
||||
import typing as temp_typing
|
||||
except ImportError: # pragma: no cover
|
||||
temp_typing = None # pragma: no cover
|
||||
except ImportError: # pragma: no cover
|
||||
temp_typing = None # pragma: no cover
|
||||
typing_class_mock = mock.MagicMock()
|
||||
text_mock = mock.MagicMock()
|
||||
typing_class_mock.Text = text_mock
|
||||
sys.modules['typing'] = typing_class_mock
|
||||
if 'acme.magic_typing' in sys.modules:
|
||||
del sys.modules['acme.magic_typing'] # pragma: no cover
|
||||
with warnings.catch_warnings():
|
||||
warnings.filterwarnings("ignore", category=DeprecationWarning)
|
||||
from acme.magic_typing import Text
|
||||
del sys.modules['acme.magic_typing'] # pragma: no cover
|
||||
from acme.magic_typing import Text # pylint: disable=no-name-in-module
|
||||
self.assertEqual(Text, text_mock)
|
||||
del sys.modules['acme.magic_typing']
|
||||
sys.modules['typing'] = temp_typing
|
||||
|
||||
def test_import_failure(self):
|
||||
try:
|
||||
import typing as temp_typing
|
||||
except ImportError: # pragma: no cover
|
||||
temp_typing = None # pragma: no cover
|
||||
sys.modules['typing'] = None
|
||||
if 'acme.magic_typing' in sys.modules:
|
||||
del sys.modules['acme.magic_typing'] # pragma: no cover
|
||||
from acme.magic_typing import Text # pylint: disable=no-name-in-module
|
||||
self.assertTrue(Text is None)
|
||||
del sys.modules['acme.magic_typing']
|
||||
sys.modules['typing'] = temp_typing
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main() # pragma: no cover
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
"""Tests for acme.messages."""
|
||||
from typing import Dict
|
||||
import unittest
|
||||
from unittest import mock
|
||||
|
||||
import josepy as jose
|
||||
import mock
|
||||
|
||||
from acme import challenges
|
||||
from acme.magic_typing import Dict # pylint: disable=unused-import, no-name-in-module
|
||||
import test_util
|
||||
|
||||
CERT = test_util.load_comparable_cert('cert.der')
|
||||
@@ -17,7 +17,7 @@ class ErrorTest(unittest.TestCase):
|
||||
"""Tests for acme.messages.Error."""
|
||||
|
||||
def setUp(self):
|
||||
from acme.messages import Error, ERROR_PREFIX, Identifier, IDENTIFIER_FQDN
|
||||
from acme.messages import Error, ERROR_PREFIX
|
||||
self.error = Error.with_code('malformed', detail='foo', title='title')
|
||||
self.jobj = {
|
||||
'detail': 'foo',
|
||||
@@ -25,9 +25,6 @@ class ErrorTest(unittest.TestCase):
|
||||
'type': ERROR_PREFIX + 'malformed',
|
||||
}
|
||||
self.error_custom = Error(typ='custom', detail='bar')
|
||||
self.identifier = Identifier(typ=IDENTIFIER_FQDN, value='example.com')
|
||||
self.subproblem = Error.with_code('caa', detail='bar', title='title', identifier=self.identifier)
|
||||
self.error_with_subproblems = Error.with_code('malformed', detail='foo', title='title', subproblems=[self.subproblem])
|
||||
self.empty_error = Error()
|
||||
|
||||
def test_default_typ(self):
|
||||
@@ -42,23 +39,15 @@ class ErrorTest(unittest.TestCase):
|
||||
from acme.messages import Error
|
||||
hash(Error.from_json(self.error.to_json()))
|
||||
|
||||
def test_from_json_with_subproblems(self):
|
||||
from acme.messages import Error
|
||||
|
||||
parsed_error = Error.from_json(self.error_with_subproblems.to_json())
|
||||
|
||||
self.assertEqual(1, len(parsed_error.subproblems))
|
||||
self.assertEqual(self.subproblem, parsed_error.subproblems[0])
|
||||
|
||||
def test_description(self):
|
||||
self.assertEqual('The request message was malformed', self.error.description)
|
||||
self.assertIsNone(self.error_custom.description)
|
||||
self.assertTrue(self.error_custom.description is None)
|
||||
|
||||
def test_code(self):
|
||||
from acme.messages import Error
|
||||
self.assertEqual('malformed', self.error.code)
|
||||
self.assertIsNone(self.error_custom.code)
|
||||
self.assertIsNone(Error().code)
|
||||
self.assertEqual(None, self.error_custom.code)
|
||||
self.assertEqual(None, Error().code)
|
||||
|
||||
def test_is_acme_error(self):
|
||||
from acme.messages import is_acme_error, Error
|
||||
@@ -84,11 +73,7 @@ class ErrorTest(unittest.TestCase):
|
||||
str(self.error),
|
||||
u"{0.typ} :: {0.description} :: {0.detail} :: {0.title}"
|
||||
.format(self.error))
|
||||
self.assertEqual(
|
||||
str(self.error_with_subproblems),
|
||||
(u"{0.typ} :: {0.description} :: {0.detail} :: {0.title}\n"+
|
||||
u"Problem for {1.identifier.value}: {1.typ} :: {1.description} :: {1.detail} :: {1.title}").format(
|
||||
self.error_with_subproblems, self.subproblem))
|
||||
|
||||
|
||||
class ConstantTest(unittest.TestCase):
|
||||
"""Tests for acme.messages._Constant."""
|
||||
@@ -97,7 +82,7 @@ class ConstantTest(unittest.TestCase):
|
||||
from acme.messages import _Constant
|
||||
|
||||
class MockConstant(_Constant): # pylint: disable=missing-docstring
|
||||
POSSIBLE_NAMES: Dict = {}
|
||||
POSSIBLE_NAMES = {} # type: Dict
|
||||
|
||||
self.MockConstant = MockConstant # pylint: disable=invalid-name
|
||||
self.const_a = MockConstant('a')
|
||||
@@ -121,11 +106,11 @@ class ConstantTest(unittest.TestCase):
|
||||
|
||||
def test_equality(self):
|
||||
const_a_prime = self.MockConstant('a')
|
||||
self.assertNotEqual(self.const_a, self.const_b)
|
||||
self.assertEqual(self.const_a, const_a_prime)
|
||||
self.assertFalse(self.const_a == self.const_b)
|
||||
self.assertTrue(self.const_a == const_a_prime)
|
||||
|
||||
self.assertNotEqual(self.const_a, self.const_b)
|
||||
self.assertEqual(self.const_a, const_a_prime)
|
||||
self.assertTrue(self.const_a != self.const_b)
|
||||
self.assertFalse(self.const_a != const_a_prime)
|
||||
|
||||
|
||||
class DirectoryTest(unittest.TestCase):
|
||||
@@ -267,19 +252,6 @@ class RegistrationTest(unittest.TestCase):
|
||||
from acme.messages import Registration
|
||||
hash(Registration.from_json(self.jobj_from))
|
||||
|
||||
def test_default_not_transmitted(self):
|
||||
from acme.messages import NewRegistration
|
||||
empty_new_reg = NewRegistration()
|
||||
new_reg_with_contact = NewRegistration(contact=())
|
||||
|
||||
self.assertEqual(empty_new_reg.contact, ())
|
||||
self.assertEqual(new_reg_with_contact.contact, ())
|
||||
|
||||
self.assertNotIn('contact', empty_new_reg.to_partial_json())
|
||||
self.assertNotIn('contact', empty_new_reg.fields_to_partial_json())
|
||||
self.assertIn('contact', new_reg_with_contact.to_partial_json())
|
||||
self.assertIn('contact', new_reg_with_contact.fields_to_partial_json())
|
||||
|
||||
|
||||
class UpdateRegistrationTest(unittest.TestCase):
|
||||
"""Tests for acme.messages.UpdateRegistration."""
|
||||
@@ -421,7 +393,7 @@ class AuthorizationResourceTest(unittest.TestCase):
|
||||
authzr = AuthorizationResource(
|
||||
uri=mock.sentinel.uri,
|
||||
body=mock.sentinel.body)
|
||||
self.assertIsInstance(authzr, jose.JSONDeSerializable)
|
||||
self.assertTrue(isinstance(authzr, jose.JSONDeSerializable))
|
||||
|
||||
|
||||
class CertificateRequestTest(unittest.TestCase):
|
||||
@@ -432,7 +404,7 @@ class CertificateRequestTest(unittest.TestCase):
|
||||
self.req = CertificateRequest(csr=CSR)
|
||||
|
||||
def test_json_de_serializable(self):
|
||||
self.assertIsInstance(self.req, jose.JSONDeSerializable)
|
||||
self.assertTrue(isinstance(self.req, jose.JSONDeSerializable))
|
||||
from acme.messages import CertificateRequest
|
||||
self.assertEqual(
|
||||
self.req, CertificateRequest.from_json(self.req.to_json()))
|
||||
@@ -448,7 +420,7 @@ class CertificateResourceTest(unittest.TestCase):
|
||||
cert_chain_uri=mock.sentinel.cert_chain_uri)
|
||||
|
||||
def test_json_de_serializable(self):
|
||||
self.assertIsInstance(self.certr, jose.JSONDeSerializable)
|
||||
self.assertTrue(isinstance(self.certr, jose.JSONDeSerializable))
|
||||
from acme.messages import CertificateResource
|
||||
self.assertEqual(
|
||||
self.certr, CertificateResource.from_json(self.certr.to_json()))
|
||||
@@ -481,7 +453,6 @@ class OrderResourceTest(unittest.TestCase):
|
||||
'authorizations': None,
|
||||
})
|
||||
|
||||
|
||||
class NewOrderTest(unittest.TestCase):
|
||||
"""Tests for acme.messages.NewOrder."""
|
||||
|
||||
@@ -496,18 +467,5 @@ class NewOrderTest(unittest.TestCase):
|
||||
})
|
||||
|
||||
|
||||
class JWSPayloadRFC8555Compliant(unittest.TestCase):
|
||||
"""Test for RFC8555 compliance of JWS generated from resources/challenges"""
|
||||
def test_message_payload(self):
|
||||
from acme.messages import NewAuthorization
|
||||
|
||||
new_order = NewAuthorization()
|
||||
new_order.le_acme_version = 2
|
||||
|
||||
jobj = new_order.json_dumps(indent=2).encode()
|
||||
# RFC8555 states that JWS bodies must not have a resource field.
|
||||
self.assertEqual(jobj, b'{}')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main() # pragma: no cover
|
||||
|
||||
@@ -1,19 +1,16 @@
|
||||
"""Tests for acme.standalone."""
|
||||
import http.client as http_client
|
||||
import socket
|
||||
import socketserver
|
||||
import threading
|
||||
import unittest
|
||||
from typing import Set
|
||||
from unittest import mock
|
||||
|
||||
import josepy as jose
|
||||
import mock
|
||||
import requests
|
||||
from six.moves import http_client # pylint: disable=import-error
|
||||
from six.moves import socketserver # type: ignore # pylint: disable=import-error
|
||||
|
||||
from acme import challenges
|
||||
from acme import crypto_util
|
||||
from acme import errors
|
||||
|
||||
from acme.magic_typing import Set # pylint: disable=unused-import, no-name-in-module
|
||||
import test_util
|
||||
|
||||
|
||||
@@ -42,7 +39,7 @@ class HTTP01ServerTest(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.account_key = jose.JWK.load(
|
||||
test_util.load_vector('rsa1024_key.pem'))
|
||||
self.resources: Set = set()
|
||||
self.resources = set() # type: Set
|
||||
|
||||
from acme.standalone import HTTP01Server
|
||||
self.server = HTTP01Server(('', 0), resources=self.resources)
|
||||
@@ -86,85 +83,11 @@ class HTTP01ServerTest(unittest.TestCase):
|
||||
def test_http01_not_found(self):
|
||||
self.assertFalse(self._test_http01(add=False))
|
||||
|
||||
def test_timely_shutdown(self):
|
||||
from acme.standalone import HTTP01Server
|
||||
server = HTTP01Server(('', 0), resources=set(), timeout=0.05)
|
||||
server_thread = threading.Thread(target=server.serve_forever)
|
||||
server_thread.start()
|
||||
|
||||
client = socket.socket()
|
||||
client.connect(('localhost', server.socket.getsockname()[1]))
|
||||
|
||||
stop_thread = threading.Thread(target=server.shutdown)
|
||||
stop_thread.start()
|
||||
server_thread.join(5.)
|
||||
|
||||
is_hung = server_thread.is_alive()
|
||||
try:
|
||||
client.shutdown(socket.SHUT_RDWR)
|
||||
except: # pragma: no cover, pylint: disable=bare-except
|
||||
# may raise error because socket could already be closed
|
||||
pass
|
||||
|
||||
self.assertFalse(is_hung, msg='Server shutdown should not be hung')
|
||||
|
||||
|
||||
@unittest.skipIf(not challenges.TLSALPN01.is_supported(), "pyOpenSSL too old")
|
||||
class TLSALPN01ServerTest(unittest.TestCase):
|
||||
"""Test for acme.standalone.TLSALPN01Server."""
|
||||
|
||||
def setUp(self):
|
||||
self.certs = {b'localhost': (
|
||||
test_util.load_pyopenssl_private_key('rsa2048_key.pem'),
|
||||
test_util.load_cert('rsa2048_cert.pem'),
|
||||
)}
|
||||
# Use different certificate for challenge.
|
||||
self.challenge_certs = {b'localhost': (
|
||||
test_util.load_pyopenssl_private_key('rsa4096_key.pem'),
|
||||
test_util.load_cert('rsa4096_cert.pem'),
|
||||
)}
|
||||
from acme.standalone import TLSALPN01Server
|
||||
self.server = TLSALPN01Server(("localhost", 0), certs=self.certs,
|
||||
challenge_certs=self.challenge_certs)
|
||||
# pylint: disable=no-member
|
||||
self.thread = threading.Thread(target=self.server.serve_forever)
|
||||
self.thread.start()
|
||||
|
||||
def tearDown(self):
|
||||
self.server.shutdown() # pylint: disable=no-member
|
||||
self.thread.join()
|
||||
|
||||
# TODO: This is not implemented yet, see comments in standalone.py
|
||||
# def test_certs(self):
|
||||
# host, port = self.server.socket.getsockname()[:2]
|
||||
# cert = crypto_util.probe_sni(
|
||||
# b'localhost', host=host, port=port, timeout=1)
|
||||
# # Expect normal cert when connecting without ALPN.
|
||||
# self.assertEqual(jose.ComparableX509(cert),
|
||||
# jose.ComparableX509(self.certs[b'localhost'][1]))
|
||||
|
||||
def test_challenge_certs(self):
|
||||
host, port = self.server.socket.getsockname()[:2]
|
||||
cert = crypto_util.probe_sni(
|
||||
b'localhost', host=host, port=port, timeout=1,
|
||||
alpn_protocols=[b"acme-tls/1"])
|
||||
# Expect challenge cert when connecting with ALPN.
|
||||
self.assertEqual(
|
||||
jose.ComparableX509(cert),
|
||||
jose.ComparableX509(self.challenge_certs[b'localhost'][1])
|
||||
)
|
||||
|
||||
def test_bad_alpn(self):
|
||||
host, port = self.server.socket.getsockname()[:2]
|
||||
with self.assertRaises(errors.Error):
|
||||
crypto_util.probe_sni(
|
||||
b'localhost', host=host, port=port, timeout=1,
|
||||
alpn_protocols=[b"bad-alpn"])
|
||||
|
||||
|
||||
class BaseDualNetworkedServersTest(unittest.TestCase):
|
||||
"""Test for acme.standalone.BaseDualNetworkedServers."""
|
||||
|
||||
|
||||
class SingleProtocolServer(socketserver.TCPServer):
|
||||
"""Server that only serves on a single protocol. FreeBSD has this behavior for AF_INET6."""
|
||||
def __init__(self, *args, **kwargs):
|
||||
@@ -174,7 +97,7 @@ class BaseDualNetworkedServersTest(unittest.TestCase):
|
||||
kwargs["bind_and_activate"] = False
|
||||
else:
|
||||
self.address_family = socket.AF_INET
|
||||
super().__init__(*args, **kwargs)
|
||||
socketserver.TCPServer.__init__(self, *args, **kwargs)
|
||||
if ipv6:
|
||||
# NB: On Windows, socket.IPPROTO_IPV6 constant may be missing.
|
||||
# We use the corresponding value (41) instead.
|
||||
@@ -189,17 +112,12 @@ class BaseDualNetworkedServersTest(unittest.TestCase):
|
||||
|
||||
@mock.patch("socket.socket.bind")
|
||||
def test_fail_to_bind(self, mock_bind):
|
||||
from errno import EADDRINUSE
|
||||
mock_bind.side_effect = socket.error
|
||||
from acme.standalone import BaseDualNetworkedServers
|
||||
|
||||
mock_bind.side_effect = socket.error(EADDRINUSE, "Fake addr in use error")
|
||||
|
||||
with self.assertRaises(socket.error) as em:
|
||||
BaseDualNetworkedServers(
|
||||
BaseDualNetworkedServersTest.SingleProtocolServer,
|
||||
('', 0), socketserver.BaseRequestHandler)
|
||||
|
||||
self.assertEqual(em.exception.errno, EADDRINUSE)
|
||||
self.assertRaises(socket.error, BaseDualNetworkedServers,
|
||||
BaseDualNetworkedServersTest.SingleProtocolServer,
|
||||
('', 0),
|
||||
socketserver.BaseRequestHandler)
|
||||
|
||||
def test_ports_equal(self):
|
||||
from acme.standalone import BaseDualNetworkedServers
|
||||
@@ -220,10 +138,11 @@ class BaseDualNetworkedServersTest(unittest.TestCase):
|
||||
class HTTP01DualNetworkedServersTest(unittest.TestCase):
|
||||
"""Tests for acme.standalone.HTTP01DualNetworkedServers."""
|
||||
|
||||
|
||||
def setUp(self):
|
||||
self.account_key = jose.JWK.load(
|
||||
test_util.load_vector('rsa1024_key.pem'))
|
||||
self.resources: Set = set()
|
||||
self.resources = set() # type: Set
|
||||
|
||||
from acme.standalone import HTTP01DualNetworkedServers
|
||||
self.servers = HTTP01DualNetworkedServers(('', 0), resources=self.resources)
|
||||
|
||||
@@ -10,7 +10,6 @@ from cryptography.hazmat.primitives import serialization
|
||||
import josepy as jose
|
||||
from OpenSSL import crypto
|
||||
import pkg_resources
|
||||
from josepy.util import ComparableECKey
|
||||
|
||||
|
||||
def load_vector(*names):
|
||||
@@ -61,14 +60,6 @@ def load_rsa_private_key(*names):
|
||||
load_vector(*names), password=None, backend=default_backend()))
|
||||
|
||||
|
||||
def load_ecdsa_private_key(*names):
|
||||
"""Load ECDSA private key."""
|
||||
loader = _guess_loader(names[-1], serialization.load_pem_private_key,
|
||||
serialization.load_der_private_key)
|
||||
return ComparableECKey(loader(
|
||||
load_vector(*names), password=None, backend=default_backend()))
|
||||
|
||||
|
||||
def load_pyopenssl_private_key(*names):
|
||||
"""Load pyOpenSSL private key."""
|
||||
loader = _guess_loader(
|
||||
|
||||
12
acme/tests/testdata/README
vendored
12
acme/tests/testdata/README
vendored
@@ -4,18 +4,12 @@ to use appropriate extension for vector filenames: .pem for PEM and
|
||||
|
||||
The following command has been used to generate test keys:
|
||||
|
||||
for k in 256 512 1024 2048 4096; do openssl genrsa -out rsa${k}_key.pem $k; done
|
||||
for x in 256 512 1024 2048; do openssl genrsa -out rsa${k}_key.pem $k; done
|
||||
|
||||
and for the CSR:
|
||||
|
||||
openssl req -key rsa2048_key.pem -new -subj '/CN=example.com' -outform DER > csr.der
|
||||
|
||||
and for the certificates:
|
||||
and for the certificate:
|
||||
|
||||
openssl req -key rsa2048_key.pem -new -subj '/CN=example.com' -x509 -outform DER > cert.der
|
||||
openssl req -key rsa2048_key.pem -new -subj '/CN=example.com' -x509 > rsa2048_cert.pem
|
||||
openssl req -key rsa1024_key.pem -new -subj '/CN=example.com' -x509 > rsa1024_cert.pem
|
||||
|
||||
and for the elliptic key curves:
|
||||
|
||||
openssl genpkey -algorithm EC -out ec_secp384r1.pem -pkeyopt ec_paramgen_curve:P-384 -pkeyopt ec_param_enc:named_curve
|
||||
openssl req -key rsa2047_key.pem -new -subj '/CN=example.com' -x509 -outform DER > cert.der
|
||||
|
||||
21
acme/tests/testdata/cert-ipsans.pem
vendored
21
acme/tests/testdata/cert-ipsans.pem
vendored
@@ -1,21 +0,0 @@
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIDizCCAnOgAwIBAgIIPNBLQXwhoUkwDQYJKoZIhvcNAQELBQAwKDEmMCQGA1UE
|
||||
AxMdUGViYmxlIEludGVybWVkaWF0ZSBDQSAxNzNiMjYwHhcNMjAwNTI5MTkxODA5
|
||||
WhcNMjUwNTI5MTkxODA5WjAWMRQwEgYDVQQDEwsxOTIuMC4yLjE0NTCCASIwDQYJ
|
||||
KoZIhvcNAQEBBQADggEPADCCAQoCggEBALyChb+NDA26GF1AfC0nzEdfOTchKw0h
|
||||
q41xEjonvg5UXgZf/aH/ntvugIkYP0MaFifNAjebOVVsemEVEtyWcUKTfBHKZGbZ
|
||||
ukTDwFIjfTccCfo6U/B2H7ZLzJIywl8DcUw9DypadeQBm8PS0VVR2ncy73dvaqym
|
||||
crhAwlASyXU0mhLqRDMMxfg5Bn/FWpcsIcDpLmPn8Q/FvdRc2t5ryBNw/aWOlwqT
|
||||
Oy16nbfLj2T0zG1A3aPuD+eT/JFUe/o3K7R+FAx7wt+RziQO46wLVVF1SueZUrIU
|
||||
zqN04Gl8Kt1WM2SniZ0gq/rORUNcPtT0NAEsEslTQfA+Trq6j2peqyMCAwEAAaOB
|
||||
yjCBxzAOBgNVHQ8BAf8EBAMCBaAwHQYDVR0lBBYwFAYIKwYBBQUHAwEGCCsGAQUF
|
||||
BwMCMAwGA1UdEwEB/wQCMAAwHQYDVR0OBBYEFHj1mwZzP//nMIH2i58NRUl/arHn
|
||||
MB8GA1UdIwQYMBaAFF5DVAKabvIUvKFHGouscA2Qdpe6MDEGCCsGAQUFBwEBBCUw
|
||||
IzAhBggrBgEFBQcwAYYVaHR0cDovLzEyNy4wLjAuMTo0MDAyMBUGA1UdEQQOMAyH
|
||||
BMAAApGHBMsAcQEwDQYJKoZIhvcNAQELBQADggEBAHjSgDg76/UCIMSYddyhj18r
|
||||
LdNKjA7p8ovnErSkebFT4lIZ9f3Sma9moNr0w64M33NamuFyHe/KTdk90mvoW8Uu
|
||||
26aDekiRIeeMakzbAtDKn67tt2tbedKIYRATcSYVwsV46uZKbM621dZKIjjxOWpo
|
||||
IY6rZYrku8LYhoXJXOqRduV3cTRVuTm5bBa9FfVNtt6N1T5JOtKKDEhuSaF4RSug
|
||||
PDy3hQIiHrVvhPfVrXU3j6owz/8UCS5549inES9ONTFrvM9o0H1R/MsmGNXR5hF5
|
||||
iJqHKC7n8LZujhVnoFIpHu2Dsiefbfr+yRYJS4I+ezy6Nq/Ok8rc8zp0eoX+uyY=
|
||||
-----END CERTIFICATE-----
|
||||
22
acme/tests/testdata/cert-ipv6sans.pem
vendored
22
acme/tests/testdata/cert-ipv6sans.pem
vendored
@@ -1,22 +0,0 @@
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIDmzCCAoOgAwIBAgIIFdxeZP+v2rgwDQYJKoZIhvcNAQELBQAwKDEmMCQGA1UE
|
||||
AxMdUGViYmxlIEludGVybWVkaWF0ZSBDQSA0M2M5NTcwHhcNMjAwNTMwMDQwNzMw
|
||||
WhcNMjUwNTMwMDQwNzMwWjAOMQwwCgYDVQQDEwM6OjEwggEiMA0GCSqGSIb3DQEB
|
||||
AQUAA4IBDwAwggEKAoIBAQC7VidVduJvqKtrSH0fw6PjE0cqL4Kfzo7klexWUkHG
|
||||
KVAa0fRVZFZ462jxKOt417V2U4WJQ6WHHO9PJ+3gW62d/MhCw8FRtUQS4nYFjqB6
|
||||
32+RFU21VRN7cWoQEqSwnEPbh/v/zv/KS5JhQ+swWUo79AOLm1kjnZWCKtcqh1Lc
|
||||
Ug5Tkpot6luoxTKp52MkchvXDpj0q2B/XpLJ8/pw5cqjv7mH12EDOK2HXllA+WwX
|
||||
ZpstcEhaA4FqtaHOW/OHnwTX5MUbINXE5YYHVEDR6moVM31/W/3pe9NDUMTDE7Si
|
||||
lVQnZbXM9NYbzZqlh+WhemDWwnIfGI6rtsfNEiirVEOlAgMBAAGjgeIwgd8wDgYD
|
||||
VR0PAQH/BAQDAgWgMB0GA1UdJQQWMBQGCCsGAQUFBwMBBggrBgEFBQcDAjAMBgNV
|
||||
HRMBAf8EAjAAMB0GA1UdDgQWBBS8DL+MZfDIy6AKky69Tgry2Vxq5DAfBgNVHSME
|
||||
GDAWgBRAsFqVenRRKgB1YPzWKzb9bzZ/ozAxBggrBgEFBQcBAQQlMCMwIQYIKwYB
|
||||
BQUHMAGGFWh0dHA6Ly8xMjcuMC4wLjE6NDAwMjAtBgNVHREEJjAkhxAAAAAAAAAA
|
||||
AAAAAAAAAAABhxCjvjLzIG7HXQlWDO6YWF7FMA0GCSqGSIb3DQEBCwUAA4IBAQBY
|
||||
M9UTZ3uaKMQ+He9kWR3p9jh6hTSD0FNi79ZdfkG0lgSzhhduhN7OhzQH2ihUUfa6
|
||||
rtKTw74fGbszhizCd9UB8YPKlm3si1Xbg6ZUQlA1RtoQo7RUGEa6ZbR68PKGm9Go
|
||||
hTTFIl/JS8jzxBR8jywZdyqtprUx+nnNUDiNk0hJtFLhw7OJH0AHlAUNqHsfD08m
|
||||
HXRdaV6q14HXU5g31slBat9H4D6tCU/2uqBURwW0wVdnqh4QeRfAeqiatJS9EmSF
|
||||
ctbc7n894Idy2Xce7NFoIy5cht3m6Rd42o/LmBsJopBmQcDPZT70/XzRtc2qE0cS
|
||||
CzBIGQHUJ6BfmBjrCQnp
|
||||
-----END CERTIFICATE-----
|
||||
16
acme/tests/testdata/csr-ipsans.pem
vendored
16
acme/tests/testdata/csr-ipsans.pem
vendored
@@ -1,16 +0,0 @@
|
||||
-----BEGIN CERTIFICATE REQUEST-----
|
||||
MIICbTCCAVUCAQIwADCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKT/
|
||||
CE7Y5EYBvI4p7Frt763upIKHDHO/R5/TWMjG8Jm9qTMui8sbMgyh2Yh+lR/j/5Xd
|
||||
tQrhgC6wx10MrW2+3JtYS88HP1p6si8zU1dbK34n3NyyklR2RivW0R7dXgnYNy7t
|
||||
5YcDYLCrbRMIPINV/uHrmzIHWYUDNcZVdAfIM2AHfKYuV6Mepcn///5GR+l4GcAh
|
||||
Nkf9CW8OdAIuKdbyLCxVr0mUW/vJz1b12uxPsgUdax9sjXgZdT4pfMXADsFd1NeF
|
||||
atpsXU073inqtHru+2F9ijHTQ75TC+u/rr6eYl3BnBntac0gp/ADtDBii7/Q1JOO
|
||||
Bhq7xJNqqxIEdiyM7zcCAwEAAaAoMCYGCSqGSIb3DQEJDjEZMBcwFQYDVR0RBA4w
|
||||
DIcEwAACkYcEywBxATANBgkqhkiG9w0BAQsFAAOCAQEADG5g3zdbSCaXpZhWHkzE
|
||||
Mek3f442TUE1pB+ITRpthmM4N3zZWETYmbLCIAO624uMrRnbCCMvAoLs/L/9ETg/
|
||||
XMMFtonQC8u9i9tV8B1ceBh8lpIfa+8b9TMWH3bqnrbWQ+YIl+Yd0gXiCZWJ9vK4
|
||||
eM1Gddu/2bR6s/k4h/XAWRgEexqk57EHr1z0N+T9OoX939n3mVcNI+u9kfd5VJ0z
|
||||
VyA3R8WR6T6KlEl5P5pcWe5Kuyhi7xMmLVImXqBtvKq4O1AMfM+gQr/yn9aE8IRq
|
||||
khP7JrMBLUIub1c/qu2TfvnynNPSM/ZcOX+6PHdHmRkR3nI0Ndpv7Ntv31FTplAm
|
||||
Dw==
|
||||
-----END CERTIFICATE REQUEST-----
|
||||
16
acme/tests/testdata/csr-ipv6sans.pem
vendored
16
acme/tests/testdata/csr-ipv6sans.pem
vendored
@@ -1,16 +0,0 @@
|
||||
-----BEGIN CERTIFICATE REQUEST-----
|
||||
MIIChTCCAW0CAQIwADCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAOIc
|
||||
UAppcqJfTkSqqOFqGt1v7lIJZPOcF4bcKI3d5cHAGbOuVxbC7uMaDuObwYLzoiED
|
||||
qnvs1NaEq2phO6KsgGESB7IE2LUjJivO7OnSZjNRpL5si/9egvBiNCn/50lULaWG
|
||||
gLEuyMfk3awZy2mVAymy7Grhbx069A4TH8TqsHuq2RpKyuDL27e/jUt6yYecb3pu
|
||||
hWMiWy3segif4tI46pkOW0/I6DpxyYD2OqOvzxm/voS9RMqE2+7YJA327H7bEi3N
|
||||
lJZEZ1zy7clZ9ga5fBQaetzbg2RyxTrZ7F919NQXSFoXgxb10Eg64wIpz0L3ooCm
|
||||
GEHehsZZexa3J5ccIvMCAwEAAaBAMD4GCSqGSIb3DQEJDjExMC8wLQYDVR0RBCYw
|
||||
JIcQAAAAAAAAAAAAAAAAAAAAAYcQo74y8yBux10JVgzumFhexTANBgkqhkiG9w0B
|
||||
AQsFAAOCAQEALvwVn0A/JPTCiNzcozHFnp5M23C9PXCplWc5u4k34d4XXzpSeFDz
|
||||
fL4gy7NpYIueme2K2ppw2j3PNQUdR6vQ5a75sriegWYrosL+7Q6Joh51ZyEUZQoD
|
||||
mNl4M4S4oX85EaChR6NFGBywTfjFarYi32XBTbFE7rK8N8KM+DQkNdwL1MXqaHWz
|
||||
F1obQKpNXlLedbCBOteV5Eg4zG3565zu/Gw/NhwzzV3mQmgxUcd1sMJxAfHQz4Vl
|
||||
ImLL+xMcR03nDsH2bgtDbK2tJm7WszSxA9tC+Xp2lRewxrnQloRWPYDz177WGQ5Q
|
||||
SoGDzTTtA6uWZxG8h7CkNLOGvA8LtU2rNA==
|
||||
-----END CERTIFICATE REQUEST-----
|
||||
16
acme/tests/testdata/csr-mixed.pem
vendored
16
acme/tests/testdata/csr-mixed.pem
vendored
@@ -1,16 +0,0 @@
|
||||
-----BEGIN CERTIFICATE REQUEST-----
|
||||
MIICdjCCAV4CAQIwADCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMXq
|
||||
v1y8EIcCbaUIzCtOcLkLS0MJ35oS+6DmV5WB1A0cIk6YrjsHIsY2lwMm13BWIvmw
|
||||
tY+Y6n0rr7eViNx5ZRGHpHEI/TL3Neb+VefTydL5CgvK3dd4ex2kSbTaed3fmpOx
|
||||
qMajEduwNcZPCcmoEXPkfrCP8w2vKQUkQ+JRPcdX1nTuzticeRP5B7YCmJsmxkEh
|
||||
Y0tzzZ+NIRDARoYNofefY86h3e5q66gtJxccNchmIM3YQahhg5n3Xoo8hGfM/TIc
|
||||
R7ncCBCLO6vtqo0QFva/NQODrgOmOsmgvqPkUWQFdZfWM8yIaU826dktx0CPB78t
|
||||
TudnJ1rBRvGsjHMsZikCAwEAAaAxMC8GCSqGSIb3DQEJDjEiMCAwHgYDVR0RBBcw
|
||||
FYINYS5leGVtcGxlLmNvbYcEwAACbzANBgkqhkiG9w0BAQsFAAOCAQEAdGMcRCxq
|
||||
1X09gn1TNdMt64XUv+wdJCKDaJ+AgyIJj7QvVw8H5k7dOnxS4I+a/yo4jE+LDl2/
|
||||
AuHcBLFEI4ddewdJSMrTNZjuRYuOdr3KP7fL7MffICSBi45vw5EOXg0tnjJCEiKu
|
||||
6gcJgbLSP5JMMd7Haf33Q/VWsmHofR3VwOMdrnakwAU3Ff5WTuXTNVhL1kT/uLFX
|
||||
yW1ru6BF4unwNqSR2UeulljpNfRBsiN4zJK11W6n9KT0NkBr9zY5WCM4sW7i8k9V
|
||||
TeypWGo3jBKzYAGeuxZsB97U77jZ2lrGdBLZKfbcjnTeRVqCvCRrui4El7UGYFmj
|
||||
7s6OJyWx5DSV8w==
|
||||
-----END CERTIFICATE REQUEST-----
|
||||
6
acme/tests/testdata/ec_secp384r1_key.pem
vendored
6
acme/tests/testdata/ec_secp384r1_key.pem
vendored
@@ -1,6 +0,0 @@
|
||||
-----BEGIN PRIVATE KEY-----
|
||||
MIG2AgEAMBAGByqGSM49AgEGBSuBBAAiBIGeMIGbAgEBBDArTn0pbFk3xHfKeXte
|
||||
xJgS4JVdJQ8mqvezhaNpULZPnwb+mcKLlrj6f5SRM52yREGhZANiAAQcrMoPMVqV
|
||||
rHnDGGz5HUKLNmXfChlNgsrwsruawXF+M283CA6eckAjTXNyiC/ounWmvtoKsZG0
|
||||
2UQOfQUNSCANId/r986yRGc03W6RJSkcRp86qBYjNsLgbZpber/3+M4=
|
||||
-----END PRIVATE KEY-----
|
||||
13
acme/tests/testdata/rsa1024_cert.pem
vendored
13
acme/tests/testdata/rsa1024_cert.pem
vendored
@@ -1,13 +0,0 @@
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIB/TCCAWagAwIBAgIJAOyRIBs3QT8QMA0GCSqGSIb3DQEBCwUAMBYxFDASBgNV
|
||||
BAMMC2V4YW1wbGUuY29tMB4XDTE4MDQyMzEwMzE0NFoXDTE4MDUyMzEwMzE0NFow
|
||||
FjEUMBIGA1UEAwwLZXhhbXBsZS5jb20wgZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJ
|
||||
AoGBAJqJ87R8aVwByONxgQA9hwgvQd/QqI1r1UInXhEF2VnEtZGtUWLi100IpIqr
|
||||
Mq4qusDwNZ3g8cUPtSkvJGs89djoajMDIJP7lQUEKUYnYrI0q755Tr/DgLWSk7iW
|
||||
l5ezym0VzWUD0/xXUz8yRbNMTjTac80rS5SZk2ja2wWkYlRJAgMBAAGjUzBRMB0G
|
||||
A1UdDgQWBBSsaX0IVZ4XXwdeffVAbG7gnxSYjTAfBgNVHSMEGDAWgBSsaX0IVZ4X
|
||||
XwdeffVAbG7gnxSYjTAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3DQEBCwUAA4GB
|
||||
ADe7SVmvGH2nkwVfONk8TauRUDkePN1CJZKFb2zW1uO9ANJ2v5Arm/OQp0BG/xnI
|
||||
Djw/aLTNVESF89oe15dkrUErtcaF413MC1Ld5lTCaJLHLGqDKY69e02YwRuxW7jY
|
||||
qarpt7k7aR5FbcfO5r4V/FK/Gvp4Dmoky8uap7SJIW6x
|
||||
-----END CERTIFICATE-----
|
||||
30
acme/tests/testdata/rsa4096_cert.pem
vendored
30
acme/tests/testdata/rsa4096_cert.pem
vendored
@@ -1,30 +0,0 @@
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIFDTCCAvWgAwIBAgIUImqDrP53V69vFROsjP/gL0YtoA4wDQYJKoZIhvcNAQEL
|
||||
BQAwFjEUMBIGA1UEAwwLZXhhbXBsZS5jb20wHhcNMjAwNTI3MjMyNDE0WhcNMjAw
|
||||
NjI2MjMyNDE0WjAWMRQwEgYDVQQDDAtleGFtcGxlLmNvbTCCAiIwDQYJKoZIhvcN
|
||||
AQEBBQADggIPADCCAgoCggIBANY9LKLk9Dxn0MUMQFHwBoTN4ehDSWBws2KcytpF
|
||||
mc8m9Mfk1wmb4fQSKYtK3wIFMfIyo9HQu0nKqMkkUw52o3ZXyOv+oWwF5qNy2BKu
|
||||
lh5OMSkaZ0o13zoPpW42e+IUnyxvg70+0urD+sUue4cyTHh/nBIUjrM/05ZJ/ac8
|
||||
HR0RK3H41YoqBjq69JjMZczZZhbNFit3s6p0R1TbVAgc3ckqbtX5BDyQMQQCP4Ed
|
||||
m4DgbAFVqdcPUCC5W3F3fmuQiPKHiADzONZnXpy6lUvLDWqcd6loKp+nKHM6OkXX
|
||||
8hmD7pE1PYMQo4hqOfhBR2IgMjAShwd5qUFjl1m2oo0Qm3PFXOk6i2ZQdS6AA/yd
|
||||
B5/mX0RnM2oIdFZPb6UZFSmtEgs9sTzn+hMUyNSZQRE54px1ur1xws2R+vbsCyM5
|
||||
+KoFVxDjVjU9TlZx3GvDvnqz/tbHjji6l8VHZYOBMBUXbKHu2U6pJFZ5Zp7k68/z
|
||||
a3Fb9Pjtn3iRkXEyC0N5kLgqO4QTlExnxebV8aMvQpWd/qefnMn9qPYIZPEXSQAR
|
||||
mEBIahkcACb60s+acG0WFFluwBPtBqEr8Q67XlSF0Ibf4iBiRzpPobhlWta1nrFg
|
||||
4IWHMSoZ0PE75bhIGBEkhrpcXQCAxXmAfxfjKDH7jdJ1fRdnZ/9+OzwYGVX5GH/l
|
||||
0QDtAgMBAAGjUzBRMB0GA1UdDgQWBBQh3xiz/o1nEU2ySylZ9gxCXvIPGzAfBgNV
|
||||
HSMEGDAWgBQh3xiz/o1nEU2ySylZ9gxCXvIPGzAPBgNVHRMBAf8EBTADAQH/MA0G
|
||||
CSqGSIb3DQEBCwUAA4ICAQAELoXz31oR9pdAwidlv9ZBOKiC7KBWy8VMqXNVkfTn
|
||||
bVRxAUex7zleLFIOkWnqadsMesU9sIwrbLzBcZ8Q/vBY+z2xOPdXcgcAoAmdKWoq
|
||||
YBQNiqng9r54sqlzB/77QZCf5fdktESe7NTxhCifgx5SAWq7IUQs/lm3tnMUSAfE
|
||||
5ctuN6M+w8K54y3WDprcfMHpnc3ZHeSPhVQApHM0h/bDvXq0bRS7kmq27Hb153Qm
|
||||
nH3TwYB5pPSWW38NbUc+s/a7mItO7S8ly8yGbA0j9c/IbN5lM+OCdk06asz3+c8E
|
||||
uo8nuCBoYO5+6AqC2N7WJ3Tdr/pFA8jTbd6VNVlgCWTIR8ZosL5Fgkfv+4fUBrHt
|
||||
zdVUqMUzvga5rvZnwnJ5Qfu/drHeAAo9MTNFQNe2QgDlYfWBh5GweolgmFSwrpkY
|
||||
v/5wLtIyv/ASHKswybbqMIlpttcLTXjx5yuh8swttT6Wh+FQqqQ32KSRB3StiwyK
|
||||
oH0ZhrwYHiFYNlPxecGX6XUta6rFtTlEdkBGSnXzgiTzL2l+Nc0as0V5B9RninZG
|
||||
qJ+VOChSQ0OFvg1riSXv7tMvbLdGQnxwTRL3t6BMS8I4LA2m3ZfWUcuXT783ODTH
|
||||
16f1Q1AgXd2csstTWO9cv+N/0fpX31nqrm6+CrGduSr2u4HjYYnlLIUhmdTvK3fX
|
||||
Fg==
|
||||
-----END CERTIFICATE-----
|
||||
51
acme/tests/testdata/rsa4096_key.pem
vendored
51
acme/tests/testdata/rsa4096_key.pem
vendored
@@ -1,51 +0,0 @@
|
||||
-----BEGIN RSA PRIVATE KEY-----
|
||||
MIIJKgIBAAKCAgEA1j0souT0PGfQxQxAUfAGhM3h6ENJYHCzYpzK2kWZzyb0x+TX
|
||||
CZvh9BIpi0rfAgUx8jKj0dC7ScqoySRTDnajdlfI6/6hbAXmo3LYEq6WHk4xKRpn
|
||||
SjXfOg+lbjZ74hSfLG+DvT7S6sP6xS57hzJMeH+cEhSOsz/Tlkn9pzwdHRErcfjV
|
||||
iioGOrr0mMxlzNlmFs0WK3ezqnRHVNtUCBzdySpu1fkEPJAxBAI/gR2bgOBsAVWp
|
||||
1w9QILlbcXd+a5CI8oeIAPM41mdenLqVS8sNapx3qWgqn6coczo6RdfyGYPukTU9
|
||||
gxCjiGo5+EFHYiAyMBKHB3mpQWOXWbaijRCbc8Vc6TqLZlB1LoAD/J0Hn+ZfRGcz
|
||||
agh0Vk9vpRkVKa0SCz2xPOf6ExTI1JlBETninHW6vXHCzZH69uwLIzn4qgVXEONW
|
||||
NT1OVnHca8O+erP+1seOOLqXxUdlg4EwFRdsoe7ZTqkkVnlmnuTrz/NrcVv0+O2f
|
||||
eJGRcTILQ3mQuCo7hBOUTGfF5tXxoy9ClZ3+p5+cyf2o9ghk8RdJABGYQEhqGRwA
|
||||
JvrSz5pwbRYUWW7AE+0GoSvxDrteVIXQht/iIGJHOk+huGVa1rWesWDghYcxKhnQ
|
||||
8TvluEgYESSGulxdAIDFeYB/F+MoMfuN0nV9F2dn/347PBgZVfkYf+XRAO0CAwEA
|
||||
AQKCAgEA0hZdTkQtCYtYm9LexDsXeWYX8VcCfrMmBj7xYcg9A3oVMmzDPuYBVwH0
|
||||
gWbjd6y2hOaJ5TfGYZ99kvmvBRDsTSHaoyopC7BhssjtAKz6Ay/0X3VH8usPQ3WS
|
||||
aZi+NT65tK6KRqtz08ppgLGLa1G00bl5x/Um1rpxeACI4FU/y4BJ1VMJvJpnT3KE
|
||||
Z86Qyagqx5NH+UpCApZSWPFX3zjHePzGgcfXErjniCHYOnpZQrFQ2KIzkfSvQ9fg
|
||||
x01ByKOM2CB2C1B33TCzBAioXRH6zyAu7A59NeCK9ywTduhDvie1a+oEryFC7IQW
|
||||
4s7I/H3MGX4hsf/pLXlHMy+5CZJOjRaC2h+pypfbbcuiXu6Sn64kHNpiI7SxI5DI
|
||||
MIRjyG7MdUcrzq0Rt8ogwwpbCoRqrl/w3bhxtqmeZaEZtyxbjlm7reK2YkIFDgyz
|
||||
JMqiJK5ZAi+9L/8c0xhjjAQQ0sIzrjmjA8U+6YnWL9jU5qXTVnBB8XQucyeeZGgk
|
||||
yRHyMur71qOXN8z3UEva7MHkDTUBlj8DgTz6sEjqCipaWl0CXfDNa4IhHIXD5qiF
|
||||
wplhq7OeS0v6EGG/UFa3Q/lFntxtrayxJX7uvvSccGzjPKXTjpWUELLi/FdnIsum
|
||||
eXT3RgIEYozj4BibDXaBLfHTCVzxOr7AAEvKM9XWSUgLA0paSWECggEBAO9ZBeE1
|
||||
GWzd1ejTTkcxBC9AK2rNsYG8PdNqiof/iTbuJWNeRqpG+KB/0CNIpjZ2X5xZd0tM
|
||||
FDpHTFehlP26Roxuq50iRAFc+SN5KoiO0A3JuJAidreIgRTia1saUUrypHqWrYEA
|
||||
VZVj2AI8Pyg3s1OkR2frFskY7hXBVb/pJNDP/m9xTXXIYiIXYkHYe+4RIJCnAxRv
|
||||
q5YHKaX+0Ull9YCZJCxmwvcHat8sgu8qkiwUMEM6QSNEkrEbdnWYBABvC1AR6sws
|
||||
7MP1h9+j22n4Zc/3D6kpFZEL9Erx8nNyhbOZ6q2Tdnf6YKVVjZdyVa8VyNnR0ROl
|
||||
3BjkFaHb/bg4e4kCggEBAOUk8ZJS3qBeGCOjug384zbHGcnhUBYtYJiOz+RXBtP+
|
||||
PRksbFtTkgk1sHuSGO8YRddU4Qv7Av1xL8o+DEsLBSD0YQ7pmLrR/LK+iDQ5N63O
|
||||
Fve9uJH0ybxAOkiua7G24+lTsVUP//KWToL4Wh5zbHBBjL5D2Z9zoeVbcE87xhva
|
||||
lImMVr4Ex252DqNP9wkZxBjudFyJ/C/TnXrjPcgwhxWTC7sLQMhE5p+490G7c4hX
|
||||
PywkIKrANbu37KDiAvVS+dC66ZgpL/NUDkeloAmGNO08LGzbV6YKchlvDyWU/AvW
|
||||
0hYjbL0FUq7K/wp1G9fumolB+fbI25K9c13X93STzUUCggEBAJDsNFUyk5yJjbYW
|
||||
C/WrRj9d+WwH9Az77+uNPSgvn+O0usq6EMuVgYGdImfa21lqv2Wp/kOHY1AOT7lX
|
||||
yyD+oyzw7dSNJOQ2aVwDR6+72Vof5DLRy1RBwPbmSd61xrc8yD658YCEtU1pUSe5
|
||||
VvyBDYH9nIbdn8RP5gkiMUusXXBaIFNWJXLFzDWcNxBrhk6V7EPp/EFphFmpKJyr
|
||||
+AkbRVWCZJbF+hMdWKadCwLJogwyhS6PnVU/dhrq6AU38GRa2Fy5HJRYN1xH1Oej
|
||||
DX3Su8L6c28Xw0k6FcczTHx+wVoIPkKvYTIwVkiFzt/+iMckx6KsGo5tBSHFKRwC
|
||||
WlQrTxECggEBALjUruLnY1oZ7AC7bTUhOimSOfQEgTQSUCtebsRxijlvhtsKYTDd
|
||||
XRt+qidStjgN7S/+8DRYuZWzOeg5WnMhpXZqiOudcyume922IGl3ibjxVsdoyjs5
|
||||
J4xohlrgDlBgBMDNWGoTqNGFejjcmNydH+gAh8VlN2INxJYbxqCyx17qVgwJHmLR
|
||||
uggYxD/pHYvCs9GkbknCp5/wYsOgDtKuihfV741lS1D/esN1UEQ+LrfYIEW7snno
|
||||
5q7Pcdhn1hkKYCWEzy2Ec4Aj2gzixQ9JqOF/OxpnZvCw1k47rg0TeqcWFYnz8x8Y
|
||||
7xO8/DH0OoxXk2GJzVXJuItJs4gLzzfCjL0CggEAJFHfC9jisdy7CoWiOpNCSF1B
|
||||
S0/CWDz77cZdlWkpTdaXGGp1MA/UKUFPIH8sOHfvpKS660+X4G/1ZBHmFb4P5kFF
|
||||
Qy8UyUMKtSOEdZS6KFlRlfSCAMd5aSTmCvq4OSjYEpMRwUhU/iEJNkn9Z1Soehe0
|
||||
U3dxJ8KiT1071geO6rRquSHoSJs6Y0WQKriYYQJOhh4Axs3PQihER2eyh+WGk8YJ
|
||||
02m0mMsjntqnXtdc6IcdKaHp9ko+OpM9QZLsvt19fxBcrXj/i21uUXrzuNtKfO6M
|
||||
JqGhsOrO2dh8lMhvodENvgKA0DmYDC9N7ogo7bxTNSedcjBF46FhJoqii8m70Q==
|
||||
-----END RSA PRIVATE KEY-----
|
||||
@@ -1,8 +1,8 @@
|
||||
include LICENSE.txt
|
||||
include README.rst
|
||||
recursive-include tests *
|
||||
include certbot_apache/_internal/centos-options-ssl-apache.conf
|
||||
include certbot_apache/_internal/options-ssl-apache.conf
|
||||
recursive-include certbot_apache/_internal/augeas_lens *.aug
|
||||
recursive-include certbot_apache/_internal/tls_configs *.conf
|
||||
include certbot_apache/py.typed
|
||||
global-exclude __pycache__
|
||||
global-exclude *.py[cod]
|
||||
|
||||
@@ -4,22 +4,16 @@ import fnmatch
|
||||
import logging
|
||||
import re
|
||||
import subprocess
|
||||
from typing import Dict
|
||||
from typing import Iterable
|
||||
from typing import List
|
||||
from typing import Optional
|
||||
from typing import Tuple
|
||||
|
||||
import pkg_resources
|
||||
|
||||
from certbot import errors
|
||||
from certbot import util
|
||||
|
||||
from certbot.compat import os
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def get_mod_deps(mod_name: str) -> List[str]:
|
||||
def get_mod_deps(mod_name):
|
||||
"""Get known module dependencies.
|
||||
|
||||
.. note:: This does not need to be accurate in order for the client to
|
||||
@@ -38,7 +32,7 @@ def get_mod_deps(mod_name: str) -> List[str]:
|
||||
return deps.get(mod_name, [])
|
||||
|
||||
|
||||
def get_file_path(vhost_path: str) -> Optional[str]:
|
||||
def get_file_path(vhost_path):
|
||||
"""Get file path from augeas_vhost_path.
|
||||
|
||||
Takes in Augeas path and returns the file name
|
||||
@@ -55,7 +49,7 @@ def get_file_path(vhost_path: str) -> Optional[str]:
|
||||
return _split_aug_path(vhost_path)[0]
|
||||
|
||||
|
||||
def get_internal_aug_path(vhost_path: str) -> str:
|
||||
def get_internal_aug_path(vhost_path):
|
||||
"""Get the Augeas path for a vhost with the file path removed.
|
||||
|
||||
:param str vhost_path: Augeas virtual host path
|
||||
@@ -67,7 +61,7 @@ def get_internal_aug_path(vhost_path: str) -> str:
|
||||
return _split_aug_path(vhost_path)[1]
|
||||
|
||||
|
||||
def _split_aug_path(vhost_path: str) -> Tuple[str, str]:
|
||||
def _split_aug_path(vhost_path):
|
||||
"""Splits an Augeas path into a file path and an internal path.
|
||||
|
||||
After removing "/files", this function splits vhost_path into the
|
||||
@@ -81,7 +75,7 @@ def _split_aug_path(vhost_path: str) -> Tuple[str, str]:
|
||||
"""
|
||||
# Strip off /files
|
||||
file_path = vhost_path[6:]
|
||||
internal_path: List[str] = []
|
||||
internal_path = []
|
||||
|
||||
# Remove components from the end of file_path until it becomes valid
|
||||
while not os.path.exists(file_path):
|
||||
@@ -91,7 +85,7 @@ def _split_aug_path(vhost_path: str) -> Tuple[str, str]:
|
||||
return file_path, "/".join(reversed(internal_path))
|
||||
|
||||
|
||||
def parse_define_file(filepath: str, varname: str) -> Dict[str, str]:
|
||||
def parse_define_file(filepath, varname):
|
||||
""" Parses Defines from a variable in configuration file
|
||||
|
||||
:param str filepath: Path of file to parse
|
||||
@@ -101,7 +95,7 @@ def parse_define_file(filepath: str, varname: str) -> Dict[str, str]:
|
||||
:rtype: `dict`
|
||||
|
||||
"""
|
||||
return_vars: Dict[str, str] = {}
|
||||
return_vars = {}
|
||||
# Get list of words in the variable
|
||||
a_opts = util.get_var_from_file(varname, filepath).split()
|
||||
for i, v in enumerate(a_opts):
|
||||
@@ -116,27 +110,28 @@ def parse_define_file(filepath: str, varname: str) -> Dict[str, str]:
|
||||
return return_vars
|
||||
|
||||
|
||||
def unique_id() -> str:
|
||||
def unique_id():
|
||||
""" Returns an unique id to be used as a VirtualHost identifier"""
|
||||
return binascii.hexlify(os.urandom(16)).decode("utf-8")
|
||||
|
||||
|
||||
def included_in_paths(filepath: str, paths: Iterable[str]) -> bool:
|
||||
def included_in_paths(filepath, paths):
|
||||
"""
|
||||
Returns true if the filepath is included in the list of paths
|
||||
that may contain full paths or wildcard paths that need to be
|
||||
expanded.
|
||||
|
||||
:param str filepath: Filepath to check
|
||||
:param list paths: List of paths to check against
|
||||
:params list paths: List of paths to check against
|
||||
|
||||
:returns: True if included
|
||||
:rtype: bool
|
||||
"""
|
||||
return any(fnmatch.fnmatch(filepath, path) for path in paths)
|
||||
|
||||
return any([fnmatch.fnmatch(filepath, path) for path in paths])
|
||||
|
||||
|
||||
def parse_defines(apachectl: str) -> Dict[str, str]:
|
||||
def parse_defines(apachectl):
|
||||
"""
|
||||
Gets Defines from httpd process and returns a dictionary of
|
||||
the defined variables.
|
||||
@@ -147,7 +142,7 @@ def parse_defines(apachectl: str) -> Dict[str, str]:
|
||||
:rtype: dict
|
||||
"""
|
||||
|
||||
variables: Dict[str, str] = {}
|
||||
variables = dict()
|
||||
define_cmd = [apachectl, "-t", "-D",
|
||||
"DUMP_RUN_CFG"]
|
||||
matches = parse_from_subprocess(define_cmd, r"Define: ([^ \n]*)")
|
||||
@@ -157,15 +152,18 @@ def parse_defines(apachectl: str) -> Dict[str, str]:
|
||||
return {}
|
||||
|
||||
for match in matches:
|
||||
# Value could also contain = so split only once
|
||||
parts = match.split('=', 1)
|
||||
value = parts[1] if len(parts) == 2 else ''
|
||||
variables[parts[0]] = value
|
||||
if match.count("=") > 1:
|
||||
logger.error("Unexpected number of equal signs in "
|
||||
"runtime config dump.")
|
||||
raise errors.PluginError(
|
||||
"Error parsing Apache runtime variables")
|
||||
parts = match.partition("=")
|
||||
variables[parts[0]] = parts[2]
|
||||
|
||||
return variables
|
||||
|
||||
|
||||
def parse_includes(apachectl: str) -> List[str]:
|
||||
def parse_includes(apachectl):
|
||||
"""
|
||||
Gets Include directives from httpd process and returns a list of
|
||||
their values.
|
||||
@@ -176,11 +174,12 @@ def parse_includes(apachectl: str) -> List[str]:
|
||||
:rtype: list of str
|
||||
"""
|
||||
|
||||
inc_cmd: List[str] = [apachectl, "-t", "-D", "DUMP_INCLUDES"]
|
||||
inc_cmd = [apachectl, "-t", "-D",
|
||||
"DUMP_INCLUDES"]
|
||||
return parse_from_subprocess(inc_cmd, r"\(.*\) (.*)")
|
||||
|
||||
|
||||
def parse_modules(apachectl: str) -> List[str]:
|
||||
def parse_modules(apachectl):
|
||||
"""
|
||||
Get loaded modules from httpd process, and return the list
|
||||
of loaded module names.
|
||||
@@ -191,11 +190,12 @@ def parse_modules(apachectl: str) -> List[str]:
|
||||
:rtype: list of str
|
||||
"""
|
||||
|
||||
mod_cmd = [apachectl, "-t", "-D", "DUMP_MODULES"]
|
||||
mod_cmd = [apachectl, "-t", "-D",
|
||||
"DUMP_MODULES"]
|
||||
return parse_from_subprocess(mod_cmd, r"(.*)_module")
|
||||
|
||||
|
||||
def parse_from_subprocess(command: List[str], regexp: str) -> List[str]:
|
||||
def parse_from_subprocess(command, regexp):
|
||||
"""Get values from stdout of subprocess command
|
||||
|
||||
:param list command: Command to run
|
||||
@@ -209,7 +209,7 @@ def parse_from_subprocess(command: List[str], regexp: str) -> List[str]:
|
||||
return re.compile(regexp).findall(stdout)
|
||||
|
||||
|
||||
def _get_runtime_cfg(command: List[str]) -> str:
|
||||
def _get_runtime_cfg(command):
|
||||
"""
|
||||
Get runtime configuration info.
|
||||
|
||||
@@ -219,14 +219,12 @@ def _get_runtime_cfg(command: List[str]) -> str:
|
||||
|
||||
"""
|
||||
try:
|
||||
proc = subprocess.run(
|
||||
proc = subprocess.Popen(
|
||||
command,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
universal_newlines=True,
|
||||
check=False,
|
||||
env=util.env_no_snap_for_external_calls())
|
||||
stdout, stderr = proc.stdout, proc.stderr
|
||||
universal_newlines=True)
|
||||
stdout, stderr = proc.communicate()
|
||||
|
||||
except (OSError, ValueError):
|
||||
logger.error(
|
||||
@@ -243,15 +241,3 @@ def _get_runtime_cfg(command: List[str]) -> str:
|
||||
"loaded because Apache is misconfigured.")
|
||||
|
||||
return stdout
|
||||
|
||||
|
||||
def find_ssl_apache_conf(prefix: str) -> str:
|
||||
"""
|
||||
Find a TLS Apache config file in the dedicated storage.
|
||||
:param str prefix: prefix of the TLS Apache config file to find
|
||||
:return: the path the TLS Apache config file
|
||||
:rtype: str
|
||||
"""
|
||||
return pkg_resources.resource_filename(
|
||||
"certbot_apache",
|
||||
os.path.join("_internal", "tls_configs", "{0}-options-ssl-apache.conf".format(prefix)))
|
||||
|
||||
@@ -1,14 +1,12 @@
|
||||
""" apacheconfig implementation of the ParserNode interfaces """
|
||||
from typing import Any
|
||||
from typing import Iterable
|
||||
from typing import List
|
||||
from typing import Optional
|
||||
from typing import Tuple
|
||||
|
||||
from functools import partial
|
||||
|
||||
from certbot import errors
|
||||
|
||||
from certbot_apache._internal import assertions
|
||||
from certbot_apache._internal import interfaces
|
||||
from certbot_apache._internal import parsernode_util as util
|
||||
from certbot_apache._internal.interfaces import ParserNode
|
||||
|
||||
|
||||
class ApacheParserNode(interfaces.ParserNode):
|
||||
@@ -18,20 +16,19 @@ class ApacheParserNode(interfaces.ParserNode):
|
||||
by parsing the equivalent configuration text using the apacheconfig library.
|
||||
"""
|
||||
|
||||
def __init__(self, **kwargs: Any) -> None:
|
||||
# pylint: disable=unused-variable
|
||||
ancestor, dirty, filepath, metadata = util.parsernode_kwargs(kwargs)
|
||||
super().__init__(**kwargs)
|
||||
def __init__(self, **kwargs):
|
||||
ancestor, dirty, filepath, metadata = util.parsernode_kwargs(kwargs) # pylint: disable=unused-variable
|
||||
super(ApacheParserNode, self).__init__(**kwargs)
|
||||
self.ancestor = ancestor
|
||||
self.filepath = filepath
|
||||
self.dirty = dirty
|
||||
self.metadata = metadata
|
||||
self._raw: Any = self.metadata["ac_ast"]
|
||||
self._raw = self.metadata["ac_ast"]
|
||||
|
||||
def save(self, msg: str) -> None:
|
||||
pass # pragma: no cover
|
||||
def save(self, msg): # pragma: no cover
|
||||
pass
|
||||
|
||||
def find_ancestors(self, name: str) -> List["ApacheParserNode"]: # pylint: disable=unused-variable
|
||||
def find_ancestors(self, name): # pylint: disable=unused-variable
|
||||
"""Find ancestor BlockNodes with a given name"""
|
||||
return [ApacheBlockNode(name=assertions.PASS,
|
||||
parameters=assertions.PASS,
|
||||
@@ -43,33 +40,33 @@ class ApacheParserNode(interfaces.ParserNode):
|
||||
class ApacheCommentNode(ApacheParserNode):
|
||||
""" apacheconfig implementation of CommentNode interface """
|
||||
|
||||
def __init__(self, **kwargs: Any) -> None:
|
||||
comment, kwargs = util.commentnode_kwargs(kwargs)
|
||||
super().__init__(**kwargs)
|
||||
def __init__(self, **kwargs):
|
||||
comment, kwargs = util.commentnode_kwargs(kwargs) # pylint: disable=unused-variable
|
||||
super(ApacheCommentNode, self).__init__(**kwargs)
|
||||
self.comment = comment
|
||||
|
||||
def __eq__(self, other: Any) -> bool:
|
||||
def __eq__(self, other): # pragma: no cover
|
||||
if isinstance(other, self.__class__):
|
||||
return (self.comment == other.comment and
|
||||
self.dirty == other.dirty and
|
||||
self.ancestor == other.ancestor and
|
||||
self.metadata == other.metadata and
|
||||
self.filepath == other.filepath)
|
||||
return False # pragma: no cover
|
||||
return False
|
||||
|
||||
|
||||
class ApacheDirectiveNode(ApacheParserNode):
|
||||
""" apacheconfig implementation of DirectiveNode interface """
|
||||
|
||||
def __init__(self, **kwargs: Any) -> None:
|
||||
def __init__(self, **kwargs):
|
||||
name, parameters, enabled, kwargs = util.directivenode_kwargs(kwargs)
|
||||
super().__init__(**kwargs)
|
||||
super(ApacheDirectiveNode, self).__init__(**kwargs)
|
||||
self.name = name
|
||||
self.parameters = parameters
|
||||
self.enabled = enabled
|
||||
self.include: Optional[str] = None
|
||||
self.include = None
|
||||
|
||||
def __eq__(self, other: Any) -> bool:
|
||||
def __eq__(self, other): # pragma: no cover
|
||||
if isinstance(other, self.__class__):
|
||||
return (self.name == other.name and
|
||||
self.filepath == other.filepath and
|
||||
@@ -78,21 +75,69 @@ class ApacheDirectiveNode(ApacheParserNode):
|
||||
self.dirty == other.dirty and
|
||||
self.ancestor == other.ancestor and
|
||||
self.metadata == other.metadata)
|
||||
return False # pragma: no cover
|
||||
return False
|
||||
|
||||
def set_parameters(self, _parameters: Iterable[str]) -> None:
|
||||
def set_parameters(self, _parameters):
|
||||
"""Sets the parameters for DirectiveNode"""
|
||||
return # pragma: no cover
|
||||
return
|
||||
|
||||
|
||||
def _parameters_from_string(text):
|
||||
text = text.strip()
|
||||
words = []
|
||||
word = ""
|
||||
quote = None
|
||||
escape = False
|
||||
for c in text:
|
||||
if c.isspace() and not quote:
|
||||
if word:
|
||||
words.append(word)
|
||||
word = ""
|
||||
else:
|
||||
word += c
|
||||
if not escape:
|
||||
if not quote and c in "\"\'":
|
||||
quote = c
|
||||
elif c == quote:
|
||||
words.append(word[1:-1])
|
||||
word = ""
|
||||
quote = None
|
||||
escape = c == "\\"
|
||||
if word:
|
||||
words.append(word)
|
||||
return tuple(words)
|
||||
|
||||
|
||||
class ApacheBlockNode(ApacheDirectiveNode):
|
||||
""" apacheconfig implementation of BlockNode interface """
|
||||
|
||||
def __init__(self, **kwargs: Any) -> None:
|
||||
super().__init__(**kwargs)
|
||||
self.children: Tuple[ApacheParserNode, ...] = ()
|
||||
def __init__(self, **kwargs):
|
||||
super(ApacheBlockNode, self).__init__(**kwargs)
|
||||
self._raw_children = self._raw
|
||||
children = []
|
||||
for raw_node in self._raw_children:
|
||||
metadata = self.metadata.copy()
|
||||
metadata['ac_ast'] = raw_node
|
||||
if raw_node.typestring == "comment":
|
||||
node = ApacheCommentNode(comment=raw_node.name[2:],
|
||||
metadata=metadata, ancestor=self,
|
||||
filepath=self.filepath)
|
||||
elif raw_node.typestring == "block":
|
||||
parameters = _parameters_from_string(raw_node.arguments)
|
||||
node = ApacheBlockNode(name=raw_node.tag, parameters=parameters,
|
||||
metadata=metadata, ancestor=self,
|
||||
filepath=self.filepath, enabled=self.enabled)
|
||||
else:
|
||||
parameters = ()
|
||||
if raw_node.value:
|
||||
parameters = _parameters_from_string(raw_node.value)
|
||||
node = ApacheDirectiveNode(name=raw_node.name, parameters=parameters,
|
||||
metadata=metadata, ancestor=self,
|
||||
filepath=self.filepath, enabled=self.enabled)
|
||||
children.append(node)
|
||||
self.children = tuple(children)
|
||||
|
||||
def __eq__(self, other: Any) -> bool:
|
||||
def __eq__(self, other): # pragma: no cover
|
||||
if isinstance(other, self.__class__):
|
||||
return (self.name == other.name and
|
||||
self.filepath == other.filepath and
|
||||
@@ -102,46 +147,48 @@ class ApacheBlockNode(ApacheDirectiveNode):
|
||||
self.dirty == other.dirty and
|
||||
self.ancestor == other.ancestor and
|
||||
self.metadata == other.metadata)
|
||||
return False # pragma: no cover
|
||||
return False
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
def add_child_block(self, name: str, parameters: Optional[List[str]] = None,
|
||||
position: Optional[int] = None) -> "ApacheBlockNode": # pragma: no cover
|
||||
def _add_child_thing(self, raw_string, partial_node, position):
|
||||
position = len(self._raw_children) if not position else position
|
||||
# Cap position to length to mimic AugeasNode behavior. TODO: document that this happens
|
||||
position = min(len(self._raw_children), position)
|
||||
raw_ast = self._raw_children.add(position, raw_string)
|
||||
metadata = self.metadata.copy()
|
||||
metadata['ac_ast'] = raw_ast
|
||||
new_node = partial_node(ancestor=self, metadata=metadata, filepath=self.filepath)
|
||||
|
||||
# Update metadata
|
||||
children = list(self.children)
|
||||
children.insert(position, new_node)
|
||||
self.children = tuple(children)
|
||||
return new_node
|
||||
|
||||
def add_child_block(self, name, parameters=None, position=None):
|
||||
"""Adds a new BlockNode to the sequence of children"""
|
||||
new_block = ApacheBlockNode(name=assertions.PASS,
|
||||
parameters=assertions.PASS,
|
||||
ancestor=self,
|
||||
filepath=assertions.PASS,
|
||||
metadata=self.metadata)
|
||||
self.children += (new_block,)
|
||||
return new_block
|
||||
parameters_str = " " + " ".join(parameters) if parameters else ""
|
||||
if not parameters:
|
||||
parameters = []
|
||||
partial_block = partial(ApacheBlockNode, name=name,
|
||||
parameters=tuple(parameters), enabled=self.enabled)
|
||||
return self._add_child_thing("\n<%s%s>\n</%s>" % (name, parameters_str, name),
|
||||
partial_block, position)
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
def add_child_directive(self, name: str, parameters: Optional[List[str]] = None,
|
||||
position: int = None) -> ApacheDirectiveNode: # pragma: no cover
|
||||
def add_child_directive(self, name, parameters=None, position=None):
|
||||
"""Adds a new DirectiveNode to the sequence of children"""
|
||||
new_dir = ApacheDirectiveNode(name=assertions.PASS,
|
||||
parameters=assertions.PASS,
|
||||
ancestor=self,
|
||||
filepath=assertions.PASS,
|
||||
metadata=self.metadata)
|
||||
self.children += (new_dir,)
|
||||
return new_dir
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
def add_child_comment(
|
||||
self, name: str, parameters: Optional[int] = None, position: Optional[int] = None
|
||||
) -> ApacheCommentNode: # pragma: no cover
|
||||
parameters_str = " " + " ".join(parameters) if parameters else ""
|
||||
if not parameters: # TODO (mona): test
|
||||
parameters = [] # pragma: no cover
|
||||
partial_block = partial(ApacheDirectiveNode, name=name,
|
||||
parameters=tuple(parameters), enabled=self.enabled)
|
||||
return self._add_child_thing("\n%s%s" % (name, parameters_str), partial_block, position)
|
||||
|
||||
def add_child_comment(self, comment="", position=None):
|
||||
"""Adds a new CommentNode to the sequence of children"""
|
||||
new_comment = ApacheCommentNode(comment=assertions.PASS,
|
||||
ancestor=self,
|
||||
filepath=assertions.PASS,
|
||||
metadata=self.metadata)
|
||||
self.children += (new_comment,)
|
||||
return new_comment
|
||||
partial_comment = partial(ApacheCommentNode, comment=comment)
|
||||
return self._add_child_thing(comment, partial_comment, position)
|
||||
|
||||
def find_blocks(self, name: str, exclude: bool = True) -> List["ApacheBlockNode"]: # pylint: disable=unused-argument
|
||||
def find_blocks(self, name, exclude=True): # pylint: disable=unused-argument
|
||||
"""Recursive search of BlockNodes from the sequence of children"""
|
||||
return [ApacheBlockNode(name=assertions.PASS,
|
||||
parameters=assertions.PASS,
|
||||
@@ -149,7 +196,7 @@ class ApacheBlockNode(ApacheDirectiveNode):
|
||||
filepath=assertions.PASS,
|
||||
metadata=self.metadata)]
|
||||
|
||||
def find_directives(self, name: str, exclude: bool = True) -> List[ApacheDirectiveNode]: # pylint: disable=unused-argument
|
||||
def find_directives(self, name, exclude=True): # pylint: disable=unused-argument
|
||||
"""Recursive search of DirectiveNodes from the sequence of children"""
|
||||
return [ApacheDirectiveNode(name=assertions.PASS,
|
||||
parameters=assertions.PASS,
|
||||
@@ -157,23 +204,35 @@ class ApacheBlockNode(ApacheDirectiveNode):
|
||||
filepath=assertions.PASS,
|
||||
metadata=self.metadata)]
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
def find_comments(self, comment: str, exact: bool = False) -> List[ApacheCommentNode]:
|
||||
def find_comments(self, comment, exact=False): # pylint: disable=unused-argument
|
||||
"""Recursive search of DirectiveNodes from the sequence of children"""
|
||||
return [ApacheCommentNode(comment=assertions.PASS, # pragma: no cover
|
||||
return [ApacheCommentNode(comment=assertions.PASS,
|
||||
ancestor=self,
|
||||
filepath=assertions.PASS,
|
||||
metadata=self.metadata)]
|
||||
|
||||
def delete_child(self, child: ParserNode) -> None:
|
||||
# TODO (mona): test
|
||||
def delete_child(self, child): # pragma: no cover
|
||||
"""Deletes a ParserNode from the sequence of children"""
|
||||
return # pragma: no cover
|
||||
index = -1
|
||||
i = None
|
||||
for i, elem in enumerate(self.children):
|
||||
if elem == child:
|
||||
index = i
|
||||
break
|
||||
if index < 0:
|
||||
raise errors.PluginError("Could not find child node to delete")
|
||||
children_list = list(self.children)
|
||||
thing = children_list.pop(i)
|
||||
self.children = tuple(children_list)
|
||||
self._raw_children.remove(i)
|
||||
return thing
|
||||
|
||||
def unsaved_files(self) -> List[str]:
|
||||
def unsaved_files(self): # pragma: no cover
|
||||
"""Returns a list of unsaved filepaths"""
|
||||
return [assertions.PASS] # pragma: no cover
|
||||
return [assertions.PASS]
|
||||
|
||||
def parsed_paths(self) -> List[str]:
|
||||
def parsed_paths(self): # pragma: no cover
|
||||
"""Returns a list of parsed configuration file paths"""
|
||||
return [assertions.PASS]
|
||||
|
||||
|
||||
@@ -1,21 +1,13 @@
|
||||
"""Dual parser node assertions"""
|
||||
import fnmatch
|
||||
from typing import Any
|
||||
from typing import Iterable
|
||||
from typing import List
|
||||
from typing import Optional
|
||||
from typing import Union
|
||||
|
||||
from certbot_apache._internal import interfaces
|
||||
from certbot_apache._internal.interfaces import CommentNode
|
||||
from certbot_apache._internal.interfaces import DirectiveNode
|
||||
from certbot_apache._internal.interfaces import ParserNode
|
||||
from certbot_apache._internal.obj import VirtualHost
|
||||
|
||||
|
||||
PASS = "CERTBOT_PASS_ASSERT"
|
||||
|
||||
|
||||
def assertEqual(first: ParserNode, second: ParserNode) -> None:
|
||||
def assertEqual(first, second):
|
||||
""" Equality assertion """
|
||||
|
||||
if isinstance(first, interfaces.CommentNode):
|
||||
@@ -38,98 +30,84 @@ def assertEqual(first: ParserNode, second: ParserNode) -> None:
|
||||
# (but identical) directory structures.
|
||||
assert first.filepath == second.filepath
|
||||
|
||||
|
||||
def assertEqualComment(first: ParserNode, second: ParserNode) -> None: # pragma: no cover
|
||||
def assertEqualComment(first, second): # pragma: no cover
|
||||
""" Equality assertion for CommentNode """
|
||||
|
||||
assert isinstance(first, interfaces.CommentNode)
|
||||
assert isinstance(second, interfaces.CommentNode)
|
||||
|
||||
if not isPass(first.comment) and not isPass(second.comment):
|
||||
assert first.comment == second.comment
|
||||
if not isPass(first.comment) and not isPass(second.comment): # type: ignore
|
||||
assert first.comment == second.comment # type: ignore
|
||||
|
||||
|
||||
def _assertEqualDirectiveComponents(first: ParserNode, # pragma: no cover
|
||||
second: ParserNode) -> None:
|
||||
def _assertEqualDirectiveComponents(first, second): # pragma: no cover
|
||||
""" Handles assertion for instance variables for DirectiveNode and BlockNode"""
|
||||
|
||||
# Enabled value cannot be asserted, because Augeas implementation
|
||||
# is unable to figure that out.
|
||||
# assert first.enabled == second.enabled
|
||||
assert isinstance(first, DirectiveNode)
|
||||
assert isinstance(second, DirectiveNode)
|
||||
|
||||
if not isPass(first.name) and not isPass(second.name):
|
||||
assert first.name == second.name
|
||||
|
||||
if not isPass(first.parameters) and not isPass(second.parameters):
|
||||
assert first.parameters == second.parameters
|
||||
|
||||
|
||||
def assertEqualDirective(first: ParserNode, second: ParserNode) -> None:
|
||||
def assertEqualDirective(first, second):
|
||||
""" Equality assertion for DirectiveNode """
|
||||
|
||||
assert isinstance(first, interfaces.DirectiveNode)
|
||||
assert isinstance(second, interfaces.DirectiveNode)
|
||||
_assertEqualDirectiveComponents(first, second)
|
||||
|
||||
|
||||
def isPass(value: Any) -> bool: # pragma: no cover
|
||||
def isPass(value): # pragma: no cover
|
||||
"""Checks if the value is set to PASS"""
|
||||
if isinstance(value, bool):
|
||||
return True
|
||||
return PASS in value
|
||||
|
||||
|
||||
def isPassDirective(block: DirectiveNode) -> bool:
|
||||
def isPassDirective(block):
|
||||
""" Checks if BlockNode or DirectiveNode should pass the assertion """
|
||||
|
||||
if isPass(block.name):
|
||||
return True
|
||||
if isPass(block.parameters): # pragma: no cover
|
||||
if isPass(block.parameters): # pragma: no cover
|
||||
return True
|
||||
if isPass(block.filepath): # pragma: no cover
|
||||
if isPass(block.filepath): # pragma: no cover
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def isPassComment(comment: CommentNode) -> bool:
|
||||
def isPassComment(comment):
|
||||
""" Checks if CommentNode should pass the assertion """
|
||||
|
||||
if isPass(comment.comment):
|
||||
return True
|
||||
if isPass(comment.filepath): # pragma: no cover
|
||||
if isPass(comment.filepath): # pragma: no cover
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def isPassNodeList(nodelist: List[Union[DirectiveNode, CommentNode]]) -> bool: # pragma: no cover
|
||||
def isPassNodeList(nodelist): # pragma: no cover
|
||||
""" Checks if a ParserNode in the nodelist should pass the assertion,
|
||||
this function is used for results of find_* methods. Unimplemented find_*
|
||||
methods should return a sequence containing a single ParserNode instance
|
||||
with assertion pass string."""
|
||||
|
||||
node: Optional[Union[DirectiveNode, CommentNode]]
|
||||
try:
|
||||
node = nodelist[0]
|
||||
except IndexError:
|
||||
node = None
|
||||
|
||||
if not node: # pragma: no cover
|
||||
if not node: # pragma: no cover
|
||||
return False
|
||||
|
||||
if isinstance(node, interfaces.DirectiveNode):
|
||||
return isPassDirective(node)
|
||||
return isPassComment(node)
|
||||
|
||||
|
||||
def assertEqualSimple(first: Any, second: Any) -> None:
|
||||
def assertEqualSimple(first, second):
|
||||
""" Simple assertion """
|
||||
if not isPass(first) and not isPass(second):
|
||||
assert first == second
|
||||
|
||||
|
||||
def isEqualVirtualHost(first: VirtualHost, second: VirtualHost) -> bool:
|
||||
def isEqualVirtualHost(first, second):
|
||||
"""
|
||||
Checks that two VirtualHost objects are similar. There are some built
|
||||
in differences with the implementations: VirtualHost created by ParserNode
|
||||
@@ -149,17 +127,16 @@ def isEqualVirtualHost(first: VirtualHost, second: VirtualHost) -> bool:
|
||||
first.ancestor == second.ancestor
|
||||
)
|
||||
|
||||
|
||||
def assertEqualPathsList(first: Iterable[str], second: Iterable[str]) -> None: # pragma: no cover
|
||||
def assertEqualPathsList(first, second): # pragma: no cover
|
||||
"""
|
||||
Checks that the two lists of file paths match. This assertion allows for wildcard
|
||||
paths.
|
||||
"""
|
||||
if any(isPass(path) for path in first):
|
||||
if any([isPass(path) for path in first]):
|
||||
return
|
||||
if any(isPass(path) for path in second):
|
||||
if any([isPass(path) for path in second]):
|
||||
return
|
||||
for fpath in first:
|
||||
assert any(fnmatch.fnmatch(fpath, spath) for spath in second)
|
||||
assert any([fnmatch.fnmatch(fpath, spath) for spath in second])
|
||||
for spath in second:
|
||||
assert any(fnmatch.fnmatch(fpath, spath) for fpath in first)
|
||||
assert any([fnmatch.fnmatch(fpath, spath) for fpath in first])
|
||||
|
||||
@@ -6,7 +6,7 @@ Authors:
|
||||
Raphael Pinson <raphink@gmail.com>
|
||||
|
||||
About: Reference
|
||||
Online Apache configuration manual: https://httpd.apache.org/docs/trunk/
|
||||
Online Apache configuration manual: http://httpd.apache.org/docs/trunk/
|
||||
|
||||
About: License
|
||||
This file is licensed under the LGPL v2+.
|
||||
|
||||
@@ -64,15 +64,9 @@ Translates over to:
|
||||
"/files/etc/apache2/apache2.conf/bLoCk[1]",
|
||||
]
|
||||
"""
|
||||
from typing import Any
|
||||
from typing import cast
|
||||
from typing import Dict
|
||||
from typing import Iterable
|
||||
from typing import List
|
||||
from typing import Optional
|
||||
from typing import Set
|
||||
from typing import Tuple
|
||||
from typing import Union
|
||||
from acme.magic_typing import Set # pylint: disable=unused-import, no-name-in-module
|
||||
from certbot import errors
|
||||
from certbot.compat import os
|
||||
|
||||
from certbot_apache._internal import apache_util
|
||||
from certbot_apache._internal import assertions
|
||||
@@ -80,23 +74,18 @@ from certbot_apache._internal import interfaces
|
||||
from certbot_apache._internal import parser
|
||||
from certbot_apache._internal import parsernode_util as util
|
||||
|
||||
from certbot import errors
|
||||
from certbot.compat import os
|
||||
|
||||
|
||||
class AugeasParserNode(interfaces.ParserNode):
|
||||
""" Augeas implementation of ParserNode interface """
|
||||
|
||||
def __init__(self, **kwargs: Any) -> None:
|
||||
# pylint: disable=unused-variable
|
||||
ancestor, dirty, filepath, metadata = util.parsernode_kwargs(kwargs)
|
||||
super().__init__(**kwargs)
|
||||
def __init__(self, **kwargs):
|
||||
ancestor, dirty, filepath, metadata = util.parsernode_kwargs(kwargs) # pylint: disable=unused-variable
|
||||
super(AugeasParserNode, self).__init__(**kwargs)
|
||||
self.ancestor = ancestor
|
||||
self.filepath = filepath
|
||||
self.dirty = dirty
|
||||
self.metadata = metadata
|
||||
self.parser = cast(parser.ApacheParser,
|
||||
self.metadata.get("augeasparser"))
|
||||
self.parser = self.metadata.get("augeasparser")
|
||||
try:
|
||||
if self.metadata["augeaspath"].endswith("/"):
|
||||
raise errors.PluginError(
|
||||
@@ -107,20 +96,20 @@ class AugeasParserNode(interfaces.ParserNode):
|
||||
except KeyError:
|
||||
raise errors.PluginError("Augeas path is required")
|
||||
|
||||
def save(self, msg: Iterable[str]) -> None:
|
||||
def save(self, msg):
|
||||
self.parser.save(msg)
|
||||
|
||||
def find_ancestors(self, name: str) -> List["AugeasParserNode"]:
|
||||
def find_ancestors(self, name):
|
||||
"""
|
||||
Searches for ancestor BlockNodes with a given name.
|
||||
|
||||
:param str name: Name of the BlockNode parent to search for
|
||||
|
||||
:returns: List of matching ancestor nodes.
|
||||
:rtype: list of AugeasParserNode
|
||||
:rtype: list of AugeasBlockNode
|
||||
"""
|
||||
|
||||
ancestors: List["AugeasParserNode"] = []
|
||||
ancestors = []
|
||||
|
||||
parent = self.metadata["augeaspath"]
|
||||
while True:
|
||||
@@ -130,12 +119,12 @@ class AugeasParserNode(interfaces.ParserNode):
|
||||
if not parent or parent == "/files":
|
||||
break
|
||||
anc = self._create_blocknode(parent)
|
||||
if anc.name is not None and anc.name.lower() == name.lower():
|
||||
if anc.name.lower() == name.lower():
|
||||
ancestors.append(anc)
|
||||
|
||||
return ancestors
|
||||
|
||||
def _create_blocknode(self, path: str) -> "AugeasBlockNode":
|
||||
def _create_blocknode(self, path):
|
||||
"""
|
||||
Helper function to create a BlockNode from Augeas path. This is used by
|
||||
AugeasParserNode.find_ancestors and AugeasBlockNode.
|
||||
@@ -143,25 +132,21 @@ class AugeasParserNode(interfaces.ParserNode):
|
||||
|
||||
"""
|
||||
|
||||
name: str = self._aug_get_name(path)
|
||||
metadata: Dict[str, Union[parser.ApacheParser, str]] = {
|
||||
"augeasparser": self.parser, "augeaspath": path
|
||||
}
|
||||
name = self._aug_get_name(path)
|
||||
metadata = {"augeasparser": self.parser, "augeaspath": path}
|
||||
|
||||
# Check if the file was included from the root config or initial state
|
||||
file_path = apache_util.get_file_path(path)
|
||||
if file_path is None:
|
||||
raise ValueError(f"No file path found for vhost: {path}.") # pragma: no cover
|
||||
|
||||
enabled = self.parser.parsed_in_original(file_path)
|
||||
enabled = self.parser.parsed_in_original(
|
||||
apache_util.get_file_path(path)
|
||||
)
|
||||
|
||||
return AugeasBlockNode(name=name,
|
||||
enabled=enabled,
|
||||
ancestor=assertions.PASS,
|
||||
filepath=file_path,
|
||||
filepath=apache_util.get_file_path(path),
|
||||
metadata=metadata)
|
||||
|
||||
def _aug_get_name(self, path: str) -> str:
|
||||
def _aug_get_name(self, path):
|
||||
"""
|
||||
Helper function to get name of a configuration block or variable from path.
|
||||
"""
|
||||
@@ -175,18 +160,20 @@ class AugeasParserNode(interfaces.ParserNode):
|
||||
|
||||
# remove [...], it's not allowed in Apache configuration and is used
|
||||
# for indexing within Augeas
|
||||
return name.split("[")[0]
|
||||
name = name.split("[")[0]
|
||||
return name
|
||||
|
||||
|
||||
class AugeasCommentNode(AugeasParserNode):
|
||||
""" Augeas implementation of CommentNode interface """
|
||||
|
||||
def __init__(self, **kwargs: Any) -> None:
|
||||
def __init__(self, **kwargs):
|
||||
comment, kwargs = util.commentnode_kwargs(kwargs) # pylint: disable=unused-variable
|
||||
super().__init__(**kwargs)
|
||||
super(AugeasCommentNode, self).__init__(**kwargs)
|
||||
# self.comment = comment
|
||||
self.comment = comment
|
||||
|
||||
def __eq__(self, other: Any) -> bool:
|
||||
def __eq__(self, other):
|
||||
if isinstance(other, self.__class__):
|
||||
return (self.comment == other.comment and
|
||||
self.filepath == other.filepath and
|
||||
@@ -199,15 +186,15 @@ class AugeasCommentNode(AugeasParserNode):
|
||||
class AugeasDirectiveNode(AugeasParserNode):
|
||||
""" Augeas implementation of DirectiveNode interface """
|
||||
|
||||
def __init__(self, **kwargs: Any) -> None:
|
||||
def __init__(self, **kwargs):
|
||||
name, parameters, enabled, kwargs = util.directivenode_kwargs(kwargs)
|
||||
super().__init__(**kwargs)
|
||||
super(AugeasDirectiveNode, self).__init__(**kwargs)
|
||||
self.name = name
|
||||
self.enabled = enabled
|
||||
if parameters:
|
||||
self.set_parameters(parameters)
|
||||
|
||||
def __eq__(self, other: Any) -> bool:
|
||||
def __eq__(self, other):
|
||||
if isinstance(other, self.__class__):
|
||||
return (self.name == other.name and
|
||||
self.filepath == other.filepath and
|
||||
@@ -218,7 +205,7 @@ class AugeasDirectiveNode(AugeasParserNode):
|
||||
self.metadata == other.metadata)
|
||||
return False
|
||||
|
||||
def set_parameters(self, parameters: Iterable[str]) -> None:
|
||||
def set_parameters(self, parameters):
|
||||
"""
|
||||
Sets parameters of a DirectiveNode or BlockNode object.
|
||||
|
||||
@@ -237,7 +224,7 @@ class AugeasDirectiveNode(AugeasParserNode):
|
||||
self.parser.aug.set(param_path, param)
|
||||
|
||||
@property
|
||||
def parameters(self) -> Tuple[str, ...]:
|
||||
def parameters(self):
|
||||
"""
|
||||
Fetches the parameters from Augeas tree, ensuring that the sequence always
|
||||
represents the current state
|
||||
@@ -247,22 +234,21 @@ class AugeasDirectiveNode(AugeasParserNode):
|
||||
"""
|
||||
return tuple(self._aug_get_params(self.metadata["augeaspath"]))
|
||||
|
||||
def _aug_get_params(self, path: str) -> List[str]:
|
||||
def _aug_get_params(self, path):
|
||||
"""Helper function to get parameters for DirectiveNodes and BlockNodes"""
|
||||
|
||||
arg_paths = self.parser.aug.match(path + "/arg")
|
||||
args = [self.parser.get_arg(apath) for apath in arg_paths]
|
||||
return [arg for arg in args if arg is not None]
|
||||
return [self.parser.get_arg(apath) for apath in arg_paths]
|
||||
|
||||
|
||||
class AugeasBlockNode(AugeasDirectiveNode):
|
||||
""" Augeas implementation of BlockNode interface """
|
||||
|
||||
def __init__(self, **kwargs: Any) -> None:
|
||||
super().__init__(**kwargs)
|
||||
self.children: Tuple["AugeasBlockNode", ...] = ()
|
||||
def __init__(self, **kwargs):
|
||||
super(AugeasBlockNode, self).__init__(**kwargs)
|
||||
self.children = ()
|
||||
|
||||
def __eq__(self, other: Any) -> bool:
|
||||
def __eq__(self, other):
|
||||
if isinstance(other, self.__class__):
|
||||
return (self.name == other.name and
|
||||
self.filepath == other.filepath and
|
||||
@@ -275,39 +261,33 @@ class AugeasBlockNode(AugeasDirectiveNode):
|
||||
return False
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
def add_child_block(self, name: str, # pragma: no cover
|
||||
parameters: Optional[List[str]] = None,
|
||||
position: Optional[int] = None) -> "AugeasBlockNode":
|
||||
def add_child_block(self, name, parameters=None, position=None): # pragma: no cover
|
||||
"""Adds a new BlockNode to the sequence of children"""
|
||||
|
||||
insertpath, realpath, before = self._aug_resolve_child_position(
|
||||
name,
|
||||
position
|
||||
)
|
||||
new_metadata: Dict[str, Any] = {"augeasparser": self.parser, "augeaspath": realpath}
|
||||
new_metadata = {"augeasparser": self.parser, "augeaspath": realpath}
|
||||
|
||||
# Create the new block
|
||||
self.parser.aug.insert(insertpath, name, before)
|
||||
# Check if the file was included from the root config or initial state
|
||||
file_path = apache_util.get_file_path(realpath)
|
||||
if file_path is None:
|
||||
raise errors.Error(f"No file path found for vhost: {realpath}") # pragma: no cover
|
||||
enabled = self.parser.parsed_in_original(file_path)
|
||||
|
||||
# Parameters will be set at the initialization of the new object
|
||||
return AugeasBlockNode(
|
||||
name=name,
|
||||
parameters=parameters,
|
||||
enabled=enabled,
|
||||
ancestor=assertions.PASS,
|
||||
filepath=file_path,
|
||||
metadata=new_metadata,
|
||||
enabled = self.parser.parsed_in_original(
|
||||
apache_util.get_file_path(realpath)
|
||||
)
|
||||
|
||||
# Parameters will be set at the initialization of the new object
|
||||
new_block = AugeasBlockNode(name=name,
|
||||
parameters=parameters,
|
||||
enabled=enabled,
|
||||
ancestor=assertions.PASS,
|
||||
filepath=apache_util.get_file_path(realpath),
|
||||
metadata=new_metadata)
|
||||
return new_block
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
def add_child_directive(self, name: str, # pragma: no cover
|
||||
parameters: Optional[List[str]] = None,
|
||||
position: Optional[int] = None) -> AugeasDirectiveNode:
|
||||
def add_child_directive(self, name, parameters=None, position=None): # pragma: no cover
|
||||
"""Adds a new DirectiveNode to the sequence of children"""
|
||||
|
||||
if not parameters:
|
||||
@@ -324,50 +304,43 @@ class AugeasBlockNode(AugeasDirectiveNode):
|
||||
# Set the directive key
|
||||
self.parser.aug.set(realpath, name)
|
||||
# Check if the file was included from the root config or initial state
|
||||
file_path = apache_util.get_file_path(realpath)
|
||||
if file_path is None:
|
||||
raise errors.Error(f"No file path found for vhost: {realpath}") # pragma: no cover
|
||||
enabled = self.parser.parsed_in_original(file_path)
|
||||
|
||||
return AugeasDirectiveNode(
|
||||
name=name,
|
||||
parameters=parameters,
|
||||
enabled=enabled,
|
||||
ancestor=assertions.PASS,
|
||||
filepath=file_path,
|
||||
metadata=new_metadata,
|
||||
enabled = self.parser.parsed_in_original(
|
||||
apache_util.get_file_path(realpath)
|
||||
)
|
||||
|
||||
def add_child_comment(
|
||||
self, comment: str = "", position: Optional[int] = None
|
||||
) -> "AugeasCommentNode":
|
||||
new_dir = AugeasDirectiveNode(name=name,
|
||||
parameters=parameters,
|
||||
enabled=enabled,
|
||||
ancestor=assertions.PASS,
|
||||
filepath=apache_util.get_file_path(realpath),
|
||||
metadata=new_metadata)
|
||||
return new_dir
|
||||
|
||||
def add_child_comment(self, comment="", position=None):
|
||||
"""Adds a new CommentNode to the sequence of children"""
|
||||
|
||||
insertpath, realpath, before = self._aug_resolve_child_position(
|
||||
"#comment",
|
||||
position
|
||||
)
|
||||
new_metadata: Dict[str, Any] = {
|
||||
"augeasparser": self.parser, "augeaspath": realpath,
|
||||
}
|
||||
new_metadata = {"augeasparser": self.parser, "augeaspath": realpath}
|
||||
|
||||
# Create the new comment
|
||||
self.parser.aug.insert(insertpath, "#comment", before)
|
||||
# Set the comment content
|
||||
self.parser.aug.set(realpath, comment)
|
||||
|
||||
return AugeasCommentNode(
|
||||
comment=comment,
|
||||
ancestor=assertions.PASS,
|
||||
filepath=apache_util.get_file_path(realpath),
|
||||
metadata=new_metadata,
|
||||
)
|
||||
new_comment = AugeasCommentNode(comment=comment,
|
||||
ancestor=assertions.PASS,
|
||||
filepath=apache_util.get_file_path(realpath),
|
||||
metadata=new_metadata)
|
||||
return new_comment
|
||||
|
||||
def find_blocks(self, name: str, exclude: bool = True) -> List["AugeasBlockNode"]:
|
||||
def find_blocks(self, name, exclude=True):
|
||||
"""Recursive search of BlockNodes from the sequence of children"""
|
||||
|
||||
nodes: List["AugeasBlockNode"] = []
|
||||
paths: Iterable[str] = self._aug_find_blocks(name)
|
||||
nodes = list()
|
||||
paths = self._aug_find_blocks(name)
|
||||
if exclude:
|
||||
paths = self.parser.exclude_dirs(paths)
|
||||
for path in paths:
|
||||
@@ -375,14 +348,14 @@ class AugeasBlockNode(AugeasDirectiveNode):
|
||||
|
||||
return nodes
|
||||
|
||||
def find_directives(self, name: str, exclude: bool = True) -> List["AugeasDirectiveNode"]:
|
||||
def find_directives(self, name, exclude=True):
|
||||
"""Recursive search of DirectiveNodes from the sequence of children"""
|
||||
|
||||
nodes = []
|
||||
nodes = list()
|
||||
ownpath = self.metadata.get("augeaspath")
|
||||
|
||||
directives = self.parser.find_dir(name, start=ownpath, exclude=exclude)
|
||||
already_parsed: Set[str] = set()
|
||||
already_parsed = set() # type: Set[str]
|
||||
for directive in directives:
|
||||
# Remove the /arg part from the Augeas path
|
||||
directive = directive.partition("/arg")[0]
|
||||
@@ -394,14 +367,14 @@ class AugeasBlockNode(AugeasDirectiveNode):
|
||||
|
||||
return nodes
|
||||
|
||||
def find_comments(self, comment: str) -> List["AugeasCommentNode"]:
|
||||
def find_comments(self, comment):
|
||||
"""
|
||||
Recursive search of DirectiveNodes from the sequence of children.
|
||||
|
||||
:param str comment: Comment content to search for.
|
||||
"""
|
||||
|
||||
nodes: List["AugeasCommentNode"] = []
|
||||
nodes = list()
|
||||
ownpath = self.metadata.get("augeaspath")
|
||||
|
||||
comments = self.parser.find_comments(comment, start=ownpath)
|
||||
@@ -410,11 +383,11 @@ class AugeasBlockNode(AugeasDirectiveNode):
|
||||
|
||||
return nodes
|
||||
|
||||
def delete_child(self, child: "AugeasParserNode") -> None:
|
||||
def delete_child(self, child):
|
||||
"""
|
||||
Deletes a ParserNode from the sequence of children, and raises an
|
||||
exception if it's unable to do so.
|
||||
:param AugeasParserNode child: A node to delete.
|
||||
:param AugeasParserNode: child: A node to delete.
|
||||
"""
|
||||
if not self.parser.aug.remove(child.metadata["augeaspath"]):
|
||||
|
||||
@@ -423,11 +396,11 @@ class AugeasBlockNode(AugeasDirectiveNode):
|
||||
"seem to exist.").format(child.metadata["augeaspath"])
|
||||
)
|
||||
|
||||
def unsaved_files(self) -> Set[str]:
|
||||
def unsaved_files(self):
|
||||
"""Returns a list of unsaved filepaths"""
|
||||
return self.parser.unsaved_files()
|
||||
|
||||
def parsed_paths(self) -> List[str]:
|
||||
def parsed_paths(self):
|
||||
"""
|
||||
Returns a list of file paths that have currently been parsed into the parser
|
||||
tree. The returned list may include paths with wildcard characters, for
|
||||
@@ -438,7 +411,7 @@ class AugeasBlockNode(AugeasDirectiveNode):
|
||||
:returns: list of file paths of files that have been parsed
|
||||
"""
|
||||
|
||||
res_paths: List[str] = []
|
||||
res_paths = []
|
||||
|
||||
paths = self.parser.existing_paths
|
||||
for directory in paths:
|
||||
@@ -447,7 +420,7 @@ class AugeasBlockNode(AugeasDirectiveNode):
|
||||
|
||||
return res_paths
|
||||
|
||||
def _create_commentnode(self, path: str) -> "AugeasCommentNode":
|
||||
def _create_commentnode(self, path):
|
||||
"""Helper function to create a CommentNode from Augeas path"""
|
||||
|
||||
comment = self.parser.aug.get(path)
|
||||
@@ -460,16 +433,14 @@ class AugeasBlockNode(AugeasDirectiveNode):
|
||||
filepath=apache_util.get_file_path(path),
|
||||
metadata=metadata)
|
||||
|
||||
def _create_directivenode(self, path: str) -> "AugeasDirectiveNode":
|
||||
def _create_directivenode(self, path):
|
||||
"""Helper function to create a DirectiveNode from Augeas path"""
|
||||
|
||||
name = self.parser.get_arg(path)
|
||||
metadata: Dict[str, Union[parser.ApacheParser, str]] = {
|
||||
"augeasparser": self.parser, "augeaspath": path,
|
||||
}
|
||||
metadata = {"augeasparser": self.parser, "augeaspath": path}
|
||||
|
||||
# Check if the file was included from the root config or initial state
|
||||
enabled: bool = self.parser.parsed_in_original(
|
||||
enabled = self.parser.parsed_in_original(
|
||||
apache_util.get_file_path(path)
|
||||
)
|
||||
return AugeasDirectiveNode(name=name,
|
||||
@@ -478,12 +449,12 @@ class AugeasBlockNode(AugeasDirectiveNode):
|
||||
filepath=apache_util.get_file_path(path),
|
||||
metadata=metadata)
|
||||
|
||||
def _aug_find_blocks(self, name: str) -> Set[str]:
|
||||
def _aug_find_blocks(self, name):
|
||||
"""Helper function to perform a search to Augeas DOM tree to search
|
||||
configuration blocks with a given name"""
|
||||
|
||||
# The code here is modified from configurator.get_virtual_hosts()
|
||||
blk_paths: Set[str] = set()
|
||||
blk_paths = set()
|
||||
for vhost_path in list(self.parser.parser_paths):
|
||||
paths = self.parser.aug.match(
|
||||
("/files%s//*[label()=~regexp('%s')]" %
|
||||
@@ -492,8 +463,7 @@ class AugeasBlockNode(AugeasDirectiveNode):
|
||||
name.lower() in os.path.basename(path).lower()])
|
||||
return blk_paths
|
||||
|
||||
def _aug_resolve_child_position(
|
||||
self, name: str, position: Optional[int]) -> Tuple[str, str, bool]:
|
||||
def _aug_resolve_child_position(self, name, position):
|
||||
"""
|
||||
Helper function that iterates through the immediate children and figures
|
||||
out the insertion path for a new AugeasParserNode.
|
||||
@@ -518,16 +488,16 @@ class AugeasBlockNode(AugeasDirectiveNode):
|
||||
"""
|
||||
|
||||
# Default to appending
|
||||
before: bool = False
|
||||
before = False
|
||||
|
||||
all_children: str = self.parser.aug.match("{}/*".format(
|
||||
all_children = self.parser.aug.match("{}/*".format(
|
||||
self.metadata["augeaspath"])
|
||||
)
|
||||
|
||||
# Calculate resulting_path
|
||||
# Augeas indices start at 1. We use counter to calculate the index to
|
||||
# be used in resulting_path.
|
||||
counter: int = 1
|
||||
counter = 1
|
||||
for i, child in enumerate(all_children):
|
||||
if position is not None and i >= position:
|
||||
# We're not going to insert the new node to an index after this
|
||||
@@ -536,7 +506,7 @@ class AugeasBlockNode(AugeasDirectiveNode):
|
||||
if name == childname:
|
||||
counter += 1
|
||||
|
||||
resulting_path: str = "{}/{}[{}]".format(
|
||||
resulting_path = "{}/{}[{}]".format(
|
||||
self.metadata["augeaspath"],
|
||||
name,
|
||||
counter
|
||||
@@ -560,7 +530,7 @@ class AugeasBlockNode(AugeasDirectiveNode):
|
||||
position
|
||||
)
|
||||
|
||||
return insert_path, resulting_path, before
|
||||
return (insert_path, resulting_path, before)
|
||||
|
||||
|
||||
interfaces.CommentNode.register(AugeasCommentNode)
|
||||
|
||||
@@ -0,0 +1,25 @@
|
||||
# This file contains important security parameters. If you modify this file
|
||||
# manually, Certbot will be unable to automatically provide future security
|
||||
# updates. Instead, Certbot will print and log an error message with a path to
|
||||
# the up-to-date file that you will need to refer to when manually updating
|
||||
# this file.
|
||||
|
||||
SSLEngine on
|
||||
|
||||
# Intermediate configuration, tweak to your needs
|
||||
SSLProtocol all -SSLv2 -SSLv3
|
||||
SSLCipherSuite ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:DHE-RSA-AES128-GCM-SHA256:DHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-AES128-SHA256:ECDHE-RSA-AES128-SHA256:ECDHE-ECDSA-AES128-SHA:ECDHE-RSA-AES256-SHA384:ECDHE-RSA-AES128-SHA:ECDHE-ECDSA-AES256-SHA384:ECDHE-ECDSA-AES256-SHA:ECDHE-RSA-AES256-SHA:DHE-RSA-AES128-SHA256:DHE-RSA-AES128-SHA:DHE-RSA-AES256-SHA256:DHE-RSA-AES256-SHA:ECDHE-ECDSA-DES-CBC3-SHA:ECDHE-RSA-DES-CBC3-SHA:EDH-RSA-DES-CBC3-SHA:AES128-GCM-SHA256:AES256-GCM-SHA384:AES128-SHA256:AES256-SHA256:AES128-SHA:AES256-SHA:DES-CBC3-SHA:!DSS
|
||||
SSLHonorCipherOrder on
|
||||
|
||||
SSLOptions +StrictRequire
|
||||
|
||||
# Add vhost name to log entries:
|
||||
LogFormat "%h %l %u %t \"%r\" %>s %b \"%{Referer}i\" \"%{User-agent}i\"" vhost_combined
|
||||
LogFormat "%v %h %l %u %t \"%r\" %>s %b" vhost_common
|
||||
|
||||
#CustomLog /var/log/apache2/access.log vhost_combined
|
||||
#LogLevel warn
|
||||
#ErrorLog /var/log/apache2/error.log
|
||||
|
||||
# Always ensure Cookies have "Secure" set (JAH 2012/1)
|
||||
#Header edit Set-Cookie (?i)^(.*)(;\s*secure)??((\s*;)?(.*)) "$1; Secure$3$4"
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,22 +1,17 @@
|
||||
"""Apache plugin constants."""
|
||||
from typing import Dict
|
||||
from typing import List
|
||||
|
||||
import pkg_resources
|
||||
|
||||
from certbot.compat import os
|
||||
|
||||
MOD_SSL_CONF_DEST = "options-ssl-apache.conf"
|
||||
"""Name of the mod_ssl config file as saved
|
||||
in `certbot.configuration.NamespaceConfig.config_dir`."""
|
||||
"""Name of the mod_ssl config file as saved in `IConfig.config_dir`."""
|
||||
|
||||
|
||||
UPDATED_MOD_SSL_CONF_DIGEST = ".updated-options-ssl-apache-conf-digest.txt"
|
||||
"""Name of the hash of the updated or informed mod_ssl_conf as saved
|
||||
in `certbot.configuration.NamespaceConfig.config_dir`."""
|
||||
"""Name of the hash of the updated or informed mod_ssl_conf as saved in `IConfig.config_dir`."""
|
||||
|
||||
# NEVER REMOVE A SINGLE HASH FROM THIS LIST UNLESS YOU KNOW EXACTLY WHAT YOU ARE DOING!
|
||||
ALL_SSL_OPTIONS_HASHES: List[str] = [
|
||||
ALL_SSL_OPTIONS_HASHES = [
|
||||
'2086bca02db48daf93468332543c60ac6acdb6f0b58c7bfdf578a5d47092f82a',
|
||||
'4844d36c9a0f587172d9fa10f4f1c9518e3bcfa1947379f155e16a70a728c21a',
|
||||
'5a922826719981c0a234b1fbcd495f3213e49d2519e845ea0748ba513044b65b',
|
||||
@@ -29,11 +24,6 @@ ALL_SSL_OPTIONS_HASHES: List[str] = [
|
||||
'0fcdc81280cd179a07ec4d29d3595068b9326b455c488de4b09f585d5dafc137',
|
||||
'86cc09ad5415cd6d5f09a947fe2501a9344328b1e8a8b458107ea903e80baa6c',
|
||||
'06675349e457eae856120cdebb564efe546f0b87399f2264baeb41e442c724c7',
|
||||
'5cc003edd93fb9cd03d40c7686495f8f058f485f75b5e764b789245a386e6daf',
|
||||
'007cd497a56a3bb8b6a2c1aeb4997789e7e38992f74e44cc5d13a625a738ac73',
|
||||
'34783b9e2210f5c4a23bced2dfd7ec289834716673354ed7c7abf69fe30192a3',
|
||||
'61466bc2f98a623c02be8a5ee916ead1655b0ce883bdc936692076ea499ff5ce',
|
||||
'3fd812e3e87fe5c645d3682a511b2a06c8286f19594f28e280f17cd6af1301b5',
|
||||
]
|
||||
"""SHA256 hashes of the contents of previous versions of all versions of MOD_SSL_CONF_SRC"""
|
||||
|
||||
@@ -41,39 +31,39 @@ AUGEAS_LENS_DIR = pkg_resources.resource_filename(
|
||||
"certbot_apache", os.path.join("_internal", "augeas_lens"))
|
||||
"""Path to the Augeas lens directory"""
|
||||
|
||||
REWRITE_HTTPS_ARGS: List[str] = [
|
||||
REWRITE_HTTPS_ARGS = [
|
||||
"^", "https://%{SERVER_NAME}%{REQUEST_URI}", "[L,NE,R=permanent]"]
|
||||
"""Apache version<2.3.9 rewrite rule arguments used for redirections to
|
||||
https vhost"""
|
||||
|
||||
REWRITE_HTTPS_ARGS_WITH_END: List[str] = [
|
||||
REWRITE_HTTPS_ARGS_WITH_END = [
|
||||
"^", "https://%{SERVER_NAME}%{REQUEST_URI}", "[END,NE,R=permanent]"]
|
||||
"""Apache version >= 2.3.9 rewrite rule arguments used for redirections to
|
||||
https vhost"""
|
||||
|
||||
OLD_REWRITE_HTTPS_ARGS: List[List[str]] = [
|
||||
OLD_REWRITE_HTTPS_ARGS = [
|
||||
["^", "https://%{SERVER_NAME}%{REQUEST_URI}", "[L,QSA,R=permanent]"],
|
||||
["^", "https://%{SERVER_NAME}%{REQUEST_URI}", "[END,QSA,R=permanent]"]]
|
||||
|
||||
HSTS_ARGS: List[str] = ["always", "set", "Strict-Transport-Security",
|
||||
HSTS_ARGS = ["always", "set", "Strict-Transport-Security",
|
||||
"\"max-age=31536000\""]
|
||||
"""Apache header arguments for HSTS"""
|
||||
|
||||
UIR_ARGS: List[str] = ["always", "set", "Content-Security-Policy", "upgrade-insecure-requests"]
|
||||
UIR_ARGS = ["always", "set", "Content-Security-Policy",
|
||||
"upgrade-insecure-requests"]
|
||||
|
||||
HEADER_ARGS: Dict[str, List[str]] = {
|
||||
"Strict-Transport-Security": HSTS_ARGS, "Upgrade-Insecure-Requests": UIR_ARGS,
|
||||
}
|
||||
HEADER_ARGS = {"Strict-Transport-Security": HSTS_ARGS,
|
||||
"Upgrade-Insecure-Requests": UIR_ARGS}
|
||||
|
||||
AUTOHSTS_STEPS: List[int] = [60, 300, 900, 3600, 21600, 43200, 86400]
|
||||
AUTOHSTS_STEPS = [60, 300, 900, 3600, 21600, 43200, 86400]
|
||||
"""AutoHSTS increase steps: 1min, 5min, 15min, 1h, 6h, 12h, 24h"""
|
||||
|
||||
AUTOHSTS_PERMANENT: int = 31536000
|
||||
AUTOHSTS_PERMANENT = 31536000
|
||||
"""Value for the last max-age of HSTS"""
|
||||
|
||||
AUTOHSTS_FREQ: int = 172800
|
||||
AUTOHSTS_FREQ = 172800
|
||||
"""Minimum time since last increase to perform a new one: 48h"""
|
||||
|
||||
MANAGED_COMMENT: str = "DO NOT REMOVE - Managed by Certbot"
|
||||
MANAGED_COMMENT_ID: str = MANAGED_COMMENT + ", VirtualHost id: {0}"
|
||||
MANAGED_COMMENT = "DO NOT REMOVE - Managed by Certbot"
|
||||
MANAGED_COMMENT_ID = MANAGED_COMMENT+", VirtualHost id: {0}"
|
||||
"""Managed by Certbot comments and the VirtualHost identification template"""
|
||||
|
||||
@@ -1,36 +1,32 @@
|
||||
"""Contains UI methods for Apache operations."""
|
||||
import logging
|
||||
from typing import Iterable
|
||||
from typing import List
|
||||
from typing import Optional
|
||||
from typing import Sequence
|
||||
from typing import Tuple
|
||||
|
||||
from certbot_apache._internal.obj import VirtualHost
|
||||
import zope.component
|
||||
|
||||
from certbot import errors
|
||||
from certbot import interfaces
|
||||
from certbot.compat import os
|
||||
from certbot.display import util as display_util
|
||||
import certbot.display.util as display_util
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def select_vhost_multiple(vhosts: Optional[List[VirtualHost]]) -> List[VirtualHost]:
|
||||
def select_vhost_multiple(vhosts):
|
||||
"""Select multiple Vhosts to install the certificate for
|
||||
|
||||
:param vhosts: Available Apache VirtualHosts
|
||||
:type vhosts: :class:`list` of type `~VirtualHost`
|
||||
:type vhosts: :class:`list` of type `~obj.Vhost`
|
||||
|
||||
:returns: List of VirtualHosts
|
||||
:rtype: :class:`list`of type `~VirtualHost`
|
||||
:rtype: :class:`list`of type `~obj.Vhost`
|
||||
"""
|
||||
if not vhosts:
|
||||
return []
|
||||
return list()
|
||||
tags_list = [vhost.display_repr()+"\n" for vhost in vhosts]
|
||||
# Remove the extra newline from the last entry
|
||||
if tags_list:
|
||||
tags_list[-1] = tags_list[-1][:-1]
|
||||
code, names = display_util.checklist(
|
||||
code, names = zope.component.getUtility(interfaces.IDisplay).checklist(
|
||||
"Which VirtualHosts would you like to install the wildcard certificate for?",
|
||||
tags=tags_list, force_interactive=True)
|
||||
if code == display_util.OK:
|
||||
@@ -38,11 +34,10 @@ def select_vhost_multiple(vhosts: Optional[List[VirtualHost]]) -> List[VirtualHo
|
||||
return return_vhosts
|
||||
return []
|
||||
|
||||
|
||||
def _reversemap_vhosts(names: Iterable[str], vhosts: Iterable[VirtualHost]) -> List[VirtualHost]:
|
||||
def _reversemap_vhosts(names, vhosts):
|
||||
"""Helper function for select_vhost_multiple for mapping string
|
||||
representations back to actual vhost objects"""
|
||||
return_vhosts = []
|
||||
return_vhosts = list()
|
||||
|
||||
for selection in names:
|
||||
for vhost in vhosts:
|
||||
@@ -50,14 +45,11 @@ def _reversemap_vhosts(names: Iterable[str], vhosts: Iterable[VirtualHost]) -> L
|
||||
return_vhosts.append(vhost)
|
||||
return return_vhosts
|
||||
|
||||
|
||||
def select_vhost(domain: str, vhosts: Sequence[VirtualHost]) -> Optional[VirtualHost]:
|
||||
def select_vhost(domain, vhosts):
|
||||
"""Select an appropriate Apache Vhost.
|
||||
|
||||
:param str domain: Domain for vhost selection
|
||||
|
||||
:param vhosts: Available Apache VirtualHosts
|
||||
:type vhosts: :class:`list` of type `~VirtualHost`
|
||||
:type vhosts: :class:`list` of type `~obj.Vhost`
|
||||
|
||||
:returns: VirtualHost or `None`
|
||||
:rtype: `~obj.Vhost` or `None`
|
||||
@@ -70,8 +62,7 @@ def select_vhost(domain: str, vhosts: Sequence[VirtualHost]) -> Optional[Virtual
|
||||
return vhosts[tag]
|
||||
return None
|
||||
|
||||
|
||||
def _vhost_menu(domain: str, vhosts: Iterable[VirtualHost]) -> Tuple[str, int]:
|
||||
def _vhost_menu(domain, vhosts):
|
||||
"""Select an appropriate Apache Vhost.
|
||||
|
||||
:param vhosts: Available Apache Virtual Hosts
|
||||
@@ -112,23 +103,23 @@ def _vhost_menu(domain: str, vhosts: Iterable[VirtualHost]) -> Tuple[str, int]:
|
||||
https="HTTPS" if vhost.ssl else "",
|
||||
active="Enabled" if vhost.enabled else "",
|
||||
fn_size=filename_size,
|
||||
name_size=disp_name_size),
|
||||
name_size=disp_name_size)
|
||||
)
|
||||
|
||||
try:
|
||||
code, tag = display_util.menu(
|
||||
f"We were unable to find a vhost with a ServerName "
|
||||
f"or Address of {domain}.{os.linesep}Which virtual host would you "
|
||||
f"like to choose?",
|
||||
code, tag = zope.component.getUtility(interfaces.IDisplay).menu(
|
||||
"We were unable to find a vhost with a ServerName "
|
||||
"or Address of {0}.{1}Which virtual host would you "
|
||||
"like to choose?".format(domain, os.linesep),
|
||||
choices, force_interactive=True)
|
||||
except errors.MissingCommandlineFlag:
|
||||
msg = (
|
||||
f"Encountered vhost ambiguity when trying to find a vhost for "
|
||||
f"{domain} but was unable to ask for user "
|
||||
f"guidance in non-interactive mode. Certbot may need "
|
||||
f"vhosts to be explicitly labelled with ServerName or "
|
||||
f"ServerAlias directives.")
|
||||
logger.error(msg)
|
||||
"Encountered vhost ambiguity when trying to find a vhost for "
|
||||
"{0} but was unable to ask for user "
|
||||
"guidance in non-interactive mode. Certbot may need "
|
||||
"vhosts to be explicitly labelled with ServerName or "
|
||||
"ServerAlias directives.".format(domain))
|
||||
logger.warning(msg)
|
||||
raise errors.MissingCommandlineFlag(msg)
|
||||
|
||||
return code, tag
|
||||
|
||||
@@ -1,45 +1,20 @@
|
||||
""" Dual ParserNode implementation """
|
||||
from typing import Any
|
||||
from typing import Generic
|
||||
from typing import Iterable
|
||||
from typing import List
|
||||
from typing import Optional
|
||||
from typing import Set
|
||||
from typing import Tuple
|
||||
from typing import Type
|
||||
from typing import TYPE_CHECKING
|
||||
from typing import TypeVar
|
||||
|
||||
from certbot_apache._internal import apacheparser
|
||||
from certbot_apache._internal import assertions
|
||||
from certbot_apache._internal import augeasparser
|
||||
from certbot_apache._internal import interfaces
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from certbot_apache._internal.apacheparser import ApacheParserNode # pragma: no cover
|
||||
from certbot_apache._internal.augeasparser import AugeasParserNode # pragma: no cover
|
||||
|
||||
GenericAugeasParserNode = TypeVar("GenericAugeasParserNode", bound="AugeasParserNode")
|
||||
GenericApacheParserNode = TypeVar("GenericApacheParserNode", bound="ApacheParserNode")
|
||||
GenericDualNode = TypeVar("GenericDualNode", bound="DualNodeBase")
|
||||
from certbot_apache._internal import apacheparser
|
||||
|
||||
|
||||
class DualNodeBase(Generic[GenericAugeasParserNode, GenericApacheParserNode]):
|
||||
class DualNodeBase(object):
|
||||
""" Dual parser interface for in development testing. This is used as the
|
||||
base class for dual parser interface classes. This class handles runtime
|
||||
attribute value assertions."""
|
||||
|
||||
def __init__(self, primary: GenericAugeasParserNode,
|
||||
secondary: GenericApacheParserNode) -> None:
|
||||
self.primary = primary
|
||||
self.secondary = secondary
|
||||
|
||||
def save(self, msg: str) -> None: # pragma: no cover
|
||||
def save(self, msg): # pragma: no cover
|
||||
""" Call save for both parsers """
|
||||
self.primary.save(msg)
|
||||
self.secondary.save(msg)
|
||||
|
||||
def __getattr__(self, aname: str) -> Any:
|
||||
def __getattr__(self, aname):
|
||||
""" Attribute value assertion """
|
||||
firstval = getattr(self.primary, aname)
|
||||
secondval = getattr(self.secondary, aname)
|
||||
@@ -53,12 +28,11 @@ class DualNodeBase(Generic[GenericAugeasParserNode, GenericApacheParserNode]):
|
||||
assertions.assertEqualSimple(firstval, secondval)
|
||||
return firstval
|
||||
|
||||
def find_ancestors(self, name: str) -> List["DualNodeBase"]:
|
||||
def find_ancestors(self, name):
|
||||
""" Traverses the ancestor tree and returns ancestors matching name """
|
||||
return self._find_helper(DualBlockNode, "find_ancestors", name)
|
||||
|
||||
def _find_helper(self, nodeclass: Type[GenericDualNode], findfunc: str, search: str,
|
||||
**kwargs: Any) -> List[GenericDualNode]:
|
||||
def _find_helper(self, nodeclass, findfunc, search, **kwargs):
|
||||
"""A helper for find_* functions. The function specific attributes should
|
||||
be passed as keyword arguments.
|
||||
|
||||
@@ -75,7 +49,7 @@ class DualNodeBase(Generic[GenericAugeasParserNode, GenericApacheParserNode]):
|
||||
|
||||
pass_primary = assertions.isPassNodeList(primary_res)
|
||||
pass_secondary = assertions.isPassNodeList(secondary_res)
|
||||
new_nodes = []
|
||||
new_nodes = list()
|
||||
|
||||
if pass_primary and pass_secondary:
|
||||
# Both unimplemented
|
||||
@@ -98,11 +72,10 @@ class DualNodeBase(Generic[GenericAugeasParserNode, GenericApacheParserNode]):
|
||||
return new_nodes
|
||||
|
||||
|
||||
class DualCommentNode(DualNodeBase[augeasparser.AugeasCommentNode,
|
||||
apacheparser.ApacheCommentNode]):
|
||||
class DualCommentNode(DualNodeBase):
|
||||
""" Dual parser implementation of CommentNode interface """
|
||||
|
||||
def __init__(self, **kwargs: Any) -> None:
|
||||
def __init__(self, **kwargs):
|
||||
""" This initialization implementation allows ordinary initialization
|
||||
of CommentNode objects as well as creating a DualCommentNode object
|
||||
using precreated or fetched CommentNode objects if provided as optional
|
||||
@@ -122,21 +95,19 @@ class DualCommentNode(DualNodeBase[augeasparser.AugeasCommentNode,
|
||||
|
||||
if primary or secondary:
|
||||
assert primary and secondary
|
||||
super().__init__(primary, secondary)
|
||||
self.primary = primary
|
||||
self.secondary = secondary
|
||||
else:
|
||||
super().__init__(augeasparser.AugeasCommentNode(**kwargs),
|
||||
apacheparser.ApacheCommentNode(**kwargs))
|
||||
self.primary = augeasparser.AugeasCommentNode(**kwargs)
|
||||
self.secondary = apacheparser.ApacheCommentNode(**kwargs)
|
||||
|
||||
assertions.assertEqual(self.primary, self.secondary)
|
||||
|
||||
|
||||
class DualDirectiveNode(DualNodeBase[augeasparser.AugeasDirectiveNode,
|
||||
apacheparser.ApacheDirectiveNode]):
|
||||
class DualDirectiveNode(DualNodeBase):
|
||||
""" Dual parser implementation of DirectiveNode interface """
|
||||
|
||||
parameters: str
|
||||
|
||||
def __init__(self, **kwargs: Any) -> None:
|
||||
def __init__(self, **kwargs):
|
||||
""" This initialization implementation allows ordinary initialization
|
||||
of DirectiveNode objects as well as creating a DualDirectiveNode object
|
||||
using precreated or fetched DirectiveNode objects if provided as optional
|
||||
@@ -147,6 +118,8 @@ class DualDirectiveNode(DualNodeBase[augeasparser.AugeasDirectiveNode,
|
||||
:param DirectiveNode primary: Primary pre-created DirectiveNode, mainly
|
||||
used when creating new DualParser nodes using add_* methods.
|
||||
:param DirectiveNode secondary: Secondary pre-created DirectiveNode
|
||||
|
||||
|
||||
"""
|
||||
|
||||
kwargs.setdefault("primary", None)
|
||||
@@ -156,14 +129,15 @@ class DualDirectiveNode(DualNodeBase[augeasparser.AugeasDirectiveNode,
|
||||
|
||||
if primary or secondary:
|
||||
assert primary and secondary
|
||||
super().__init__(primary, secondary)
|
||||
self.primary = primary
|
||||
self.secondary = secondary
|
||||
else:
|
||||
super().__init__(augeasparser.AugeasDirectiveNode(**kwargs),
|
||||
apacheparser.ApacheDirectiveNode(**kwargs))
|
||||
self.primary = augeasparser.AugeasDirectiveNode(**kwargs)
|
||||
self.secondary = apacheparser.ApacheDirectiveNode(**kwargs)
|
||||
|
||||
assertions.assertEqual(self.primary, self.secondary)
|
||||
|
||||
def set_parameters(self, parameters: Iterable[str]) -> None:
|
||||
def set_parameters(self, parameters):
|
||||
""" Sets parameters and asserts that both implementation successfully
|
||||
set the parameter sequence """
|
||||
|
||||
@@ -172,11 +146,10 @@ class DualDirectiveNode(DualNodeBase[augeasparser.AugeasDirectiveNode,
|
||||
assertions.assertEqual(self.primary, self.secondary)
|
||||
|
||||
|
||||
class DualBlockNode(DualNodeBase[augeasparser.AugeasBlockNode,
|
||||
apacheparser.ApacheBlockNode]):
|
||||
class DualBlockNode(DualNodeBase):
|
||||
""" Dual parser implementation of BlockNode interface """
|
||||
|
||||
def __init__(self, **kwargs: Any) -> None:
|
||||
def __init__(self, **kwargs):
|
||||
""" This initialization implementation allows ordinary initialization
|
||||
of BlockNode objects as well as creating a DualBlockNode object
|
||||
using precreated or fetched BlockNode objects if provided as optional
|
||||
@@ -191,20 +164,20 @@ class DualBlockNode(DualNodeBase[augeasparser.AugeasBlockNode,
|
||||
|
||||
kwargs.setdefault("primary", None)
|
||||
kwargs.setdefault("secondary", None)
|
||||
primary: Optional[augeasparser.AugeasBlockNode] = kwargs.pop("primary")
|
||||
secondary: Optional[apacheparser.ApacheBlockNode] = kwargs.pop("secondary")
|
||||
primary = kwargs.pop("primary")
|
||||
secondary = kwargs.pop("secondary")
|
||||
|
||||
if primary or secondary:
|
||||
assert primary and secondary
|
||||
super().__init__(primary, secondary)
|
||||
self.primary = primary
|
||||
self.secondary = secondary
|
||||
else:
|
||||
super().__init__(augeasparser.AugeasBlockNode(**kwargs),
|
||||
apacheparser.ApacheBlockNode(**kwargs))
|
||||
self.primary = augeasparser.AugeasBlockNode(**kwargs)
|
||||
self.secondary = apacheparser.ApacheBlockNode(**kwargs)
|
||||
|
||||
assertions.assertEqual(self.primary, self.secondary)
|
||||
|
||||
def add_child_block(self, name: str, parameters: Optional[List[str]] = None,
|
||||
position: Optional[int] = None) -> "DualBlockNode":
|
||||
def add_child_block(self, name, parameters=None, position=None):
|
||||
""" Creates a new child BlockNode, asserts that both implementations
|
||||
did it in a similar way, and returns a newly created DualBlockNode object
|
||||
encapsulating both of the newly created objects """
|
||||
@@ -212,10 +185,10 @@ class DualBlockNode(DualNodeBase[augeasparser.AugeasBlockNode,
|
||||
primary_new = self.primary.add_child_block(name, parameters, position)
|
||||
secondary_new = self.secondary.add_child_block(name, parameters, position)
|
||||
assertions.assertEqual(primary_new, secondary_new)
|
||||
return DualBlockNode(primary=primary_new, secondary=secondary_new)
|
||||
new_block = DualBlockNode(primary=primary_new, secondary=secondary_new)
|
||||
return new_block
|
||||
|
||||
def add_child_directive(self, name: str, parameters: Optional[List[str]] = None,
|
||||
position: Optional[int] = None) -> DualDirectiveNode:
|
||||
def add_child_directive(self, name, parameters=None, position=None):
|
||||
""" Creates a new child DirectiveNode, asserts that both implementations
|
||||
did it in a similar way, and returns a newly created DualDirectiveNode
|
||||
object encapsulating both of the newly created objects """
|
||||
@@ -223,22 +196,21 @@ class DualBlockNode(DualNodeBase[augeasparser.AugeasBlockNode,
|
||||
primary_new = self.primary.add_child_directive(name, parameters, position)
|
||||
secondary_new = self.secondary.add_child_directive(name, parameters, position)
|
||||
assertions.assertEqual(primary_new, secondary_new)
|
||||
return DualDirectiveNode(primary=primary_new, secondary=secondary_new)
|
||||
new_dir = DualDirectiveNode(primary=primary_new, secondary=secondary_new)
|
||||
return new_dir
|
||||
|
||||
def add_child_comment(self, comment: str = "",
|
||||
position: Optional[int] = None) -> DualCommentNode:
|
||||
def add_child_comment(self, comment="", position=None):
|
||||
""" Creates a new child CommentNode, asserts that both implementations
|
||||
did it in a similar way, and returns a newly created DualCommentNode
|
||||
object encapsulating both of the newly created objects """
|
||||
|
||||
primary_new = self.primary.add_child_comment(comment=comment, position=position)
|
||||
secondary_new = self.secondary.add_child_comment(name=comment, position=position)
|
||||
primary_new = self.primary.add_child_comment(comment, position)
|
||||
secondary_new = self.secondary.add_child_comment(comment, position)
|
||||
assertions.assertEqual(primary_new, secondary_new)
|
||||
return DualCommentNode(primary=primary_new, secondary=secondary_new)
|
||||
new_comment = DualCommentNode(primary=primary_new, secondary=secondary_new)
|
||||
return new_comment
|
||||
|
||||
def _create_matching_list(self, primary_list: Iterable[interfaces.ParserNode],
|
||||
secondary_list: Iterable[interfaces.ParserNode]
|
||||
) -> List[Tuple[interfaces.ParserNode, interfaces.ParserNode]]:
|
||||
def _create_matching_list(self, primary_list, secondary_list):
|
||||
""" Matches the list of primary_list to a list of secondary_list and
|
||||
returns a list of tuples. This is used to create results for find_
|
||||
methods.
|
||||
@@ -249,7 +221,7 @@ class DualBlockNode(DualNodeBase[augeasparser.AugeasBlockNode,
|
||||
implementations to a list of tuples.
|
||||
"""
|
||||
|
||||
matched = []
|
||||
matched = list()
|
||||
for p in primary_list:
|
||||
match = None
|
||||
for s in secondary_list:
|
||||
@@ -265,7 +237,7 @@ class DualBlockNode(DualNodeBase[augeasparser.AugeasBlockNode,
|
||||
raise AssertionError("Could not find a matching node.")
|
||||
return matched
|
||||
|
||||
def find_blocks(self, name: str, exclude: bool = True) -> List["DualBlockNode"]:
|
||||
def find_blocks(self, name, exclude=True):
|
||||
"""
|
||||
Performs a search for BlockNodes using both implementations and does simple
|
||||
checks for results. This is built upon the assumption that unimplemented
|
||||
@@ -277,7 +249,7 @@ class DualBlockNode(DualNodeBase[augeasparser.AugeasBlockNode,
|
||||
return self._find_helper(DualBlockNode, "find_blocks", name,
|
||||
exclude=exclude)
|
||||
|
||||
def find_directives(self, name: str, exclude: bool = True) -> List[DualDirectiveNode]:
|
||||
def find_directives(self, name, exclude=True):
|
||||
"""
|
||||
Performs a search for DirectiveNodes using both implementations and
|
||||
checks the results. This is built upon the assumption that unimplemented
|
||||
@@ -289,7 +261,7 @@ class DualBlockNode(DualNodeBase[augeasparser.AugeasBlockNode,
|
||||
return self._find_helper(DualDirectiveNode, "find_directives", name,
|
||||
exclude=exclude)
|
||||
|
||||
def find_comments(self, comment: str) -> List[DualCommentNode]:
|
||||
def find_comments(self, comment):
|
||||
"""
|
||||
Performs a search for CommentNodes using both implementations and
|
||||
checks the results. This is built upon the assumption that unimplemented
|
||||
@@ -300,7 +272,7 @@ class DualBlockNode(DualNodeBase[augeasparser.AugeasBlockNode,
|
||||
|
||||
return self._find_helper(DualCommentNode, "find_comments", comment)
|
||||
|
||||
def delete_child(self, child: "DualBlockNode") -> None:
|
||||
def delete_child(self, child):
|
||||
"""Deletes a child from the ParserNode implementations. The actual
|
||||
ParserNode implementations are used here directly in order to be able
|
||||
to match a child to the list of children."""
|
||||
@@ -308,7 +280,7 @@ class DualBlockNode(DualNodeBase[augeasparser.AugeasBlockNode,
|
||||
self.primary.delete_child(child.primary)
|
||||
self.secondary.delete_child(child.secondary)
|
||||
|
||||
def unsaved_files(self) -> Set[str]:
|
||||
def unsaved_files(self):
|
||||
""" Fetches the list of unsaved file paths and asserts that the lists
|
||||
match """
|
||||
primary_files = self.primary.unsaved_files()
|
||||
@@ -317,7 +289,7 @@ class DualBlockNode(DualNodeBase[augeasparser.AugeasBlockNode,
|
||||
|
||||
return primary_files
|
||||
|
||||
def parsed_paths(self) -> List[str]:
|
||||
def parsed_paths(self):
|
||||
"""
|
||||
Returns a list of file paths that have currently been parsed into the parser
|
||||
tree. The returned list may include paths with wildcard characters, for
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
""" Entry point for Apache Plugin """
|
||||
from typing import Dict
|
||||
from typing import Type
|
||||
# Pylint does not like disutils.version when running inside a venv.
|
||||
# See: https://github.com/PyCQA/pylint/issues/73
|
||||
from distutils.version import LooseVersion # pylint: disable=no-name-in-module,import-error
|
||||
|
||||
from certbot import util
|
||||
from certbot_apache._internal import configurator
|
||||
from certbot_apache._internal import override_arch
|
||||
from certbot_apache._internal import override_centos
|
||||
@@ -10,11 +12,8 @@ from certbot_apache._internal import override_debian
|
||||
from certbot_apache._internal import override_fedora
|
||||
from certbot_apache._internal import override_gentoo
|
||||
from certbot_apache._internal import override_suse
|
||||
from certbot_apache._internal import override_void
|
||||
|
||||
from certbot import util
|
||||
|
||||
OVERRIDE_CLASSES: Dict[str, Type[configurator.ApacheConfigurator]] = {
|
||||
OVERRIDE_CLASSES = {
|
||||
"arch": override_arch.ArchConfigurator,
|
||||
"cloudlinux": override_centos.CentOSConfigurator,
|
||||
"darwin": override_darwin.DarwinConfigurator,
|
||||
@@ -38,19 +37,17 @@ OVERRIDE_CLASSES: Dict[str, Type[configurator.ApacheConfigurator]] = {
|
||||
"sles": override_suse.OpenSUSEConfigurator,
|
||||
"scientific": override_centos.CentOSConfigurator,
|
||||
"scientific linux": override_centos.CentOSConfigurator,
|
||||
"void": override_void.VoidConfigurator,
|
||||
}
|
||||
|
||||
|
||||
def get_configurator() -> Type[configurator.ApacheConfigurator]:
|
||||
def get_configurator():
|
||||
""" Get correct configurator class based on the OS fingerprint """
|
||||
os_name, os_version = util.get_os_info()
|
||||
os_name = os_name.lower()
|
||||
override_class = None
|
||||
|
||||
# Special case for older Fedora versions
|
||||
min_version = util.parse_loose_version('29')
|
||||
if os_name == 'fedora' and util.parse_loose_version(os_version) < min_version:
|
||||
if os_name == 'fedora' and LooseVersion(os_version) < LooseVersion('29'):
|
||||
os_name = 'fedora_old'
|
||||
|
||||
try:
|
||||
@@ -60,7 +57,8 @@ def get_configurator() -> Type[configurator.ApacheConfigurator]:
|
||||
os_like = util.get_systemd_os_like()
|
||||
if os_like:
|
||||
for os_name in os_like:
|
||||
override_class = OVERRIDE_CLASSES.get(os_name)
|
||||
if os_name in OVERRIDE_CLASSES.keys():
|
||||
override_class = OVERRIDE_CLASSES[os_name]
|
||||
if not override_class:
|
||||
# No override class found, return the generic configurator
|
||||
override_class = configurator.ApacheConfigurator
|
||||
|
||||
@@ -1,22 +1,14 @@
|
||||
"""A class that performs HTTP-01 challenges for Apache"""
|
||||
import errno
|
||||
import logging
|
||||
from typing import List
|
||||
from typing import Set
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from certbot_apache._internal.obj import VirtualHost
|
||||
from certbot_apache._internal.parser import get_aug_path
|
||||
|
||||
from acme.challenges import KeyAuthorizationChallengeResponse
|
||||
from acme.magic_typing import List # pylint: disable=unused-import, no-name-in-module
|
||||
from acme.magic_typing import Set # pylint: disable=unused-import, no-name-in-module
|
||||
from certbot import errors
|
||||
from certbot.achallenges import KeyAuthorizationAnnotatedChallenge
|
||||
from certbot.compat import filesystem
|
||||
from certbot.compat import os
|
||||
from certbot.plugins import common
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from certbot_apache._internal.configurator import ApacheConfigurator # pragma: no cover
|
||||
from certbot_apache._internal.obj import VirtualHost # pylint: disable=unused-import
|
||||
from certbot_apache._internal.parser import get_aug_path
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -53,9 +45,8 @@ class ApacheHttp01(common.ChallengePerformer):
|
||||
</Location>
|
||||
"""
|
||||
|
||||
def __init__(self, configurator: "ApacheConfigurator") -> None:
|
||||
super().__init__(configurator)
|
||||
self.configurator: "ApacheConfigurator"
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(ApacheHttp01, self).__init__(*args, **kwargs)
|
||||
self.challenge_conf_pre = os.path.join(
|
||||
self.configurator.conf("challenge-location"),
|
||||
"le_http_01_challenge_pre.conf")
|
||||
@@ -65,9 +56,9 @@ class ApacheHttp01(common.ChallengePerformer):
|
||||
self.challenge_dir = os.path.join(
|
||||
self.configurator.config.work_dir,
|
||||
"http_challenges")
|
||||
self.moded_vhosts: Set[VirtualHost] = set()
|
||||
self.moded_vhosts = set() # type: Set[VirtualHost]
|
||||
|
||||
def perform(self) -> List[KeyAuthorizationChallengeResponse]:
|
||||
def perform(self):
|
||||
"""Perform all HTTP-01 challenges."""
|
||||
if not self.achalls:
|
||||
return []
|
||||
@@ -75,7 +66,8 @@ class ApacheHttp01(common.ChallengePerformer):
|
||||
# About to make temporary changes to the config
|
||||
self.configurator.save("Changes before challenge setup", True)
|
||||
|
||||
self.configurator.ensure_listen(str(self.configurator.config.http01_port))
|
||||
self.configurator.ensure_listen(str(
|
||||
self.configurator.config.http01_port))
|
||||
self.prepare_http01_modules()
|
||||
|
||||
responses = self._set_up_challenges()
|
||||
@@ -86,7 +78,7 @@ class ApacheHttp01(common.ChallengePerformer):
|
||||
|
||||
return responses
|
||||
|
||||
def prepare_http01_modules(self) -> None:
|
||||
def prepare_http01_modules(self):
|
||||
"""Make sure that we have the needed modules available for http01"""
|
||||
|
||||
if self.configurator.conf("handle-modules"):
|
||||
@@ -99,13 +91,13 @@ class ApacheHttp01(common.ChallengePerformer):
|
||||
if mod + "_module" not in self.configurator.parser.modules:
|
||||
self.configurator.enable_mod(mod, temp=True)
|
||||
|
||||
def _mod_config(self) -> None:
|
||||
selected_vhosts: List[VirtualHost] = []
|
||||
def _mod_config(self):
|
||||
selected_vhosts = [] # type: List[VirtualHost]
|
||||
http_port = str(self.configurator.config.http01_port)
|
||||
|
||||
# Search for VirtualHosts matching by name
|
||||
for chall in self.achalls:
|
||||
selected_vhosts += self._matching_vhosts(chall.domain)
|
||||
# Search for matching VirtualHosts
|
||||
for vh in self._matching_vhosts(chall.domain):
|
||||
selected_vhosts.append(vh)
|
||||
|
||||
# Ensure that we have one or more VirtualHosts that we can continue
|
||||
# with. (one that listens to port configured with --http-01-port)
|
||||
@@ -114,13 +106,9 @@ class ApacheHttp01(common.ChallengePerformer):
|
||||
if any(a.is_wildcard() or a.get_port() == http_port for a in vhost.addrs):
|
||||
found = True
|
||||
|
||||
# If there's at least one eligible VirtualHost, also add all unnamed VirtualHosts
|
||||
# because they might match at runtime (#8890)
|
||||
if found:
|
||||
selected_vhosts += self._unnamed_vhosts()
|
||||
# Otherwise, add every Virtualhost which listens on the right port
|
||||
else:
|
||||
selected_vhosts += self._relevant_vhosts()
|
||||
if not found:
|
||||
for vh in self._relevant_vhosts():
|
||||
selected_vhosts.append(vh)
|
||||
|
||||
# Add the challenge configuration
|
||||
for vh in selected_vhosts:
|
||||
@@ -148,7 +136,7 @@ class ApacheHttp01(common.ChallengePerformer):
|
||||
with open(self.challenge_conf_post, "w") as new_conf:
|
||||
new_conf.write(config_text_post)
|
||||
|
||||
def _matching_vhosts(self, domain: str) -> List[VirtualHost]:
|
||||
def _matching_vhosts(self, domain):
|
||||
"""Return all VirtualHost objects that have the requested domain name or
|
||||
a wildcard name that would match the domain in ServerName or ServerAlias
|
||||
directive.
|
||||
@@ -162,9 +150,9 @@ class ApacheHttp01(common.ChallengePerformer):
|
||||
|
||||
return matching_vhosts
|
||||
|
||||
def _relevant_vhosts(self) -> List[VirtualHost]:
|
||||
def _relevant_vhosts(self):
|
||||
http01_port = str(self.configurator.config.http01_port)
|
||||
relevant_vhosts: List[VirtualHost] = []
|
||||
relevant_vhosts = []
|
||||
for vhost in self.configurator.vhosts:
|
||||
if any(a.is_wildcard() or a.get_port() == http01_port for a in vhost.addrs):
|
||||
if not vhost.ssl:
|
||||
@@ -178,21 +166,9 @@ class ApacheHttp01(common.ChallengePerformer):
|
||||
|
||||
return relevant_vhosts
|
||||
|
||||
def _unnamed_vhosts(self) -> List[VirtualHost]:
|
||||
"""Return all VirtualHost objects with no ServerName"""
|
||||
return [vh for vh in self.configurator.vhosts if vh.name is None]
|
||||
|
||||
def _set_up_challenges(self) -> List[KeyAuthorizationChallengeResponse]:
|
||||
def _set_up_challenges(self):
|
||||
if not os.path.isdir(self.challenge_dir):
|
||||
old_umask = filesystem.umask(0o022)
|
||||
try:
|
||||
filesystem.makedirs(self.challenge_dir, 0o755)
|
||||
except OSError as exception:
|
||||
if exception.errno not in (errno.EEXIST, errno.EISDIR):
|
||||
raise errors.PluginError(
|
||||
"Couldn't create root for http-01 challenge")
|
||||
finally:
|
||||
filesystem.umask(old_umask)
|
||||
filesystem.makedirs(self.challenge_dir, 0o755)
|
||||
|
||||
responses = []
|
||||
for achall in self.achalls:
|
||||
@@ -200,11 +176,10 @@ class ApacheHttp01(common.ChallengePerformer):
|
||||
|
||||
return responses
|
||||
|
||||
def _set_up_challenge(self, achall: KeyAuthorizationAnnotatedChallenge
|
||||
) -> KeyAuthorizationChallengeResponse:
|
||||
def _set_up_challenge(self, achall):
|
||||
response, validation = achall.response_and_validation()
|
||||
|
||||
name: str = os.path.join(self.challenge_dir, achall.chall.encode("token"))
|
||||
name = os.path.join(self.challenge_dir, achall.chall.encode("token"))
|
||||
|
||||
self.configurator.reverter.register_file_creation(True, name)
|
||||
with open(name, 'wb') as f:
|
||||
@@ -213,7 +188,7 @@ class ApacheHttp01(common.ChallengePerformer):
|
||||
|
||||
return response
|
||||
|
||||
def _set_up_include_directives(self, vhost: VirtualHost) -> None:
|
||||
def _set_up_include_directives(self, vhost):
|
||||
"""Includes override configuration to the beginning and to the end of
|
||||
VirtualHost. Note that this include isn't added to Augeas search tree"""
|
||||
|
||||
|
||||
@@ -98,18 +98,15 @@ names and parameters in case insensitive manner. This does not apply to everythi
|
||||
however, for example the parameters of a conditional statement may be case sensitive.
|
||||
For this reason the internal representation of data should not ignore the case.
|
||||
"""
|
||||
|
||||
import abc
|
||||
from typing import Any
|
||||
from typing import Dict
|
||||
from typing import List
|
||||
from typing import Optional
|
||||
from typing import Tuple
|
||||
from typing import TypeVar
|
||||
import six
|
||||
|
||||
GenericParserNode = TypeVar("GenericParserNode", bound="ParserNode")
|
||||
from acme.magic_typing import Any, Dict, Optional, Tuple # pylint: disable=unused-import, no-name-in-module
|
||||
|
||||
|
||||
class ParserNode(metaclass=abc.ABCMeta):
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class ParserNode(object):
|
||||
"""
|
||||
ParserNode is the basic building block of the tree of such nodes,
|
||||
representing the structure of the configuration. It is largely meant to keep
|
||||
@@ -151,13 +148,9 @@ class ParserNode(metaclass=abc.ABCMeta):
|
||||
# for the ParserNode instance.
|
||||
metadata: Dict[str, Any]
|
||||
"""
|
||||
ancestor: Optional["ParserNode"]
|
||||
dirty: bool
|
||||
filepath: Optional[str]
|
||||
metadata: Dict[str, Any]
|
||||
|
||||
@abc.abstractmethod
|
||||
def __init__(self, **kwargs: Any) -> None:
|
||||
def __init__(self, **kwargs):
|
||||
"""
|
||||
Initializes the ParserNode instance, and sets the ParserNode specific
|
||||
instance variables. This is not meant to be used directly, but through
|
||||
@@ -181,7 +174,7 @@ class ParserNode(metaclass=abc.ABCMeta):
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def save(self, msg: str) -> None:
|
||||
def save(self, msg):
|
||||
"""
|
||||
Save traverses the children, and attempts to write the AST to disk for
|
||||
all the objects that are marked dirty. The actual operation of course
|
||||
@@ -200,7 +193,7 @@ class ParserNode(metaclass=abc.ABCMeta):
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def find_ancestors(self: GenericParserNode, name: str) -> List[GenericParserNode]:
|
||||
def find_ancestors(self, name):
|
||||
"""
|
||||
Traverses the ancestor tree up, searching for BlockNodes with a specific
|
||||
name.
|
||||
@@ -212,7 +205,9 @@ class ParserNode(metaclass=abc.ABCMeta):
|
||||
"""
|
||||
|
||||
|
||||
class CommentNode(ParserNode, metaclass=abc.ABCMeta):
|
||||
# Linter rule exclusion done because of https://github.com/PyCQA/pylint/issues/179
|
||||
@six.add_metaclass(abc.ABCMeta) # pylint: disable=abstract-method
|
||||
class CommentNode(ParserNode):
|
||||
"""
|
||||
CommentNode class is used for representation of comments within the parsed
|
||||
configuration structure. Because of the nature of comments, it is not able
|
||||
@@ -229,10 +224,9 @@ class CommentNode(ParserNode, metaclass=abc.ABCMeta):
|
||||
comment: str
|
||||
|
||||
"""
|
||||
comment: str
|
||||
|
||||
@abc.abstractmethod
|
||||
def __init__(self, **kwargs: Any) -> None:
|
||||
def __init__(self, **kwargs):
|
||||
"""
|
||||
Initializes the CommentNode instance and sets its instance variables.
|
||||
|
||||
@@ -250,15 +244,14 @@ class CommentNode(ParserNode, metaclass=abc.ABCMeta):
|
||||
created or changed after the last save. Default: False.
|
||||
:type dirty: bool
|
||||
"""
|
||||
super().__init__( # pragma: no cover
|
||||
ancestor=kwargs['ancestor'],
|
||||
dirty=kwargs.get('dirty', False),
|
||||
filepath=kwargs['filepath'],
|
||||
metadata=kwargs.get('metadata', {}),
|
||||
)
|
||||
super(CommentNode, self).__init__(ancestor=kwargs['ancestor'],
|
||||
dirty=kwargs.get('dirty', False),
|
||||
filepath=kwargs['filepath'],
|
||||
metadata=kwargs.get('metadata', {})) # pragma: no cover
|
||||
|
||||
|
||||
class DirectiveNode(ParserNode, metaclass=abc.ABCMeta):
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class DirectiveNode(ParserNode):
|
||||
"""
|
||||
DirectiveNode class represents a configuration directive within the configuration.
|
||||
It can have zero or more parameters attached to it. Because of the nature of
|
||||
@@ -284,12 +277,9 @@ class DirectiveNode(ParserNode, metaclass=abc.ABCMeta):
|
||||
parameters: Tuple[str, ...]
|
||||
|
||||
"""
|
||||
enabled: bool
|
||||
name: Optional[str]
|
||||
parameters: Tuple[str, ...]
|
||||
|
||||
@abc.abstractmethod
|
||||
def __init__(self, **kwargs: Any) -> None:
|
||||
def __init__(self, **kwargs):
|
||||
"""
|
||||
Initializes the DirectiveNode instance and sets its instance variables.
|
||||
|
||||
@@ -319,26 +309,25 @@ class DirectiveNode(ParserNode, metaclass=abc.ABCMeta):
|
||||
:type enabled: bool
|
||||
|
||||
"""
|
||||
super().__init__( # pragma: no cover
|
||||
ancestor=kwargs['ancestor'],
|
||||
dirty=kwargs.get('dirty', False),
|
||||
filepath=kwargs['filepath'],
|
||||
metadata=kwargs.get('metadata', {}),
|
||||
)
|
||||
super(DirectiveNode, self).__init__(ancestor=kwargs['ancestor'],
|
||||
dirty=kwargs.get('dirty', False),
|
||||
filepath=kwargs['filepath'],
|
||||
metadata=kwargs.get('metadata', {})) # pragma: no cover
|
||||
|
||||
@abc.abstractmethod
|
||||
def set_parameters(self, parameters: List[str]) -> None:
|
||||
def set_parameters(self, parameters):
|
||||
"""
|
||||
Sets the sequence of parameters for this ParserNode object without
|
||||
whitespaces. While the whitespaces for parameters are discarded when using
|
||||
this method, the whitespacing preceding the ParserNode itself should be
|
||||
this method, the whitespacing preceeding the ParserNode itself should be
|
||||
kept intact.
|
||||
|
||||
:param list parameters: sequence of parameters
|
||||
"""
|
||||
|
||||
|
||||
class BlockNode(DirectiveNode, metaclass=abc.ABCMeta):
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class BlockNode(DirectiveNode):
|
||||
"""
|
||||
BlockNode class represents a block of nested configuration directives, comments
|
||||
and other blocks as its children. A BlockNode can have zero or more parameters
|
||||
@@ -378,14 +367,12 @@ class BlockNode(DirectiveNode, metaclass=abc.ABCMeta):
|
||||
children: Tuple[ParserNode, ...]
|
||||
|
||||
"""
|
||||
children: Tuple[ParserNode, ...]
|
||||
|
||||
@abc.abstractmethod
|
||||
def add_child_block(self, name: str, parameters: Optional[List[str]] = None,
|
||||
position: Optional[int] = None) -> "BlockNode":
|
||||
def add_child_block(self, name, parameters=None, position=None):
|
||||
"""
|
||||
Adds a new BlockNode child node with provided values and marks the callee
|
||||
BlockNode dirty. This is used to add new children to the AST. The preceding
|
||||
BlockNode dirty. This is used to add new children to the AST. The preceeding
|
||||
whitespaces should not be added based on the ancestor or siblings for the
|
||||
newly created object. This is to match the current behavior of the legacy
|
||||
parser implementation.
|
||||
@@ -402,12 +389,11 @@ class BlockNode(DirectiveNode, metaclass=abc.ABCMeta):
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def add_child_directive(self, name: str, parameters: Optional[List[str]] = None,
|
||||
position: Optional[int] = None) -> DirectiveNode:
|
||||
def add_child_directive(self, name, parameters=None, position=None):
|
||||
"""
|
||||
Adds a new DirectiveNode child node with provided values and marks the
|
||||
callee BlockNode dirty. This is used to add new children to the AST. The
|
||||
preceding whitespaces should not be added based on the ancestor or siblings
|
||||
preceeding whitespaces should not be added based on the ancestor or siblings
|
||||
for the newly created object. This is to match the current behavior of the
|
||||
legacy parser implementation.
|
||||
|
||||
@@ -424,11 +410,11 @@ class BlockNode(DirectiveNode, metaclass=abc.ABCMeta):
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def add_child_comment(self, comment: str = "", position: Optional[int] = None) -> CommentNode:
|
||||
def add_child_comment(self, comment="", position=None):
|
||||
"""
|
||||
Adds a new CommentNode child node with provided value and marks the
|
||||
callee BlockNode dirty. This is used to add new children to the AST. The
|
||||
preceding whitespaces should not be added based on the ancestor or siblings
|
||||
preceeding whitespaces should not be added based on the ancestor or siblings
|
||||
for the newly created object. This is to match the current behavior of the
|
||||
legacy parser implementation.
|
||||
|
||||
@@ -444,7 +430,7 @@ class BlockNode(DirectiveNode, metaclass=abc.ABCMeta):
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def find_blocks(self, name: str, exclude: bool = True) -> List["BlockNode"]:
|
||||
def find_blocks(self, name, exclude=True):
|
||||
"""
|
||||
Find a configuration block by name. This method walks the child tree of
|
||||
ParserNodes under the instance it was called from. This way it is possible
|
||||
@@ -461,7 +447,7 @@ class BlockNode(DirectiveNode, metaclass=abc.ABCMeta):
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def find_directives(self, name: str, exclude: bool = True) -> List[DirectiveNode]:
|
||||
def find_directives(self, name, exclude=True):
|
||||
"""
|
||||
Find a directive by name. This method walks the child tree of ParserNodes
|
||||
under the instance it was called from. This way it is possible to search
|
||||
@@ -479,7 +465,7 @@ class BlockNode(DirectiveNode, metaclass=abc.ABCMeta):
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def find_comments(self, comment: str) -> List[CommentNode]:
|
||||
def find_comments(self, comment):
|
||||
"""
|
||||
Find comments with value containing the search term.
|
||||
|
||||
@@ -495,7 +481,7 @@ class BlockNode(DirectiveNode, metaclass=abc.ABCMeta):
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def delete_child(self, child: ParserNode) -> None:
|
||||
def delete_child(self, child):
|
||||
"""
|
||||
Remove a specified child node from the list of children of the called
|
||||
BlockNode object.
|
||||
@@ -505,7 +491,7 @@ class BlockNode(DirectiveNode, metaclass=abc.ABCMeta):
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def unsaved_files(self) -> List[str]:
|
||||
def unsaved_files(self):
|
||||
"""
|
||||
Returns a list of file paths that have been changed since the last save
|
||||
(or the initial configuration parse). The intended use for this method
|
||||
@@ -518,7 +504,7 @@ class BlockNode(DirectiveNode, metaclass=abc.ABCMeta):
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def parsed_paths(self) -> List[str]:
|
||||
def parsed_paths(self):
|
||||
"""
|
||||
Returns a list of file paths that have currently been parsed into the parser
|
||||
tree. The returned list may include paths with wildcard characters, for
|
||||
|
||||
@@ -1,23 +1,14 @@
|
||||
"""Module contains classes used by the Apache Configurator."""
|
||||
import re
|
||||
from typing import Any
|
||||
from typing import Iterable
|
||||
from typing import Optional
|
||||
from typing import Pattern
|
||||
from typing import Set
|
||||
from typing import Union
|
||||
|
||||
from certbot_apache._internal.apacheparser import ApacheBlockNode
|
||||
from certbot_apache._internal.augeasparser import AugeasBlockNode
|
||||
from certbot_apache._internal.dualparser import DualBlockNode
|
||||
|
||||
from acme.magic_typing import Set # pylint: disable=unused-import, no-name-in-module
|
||||
from certbot.plugins import common
|
||||
|
||||
|
||||
class Addr(common.Addr):
|
||||
"""Represents an Apache address."""
|
||||
|
||||
def __eq__(self, other: Any) -> bool:
|
||||
def __eq__(self, other):
|
||||
"""This is defined as equivalent within Apache.
|
||||
|
||||
ip_addr:* == ip_addr
|
||||
@@ -29,20 +20,23 @@ class Addr(common.Addr):
|
||||
self.is_wildcard() and other.is_wildcard()))
|
||||
return False
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"certbot_apache._internal.obj.Addr({repr(self.tup)})"
|
||||
def __ne__(self, other):
|
||||
return not self.__eq__(other)
|
||||
|
||||
def __hash__(self) -> int: # pylint: disable=useless-super-delegation
|
||||
def __repr__(self):
|
||||
return "certbot_apache._internal.obj.Addr(" + repr(self.tup) + ")"
|
||||
|
||||
def __hash__(self): # pylint: disable=useless-super-delegation
|
||||
# Python 3 requires explicit overridden for __hash__ if __eq__ or
|
||||
# __cmp__ is overridden. See https://bugs.python.org/issue2235
|
||||
return super().__hash__()
|
||||
return super(Addr, self).__hash__()
|
||||
|
||||
def _addr_less_specific(self, addr: "Addr") -> bool:
|
||||
def _addr_less_specific(self, addr):
|
||||
"""Returns if addr.get_addr() is more specific than self.get_addr()."""
|
||||
# pylint: disable=protected-access
|
||||
return addr._rank_specific_addr() > self._rank_specific_addr()
|
||||
|
||||
def _rank_specific_addr(self) -> int:
|
||||
def _rank_specific_addr(self):
|
||||
"""Returns numerical rank for get_addr()
|
||||
|
||||
:returns: 2 - FQ, 1 - wildcard, 0 - _default_
|
||||
@@ -55,7 +49,7 @@ class Addr(common.Addr):
|
||||
return 1
|
||||
return 2
|
||||
|
||||
def conflicts(self, addr: "Addr") -> bool:
|
||||
def conflicts(self, addr):
|
||||
r"""Returns if address could conflict with correct function of self.
|
||||
|
||||
Could addr take away service provided by self within Apache?
|
||||
@@ -83,11 +77,11 @@ class Addr(common.Addr):
|
||||
return True
|
||||
return False
|
||||
|
||||
def is_wildcard(self) -> bool:
|
||||
def is_wildcard(self):
|
||||
"""Returns if address has a wildcard port."""
|
||||
return self.tup[1] == "*" or not self.tup[1]
|
||||
|
||||
def get_sni_addr(self, port: str) -> common.Addr:
|
||||
def get_sni_addr(self, port):
|
||||
"""Returns the least specific address that resolves on the port.
|
||||
|
||||
Examples:
|
||||
@@ -104,7 +98,7 @@ class Addr(common.Addr):
|
||||
return self.get_addr_obj(port)
|
||||
|
||||
|
||||
class VirtualHost:
|
||||
class VirtualHost(object):
|
||||
"""Represents an Apache Virtualhost.
|
||||
|
||||
:ivar str filep: file path of VH
|
||||
@@ -127,16 +121,13 @@ class VirtualHost:
|
||||
|
||||
"""
|
||||
# ?: is used for not returning enclosed characters
|
||||
strip_name: Pattern = re.compile(r"^(?:.+://)?([^ :$]*)")
|
||||
strip_name = re.compile(r"^(?:.+://)?([^ :$]*)")
|
||||
|
||||
def __init__(self, filepath: str, path: str, addrs: Set["Addr"], ssl: bool,
|
||||
enabled: bool, name: Optional[str] = None, aliases: Optional[Set[str]] = None,
|
||||
modmacro: bool = False, ancestor: Optional["VirtualHost"] = None,
|
||||
node: Optional[Union[ApacheBlockNode, AugeasBlockNode, DualBlockNode]] = None
|
||||
) -> None:
|
||||
def __init__(self, filep, path, addrs, ssl, enabled, name=None,
|
||||
aliases=None, modmacro=False, ancestor=None, node=None):
|
||||
|
||||
"""Initialize a VH."""
|
||||
self.filep = filepath
|
||||
self.filep = filep
|
||||
self.path = path
|
||||
self.addrs = addrs
|
||||
self.name = name
|
||||
@@ -147,9 +138,9 @@ class VirtualHost:
|
||||
self.ancestor = ancestor
|
||||
self.node = node
|
||||
|
||||
def get_names(self) -> Set[str]:
|
||||
def get_names(self):
|
||||
"""Return a set of all names."""
|
||||
all_names: Set[str] = set()
|
||||
all_names = set() # type: Set[str]
|
||||
all_names.update(self.aliases)
|
||||
# Strip out any scheme:// and <port> field from servername
|
||||
if self.name is not None:
|
||||
@@ -157,28 +148,39 @@ class VirtualHost:
|
||||
|
||||
return all_names
|
||||
|
||||
def __str__(self) -> str:
|
||||
def __str__(self):
|
||||
return (
|
||||
f"File: {self.filep}\n"
|
||||
f"Vhost path: {self.path}\n"
|
||||
f"Addresses: {', '.join(str(addr) for addr in self.addrs)}\n"
|
||||
f"Name: {self.name if self.name is not None else ''}\n"
|
||||
f"Aliases: {', '.join(name for name in self.aliases)}\n"
|
||||
f"TLS Enabled: {'Yes' if self.ssl else 'No'}\n"
|
||||
f"Site Enabled: {'Yes' if self.enabled else 'No'}\n"
|
||||
f"mod_macro Vhost: {'Yes' if self.modmacro else 'No'}"
|
||||
)
|
||||
"File: {filename}\n"
|
||||
"Vhost path: {vhpath}\n"
|
||||
"Addresses: {addrs}\n"
|
||||
"Name: {name}\n"
|
||||
"Aliases: {aliases}\n"
|
||||
"TLS Enabled: {tls}\n"
|
||||
"Site Enabled: {active}\n"
|
||||
"mod_macro Vhost: {modmacro}".format(
|
||||
filename=self.filep,
|
||||
vhpath=self.path,
|
||||
addrs=", ".join(str(addr) for addr in self.addrs),
|
||||
name=self.name if self.name is not None else "",
|
||||
aliases=", ".join(name for name in self.aliases),
|
||||
tls="Yes" if self.ssl else "No",
|
||||
active="Yes" if self.enabled else "No",
|
||||
modmacro="Yes" if self.modmacro else "No"))
|
||||
|
||||
def display_repr(self) -> str:
|
||||
def display_repr(self):
|
||||
"""Return a representation of VHost to be used in dialog"""
|
||||
return (
|
||||
f"File: {self.filep}\n"
|
||||
f"Addresses: {', '.join(str(addr) for addr in self.addrs)}\n"
|
||||
f"Names: {', '.join(self.get_names())}\n"
|
||||
f"HTTPS: {'Yes' if self.ssl else 'No'}\n"
|
||||
)
|
||||
"File: {filename}\n"
|
||||
"Addresses: {addrs}\n"
|
||||
"Names: {names}\n"
|
||||
"HTTPS: {https}\n".format(
|
||||
filename=self.filep,
|
||||
addrs=", ".join(str(addr) for addr in self.addrs),
|
||||
names=", ".join(self.get_names()),
|
||||
https="Yes" if self.ssl else "No"))
|
||||
|
||||
def __eq__(self, other: Any) -> bool:
|
||||
|
||||
def __eq__(self, other):
|
||||
if isinstance(other, self.__class__):
|
||||
return (self.filep == other.filep and self.path == other.path and
|
||||
self.addrs == other.addrs and
|
||||
@@ -189,12 +191,15 @@ class VirtualHost:
|
||||
|
||||
return False
|
||||
|
||||
def __hash__(self) -> int:
|
||||
def __ne__(self, other):
|
||||
return not self.__eq__(other)
|
||||
|
||||
def __hash__(self):
|
||||
return hash((self.filep, self.path,
|
||||
tuple(self.addrs), tuple(self.get_names()),
|
||||
self.ssl, self.enabled, self.modmacro))
|
||||
|
||||
def conflicts(self, addrs: Iterable[Addr]) -> bool:
|
||||
def conflicts(self, addrs):
|
||||
"""See if vhost conflicts with any of the addrs.
|
||||
|
||||
This determines whether or not these addresses would/could overwrite
|
||||
@@ -213,7 +218,7 @@ class VirtualHost:
|
||||
return True
|
||||
return False
|
||||
|
||||
def same_server(self, vhost: "VirtualHost", generic: bool = False) -> bool:
|
||||
def same_server(self, vhost, generic=False):
|
||||
"""Determines if the vhost is the same 'server'.
|
||||
|
||||
Used in redirection - indicates whether or not the two virtual hosts
|
||||
@@ -246,7 +251,7 @@ class VirtualHost:
|
||||
|
||||
# already_found acts to keep everything very conservative.
|
||||
# Don't allow multiple ip:ports in same set.
|
||||
already_found: Set[str] = set()
|
||||
already_found = set() # type: Set[str]
|
||||
|
||||
for addr in vhost.addrs:
|
||||
for local_addr in self.addrs:
|
||||
|
||||
@@ -0,0 +1,26 @@
|
||||
# This file contains important security parameters. If you modify this file
|
||||
# manually, Certbot will be unable to automatically provide future security
|
||||
# updates. Instead, Certbot will print and log an error message with a path to
|
||||
# the up-to-date file that you will need to refer to when manually updating
|
||||
# this file.
|
||||
|
||||
SSLEngine on
|
||||
|
||||
# Intermediate configuration, tweak to your needs
|
||||
SSLProtocol all -SSLv2 -SSLv3
|
||||
SSLCipherSuite ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:DHE-RSA-AES128-GCM-SHA256:DHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-AES128-SHA256:ECDHE-RSA-AES128-SHA256:ECDHE-ECDSA-AES128-SHA:ECDHE-RSA-AES256-SHA384:ECDHE-RSA-AES128-SHA:ECDHE-ECDSA-AES256-SHA384:ECDHE-ECDSA-AES256-SHA:ECDHE-RSA-AES256-SHA:DHE-RSA-AES128-SHA256:DHE-RSA-AES128-SHA:DHE-RSA-AES256-SHA256:DHE-RSA-AES256-SHA:ECDHE-ECDSA-DES-CBC3-SHA:ECDHE-RSA-DES-CBC3-SHA:EDH-RSA-DES-CBC3-SHA:AES128-GCM-SHA256:AES256-GCM-SHA384:AES128-SHA256:AES256-SHA256:AES128-SHA:AES256-SHA:DES-CBC3-SHA:!DSS
|
||||
SSLHonorCipherOrder on
|
||||
SSLCompression off
|
||||
|
||||
SSLOptions +StrictRequire
|
||||
|
||||
# Add vhost name to log entries:
|
||||
LogFormat "%h %l %u %t \"%r\" %>s %b \"%{Referer}i\" \"%{User-agent}i\"" vhost_combined
|
||||
LogFormat "%v %h %l %u %t \"%r\" %>s %b" vhost_common
|
||||
|
||||
#CustomLog /var/log/apache2/access.log vhost_combined
|
||||
#LogLevel warn
|
||||
#ErrorLog /var/log/apache2/error.log
|
||||
|
||||
# Always ensure Cookies have "Secure" set (JAH 2012/1)
|
||||
#Header edit Set-Cookie (?i)^(.*)(;\s*secure)??((\s*;)?(.*)) "$1; Secure$3$4"
|
||||
@@ -1,12 +1,17 @@
|
||||
""" Distribution specific override class for Arch Linux """
|
||||
import pkg_resources
|
||||
import zope.interface
|
||||
|
||||
from certbot import interfaces
|
||||
from certbot.compat import os
|
||||
from certbot_apache._internal import configurator
|
||||
from certbot_apache._internal.configurator import OsOptions
|
||||
|
||||
|
||||
@zope.interface.provider(interfaces.IPluginFactory)
|
||||
class ArchConfigurator(configurator.ApacheConfigurator):
|
||||
"""Arch Linux specific ApacheConfigurator override class"""
|
||||
|
||||
OS_DEFAULTS = OsOptions(
|
||||
OS_DEFAULTS = dict(
|
||||
server_root="/etc/httpd",
|
||||
vhost_root="/etc/httpd/conf",
|
||||
vhost_files="*.conf",
|
||||
@@ -15,5 +20,12 @@ class ArchConfigurator(configurator.ApacheConfigurator):
|
||||
version_cmd=['apachectl', '-v'],
|
||||
restart_cmd=['apachectl', 'graceful'],
|
||||
conftest_cmd=['apachectl', 'configtest'],
|
||||
enmod=None,
|
||||
dismod=None,
|
||||
le_vhost_ext="-le-ssl.conf",
|
||||
handle_modules=False,
|
||||
handle_sites=False,
|
||||
challenge_location="/etc/httpd/conf",
|
||||
MOD_SSL_CONF_SRC=pkg_resources.resource_filename(
|
||||
"certbot_apache", os.path.join("_internal", "options-ssl-apache.conf"))
|
||||
)
|
||||
|
||||
@@ -1,25 +1,27 @@
|
||||
""" Distribution specific override class for CentOS family (RHEL, Fedora) """
|
||||
import logging
|
||||
from typing import Any
|
||||
from typing import cast
|
||||
from typing import List
|
||||
|
||||
import pkg_resources
|
||||
import zope.interface
|
||||
|
||||
from acme.magic_typing import List # pylint: disable=unused-import, no-name-in-module
|
||||
from certbot import errors
|
||||
from certbot import interfaces
|
||||
from certbot import util
|
||||
from certbot.compat import os
|
||||
from certbot.errors import MisconfigurationError
|
||||
from certbot_apache._internal import apache_util
|
||||
from certbot_apache._internal import configurator
|
||||
from certbot_apache._internal import parser
|
||||
from certbot_apache._internal.configurator import OsOptions
|
||||
|
||||
from certbot import errors
|
||||
from certbot import util
|
||||
from certbot.errors import MisconfigurationError
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@zope.interface.provider(interfaces.IPluginFactory)
|
||||
class CentOSConfigurator(configurator.ApacheConfigurator):
|
||||
"""CentOS specific ApacheConfigurator override class"""
|
||||
|
||||
OS_DEFAULTS = OsOptions(
|
||||
OS_DEFAULTS = dict(
|
||||
server_root="/etc/httpd",
|
||||
vhost_root="/etc/httpd/conf.d",
|
||||
vhost_files="*.conf",
|
||||
@@ -29,10 +31,17 @@ class CentOSConfigurator(configurator.ApacheConfigurator):
|
||||
restart_cmd=['apachectl', 'graceful'],
|
||||
restart_cmd_alt=['apachectl', 'restart'],
|
||||
conftest_cmd=['apachectl', 'configtest'],
|
||||
enmod=None,
|
||||
dismod=None,
|
||||
le_vhost_ext="-le-ssl.conf",
|
||||
handle_modules=False,
|
||||
handle_sites=False,
|
||||
challenge_location="/etc/httpd/conf.d",
|
||||
MOD_SSL_CONF_SRC=pkg_resources.resource_filename(
|
||||
"certbot_apache", os.path.join("_internal", "centos-options-ssl-apache.conf"))
|
||||
)
|
||||
|
||||
def config_test(self) -> None:
|
||||
def config_test(self):
|
||||
"""
|
||||
Override config_test to mitigate configtest error in vanilla installation
|
||||
of mod_ssl in Fedora. The error is caused by non-existent self-signed
|
||||
@@ -44,16 +53,16 @@ class CentOSConfigurator(configurator.ApacheConfigurator):
|
||||
fedora = os_info[0].lower() == "fedora"
|
||||
|
||||
try:
|
||||
super().config_test()
|
||||
super(CentOSConfigurator, self).config_test()
|
||||
except errors.MisconfigurationError:
|
||||
if fedora:
|
||||
self._try_restart_fedora()
|
||||
else:
|
||||
raise
|
||||
|
||||
def _try_restart_fedora(self) -> None:
|
||||
def _try_restart_fedora(self):
|
||||
"""
|
||||
Tries to restart httpd using systemctl to generate the self signed key pair.
|
||||
Tries to restart httpd using systemctl to generate the self signed keypair.
|
||||
"""
|
||||
|
||||
try:
|
||||
@@ -62,34 +71,33 @@ class CentOSConfigurator(configurator.ApacheConfigurator):
|
||||
raise errors.MisconfigurationError(str(err))
|
||||
|
||||
# Finish with actual config check to see if systemctl restart helped
|
||||
super().config_test()
|
||||
super(CentOSConfigurator, self).config_test()
|
||||
|
||||
def _prepare_options(self) -> None:
|
||||
def _prepare_options(self):
|
||||
"""
|
||||
Override the options dictionary initialization in order to support
|
||||
alternative restart cmd used in CentOS.
|
||||
"""
|
||||
super()._prepare_options()
|
||||
if not self.options.restart_cmd_alt: # pragma: no cover
|
||||
raise ValueError("OS option restart_cmd_alt must be set for CentOS.")
|
||||
self.options.restart_cmd_alt[0] = self.options.ctl
|
||||
super(CentOSConfigurator, self)._prepare_options()
|
||||
self.options["restart_cmd_alt"][0] = self.option("ctl")
|
||||
|
||||
def get_parser(self) -> "CentOSParser":
|
||||
def get_parser(self):
|
||||
"""Initializes the ApacheParser"""
|
||||
return CentOSParser(
|
||||
self.options.server_root, self, self.options.vhost_root, self.version)
|
||||
self.option("server_root"), self.option("vhost_root"),
|
||||
self.version, configurator=self)
|
||||
|
||||
def _deploy_cert(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=arguments-differ
|
||||
def _deploy_cert(self, *args, **kwargs): # pylint: disable=arguments-differ
|
||||
"""
|
||||
Override _deploy_cert in order to ensure that the Apache configuration
|
||||
has "LoadModule ssl_module..." before parsing the VirtualHost configuration
|
||||
that was created by Certbot
|
||||
"""
|
||||
super()._deploy_cert(*args, **kwargs)
|
||||
super(CentOSConfigurator, self)._deploy_cert(*args, **kwargs)
|
||||
if self.version < (2, 4, 0):
|
||||
self._deploy_loadmodule_ssl_if_needed()
|
||||
|
||||
def _deploy_loadmodule_ssl_if_needed(self) -> None:
|
||||
def _deploy_loadmodule_ssl_if_needed(self):
|
||||
"""
|
||||
Add "LoadModule ssl_module <pre-existing path>" to main httpd.conf if
|
||||
it doesn't exist there already.
|
||||
@@ -97,9 +105,9 @@ class CentOSConfigurator(configurator.ApacheConfigurator):
|
||||
|
||||
loadmods = self.parser.find_dir("LoadModule", "ssl_module", exclude=False)
|
||||
|
||||
correct_ifmods: List[str] = []
|
||||
loadmod_args: List[str] = []
|
||||
loadmod_paths: List[str] = []
|
||||
correct_ifmods = [] # type: List[str]
|
||||
loadmod_args = [] # type: List[str]
|
||||
loadmod_paths = [] # type: List[str]
|
||||
for m in loadmods:
|
||||
noarg_path = m.rpartition("/")[0]
|
||||
path_args = self.parser.get_all_args(noarg_path)
|
||||
@@ -111,13 +119,13 @@ class CentOSConfigurator(configurator.ApacheConfigurator):
|
||||
"use, and run Certbot again.")
|
||||
raise MisconfigurationError(msg)
|
||||
else:
|
||||
loadmod_args = [arg for arg in path_args if arg]
|
||||
loadmod_args = path_args
|
||||
|
||||
centos_parser: CentOSParser = cast(CentOSParser, self.parser)
|
||||
if centos_parser.not_modssl_ifmodule(noarg_path):
|
||||
if centos_parser.loc["default"] in noarg_path:
|
||||
if self.parser.not_modssl_ifmodule(noarg_path): # pylint: disable=no-member
|
||||
if self.parser.loc["default"] in noarg_path:
|
||||
# LoadModule already in the main configuration file
|
||||
if "ifmodule/" in noarg_path.lower() or "ifmodule[1]" in noarg_path.lower():
|
||||
if ("ifmodule/" in noarg_path.lower() or
|
||||
"ifmodule[1]" in noarg_path.lower()):
|
||||
# It's the first or only IfModule in the file
|
||||
return
|
||||
# Populate the list of known !mod_ssl.c IfModules
|
||||
@@ -148,7 +156,8 @@ class CentOSConfigurator(configurator.ApacheConfigurator):
|
||||
self.parser.aug.remove(loadmod_path)
|
||||
|
||||
# Create a new IfModule !mod_ssl.c if not already found on path
|
||||
ssl_ifmod = self.parser.get_ifmod(nodir_path, "!mod_ssl.c", beginning=True)[:-1]
|
||||
ssl_ifmod = self.parser.get_ifmod(nodir_path, "!mod_ssl.c",
|
||||
beginning=True)[:-1]
|
||||
if ssl_ifmod not in correct_ifmods:
|
||||
self.parser.add_dir(ssl_ifmod, "LoadModule", loadmod_args)
|
||||
correct_ifmods.append(ssl_ifmod)
|
||||
@@ -158,24 +167,24 @@ class CentOSConfigurator(configurator.ApacheConfigurator):
|
||||
|
||||
class CentOSParser(parser.ApacheParser):
|
||||
"""CentOS specific ApacheParser override class"""
|
||||
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
||||
def __init__(self, *args, **kwargs):
|
||||
# CentOS specific configuration file for Apache
|
||||
self.sysconfig_filep: str = "/etc/sysconfig/httpd"
|
||||
super().__init__(*args, **kwargs)
|
||||
self.sysconfig_filep = "/etc/sysconfig/httpd"
|
||||
super(CentOSParser, self).__init__(*args, **kwargs)
|
||||
|
||||
def update_runtime_variables(self) -> None:
|
||||
def update_runtime_variables(self):
|
||||
""" Override for update_runtime_variables for custom parsing """
|
||||
# Opportunistic, works if SELinux not enforced
|
||||
super().update_runtime_variables()
|
||||
super(CentOSParser, self).update_runtime_variables()
|
||||
self.parse_sysconfig_var()
|
||||
|
||||
def parse_sysconfig_var(self) -> None:
|
||||
def parse_sysconfig_var(self):
|
||||
""" Parses Apache CLI options from CentOS configuration file """
|
||||
defines = apache_util.parse_define_file(self.sysconfig_filep, "OPTIONS")
|
||||
for k, v in defines.items():
|
||||
self.variables[k] = v
|
||||
for k in defines:
|
||||
self.variables[k] = defines[k]
|
||||
|
||||
def not_modssl_ifmodule(self, path: str) -> bool:
|
||||
def not_modssl_ifmodule(self, path):
|
||||
"""Checks if the provided Augeas path has argument !mod_ssl"""
|
||||
|
||||
if "ifmodule" not in path.lower():
|
||||
|
||||
@@ -1,17 +1,31 @@
|
||||
""" Distribution specific override class for macOS """
|
||||
import pkg_resources
|
||||
import zope.interface
|
||||
|
||||
from certbot import interfaces
|
||||
from certbot.compat import os
|
||||
from certbot_apache._internal import configurator
|
||||
from certbot_apache._internal.configurator import OsOptions
|
||||
|
||||
|
||||
@zope.interface.provider(interfaces.IPluginFactory)
|
||||
class DarwinConfigurator(configurator.ApacheConfigurator):
|
||||
"""macOS specific ApacheConfigurator override class"""
|
||||
|
||||
OS_DEFAULTS = OsOptions(
|
||||
OS_DEFAULTS = dict(
|
||||
server_root="/etc/apache2",
|
||||
vhost_root="/etc/apache2/other",
|
||||
vhost_files="*.conf",
|
||||
logs_root="/var/log/apache2",
|
||||
ctl="apachectl",
|
||||
version_cmd=['apachectl', '-v'],
|
||||
restart_cmd=['apachectl', 'graceful'],
|
||||
conftest_cmd=['apachectl', 'configtest'],
|
||||
enmod=None,
|
||||
dismod=None,
|
||||
le_vhost_ext="-le-ssl.conf",
|
||||
handle_modules=False,
|
||||
handle_sites=False,
|
||||
challenge_location="/etc/apache2/other",
|
||||
MOD_SSL_CONF_SRC=pkg_resources.resource_filename(
|
||||
"certbot_apache", os.path.join("_internal", "options-ssl-apache.conf"))
|
||||
)
|
||||
|
||||
@@ -1,30 +1,44 @@
|
||||
""" Distribution specific override class for Debian family (Ubuntu/Debian) """
|
||||
import logging
|
||||
|
||||
from certbot_apache._internal import apache_util
|
||||
from certbot_apache._internal import configurator
|
||||
from certbot_apache._internal.configurator import OsOptions
|
||||
from certbot_apache._internal.obj import VirtualHost
|
||||
import pkg_resources
|
||||
import zope.interface
|
||||
|
||||
from certbot import errors
|
||||
from certbot import interfaces
|
||||
from certbot import util
|
||||
from certbot.compat import filesystem
|
||||
from certbot.compat import os
|
||||
from certbot_apache._internal import apache_util
|
||||
from certbot_apache._internal import configurator
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@zope.interface.provider(interfaces.IPluginFactory)
|
||||
class DebianConfigurator(configurator.ApacheConfigurator):
|
||||
"""Debian specific ApacheConfigurator override class"""
|
||||
|
||||
OS_DEFAULTS = OsOptions(
|
||||
OS_DEFAULTS = dict(
|
||||
server_root="/etc/apache2",
|
||||
vhost_root="/etc/apache2/sites-available",
|
||||
vhost_files="*",
|
||||
logs_root="/var/log/apache2",
|
||||
ctl="apache2ctl",
|
||||
version_cmd=['apache2ctl', '-v'],
|
||||
restart_cmd=['apache2ctl', 'graceful'],
|
||||
conftest_cmd=['apache2ctl', 'configtest'],
|
||||
enmod="a2enmod",
|
||||
dismod="a2dismod",
|
||||
le_vhost_ext="-le-ssl.conf",
|
||||
handle_modules=True,
|
||||
handle_sites=True,
|
||||
challenge_location="/etc/apache2",
|
||||
MOD_SSL_CONF_SRC=pkg_resources.resource_filename(
|
||||
"certbot_apache", os.path.join("_internal", "options-ssl-apache.conf"))
|
||||
)
|
||||
|
||||
def enable_site(self, vhost: VirtualHost) -> None:
|
||||
def enable_site(self, vhost):
|
||||
"""Enables an available site, Apache reload required.
|
||||
|
||||
.. note:: Does not make sure that the site correctly works or that all
|
||||
@@ -46,7 +60,7 @@ class DebianConfigurator(configurator.ApacheConfigurator):
|
||||
if not os.path.isdir(os.path.dirname(enabled_path)):
|
||||
# For some reason, sites-enabled / sites-available do not exist
|
||||
# Call the parent method
|
||||
return super().enable_site(vhost)
|
||||
return super(DebianConfigurator, self).enable_site(vhost)
|
||||
self.reverter.register_file_creation(False, enabled_path)
|
||||
try:
|
||||
os.symlink(vhost.filep, enabled_path)
|
||||
@@ -56,7 +70,7 @@ class DebianConfigurator(configurator.ApacheConfigurator):
|
||||
# Already in shape
|
||||
vhost.enabled = True
|
||||
return None
|
||||
logger.error(
|
||||
logger.warning(
|
||||
"Could not symlink %s to %s, got error: %s", enabled_path,
|
||||
vhost.filep, err.strerror)
|
||||
errstring = ("Encountered error while trying to enable a " +
|
||||
@@ -69,7 +83,7 @@ class DebianConfigurator(configurator.ApacheConfigurator):
|
||||
self.save_notes += "Enabled site %s\n" % vhost.filep
|
||||
return None
|
||||
|
||||
def enable_mod(self, mod_name: str, temp: bool = False) -> None:
|
||||
def enable_mod(self, mod_name, temp=False):
|
||||
"""Enables module in Apache.
|
||||
|
||||
Both enables and reloads Apache so module is active.
|
||||
@@ -115,16 +129,16 @@ class DebianConfigurator(configurator.ApacheConfigurator):
|
||||
# Reload is not necessary as DUMP_RUN_CFG uses latest config.
|
||||
self.parser.update_runtime_variables()
|
||||
|
||||
def _enable_mod_debian(self, mod_name: str, temp: bool) -> None:
|
||||
def _enable_mod_debian(self, mod_name, temp):
|
||||
"""Assumes mods-available, mods-enabled layout."""
|
||||
# Generate reversal command.
|
||||
# Try to be safe here... check that we can probably reverse before
|
||||
# applying enmod command
|
||||
if (self.options.dismod is None or self.options.enmod is None
|
||||
or not util.exe_exists(self.options.dismod)):
|
||||
if not util.exe_exists(self.option("dismod")):
|
||||
raise errors.MisconfigurationError(
|
||||
"Unable to find a2dismod, please make sure a2enmod and "
|
||||
"a2dismod are configured correctly for certbot.")
|
||||
|
||||
self.reverter.register_undo_command(temp, [self.options.dismod, "-f", mod_name])
|
||||
util.run_script([self.options.enmod, mod_name])
|
||||
self.reverter.register_undo_command(
|
||||
temp, [self.option("dismod"), "-f", mod_name])
|
||||
util.run_script([self.option("enmod"), mod_name])
|
||||
|
||||
@@ -1,19 +1,21 @@
|
||||
""" Distribution specific override class for Fedora 29+ """
|
||||
from typing import Any
|
||||
import pkg_resources
|
||||
import zope.interface
|
||||
|
||||
from certbot import errors
|
||||
from certbot import interfaces
|
||||
from certbot import util
|
||||
from certbot.compat import os
|
||||
from certbot_apache._internal import apache_util
|
||||
from certbot_apache._internal import configurator
|
||||
from certbot_apache._internal import parser
|
||||
from certbot_apache._internal.configurator import OsOptions
|
||||
|
||||
from certbot import errors
|
||||
from certbot import util
|
||||
|
||||
|
||||
@zope.interface.provider(interfaces.IPluginFactory)
|
||||
class FedoraConfigurator(configurator.ApacheConfigurator):
|
||||
"""Fedora 29+ specific ApacheConfigurator override class"""
|
||||
|
||||
OS_DEFAULTS = OsOptions(
|
||||
OS_DEFAULTS = dict(
|
||||
server_root="/etc/httpd",
|
||||
vhost_root="/etc/httpd/conf.d",
|
||||
vhost_files="*.conf",
|
||||
@@ -23,10 +25,18 @@ class FedoraConfigurator(configurator.ApacheConfigurator):
|
||||
restart_cmd=['apachectl', 'graceful'],
|
||||
restart_cmd_alt=['apachectl', 'restart'],
|
||||
conftest_cmd=['apachectl', 'configtest'],
|
||||
enmod=None,
|
||||
dismod=None,
|
||||
le_vhost_ext="-le-ssl.conf",
|
||||
handle_modules=False,
|
||||
handle_sites=False,
|
||||
challenge_location="/etc/httpd/conf.d",
|
||||
MOD_SSL_CONF_SRC=pkg_resources.resource_filename(
|
||||
# TODO: eventually newest version of Fedora will need their own config
|
||||
"certbot_apache", os.path.join("_internal", "centos-options-ssl-apache.conf"))
|
||||
)
|
||||
|
||||
def config_test(self) -> None:
|
||||
def config_test(self):
|
||||
"""
|
||||
Override config_test to mitigate configtest error in vanilla installation
|
||||
of mod_ssl in Fedora. The error is caused by non-existent self-signed
|
||||
@@ -34,18 +44,19 @@ class FedoraConfigurator(configurator.ApacheConfigurator):
|
||||
during the first (re)start of httpd.
|
||||
"""
|
||||
try:
|
||||
super().config_test()
|
||||
super(FedoraConfigurator, self).config_test()
|
||||
except errors.MisconfigurationError:
|
||||
self._try_restart_fedora()
|
||||
|
||||
def get_parser(self) -> "FedoraParser":
|
||||
def get_parser(self):
|
||||
"""Initializes the ApacheParser"""
|
||||
return FedoraParser(
|
||||
self.options.server_root, self, self.options.vhost_root, self.version)
|
||||
self.option("server_root"), self.option("vhost_root"),
|
||||
self.version, configurator=self)
|
||||
|
||||
def _try_restart_fedora(self) -> None:
|
||||
def _try_restart_fedora(self):
|
||||
"""
|
||||
Tries to restart httpd using systemctl to generate the self signed key pair.
|
||||
Tries to restart httpd using systemctl to generate the self signed keypair.
|
||||
"""
|
||||
try:
|
||||
util.run_script(['systemctl', 'restart', 'httpd'])
|
||||
@@ -53,37 +64,35 @@ class FedoraConfigurator(configurator.ApacheConfigurator):
|
||||
raise errors.MisconfigurationError(str(err))
|
||||
|
||||
# Finish with actual config check to see if systemctl restart helped
|
||||
super().config_test()
|
||||
super(FedoraConfigurator, self).config_test()
|
||||
|
||||
def _prepare_options(self) -> None:
|
||||
def _prepare_options(self):
|
||||
"""
|
||||
Override the options dictionary initialization to keep using apachectl
|
||||
instead of httpd and so take advantages of this new bash script in newer versions
|
||||
of Fedora to restart httpd.
|
||||
"""
|
||||
super()._prepare_options()
|
||||
self.options.restart_cmd[0] = 'apachectl'
|
||||
if not self.options.restart_cmd_alt: # pragma: no cover
|
||||
raise ValueError("OS option restart_cmd_alt must be set for Fedora.")
|
||||
self.options.restart_cmd_alt[0] = 'apachectl'
|
||||
self.options.conftest_cmd[0] = 'apachectl'
|
||||
super(FedoraConfigurator, self)._prepare_options()
|
||||
self.options["restart_cmd"][0] = 'apachectl'
|
||||
self.options["restart_cmd_alt"][0] = 'apachectl'
|
||||
self.options["conftest_cmd"][0] = 'apachectl'
|
||||
|
||||
|
||||
class FedoraParser(parser.ApacheParser):
|
||||
"""Fedora 29+ specific ApacheParser override class"""
|
||||
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
||||
def __init__(self, *args, **kwargs):
|
||||
# Fedora 29+ specific configuration file for Apache
|
||||
self.sysconfig_filep = "/etc/sysconfig/httpd"
|
||||
super().__init__(*args, **kwargs)
|
||||
super(FedoraParser, self).__init__(*args, **kwargs)
|
||||
|
||||
def update_runtime_variables(self) -> None:
|
||||
def update_runtime_variables(self):
|
||||
""" Override for update_runtime_variables for custom parsing """
|
||||
# Opportunistic, works if SELinux not enforced
|
||||
super().update_runtime_variables()
|
||||
super(FedoraParser, self).update_runtime_variables()
|
||||
self._parse_sysconfig_var()
|
||||
|
||||
def _parse_sysconfig_var(self) -> None:
|
||||
def _parse_sysconfig_var(self):
|
||||
""" Parses Apache CLI options from Fedora configuration file """
|
||||
defines = apache_util.parse_define_file(self.sysconfig_filep, "OPTIONS")
|
||||
for k, v in defines.items():
|
||||
self.variables[k] = v
|
||||
for k in defines:
|
||||
self.variables[k] = defines[k]
|
||||
|
||||
@@ -1,61 +1,75 @@
|
||||
""" Distribution specific override class for Gentoo Linux """
|
||||
from typing import Any
|
||||
import pkg_resources
|
||||
import zope.interface
|
||||
|
||||
from certbot import interfaces
|
||||
from certbot.compat import os
|
||||
from certbot_apache._internal import apache_util
|
||||
from certbot_apache._internal import configurator
|
||||
from certbot_apache._internal import parser
|
||||
from certbot_apache._internal.configurator import OsOptions
|
||||
|
||||
|
||||
@zope.interface.provider(interfaces.IPluginFactory)
|
||||
class GentooConfigurator(configurator.ApacheConfigurator):
|
||||
"""Gentoo specific ApacheConfigurator override class"""
|
||||
|
||||
OS_DEFAULTS = OsOptions(
|
||||
OS_DEFAULTS = dict(
|
||||
server_root="/etc/apache2",
|
||||
vhost_root="/etc/apache2/vhosts.d",
|
||||
vhost_files="*.conf",
|
||||
logs_root="/var/log/apache2",
|
||||
ctl="apache2ctl",
|
||||
version_cmd=['apache2ctl', '-v'],
|
||||
restart_cmd=['apache2ctl', 'graceful'],
|
||||
restart_cmd_alt=['apache2ctl', 'restart'],
|
||||
conftest_cmd=['apache2ctl', 'configtest'],
|
||||
enmod=None,
|
||||
dismod=None,
|
||||
le_vhost_ext="-le-ssl.conf",
|
||||
handle_modules=False,
|
||||
handle_sites=False,
|
||||
challenge_location="/etc/apache2/vhosts.d",
|
||||
MOD_SSL_CONF_SRC=pkg_resources.resource_filename(
|
||||
"certbot_apache", os.path.join("_internal", "options-ssl-apache.conf"))
|
||||
)
|
||||
|
||||
def _prepare_options(self) -> None:
|
||||
def _prepare_options(self):
|
||||
"""
|
||||
Override the options dictionary initialization in order to support
|
||||
alternative restart cmd used in Gentoo.
|
||||
"""
|
||||
super()._prepare_options()
|
||||
if not self.options.restart_cmd_alt: # pragma: no cover
|
||||
raise ValueError("OS option restart_cmd_alt must be set for Gentoo.")
|
||||
self.options.restart_cmd_alt[0] = self.options.ctl
|
||||
super(GentooConfigurator, self)._prepare_options()
|
||||
self.options["restart_cmd_alt"][0] = self.option("ctl")
|
||||
|
||||
def get_parser(self) -> "GentooParser":
|
||||
def get_parser(self):
|
||||
"""Initializes the ApacheParser"""
|
||||
return GentooParser(
|
||||
self.options.server_root, self, self.options.vhost_root, self.version)
|
||||
self.option("server_root"), self.option("vhost_root"),
|
||||
self.version, configurator=self)
|
||||
|
||||
|
||||
class GentooParser(parser.ApacheParser):
|
||||
"""Gentoo specific ApacheParser override class"""
|
||||
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
||||
def __init__(self, *args, **kwargs):
|
||||
# Gentoo specific configuration file for Apache2
|
||||
self.apacheconfig_filep = "/etc/conf.d/apache2"
|
||||
super().__init__(*args, **kwargs)
|
||||
super(GentooParser, self).__init__(*args, **kwargs)
|
||||
|
||||
def update_runtime_variables(self) -> None:
|
||||
def update_runtime_variables(self):
|
||||
""" Override for update_runtime_variables for custom parsing """
|
||||
self.parse_sysconfig_var()
|
||||
self.update_modules()
|
||||
|
||||
def parse_sysconfig_var(self) -> None:
|
||||
def parse_sysconfig_var(self):
|
||||
""" Parses Apache CLI options from Gentoo configuration file """
|
||||
defines = apache_util.parse_define_file(self.apacheconfig_filep,
|
||||
"APACHE2_OPTS")
|
||||
for k, v in defines.items():
|
||||
self.variables[k] = v
|
||||
for k in defines:
|
||||
self.variables[k] = defines[k]
|
||||
|
||||
def update_modules(self) -> None:
|
||||
def update_modules(self):
|
||||
"""Get loaded modules from httpd process, and add them to DOM"""
|
||||
mod_cmd = [self.configurator.options.ctl, "modules"]
|
||||
mod_cmd = [self.configurator.option("ctl"), "modules"]
|
||||
matches = apache_util.parse_from_subprocess(mod_cmd, r"(.*)_module")
|
||||
for mod in matches:
|
||||
self.add_mod(mod.strip())
|
||||
|
||||
@@ -1,19 +1,31 @@
|
||||
""" Distribution specific override class for OpenSUSE """
|
||||
import pkg_resources
|
||||
import zope.interface
|
||||
|
||||
from certbot import interfaces
|
||||
from certbot.compat import os
|
||||
from certbot_apache._internal import configurator
|
||||
from certbot_apache._internal.configurator import OsOptions
|
||||
|
||||
|
||||
@zope.interface.provider(interfaces.IPluginFactory)
|
||||
class OpenSUSEConfigurator(configurator.ApacheConfigurator):
|
||||
"""OpenSUSE specific ApacheConfigurator override class"""
|
||||
|
||||
OS_DEFAULTS = OsOptions(
|
||||
OS_DEFAULTS = dict(
|
||||
server_root="/etc/apache2",
|
||||
vhost_root="/etc/apache2/vhosts.d",
|
||||
vhost_files="*.conf",
|
||||
ctl="apachectl",
|
||||
version_cmd=['apachectl', '-v'],
|
||||
restart_cmd=['apachectl', 'graceful'],
|
||||
conftest_cmd=['apachectl', 'configtest'],
|
||||
logs_root="/var/log/apache2",
|
||||
ctl="apache2ctl",
|
||||
version_cmd=['apache2ctl', '-v'],
|
||||
restart_cmd=['apache2ctl', 'graceful'],
|
||||
conftest_cmd=['apache2ctl', 'configtest'],
|
||||
enmod="a2enmod",
|
||||
dismod="a2dismod",
|
||||
le_vhost_ext="-le-ssl.conf",
|
||||
handle_modules=False,
|
||||
handle_sites=False,
|
||||
challenge_location="/etc/apache2/vhosts.d",
|
||||
MOD_SSL_CONF_SRC=pkg_resources.resource_filename(
|
||||
"certbot_apache", os.path.join("_internal", "options-ssl-apache.conf"))
|
||||
)
|
||||
|
||||
@@ -1,19 +0,0 @@
|
||||
""" Distribution specific override class for Void Linux """
|
||||
from certbot_apache._internal import configurator
|
||||
from certbot_apache._internal.configurator import OsOptions
|
||||
|
||||
|
||||
class VoidConfigurator(configurator.ApacheConfigurator):
|
||||
"""Void Linux specific ApacheConfigurator override class"""
|
||||
|
||||
OS_DEFAULTS = OsOptions(
|
||||
server_root="/etc/apache",
|
||||
vhost_root="/etc/apache/extra",
|
||||
vhost_files="*.conf",
|
||||
logs_root="/var/log/httpd",
|
||||
ctl="apachectl",
|
||||
version_cmd=['apachectl', '-v'],
|
||||
restart_cmd=['apachectl', 'graceful'],
|
||||
conftest_cmd=['apachectl', 'configtest'],
|
||||
challenge_location="/etc/apache/extra",
|
||||
)
|
||||
@@ -3,36 +3,22 @@ import copy
|
||||
import fnmatch
|
||||
import logging
|
||||
import re
|
||||
from typing import Collection
|
||||
from typing import Dict
|
||||
from typing import Iterable
|
||||
from typing import List
|
||||
from typing import Mapping
|
||||
from typing import Optional
|
||||
from typing import Pattern
|
||||
from typing import Set
|
||||
from typing import Tuple
|
||||
from typing import TYPE_CHECKING
|
||||
from typing import Union
|
||||
import sys
|
||||
|
||||
from certbot_apache._internal import apache_util
|
||||
from certbot_apache._internal import constants
|
||||
import six
|
||||
|
||||
from acme.magic_typing import Dict # pylint: disable=unused-import, no-name-in-module
|
||||
from acme.magic_typing import List # pylint: disable=unused-import, no-name-in-module
|
||||
from acme.magic_typing import Set # pylint: disable=unused-import, no-name-in-module
|
||||
from certbot import errors
|
||||
from certbot.compat import os
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from certbot_apache._internal.configurator import ApacheConfigurator # pragma: no cover
|
||||
|
||||
try:
|
||||
from augeas import Augeas
|
||||
except ImportError: # pragma: no cover
|
||||
Augeas = None
|
||||
from certbot_apache._internal import apache_util
|
||||
from certbot_apache._internal import constants
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ApacheParser:
|
||||
class ApacheParser(object):
|
||||
"""Class handles the fine details of parsing the Apache Configuration.
|
||||
|
||||
.. todo:: Make parsing general... remove sites-available etc...
|
||||
@@ -44,11 +30,11 @@ class ApacheParser:
|
||||
default - user config file, name - NameVirtualHost,
|
||||
|
||||
"""
|
||||
arg_var_interpreter: Pattern = re.compile(r"\$\{[^ \}]*}")
|
||||
fnmatch_chars: Set[str] = {"*", "?", "\\", "[", "]"}
|
||||
arg_var_interpreter = re.compile(r"\$\{[^ \}]*}")
|
||||
fnmatch_chars = set(["*", "?", "\\", "[", "]"])
|
||||
|
||||
def __init__(self, root: str, configurator: "ApacheConfigurator",
|
||||
vhostroot: str, version: Tuple[int, ...] = (2, 4)) -> None:
|
||||
def __init__(self, root, vhostroot=None, version=(2, 4),
|
||||
configurator=None):
|
||||
# Note: Order is important here.
|
||||
|
||||
# Needed for calling save() with reverter functionality that resides in
|
||||
@@ -57,7 +43,8 @@ class ApacheParser:
|
||||
self.configurator = configurator
|
||||
|
||||
# Initialize augeas
|
||||
self.aug: Augeas = init_augeas()
|
||||
self.aug = None
|
||||
self.init_augeas()
|
||||
|
||||
if not self.check_aug_version():
|
||||
raise errors.NotSupportedError(
|
||||
@@ -65,13 +52,13 @@ class ApacheParser:
|
||||
"version 1.2.0 or higher, please make sure you have you have "
|
||||
"those installed.")
|
||||
|
||||
self.modules: Dict[str, Optional[str]] = {}
|
||||
self.parser_paths: Dict[str, List[str]] = {}
|
||||
self.variables: Dict[str, str] = {}
|
||||
self.modules = set() # type: Set[str]
|
||||
self.parser_paths = {} # type: Dict[str, List[str]]
|
||||
self.variables = {} # type: Dict[str, str]
|
||||
|
||||
# Find configuration root and make sure augeas can parse it.
|
||||
self.root: str = os.path.abspath(root)
|
||||
self.loc: Dict[str, str] = {"root": self._find_config_root()}
|
||||
self.root = os.path.abspath(root)
|
||||
self.loc = {"root": self._find_config_root()}
|
||||
self.parse_file(self.loc["root"])
|
||||
|
||||
if version >= (2, 4):
|
||||
@@ -93,14 +80,31 @@ class ApacheParser:
|
||||
# Must also attempt to parse additional virtual host root
|
||||
if vhostroot:
|
||||
self.parse_file(os.path.abspath(vhostroot) + "/" +
|
||||
self.configurator.options.vhost_files)
|
||||
self.configurator.option("vhost_files"))
|
||||
|
||||
# check to see if there were unparsed define statements
|
||||
if version < (2, 4):
|
||||
if self.find_dir("Define", exclude=False):
|
||||
raise errors.PluginError("Error parsing runtime variables")
|
||||
|
||||
def check_parsing_errors(self, lens: str) -> None:
|
||||
def init_augeas(self):
|
||||
""" Initialize the actual Augeas instance """
|
||||
|
||||
try:
|
||||
import augeas
|
||||
except ImportError: # pragma: no cover
|
||||
raise errors.NoInstallationError("Problem in Augeas installation")
|
||||
|
||||
self.aug = augeas.Augeas(
|
||||
# specify a directory to load our preferred lens from
|
||||
loadpath=constants.AUGEAS_LENS_DIR,
|
||||
# Do not save backup (we do it ourselves), do not load
|
||||
# anything by default
|
||||
flags=(augeas.Augeas.NONE |
|
||||
augeas.Augeas.NO_MODL_AUTOLOAD |
|
||||
augeas.Augeas.ENABLE_SPAN))
|
||||
|
||||
def check_parsing_errors(self, lens):
|
||||
"""Verify Augeas can parse all of the lens files.
|
||||
|
||||
:param str lens: lens to check for errors
|
||||
@@ -126,7 +130,7 @@ class ApacheParser:
|
||||
self.aug.get(path + "/message")))
|
||||
raise errors.PluginError(msg)
|
||||
|
||||
def check_aug_version(self) -> Union[bool, List[str]]:
|
||||
def check_aug_version(self):
|
||||
""" Checks that we have recent enough version of libaugeas.
|
||||
If augeas version is recent enough, it will support case insensitive
|
||||
regexp matching"""
|
||||
@@ -141,7 +145,7 @@ class ApacheParser:
|
||||
self.aug.remove("/test/path")
|
||||
return matches
|
||||
|
||||
def unsaved_files(self) -> Set[str]:
|
||||
def unsaved_files(self):
|
||||
"""Lists files that have modified Augeas DOM but the changes have not
|
||||
been written to the filesystem yet, used by `self.save()` and
|
||||
ApacheConfigurator to check the file state.
|
||||
@@ -180,7 +184,7 @@ class ApacheParser:
|
||||
save_files.add(self.aug.get(path)[6:])
|
||||
return save_files
|
||||
|
||||
def ensure_augeas_state(self) -> None:
|
||||
def ensure_augeas_state(self):
|
||||
"""Makes sure that all Augeas dom changes are written to files to avoid
|
||||
loss of configuration directives when doing additional augeas parsing,
|
||||
causing a possible augeas.load() resulting dom reset
|
||||
@@ -190,7 +194,7 @@ class ApacheParser:
|
||||
self.configurator.save_notes += "(autosave)"
|
||||
self.configurator.save()
|
||||
|
||||
def save(self, save_files: Iterable[str]) -> None:
|
||||
def save(self, save_files):
|
||||
"""Saves all changes to the configuration files.
|
||||
|
||||
save() is called from ApacheConfigurator to handle the parser specific
|
||||
@@ -200,13 +204,7 @@ class ApacheParser:
|
||||
|
||||
"""
|
||||
self.configurator.save_notes = ""
|
||||
|
||||
ex_errs = self.aug.match("/augeas//error")
|
||||
try:
|
||||
self.aug.save()
|
||||
except IOError:
|
||||
self._log_save_errors(ex_errs)
|
||||
raise
|
||||
self.aug.save()
|
||||
|
||||
# Force reload if files were modified
|
||||
# This is needed to recalculate augeas directive span
|
||||
@@ -215,24 +213,21 @@ class ApacheParser:
|
||||
self.aug.remove("/files/"+sf)
|
||||
self.aug.load()
|
||||
|
||||
def _log_save_errors(self, ex_errs: Iterable[str]) -> None:
|
||||
def _log_save_errors(self, ex_errs):
|
||||
"""Log errors due to bad Augeas save.
|
||||
|
||||
:param list ex_errs: Existing errors before save
|
||||
|
||||
"""
|
||||
# Check for the root of save problems
|
||||
new_errs = [e for e in self.aug.match("/augeas//error") if e not in ex_errs]
|
||||
new_errs = self.aug.match("/augeas//error")
|
||||
# logger.error("During Save - %s", mod_conf)
|
||||
logger.error("Unable to save files: %s. Attempted Save Notes: %s",
|
||||
", ".join(err[13:len(err) - 6] for err in new_errs
|
||||
# Only new errors caused by recent save
|
||||
if err not in ex_errs), self.configurator.save_notes)
|
||||
|
||||
for err in new_errs:
|
||||
logger.debug(
|
||||
"Error %s saving %s: %s", self.aug.get(err), err[13:len(err) - 6],
|
||||
self.aug.get(f"{err}/message"))
|
||||
logger.error(
|
||||
"Unable to save files: %s.%s", ", ".join(err[13:len(err) - 6] for err in new_errs),
|
||||
f" Save Notes: {self.configurator.save_notes}" if self.configurator.save_notes else "")
|
||||
|
||||
def add_include(self, main_config: str, inc_path: str) -> None:
|
||||
def add_include(self, main_config, inc_path):
|
||||
"""Add Include for a new configuration file if one does not exist
|
||||
|
||||
:param str main_config: file path to main Apache config file
|
||||
@@ -251,28 +246,28 @@ class ApacheParser:
|
||||
new_file = os.path.basename(inc_path)
|
||||
self.existing_paths.setdefault(new_dir, []).append(new_file)
|
||||
|
||||
def add_mod(self, mod_name: str) -> None:
|
||||
def add_mod(self, mod_name):
|
||||
"""Shortcut for updating parser modules."""
|
||||
if mod_name + "_module" not in self.modules:
|
||||
self.modules[mod_name + "_module"] = None
|
||||
self.modules.add(mod_name + "_module")
|
||||
if "mod_" + mod_name + ".c" not in self.modules:
|
||||
self.modules["mod_" + mod_name + ".c"] = None
|
||||
self.modules.add("mod_" + mod_name + ".c")
|
||||
|
||||
def reset_modules(self) -> None:
|
||||
def reset_modules(self):
|
||||
"""Reset the loaded modules list. This is called from cleanup to clear
|
||||
temporarily loaded modules."""
|
||||
self.modules = {}
|
||||
self.modules = set()
|
||||
self.update_modules()
|
||||
self.parse_modules()
|
||||
|
||||
def parse_modules(self) -> None:
|
||||
def parse_modules(self):
|
||||
"""Iterates on the configuration until no new modules are loaded.
|
||||
|
||||
..todo:: This should be attempted to be done with a binary to avoid
|
||||
the iteration issue. Else... parse and enable mods at same time.
|
||||
|
||||
"""
|
||||
mods: Dict[str, str] = {}
|
||||
mods = set() # type: Set[str]
|
||||
matches = self.find_dir("LoadModule")
|
||||
iterator = iter(matches)
|
||||
# Make sure prev_size != cur_size for do: while: iteration
|
||||
@@ -281,30 +276,31 @@ class ApacheParser:
|
||||
while len(mods) != prev_size:
|
||||
prev_size = len(mods)
|
||||
|
||||
for match_name, match_filename in zip(
|
||||
for match_name, match_filename in six.moves.zip(
|
||||
iterator, iterator):
|
||||
mod_name = self.get_arg(match_name)
|
||||
mod_filename = self.get_arg(match_filename)
|
||||
if mod_name and mod_filename:
|
||||
mods[mod_name] = mod_filename
|
||||
mods[os.path.basename(mod_filename)[:-2] + "c"] = mod_filename
|
||||
mods.add(mod_name)
|
||||
mods.add(os.path.basename(mod_filename)[:-2] + "c")
|
||||
else:
|
||||
logger.debug("Could not read LoadModule directive from Augeas path: %s",
|
||||
match_name[6:])
|
||||
self.modules.update(mods)
|
||||
|
||||
def update_runtime_variables(self) -> None:
|
||||
def update_runtime_variables(self):
|
||||
"""Update Includes, Defines and Includes from httpd config dump data"""
|
||||
|
||||
self.update_defines()
|
||||
self.update_includes()
|
||||
self.update_modules()
|
||||
|
||||
def update_defines(self) -> None:
|
||||
def update_defines(self):
|
||||
"""Updates the dictionary of known variables in the configuration"""
|
||||
self.variables = apache_util.parse_defines(self.configurator.options.ctl)
|
||||
|
||||
def update_includes(self) -> None:
|
||||
self.variables = apache_util.parse_defines(self.configurator.option("ctl"))
|
||||
|
||||
def update_includes(self):
|
||||
"""Get includes from httpd process, and add them to DOM if needed"""
|
||||
|
||||
# Find_dir iterates over configuration for Include and IncludeOptional
|
||||
@@ -312,34 +308,34 @@ class ApacheParser:
|
||||
# configuration files
|
||||
_ = self.find_dir("Include")
|
||||
|
||||
matches = apache_util.parse_includes(self.configurator.options.ctl)
|
||||
matches = apache_util.parse_includes(self.configurator.option("ctl"))
|
||||
if matches:
|
||||
for i in matches:
|
||||
if not self.parsed_in_current(i):
|
||||
self.parse_file(i)
|
||||
|
||||
def update_modules(self) -> None:
|
||||
def update_modules(self):
|
||||
"""Get loaded modules from httpd process, and add them to DOM"""
|
||||
|
||||
matches = apache_util.parse_modules(self.configurator.options.ctl)
|
||||
matches = apache_util.parse_modules(self.configurator.option("ctl"))
|
||||
for mod in matches:
|
||||
self.add_mod(mod.strip())
|
||||
|
||||
def filter_args_num(self, matches: str, args: int) -> List[str]:
|
||||
def filter_args_num(self, matches, args): # pylint: disable=no-self-use
|
||||
"""Filter out directives with specific number of arguments.
|
||||
|
||||
This function makes the assumption that all related arguments are given
|
||||
in order. Thus /files/apache/directive[5]/arg[2] must come immediately
|
||||
after /files/apache/directive[5]/arg[1]. Runs in 1 linear pass.
|
||||
|
||||
:param str matches: Matches of all directives with arg nodes
|
||||
:param string matches: Matches of all directives with arg nodes
|
||||
:param int args: Number of args you would like to filter
|
||||
|
||||
:returns: List of directives that contain # of arguments.
|
||||
(arg is stripped off)
|
||||
|
||||
"""
|
||||
filtered: List[str] = []
|
||||
filtered = []
|
||||
if args == 1:
|
||||
for i, match in enumerate(matches):
|
||||
if match.endswith("/arg"):
|
||||
@@ -356,7 +352,7 @@ class ApacheParser:
|
||||
|
||||
return filtered
|
||||
|
||||
def add_dir_to_ifmodssl(self, aug_conf_path: str, directive: str, args: List[str]) -> None:
|
||||
def add_dir_to_ifmodssl(self, aug_conf_path, directive, args):
|
||||
"""Adds directive and value to IfMod ssl block.
|
||||
|
||||
Adds given directive and value along configuration path within
|
||||
@@ -365,7 +361,7 @@ class ApacheParser:
|
||||
|
||||
:param str aug_conf_path: Desired Augeas config path to add directive
|
||||
:param str directive: Directive you would like to add, e.g. Listen
|
||||
:param args: Values of the directive; list of str (eg. ["443"])
|
||||
:param args: Values of the directive; str "443" or list of str
|
||||
:type args: list
|
||||
|
||||
"""
|
||||
@@ -382,7 +378,7 @@ class ApacheParser:
|
||||
for i, arg in enumerate(args):
|
||||
self.aug.set("%s/arg[%d]" % (nvh_path, i + 1), arg)
|
||||
|
||||
def get_ifmod(self, aug_conf_path: str, mod: str, beginning: bool = False) -> str:
|
||||
def get_ifmod(self, aug_conf_path, mod, beginning=False):
|
||||
"""Returns the path to <IfMod mod> and creates one if it doesn't exist.
|
||||
|
||||
:param str aug_conf_path: Augeas configuration path
|
||||
@@ -404,7 +400,7 @@ class ApacheParser:
|
||||
# Strip off "arg" at end of first ifmod path
|
||||
return if_mods[0].rpartition("arg")[0]
|
||||
|
||||
def create_ifmod(self, aug_conf_path: str, mod: str, beginning: bool = False) -> str:
|
||||
def create_ifmod(self, aug_conf_path, mod, beginning=False):
|
||||
"""Creates a new <IfMod mod> and returns its path.
|
||||
|
||||
:param str aug_conf_path: Augeas configuration path
|
||||
@@ -431,9 +427,7 @@ class ApacheParser:
|
||||
self.aug.set(c_path_arg, mod)
|
||||
return retpath
|
||||
|
||||
def add_dir(
|
||||
self, aug_conf_path: Optional[str], directive: Optional[str], args: Union[List[str], str]
|
||||
) -> None:
|
||||
def add_dir(self, aug_conf_path, directive, args):
|
||||
"""Appends directive to the end fo the file given by aug_conf_path.
|
||||
|
||||
.. note:: Not added to AugeasConfigurator because it may depend
|
||||
@@ -445,7 +439,6 @@ class ApacheParser:
|
||||
:type args: list or str
|
||||
|
||||
"""
|
||||
aug_conf_path = aug_conf_path if aug_conf_path else ""
|
||||
self.aug.set(aug_conf_path + "/directive[last() + 1]", directive)
|
||||
if isinstance(args, list):
|
||||
for i, value in enumerate(args, 1):
|
||||
@@ -454,8 +447,7 @@ class ApacheParser:
|
||||
else:
|
||||
self.aug.set(aug_conf_path + "/directive[last()]/arg", args)
|
||||
|
||||
def add_dir_beginning(self, aug_conf_path: Optional[str], dirname: str,
|
||||
args: Union[List[str], str]) -> None:
|
||||
def add_dir_beginning(self, aug_conf_path, dirname, args):
|
||||
"""Adds the directive to the beginning of defined aug_conf_path.
|
||||
|
||||
:param str aug_conf_path: Augeas configuration path to add directive
|
||||
@@ -463,13 +455,8 @@ class ApacheParser:
|
||||
:param args: Value of the directive. ie. Listen 443, 443 is arg
|
||||
:type args: list or str
|
||||
"""
|
||||
aug_conf_path = aug_conf_path if aug_conf_path else ""
|
||||
first_dir = aug_conf_path + "/directive[1]"
|
||||
if self.aug.get(first_dir):
|
||||
self.aug.insert(first_dir, "directive", True)
|
||||
else:
|
||||
self.aug.set(first_dir, "directive")
|
||||
|
||||
self.aug.insert(first_dir, "directive", True)
|
||||
self.aug.set(first_dir, dirname)
|
||||
if isinstance(args, list):
|
||||
for i, value in enumerate(args, 1):
|
||||
@@ -477,7 +464,7 @@ class ApacheParser:
|
||||
else:
|
||||
self.aug.set(first_dir + "/arg", args)
|
||||
|
||||
def add_comment(self, aug_conf_path: str, comment: str) -> None:
|
||||
def add_comment(self, aug_conf_path, comment):
|
||||
"""Adds the comment to the augeas path
|
||||
|
||||
:param str aug_conf_path: Augeas configuration path to add directive
|
||||
@@ -486,7 +473,7 @@ class ApacheParser:
|
||||
"""
|
||||
self.aug.set(aug_conf_path + "/#comment[last() + 1]", comment)
|
||||
|
||||
def find_comments(self, arg: str, start: Optional[str] = None) -> List[str]:
|
||||
def find_comments(self, arg, start=None):
|
||||
"""Finds a comment with specified content from the provided DOM path
|
||||
|
||||
:param str arg: Comment content to search
|
||||
@@ -508,8 +495,7 @@ class ApacheParser:
|
||||
results.append(comment)
|
||||
return results
|
||||
|
||||
def find_dir(self, directive: str, arg: Optional[str] = None,
|
||||
start: Optional[str] = None, exclude: bool = True) -> List[str]:
|
||||
def find_dir(self, directive, arg=None, start=None, exclude=True):
|
||||
"""Finds directive in the configuration.
|
||||
|
||||
Recursively searches through config files to find directives
|
||||
@@ -537,8 +523,6 @@ class ApacheParser:
|
||||
:param bool exclude: Whether or not to exclude directives based on
|
||||
variables and enabled modules
|
||||
|
||||
:rtype list
|
||||
|
||||
"""
|
||||
# Cannot place member variable in the definition of the function so...
|
||||
if not start:
|
||||
@@ -570,7 +554,7 @@ class ApacheParser:
|
||||
else:
|
||||
arg_suffix = "/*[self::arg=~regexp('%s')]" % case_i(arg)
|
||||
|
||||
ordered_matches: List[str] = []
|
||||
ordered_matches = [] # type: List[str]
|
||||
|
||||
# TODO: Wildcards should be included in alphabetical order
|
||||
# https://httpd.apache.org/docs/2.4/mod/core.html#include
|
||||
@@ -587,7 +571,7 @@ class ApacheParser:
|
||||
|
||||
return ordered_matches
|
||||
|
||||
def get_all_args(self, match: str) -> List[Optional[str]]:
|
||||
def get_all_args(self, match):
|
||||
"""
|
||||
Tries to fetch all arguments for a directive. See get_arg.
|
||||
|
||||
@@ -597,11 +581,11 @@ class ApacheParser:
|
||||
"""
|
||||
|
||||
if match[-1] != "/":
|
||||
match = match + "/"
|
||||
match = match+"/"
|
||||
allargs = self.aug.match(match + '*')
|
||||
return [self.get_arg(arg) for arg in allargs]
|
||||
|
||||
def get_arg(self, match: str) -> Optional[str]:
|
||||
def get_arg(self, match):
|
||||
"""Uses augeas.get to get argument value and interprets result.
|
||||
|
||||
This also converts all variables and parameters appropriately.
|
||||
@@ -616,7 +600,6 @@ class ApacheParser:
|
||||
# e.g. strip now, not later
|
||||
if not value:
|
||||
return None
|
||||
|
||||
value = value.strip("'\"")
|
||||
|
||||
variables = ApacheParser.arg_var_interpreter.findall(value)
|
||||
@@ -630,15 +613,15 @@ class ApacheParser:
|
||||
|
||||
return value
|
||||
|
||||
def get_root_augpath(self) -> str:
|
||||
def get_root_augpath(self):
|
||||
"""
|
||||
Returns the Augeas path of root configuration.
|
||||
"""
|
||||
return get_aug_path(self.loc["root"])
|
||||
|
||||
def exclude_dirs(self, matches: Iterable[str]) -> List[str]:
|
||||
def exclude_dirs(self, matches):
|
||||
"""Exclude directives that are not loaded into the configuration."""
|
||||
filters = [("ifmodule", self.modules.keys()), ("ifdefine", self.variables)]
|
||||
filters = [("ifmodule", self.modules), ("ifdefine", self.variables)]
|
||||
|
||||
valid_matches = []
|
||||
|
||||
@@ -650,7 +633,7 @@ class ApacheParser:
|
||||
valid_matches.append(match)
|
||||
return valid_matches
|
||||
|
||||
def _pass_filter(self, match: str, filter_: Tuple[str, Collection[str]]) -> bool:
|
||||
def _pass_filter(self, match, filter_):
|
||||
"""Determine if directive passes a filter.
|
||||
|
||||
:param str match: Augeas path
|
||||
@@ -679,26 +662,7 @@ class ApacheParser:
|
||||
|
||||
return True
|
||||
|
||||
def standard_path_from_server_root(self, arg: str) -> str:
|
||||
"""Ensure paths are consistent and absolute
|
||||
|
||||
:param str arg: Argument of directive
|
||||
|
||||
:returns: Standardized argument path
|
||||
:rtype: str
|
||||
"""
|
||||
# Remove beginning and ending quotes
|
||||
arg = arg.strip("'\"")
|
||||
|
||||
# Standardize the include argument based on server root
|
||||
if not arg.startswith("/"):
|
||||
# Normpath will condense ../
|
||||
arg = os.path.normpath(os.path.join(self.root, arg))
|
||||
else:
|
||||
arg = os.path.normpath(arg)
|
||||
return arg
|
||||
|
||||
def _get_include_path(self, arg: Optional[str]) -> Optional[str]:
|
||||
def _get_include_path(self, arg):
|
||||
"""Converts an Apache Include directive into Augeas path.
|
||||
|
||||
Converts an Apache Include directive argument into an Augeas
|
||||
@@ -718,9 +682,16 @@ class ApacheParser:
|
||||
# if matchObj.group() != arg:
|
||||
# logger.error("Error: Invalid regexp characters in %s", arg)
|
||||
# return []
|
||||
if arg is None:
|
||||
return None # pragma: no cover
|
||||
arg = self.standard_path_from_server_root(arg)
|
||||
|
||||
# Remove beginning and ending quotes
|
||||
arg = arg.strip("'\"")
|
||||
|
||||
# Standardize the include argument based on server root
|
||||
if not arg.startswith("/"):
|
||||
# Normpath will condense ../
|
||||
arg = os.path.normpath(os.path.join(self.root, arg))
|
||||
else:
|
||||
arg = os.path.normpath(arg)
|
||||
|
||||
# Attempts to add a transform to the file if one does not already exist
|
||||
if os.path.isdir(arg):
|
||||
@@ -734,7 +705,7 @@ class ApacheParser:
|
||||
split_arg = arg.split("/")
|
||||
for idx, split in enumerate(split_arg):
|
||||
if any(char in ApacheParser.fnmatch_chars for char in split):
|
||||
# Turn it into an augeas regex
|
||||
# Turn it into a augeas regex
|
||||
# TODO: Can this instead be an augeas glob instead of regex
|
||||
split_arg[idx] = ("* [label()=~regexp('%s')]" %
|
||||
self.fnmatch_to_re(split))
|
||||
@@ -744,13 +715,14 @@ class ApacheParser:
|
||||
|
||||
return get_aug_path(arg)
|
||||
|
||||
def fnmatch_to_re(self, clean_fn_match: str) -> str:
|
||||
def fnmatch_to_re(self, clean_fn_match): # pylint: disable=no-self-use
|
||||
"""Method converts Apache's basic fnmatch to regular expression.
|
||||
|
||||
Assumption - Configs are assumed to be well-formed and only writable by
|
||||
privileged users.
|
||||
|
||||
https://apr.apache.org/docs/apr/2.0/apr__fnmatch_8h_source.html
|
||||
http://apache2.sourcearchive.com/documentation/2.2.16-6/apr__fnmatch_8h_source.html
|
||||
|
||||
:param str clean_fn_match: Apache style filename match, like globs
|
||||
|
||||
@@ -758,10 +730,13 @@ class ApacheParser:
|
||||
:rtype: str
|
||||
|
||||
"""
|
||||
if sys.version_info < (3, 6):
|
||||
# This strips off final /Z(?ms)
|
||||
return fnmatch.translate(clean_fn_match)[:-7]
|
||||
# Since Python 3.6, it returns a different pattern like (?s:.*\.load)\Z
|
||||
return fnmatch.translate(clean_fn_match)[4:-3] # pragma: no cover
|
||||
|
||||
def parse_file(self, filepath: str) -> None:
|
||||
def parse_file(self, filepath):
|
||||
"""Parse file with Augeas
|
||||
|
||||
Checks to see if file_path is parsed by Augeas
|
||||
@@ -788,7 +763,7 @@ class ApacheParser:
|
||||
self._add_httpd_transform(filepath)
|
||||
self.aug.load()
|
||||
|
||||
def parsed_in_current(self, filep: Optional[str]) -> bool:
|
||||
def parsed_in_current(self, filep):
|
||||
"""Checks if the file path is parsed by current Augeas parser config
|
||||
ie. returns True if the file is found on a path that's found in live
|
||||
Augeas configuration.
|
||||
@@ -798,11 +773,9 @@ class ApacheParser:
|
||||
:returns: True if file is parsed in existing configuration tree
|
||||
:rtype: bool
|
||||
"""
|
||||
if not filep:
|
||||
return False # pragma: no cover
|
||||
return self._parsed_by_parser_paths(filep, self.parser_paths)
|
||||
|
||||
def parsed_in_original(self, filep: Optional[str]) -> bool:
|
||||
def parsed_in_original(self, filep):
|
||||
"""Checks if the file path is parsed by existing Apache config.
|
||||
ie. returns True if the file is found on a path that matches Include or
|
||||
IncludeOptional statement in the Apache configuration.
|
||||
@@ -812,20 +785,18 @@ class ApacheParser:
|
||||
:returns: True if file is parsed in existing configuration tree
|
||||
:rtype: bool
|
||||
"""
|
||||
if not filep:
|
||||
return False # pragma: no cover
|
||||
return self._parsed_by_parser_paths(filep, self.existing_paths)
|
||||
|
||||
def _parsed_by_parser_paths(self, filep: str, paths: Mapping[str, List[str]]) -> bool:
|
||||
def _parsed_by_parser_paths(self, filep, paths):
|
||||
"""Helper function that searches through provided paths and returns
|
||||
True if file path is found in the set"""
|
||||
for directory in paths:
|
||||
for directory in paths.keys():
|
||||
for filename in paths[directory]:
|
||||
if fnmatch.fnmatch(filep, os.path.join(directory, filename)):
|
||||
return True
|
||||
return False
|
||||
|
||||
def _check_path_actions(self, filepath: str) -> Tuple[bool, bool]:
|
||||
def _check_path_actions(self, filepath):
|
||||
"""Determine actions to take with a new augeas path
|
||||
|
||||
This helper function will return a tuple that defines
|
||||
@@ -850,7 +821,7 @@ class ApacheParser:
|
||||
remove_old = False
|
||||
return use_new, remove_old
|
||||
|
||||
def _remove_httpd_transform(self, filepath: str) -> None:
|
||||
def _remove_httpd_transform(self, filepath):
|
||||
"""Remove path from Augeas transform
|
||||
|
||||
:param str filepath: filepath to remove
|
||||
@@ -865,7 +836,7 @@ class ApacheParser:
|
||||
self.aug.remove(remove_inc[0])
|
||||
self.parser_paths.pop(remove_dirname)
|
||||
|
||||
def _add_httpd_transform(self, incl: str) -> None:
|
||||
def _add_httpd_transform(self, incl):
|
||||
"""Add a transform to Augeas.
|
||||
|
||||
This function will correctly add a transform to augeas
|
||||
@@ -875,7 +846,7 @@ class ApacheParser:
|
||||
:param str incl: filepath to include for transform
|
||||
|
||||
"""
|
||||
last_include: str = self.aug.match("/augeas/load/Httpd/incl [last()]")
|
||||
last_include = self.aug.match("/augeas/load/Httpd/incl [last()]")
|
||||
if last_include:
|
||||
# Insert a new node immediately after the last incl
|
||||
self.aug.insert(last_include[0], "incl", False)
|
||||
@@ -893,7 +864,7 @@ class ApacheParser:
|
||||
self.parser_paths[os.path.dirname(incl)] = [
|
||||
os.path.basename(incl)]
|
||||
|
||||
def standardize_excl(self) -> None:
|
||||
def standardize_excl(self):
|
||||
"""Standardize the excl arguments for the Httpd lens in Augeas.
|
||||
|
||||
Note: Hack!
|
||||
@@ -925,16 +896,16 @@ class ApacheParser:
|
||||
|
||||
self.aug.load()
|
||||
|
||||
def _set_locations(self) -> Dict[str, str]:
|
||||
def _set_locations(self):
|
||||
"""Set default location for directives.
|
||||
|
||||
Locations are given as file_paths
|
||||
.. todo:: Make sure that files are included
|
||||
|
||||
"""
|
||||
default: str = self.loc["root"]
|
||||
default = self.loc["root"]
|
||||
|
||||
temp: str = os.path.join(self.root, "ports.conf")
|
||||
temp = os.path.join(self.root, "ports.conf")
|
||||
if os.path.isfile(temp):
|
||||
listen = temp
|
||||
name = temp
|
||||
@@ -944,7 +915,7 @@ class ApacheParser:
|
||||
|
||||
return {"default": default, "listen": listen, "name": name}
|
||||
|
||||
def _find_config_root(self) -> str:
|
||||
def _find_config_root(self):
|
||||
"""Find the Apache Configuration Root file."""
|
||||
location = ["apache2.conf", "httpd.conf", "conf/httpd.conf"]
|
||||
for name in location:
|
||||
@@ -953,7 +924,7 @@ class ApacheParser:
|
||||
raise errors.NoInstallationError("Could not find configuration root")
|
||||
|
||||
|
||||
def case_i(string: str) -> str:
|
||||
def case_i(string):
|
||||
"""Returns case insensitive regex.
|
||||
|
||||
Returns a sloppy, but necessary version of a case insensitive regex.
|
||||
@@ -965,30 +936,14 @@ def case_i(string: str) -> str:
|
||||
:param str string: string to make case i regex
|
||||
|
||||
"""
|
||||
return "".join("[" + c.upper() + c.lower() + "]"
|
||||
if c.isalpha() else c for c in re.escape(string))
|
||||
return "".join(["[" + c.upper() + c.lower() + "]"
|
||||
if c.isalpha() else c for c in re.escape(string)])
|
||||
|
||||
|
||||
def get_aug_path(file_path: str) -> str:
|
||||
def get_aug_path(file_path):
|
||||
"""Return augeas path for full filepath.
|
||||
|
||||
:param str file_path: Full filepath
|
||||
|
||||
"""
|
||||
return "/files%s" % file_path
|
||||
|
||||
|
||||
def init_augeas() -> Augeas:
|
||||
""" Initialize the actual Augeas instance """
|
||||
|
||||
if not Augeas: # pragma: no cover
|
||||
raise errors.NoInstallationError("Problem in Augeas installation")
|
||||
|
||||
return Augeas(
|
||||
# specify a directory to load our preferred lens from
|
||||
loadpath=constants.AUGEAS_LENS_DIR,
|
||||
# Do not save backup (we do it ourselves), do not load
|
||||
# anything by default
|
||||
flags=(Augeas.NONE |
|
||||
Augeas.NO_MODL_AUTOLOAD |
|
||||
Augeas.ENABLE_SPAN))
|
||||
|
||||
@@ -1,14 +1,7 @@
|
||||
"""ParserNode utils"""
|
||||
from typing import Any
|
||||
from typing import Dict
|
||||
from typing import Iterable
|
||||
from typing import Optional
|
||||
from typing import Tuple
|
||||
|
||||
from certbot_apache._internal.interfaces import ParserNode
|
||||
|
||||
|
||||
def validate_kwargs(kwargs: Dict[str, Any], required_names: Iterable[str]) -> Dict[str, Any]:
|
||||
def validate_kwargs(kwargs, required_names):
|
||||
"""
|
||||
Ensures that the kwargs dict has all the expected values. This function modifies
|
||||
the kwargs dictionary, and hence the returned dictionary should be used instead
|
||||
@@ -18,7 +11,7 @@ def validate_kwargs(kwargs: Dict[str, Any], required_names: Iterable[str]) -> Di
|
||||
:param list required_names: List of required parameter names.
|
||||
"""
|
||||
|
||||
validated_kwargs: Dict[str, Any] = {}
|
||||
validated_kwargs = dict()
|
||||
for name in required_names:
|
||||
try:
|
||||
validated_kwargs[name] = kwargs.pop(name)
|
||||
@@ -32,8 +25,7 @@ def validate_kwargs(kwargs: Dict[str, Any], required_names: Iterable[str]) -> Di
|
||||
return validated_kwargs
|
||||
|
||||
|
||||
def parsernode_kwargs(kwargs: Dict[str, Any]
|
||||
) -> Tuple[Optional[ParserNode], bool, Optional[str], Dict[str, Any]]:
|
||||
def parsernode_kwargs(kwargs):
|
||||
"""
|
||||
Validates keyword arguments for ParserNode. This function modifies the kwargs
|
||||
dictionary, and hence the returned dictionary should be used instead in the
|
||||
@@ -63,7 +55,7 @@ def parsernode_kwargs(kwargs: Dict[str, Any]
|
||||
return kwargs["ancestor"], kwargs["dirty"], kwargs["filepath"], kwargs["metadata"]
|
||||
|
||||
|
||||
def commentnode_kwargs(kwargs: Dict[str, Any]) -> Tuple[Optional[str], Dict[str, str]]:
|
||||
def commentnode_kwargs(kwargs):
|
||||
"""
|
||||
Validates keyword arguments for CommentNode and sets the default values for
|
||||
optional kwargs. This function modifies the kwargs dictionary, and hence the
|
||||
@@ -98,8 +90,7 @@ def commentnode_kwargs(kwargs: Dict[str, Any]) -> Tuple[Optional[str], Dict[str,
|
||||
return comment, kwargs
|
||||
|
||||
|
||||
def directivenode_kwargs(kwargs: Dict[str, Any]
|
||||
) -> Tuple[Optional[str], Tuple[str, ...], bool, Dict[str, Any]]:
|
||||
def directivenode_kwargs(kwargs):
|
||||
"""
|
||||
Validates keyword arguments for DirectiveNode and BlockNode and sets the
|
||||
default values for optional kwargs. This function modifies the kwargs
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user