Compare commits
3 Commits
test-remov
...
travis-tes
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
fcb85becb9 | ||
|
|
ca1573ea32 | ||
|
|
8af28d83a6 |
@@ -2,13 +2,12 @@
|
||||
trigger:
|
||||
# When changing these triggers, please ensure the documentation under
|
||||
# "Running tests in CI" is still correct.
|
||||
- azure-test-*
|
||||
- test-*
|
||||
pr: none
|
||||
|
||||
variables:
|
||||
# We don't publish our Docker images in this pipeline, but when building them
|
||||
# for testing, let's use the nightly tag.
|
||||
dockerTag: nightly
|
||||
|
||||
stages:
|
||||
- template: templates/stages/test-and-package-stage.yml
|
||||
jobs:
|
||||
# Any addition here should be reflected in the advanced and release pipelines.
|
||||
# It is advised to declare all jobs here as templates to improve maintainability.
|
||||
- template: templates/tests-suite.yml
|
||||
- template: templates/installer-tests.yml
|
||||
|
||||
18
.azure-pipelines/advanced.yml
Normal file
18
.azure-pipelines/advanced.yml
Normal file
@@ -0,0 +1,18 @@
|
||||
# Advanced pipeline for running our full test suite on protected branches.
|
||||
trigger:
|
||||
- '*.x'
|
||||
pr: none
|
||||
# This pipeline is also nightly run on master
|
||||
schedules:
|
||||
- cron: "0 4 * * *"
|
||||
displayName: Nightly build
|
||||
branches:
|
||||
include:
|
||||
- master
|
||||
always: true
|
||||
|
||||
jobs:
|
||||
# Any addition here should be reflected in the advanced-test and release pipelines.
|
||||
# It is advised to declare all jobs here as templates to improve maintainability.
|
||||
- template: templates/tests-suite.yml
|
||||
- template: templates/installer-tests.yml
|
||||
@@ -1,8 +1,8 @@
|
||||
trigger: none
|
||||
trigger:
|
||||
- master
|
||||
pr:
|
||||
- master
|
||||
- '*.x'
|
||||
|
||||
jobs:
|
||||
- template: templates/jobs/standard-tests-jobs.yml
|
||||
|
||||
- template: templates/tests-suite.yml
|
||||
|
||||
@@ -1,18 +0,0 @@
|
||||
# Nightly pipeline running each day for master.
|
||||
trigger: none
|
||||
pr: none
|
||||
schedules:
|
||||
- cron: "30 4 * * *"
|
||||
displayName: Nightly build
|
||||
branches:
|
||||
include:
|
||||
- master
|
||||
always: true
|
||||
|
||||
variables:
|
||||
dockerTag: nightly
|
||||
|
||||
stages:
|
||||
- template: templates/stages/test-and-package-stage.yml
|
||||
- template: templates/stages/deploy-stage.yml
|
||||
- template: templates/stages/notify-failure-stage.yml
|
||||
@@ -1,18 +1,13 @@
|
||||
# Release pipeline to run our full test suite, build artifacts, and deploy them
|
||||
# for GitHub release tags.
|
||||
# Release pipeline to build and deploy Certbot for Windows for GitHub release tags
|
||||
trigger:
|
||||
tags:
|
||||
include:
|
||||
- v*
|
||||
pr: none
|
||||
|
||||
variables:
|
||||
dockerTag: ${{variables['Build.SourceBranchName']}}
|
||||
|
||||
stages:
|
||||
- template: templates/stages/test-and-package-stage.yml
|
||||
- template: templates/stages/changelog-stage.yml
|
||||
- template: templates/stages/deploy-stage.yml
|
||||
parameters:
|
||||
snapReleaseChannel: beta
|
||||
- template: templates/stages/notify-failure-stage.yml
|
||||
jobs:
|
||||
# Any addition here should be reflected in the advanced and advanced-test pipelines.
|
||||
# It is advised to declare all jobs here as templates to improve maintainability.
|
||||
- template: templates/tests-suite.yml
|
||||
- template: templates/installer-tests.yml
|
||||
- template: templates/changelog.yml
|
||||
|
||||
14
.azure-pipelines/templates/changelog.yml
Normal file
14
.azure-pipelines/templates/changelog.yml
Normal file
@@ -0,0 +1,14 @@
|
||||
jobs:
|
||||
- job: changelog
|
||||
pool:
|
||||
vmImage: vs2017-win2016
|
||||
steps:
|
||||
- bash: |
|
||||
CERTBOT_VERSION="$(cd certbot && python -c "import certbot; print(certbot.__version__)" && cd ~-)"
|
||||
"${BUILD_REPOSITORY_LOCALPATH}\tools\extract_changelog.py" "${CERTBOT_VERSION}" >> "${BUILD_ARTIFACTSTAGINGDIRECTORY}/release_notes.md"
|
||||
displayName: Prepare changelog
|
||||
- task: PublishPipelineArtifact@1
|
||||
inputs:
|
||||
path: $(Build.ArtifactStagingDirectory)
|
||||
artifact: changelog
|
||||
displayName: Publish changelog
|
||||
61
.azure-pipelines/templates/installer-tests.yml
Normal file
61
.azure-pipelines/templates/installer-tests.yml
Normal file
@@ -0,0 +1,61 @@
|
||||
jobs:
|
||||
- job: installer_build
|
||||
pool:
|
||||
vmImage: vs2017-win2016
|
||||
steps:
|
||||
- task: UsePythonVersion@0
|
||||
inputs:
|
||||
versionSpec: 3.7
|
||||
architecture: x86
|
||||
addToPath: true
|
||||
- script: python windows-installer/construct.py
|
||||
displayName: Build Certbot installer
|
||||
- task: CopyFiles@2
|
||||
inputs:
|
||||
sourceFolder: $(System.DefaultWorkingDirectory)/windows-installer/build/nsis
|
||||
contents: '*.exe'
|
||||
targetFolder: $(Build.ArtifactStagingDirectory)
|
||||
- task: PublishPipelineArtifact@1
|
||||
inputs:
|
||||
path: $(Build.ArtifactStagingDirectory)
|
||||
artifact: windows-installer
|
||||
displayName: Publish Windows installer
|
||||
- job: installer_run
|
||||
dependsOn: installer_build
|
||||
strategy:
|
||||
matrix:
|
||||
win2019:
|
||||
imageName: windows-2019
|
||||
win2016:
|
||||
imageName: vs2017-win2016
|
||||
pool:
|
||||
vmImage: $(imageName)
|
||||
steps:
|
||||
- powershell: |
|
||||
$currentVersion = $PSVersionTable.PSVersion
|
||||
if ($currentVersion.Major -ne 5) {
|
||||
throw "Powershell version is not 5.x"
|
||||
}
|
||||
condition: eq(variables['imageName'], 'vs2017-win2016')
|
||||
displayName: Check Powershell 5.x is used in vs2017-win2016
|
||||
- task: UsePythonVersion@0
|
||||
inputs:
|
||||
versionSpec: 3.8
|
||||
addToPath: true
|
||||
- task: DownloadPipelineArtifact@2
|
||||
inputs:
|
||||
artifact: windows-installer
|
||||
path: $(Build.SourcesDirectory)/bin
|
||||
displayName: Retrieve Windows installer
|
||||
- script: |
|
||||
py -3 -m venv venv
|
||||
venv\Scripts\python tools\pip_install.py -e certbot-ci
|
||||
displayName: Prepare Certbot-CI
|
||||
- script: |
|
||||
set PATH=%ProgramFiles(x86)%\Certbot\bin;%PATH%
|
||||
venv\Scripts\python -m pytest certbot-ci\windows_installer_integration_tests --allow-persistent-changes --installer-path $(Build.SourcesDirectory)\bin\certbot-beta-installer-win32.exe
|
||||
displayName: Run windows installer integration tests
|
||||
- script: |
|
||||
set PATH=%ProgramFiles(x86)%\Certbot\bin;%PATH%
|
||||
venv\Scripts\python -m pytest certbot-ci\certbot_integration_tests\certbot_tests -n 4
|
||||
displayName: Run certbot integration tests
|
||||
@@ -1,94 +0,0 @@
|
||||
jobs:
|
||||
- job: extended_test
|
||||
variables:
|
||||
- name: IMAGE_NAME
|
||||
value: ubuntu-18.04
|
||||
- name: PYTHON_VERSION
|
||||
value: 3.9
|
||||
- group: certbot-common
|
||||
strategy:
|
||||
matrix:
|
||||
linux-py36:
|
||||
PYTHON_VERSION: 3.6
|
||||
TOXENV: py36
|
||||
linux-py37:
|
||||
PYTHON_VERSION: 3.7
|
||||
TOXENV: py37
|
||||
linux-py38:
|
||||
PYTHON_VERSION: 3.8
|
||||
TOXENV: py38
|
||||
linux-py37-nopin:
|
||||
PYTHON_VERSION: 3.7
|
||||
TOXENV: py37
|
||||
CERTBOT_NO_PIN: 1
|
||||
linux-external-mock:
|
||||
TOXENV: external-mock
|
||||
linux-boulder-v1-integration-certbot-oldest:
|
||||
PYTHON_VERSION: 3.6
|
||||
TOXENV: integration-certbot-oldest
|
||||
ACME_SERVER: boulder-v1
|
||||
linux-boulder-v2-integration-certbot-oldest:
|
||||
PYTHON_VERSION: 3.6
|
||||
TOXENV: integration-certbot-oldest
|
||||
ACME_SERVER: boulder-v2
|
||||
linux-boulder-v1-integration-nginx-oldest:
|
||||
PYTHON_VERSION: 3.6
|
||||
TOXENV: integration-nginx-oldest
|
||||
ACME_SERVER: boulder-v1
|
||||
linux-boulder-v2-integration-nginx-oldest:
|
||||
PYTHON_VERSION: 3.6
|
||||
TOXENV: integration-nginx-oldest
|
||||
ACME_SERVER: boulder-v2
|
||||
linux-boulder-v1-py36-integration:
|
||||
PYTHON_VERSION: 3.6
|
||||
TOXENV: integration
|
||||
ACME_SERVER: boulder-v1
|
||||
linux-boulder-v2-py36-integration:
|
||||
PYTHON_VERSION: 3.6
|
||||
TOXENV: integration
|
||||
ACME_SERVER: boulder-v2
|
||||
linux-boulder-v1-py37-integration:
|
||||
PYTHON_VERSION: 3.7
|
||||
TOXENV: integration
|
||||
ACME_SERVER: boulder-v1
|
||||
linux-boulder-v2-py37-integration:
|
||||
PYTHON_VERSION: 3.7
|
||||
TOXENV: integration
|
||||
ACME_SERVER: boulder-v2
|
||||
linux-boulder-v1-py38-integration:
|
||||
PYTHON_VERSION: 3.8
|
||||
TOXENV: integration
|
||||
ACME_SERVER: boulder-v1
|
||||
linux-boulder-v2-py38-integration:
|
||||
PYTHON_VERSION: 3.8
|
||||
TOXENV: integration
|
||||
ACME_SERVER: boulder-v2
|
||||
linux-boulder-v1-py39-integration:
|
||||
PYTHON_VERSION: 3.9
|
||||
TOXENV: integration
|
||||
ACME_SERVER: boulder-v1
|
||||
linux-boulder-v2-py39-integration:
|
||||
PYTHON_VERSION: 3.9
|
||||
TOXENV: integration
|
||||
ACME_SERVER: boulder-v2
|
||||
nginx-compat:
|
||||
TOXENV: nginx_compat
|
||||
linux-integration-rfc2136:
|
||||
IMAGE_NAME: ubuntu-18.04
|
||||
PYTHON_VERSION: 3.8
|
||||
TOXENV: integration-dns-rfc2136
|
||||
docker-dev:
|
||||
TOXENV: docker_dev
|
||||
macos-farmtest-apache2:
|
||||
# We run one of these test farm tests on macOS to help ensure the
|
||||
# tests continue to work on the platform.
|
||||
IMAGE_NAME: macOS-10.15
|
||||
PYTHON_VERSION: 3.8
|
||||
TOXENV: test-farm-apache2
|
||||
farmtest-sdists:
|
||||
PYTHON_VERSION: 3.7
|
||||
TOXENV: test-farm-sdists
|
||||
pool:
|
||||
vmImage: $(IMAGE_NAME)
|
||||
steps:
|
||||
- template: ../steps/tox-steps.yml
|
||||
@@ -1,77 +0,0 @@
|
||||
jobs:
|
||||
- job: snaps_build
|
||||
pool:
|
||||
vmImage: ubuntu-18.04
|
||||
strategy:
|
||||
matrix:
|
||||
amd64:
|
||||
SNAP_ARCH: amd64
|
||||
# Do not run the heavy non-amd64 builds for test branches
|
||||
${{ if not(startsWith(variables['Build.SourceBranchName'], 'test-')) }}:
|
||||
armhf:
|
||||
SNAP_ARCH: armhf
|
||||
arm64:
|
||||
SNAP_ARCH: arm64
|
||||
timeoutInMinutes: 0
|
||||
steps:
|
||||
- script: |
|
||||
set -e
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y --no-install-recommends snapd
|
||||
sudo snap install --classic snapcraft
|
||||
displayName: Install dependencies
|
||||
- task: UsePythonVersion@0
|
||||
inputs:
|
||||
versionSpec: 3.8
|
||||
addToPath: true
|
||||
- task: DownloadSecureFile@1
|
||||
name: credentials
|
||||
inputs:
|
||||
secureFile: launchpad-credentials
|
||||
- script: |
|
||||
set -e
|
||||
git config --global user.email "$(Build.RequestedForEmail)"
|
||||
git config --global user.name "$(Build.RequestedFor)"
|
||||
mkdir -p ~/.local/share/snapcraft/provider/launchpad
|
||||
cp $(credentials.secureFilePath) ~/.local/share/snapcraft/provider/launchpad/credentials
|
||||
python3 tools/snap/build_remote.py ALL --archs ${SNAP_ARCH} --timeout 19800
|
||||
displayName: Build snaps
|
||||
- script: |
|
||||
set -e
|
||||
mv *.snap $(Build.ArtifactStagingDirectory)
|
||||
mv certbot-dns-*/*.snap $(Build.ArtifactStagingDirectory)
|
||||
displayName: Prepare artifacts
|
||||
- task: PublishPipelineArtifact@1
|
||||
inputs:
|
||||
path: $(Build.ArtifactStagingDirectory)
|
||||
artifact: snaps_$(SNAP_ARCH)
|
||||
displayName: Store snaps artifacts
|
||||
- job: snap_dns_run
|
||||
dependsOn: snaps_build
|
||||
pool:
|
||||
vmImage: ubuntu-18.04
|
||||
steps:
|
||||
- script: |
|
||||
set -e
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y --no-install-recommends snapd
|
||||
displayName: Install dependencies
|
||||
- task: UsePythonVersion@0
|
||||
inputs:
|
||||
versionSpec: 3.8
|
||||
addToPath: true
|
||||
- task: DownloadPipelineArtifact@2
|
||||
inputs:
|
||||
artifact: snaps_amd64
|
||||
path: $(Build.SourcesDirectory)/snap
|
||||
displayName: Retrieve Certbot snaps
|
||||
- script: |
|
||||
set -e
|
||||
python3 -m venv venv
|
||||
venv/bin/python tools/pipstrap.py
|
||||
venv/bin/python tools/pip_install.py -e certbot-ci
|
||||
displayName: Prepare Certbot-CI
|
||||
- script: |
|
||||
set -e
|
||||
sudo -E venv/bin/pytest certbot-ci/snap_integration_tests/dns_tests --allow-persistent-changes --snap-folder $(Build.SourcesDirectory)/snap --snap-arch amd64
|
||||
displayName: Test DNS plugins snaps
|
||||
@@ -1,80 +0,0 @@
|
||||
jobs:
|
||||
- job: test
|
||||
variables:
|
||||
PYTHON_VERSION: 3.9
|
||||
strategy:
|
||||
matrix:
|
||||
macos-py36:
|
||||
IMAGE_NAME: macOS-10.15
|
||||
PYTHON_VERSION: 3.6
|
||||
TOXENV: py36
|
||||
macos-py39:
|
||||
IMAGE_NAME: macOS-10.15
|
||||
PYTHON_VERSION: 3.9
|
||||
TOXENV: py39
|
||||
windows-py36:
|
||||
IMAGE_NAME: vs2017-win2016
|
||||
PYTHON_VERSION: 3.6
|
||||
TOXENV: py36
|
||||
windows-py38-cover:
|
||||
IMAGE_NAME: vs2017-win2016
|
||||
PYTHON_VERSION: 3.8
|
||||
TOXENV: py38-cover
|
||||
windows-integration-certbot:
|
||||
IMAGE_NAME: vs2017-win2016
|
||||
PYTHON_VERSION: 3.8
|
||||
TOXENV: integration-certbot
|
||||
linux-oldest-tests-1:
|
||||
IMAGE_NAME: ubuntu-18.04
|
||||
PYTHON_VERSION: 3.6
|
||||
TOXENV: '{acme,apache,apache-v2,certbot}-oldest'
|
||||
linux-oldest-tests-2:
|
||||
IMAGE_NAME: ubuntu-18.04
|
||||
PYTHON_VERSION: 3.6
|
||||
TOXENV: '{dns,nginx}-oldest'
|
||||
linux-py36:
|
||||
IMAGE_NAME: ubuntu-18.04
|
||||
PYTHON_VERSION: 3.6
|
||||
TOXENV: py36
|
||||
linux-py39-cover:
|
||||
IMAGE_NAME: ubuntu-18.04
|
||||
PYTHON_VERSION: 3.9
|
||||
TOXENV: py39-cover
|
||||
linux-py39-lint:
|
||||
IMAGE_NAME: ubuntu-18.04
|
||||
PYTHON_VERSION: 3.9
|
||||
TOXENV: lint
|
||||
linux-py39-mypy:
|
||||
IMAGE_NAME: ubuntu-18.04
|
||||
PYTHON_VERSION: 3.9
|
||||
TOXENV: mypy
|
||||
linux-integration:
|
||||
IMAGE_NAME: ubuntu-18.04
|
||||
PYTHON_VERSION: 3.8
|
||||
TOXENV: integration
|
||||
ACME_SERVER: pebble
|
||||
apache-compat:
|
||||
IMAGE_NAME: ubuntu-18.04
|
||||
TOXENV: apache_compat
|
||||
# le-modification can be moved to the extended test suite once
|
||||
# https://github.com/certbot/certbot/issues/8742 is resolved.
|
||||
le-modification:
|
||||
IMAGE_NAME: ubuntu-18.04
|
||||
TOXENV: modification
|
||||
apacheconftest:
|
||||
IMAGE_NAME: ubuntu-18.04
|
||||
PYTHON_VERSION: 3.6
|
||||
TOXENV: apacheconftest-with-pebble
|
||||
nginxroundtrip:
|
||||
IMAGE_NAME: ubuntu-18.04
|
||||
PYTHON_VERSION: 3.6
|
||||
TOXENV: nginxroundtrip
|
||||
pool:
|
||||
vmImage: $(IMAGE_NAME)
|
||||
steps:
|
||||
- template: ../steps/tox-steps.yml
|
||||
- job: test_sphinx_builds
|
||||
pool:
|
||||
vmImage: ubuntu-20.04
|
||||
steps:
|
||||
- template: ../steps/sphinx-steps.yml
|
||||
@@ -1,19 +0,0 @@
|
||||
stages:
|
||||
- stage: Changelog
|
||||
jobs:
|
||||
- job: prepare
|
||||
pool:
|
||||
vmImage: vs2017-win2016
|
||||
steps:
|
||||
# If we change the output filename from `release_notes.md`, it should also be changed in tools/create_github_release.py
|
||||
- bash: |
|
||||
set -e
|
||||
CERTBOT_VERSION="$(cd certbot && python -c "import certbot; print(certbot.__version__)" && cd ~-)"
|
||||
"${BUILD_REPOSITORY_LOCALPATH}\tools\extract_changelog.py" "${CERTBOT_VERSION}" >> "${BUILD_ARTIFACTSTAGINGDIRECTORY}/release_notes.md"
|
||||
displayName: Prepare changelog
|
||||
- task: PublishPipelineArtifact@1
|
||||
inputs:
|
||||
path: $(Build.ArtifactStagingDirectory)
|
||||
# If we change the artifact's name, it should also be changed in tools/create_github_release.py
|
||||
artifact: changelog
|
||||
displayName: Publish changelog
|
||||
@@ -1,106 +0,0 @@
|
||||
parameters:
|
||||
- name: snapReleaseChannel
|
||||
type: string
|
||||
default: edge
|
||||
values:
|
||||
- edge
|
||||
- beta
|
||||
|
||||
stages:
|
||||
- stage: Deploy
|
||||
jobs:
|
||||
# This job relies on credentials used to publish the Certbot snaps. This
|
||||
# credential file was created by running:
|
||||
#
|
||||
# snapcraft logout
|
||||
# snapcraft login (provide the shared snapcraft credentials when prompted)
|
||||
# snapcraft export-login --channels=beta,edge snapcraft.cfg
|
||||
#
|
||||
# Then the file was added as a secure file in Azure pipelines
|
||||
# with the name snapcraft.cfg by following the instructions at
|
||||
# https://docs.microsoft.com/en-us/azure/devops/pipelines/library/secure-files?view=azure-devops
|
||||
# including authorizing the file in all pipelines as described at
|
||||
# https://docs.microsoft.com/en-us/azure/devops/pipelines/library/secure-files?view=azure-devops#how-do-i-authorize-a-secure-file-for-use-in-all-pipelines.
|
||||
#
|
||||
# This file has a maximum lifetime of one year and the current
|
||||
# file will expire on 2021-07-28 which is also tracked by
|
||||
# https://github.com/certbot/certbot/issues/7931. The file will
|
||||
# need to be updated before then to prevent automated deploys
|
||||
# from breaking.
|
||||
#
|
||||
# Revoking these credentials can be done by changing the password of the
|
||||
# account used to generate the credentials. See
|
||||
# https://forum.snapcraft.io/t/revoking-exported-credentials/19031 for
|
||||
# more info.
|
||||
- job: publish_snap
|
||||
pool:
|
||||
vmImage: ubuntu-18.04
|
||||
variables:
|
||||
- group: certbot-common
|
||||
strategy:
|
||||
matrix:
|
||||
amd64:
|
||||
SNAP_ARCH: amd64
|
||||
arm32v6:
|
||||
SNAP_ARCH: armhf
|
||||
arm64v8:
|
||||
SNAP_ARCH: arm64
|
||||
steps:
|
||||
- bash: |
|
||||
set -e
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y --no-install-recommends snapd
|
||||
sudo snap install --classic snapcraft
|
||||
displayName: Install dependencies
|
||||
- task: DownloadPipelineArtifact@2
|
||||
inputs:
|
||||
artifact: snaps_$(SNAP_ARCH)
|
||||
path: $(Build.SourcesDirectory)/snap
|
||||
displayName: Retrieve Certbot snaps
|
||||
- task: DownloadSecureFile@1
|
||||
name: snapcraftCfg
|
||||
inputs:
|
||||
secureFile: snapcraft.cfg
|
||||
- bash: |
|
||||
set -e
|
||||
snapcraft login --with $(snapcraftCfg.secureFilePath)
|
||||
for SNAP_FILE in snap/*.snap; do
|
||||
tools/retry.sh eval snapcraft upload --release=${{ parameters.snapReleaseChannel }} "${SNAP_FILE}"
|
||||
done
|
||||
displayName: Publish to Snap store
|
||||
- job: publish_docker
|
||||
pool:
|
||||
vmImage: ubuntu-18.04
|
||||
strategy:
|
||||
matrix:
|
||||
amd64:
|
||||
DOCKER_ARCH: amd64
|
||||
arm32v6:
|
||||
DOCKER_ARCH: arm32v6
|
||||
arm64v8:
|
||||
DOCKER_ARCH: arm64v8
|
||||
steps:
|
||||
- task: DownloadPipelineArtifact@2
|
||||
inputs:
|
||||
artifact: docker_$(DOCKER_ARCH)
|
||||
path: $(Build.SourcesDirectory)
|
||||
displayName: Retrieve Docker images
|
||||
- bash: set -e && docker load --input $(Build.SourcesDirectory)/images.tar
|
||||
displayName: Load Docker images
|
||||
- task: Docker@2
|
||||
inputs:
|
||||
command: login
|
||||
# The credentials used here are for the shared certbotbot account
|
||||
# on Docker Hub. The credentials are stored in a service account
|
||||
# which was created by following the instructions at
|
||||
# https://docs.microsoft.com/en-us/azure/devops/pipelines/library/service-endpoints?view=azure-devops&tabs=yaml#sep-docreg.
|
||||
# The name given to this service account must match the value
|
||||
# given to containerRegistry below. "Grant access to all
|
||||
# pipelines" should also be checked. To revoke these
|
||||
# credentials, we can change the password on the certbotbot
|
||||
# Docker Hub account or remove the account from the
|
||||
# Certbot organization on Docker Hub.
|
||||
containerRegistry: docker-hub
|
||||
displayName: Login to Docker Hub
|
||||
- bash: set -e && tools/docker/deploy.sh $(dockerTag) $DOCKER_ARCH
|
||||
displayName: Deploy the Docker images
|
||||
@@ -1,19 +0,0 @@
|
||||
stages:
|
||||
- stage: On_Failure
|
||||
jobs:
|
||||
- job: notify_mattermost
|
||||
variables:
|
||||
- group: certbot-common
|
||||
pool:
|
||||
vmImage: ubuntu-20.04
|
||||
steps:
|
||||
- bash: |
|
||||
set -e
|
||||
MESSAGE="\
|
||||
---\n\
|
||||
##### Azure Pipeline
|
||||
*Repo* $(Build.Repository.ID) - *Pipeline* $(Build.DefinitionName) #$(Build.BuildNumber) - *Branch/PR* $(Build.SourceBranchName)\n\
|
||||
:warning: __Pipeline has failed__: [Link to the build](https://dev.azure.com/$(Build.Repository.ID)/_build/results?buildId=$(Build.BuildId)&view=results)\n\n\
|
||||
---"
|
||||
curl -i -X POST --data-urlencode "payload={\"text\":\"${MESSAGE}\"}" "$(MATTERMOST_URL)"
|
||||
condition: failed()
|
||||
@@ -1,4 +0,0 @@
|
||||
stages:
|
||||
- stage: TestAndPackage
|
||||
jobs:
|
||||
- template: ../jobs/packaging-jobs.yml
|
||||
@@ -1,23 +0,0 @@
|
||||
steps:
|
||||
- bash: |
|
||||
FINAL_STATUS=0
|
||||
declare -a FAILED_BUILDS
|
||||
python3 -m venv .venv
|
||||
source .venv/bin/activate
|
||||
python tools/pipstrap.py
|
||||
for doc_path in */docs
|
||||
do
|
||||
echo ""
|
||||
echo "##[group]Building $doc_path"
|
||||
pip install -q -e $doc_path/..[docs]
|
||||
if ! sphinx-build -W --keep-going -b html $doc_path $doc_path/_build/html; then
|
||||
FINAL_STATUS=1
|
||||
FAILED_BUILDS[${#FAILED_BUILDS[@]}]="${doc_path%/docs}"
|
||||
fi
|
||||
echo "##[endgroup]"
|
||||
done
|
||||
if [[ $FINAL_STATUS -ne 0 ]]; then
|
||||
echo "##[error]The following builds failed: ${FAILED_BUILDS[*]}"
|
||||
exit 1
|
||||
fi
|
||||
displayName: Build Sphinx Documentation
|
||||
@@ -1,57 +0,0 @@
|
||||
steps:
|
||||
# We run brew update because we've seen attempts to install an older version
|
||||
# of a package fail. See
|
||||
# https://github.com/actions/virtual-environments/issues/3165.
|
||||
- bash: |
|
||||
set -e
|
||||
brew update
|
||||
brew install augeas
|
||||
condition: startswith(variables['IMAGE_NAME'], 'macOS')
|
||||
displayName: Install MacOS dependencies
|
||||
- bash: |
|
||||
set -e
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y --no-install-recommends \
|
||||
python-dev \
|
||||
gcc \
|
||||
libaugeas0 \
|
||||
libssl-dev \
|
||||
libffi-dev \
|
||||
ca-certificates \
|
||||
nginx-light \
|
||||
openssl
|
||||
sudo systemctl stop nginx
|
||||
condition: startswith(variables['IMAGE_NAME'], 'ubuntu')
|
||||
displayName: Install Linux dependencies
|
||||
- task: UsePythonVersion@0
|
||||
inputs:
|
||||
versionSpec: $(PYTHON_VERSION)
|
||||
addToPath: true
|
||||
# tools/pip_install.py is used to pin packages to a known working version
|
||||
# except in tests where the environment variable CERTBOT_NO_PIN is set.
|
||||
# virtualenv is listed here explicitly to make sure it is upgraded when
|
||||
# CERTBOT_NO_PIN is set to work around failures we've seen when using an older
|
||||
# version of virtualenv. The option "-I" is set so when CERTBOT_NO_PIN is also
|
||||
# set, pip updates dependencies it thinks are already satisfied to avoid some
|
||||
# problems with its lack of real dependency resolution.
|
||||
- bash: |
|
||||
set -e
|
||||
python tools/pipstrap.py
|
||||
python tools/pip_install.py -I tox virtualenv
|
||||
displayName: Install runtime dependencies
|
||||
- task: DownloadSecureFile@1
|
||||
name: testFarmPem
|
||||
inputs:
|
||||
secureFile: azure-test-farm.pem
|
||||
condition: contains(variables['TOXENV'], 'test-farm')
|
||||
- bash: |
|
||||
set -e
|
||||
export TARGET_BRANCH="`echo "${BUILD_SOURCEBRANCH}" | sed -E 's!refs/(heads|tags)/!!g'`"
|
||||
[ -z "${SYSTEM_PULLREQUEST_TARGETBRANCH}" ] || export TARGET_BRANCH="${SYSTEM_PULLREQUEST_TARGETBRANCH}"
|
||||
env
|
||||
python -m tox
|
||||
env:
|
||||
AWS_ACCESS_KEY_ID: $(AWS_ACCESS_KEY_ID)
|
||||
AWS_SECRET_ACCESS_KEY: $(AWS_SECRET_ACCESS_KEY)
|
||||
AWS_EC2_PEM_FILE: $(testFarmPem.secureFilePath)
|
||||
displayName: Run tox
|
||||
39
.azure-pipelines/templates/tests-suite.yml
Normal file
39
.azure-pipelines/templates/tests-suite.yml
Normal file
@@ -0,0 +1,39 @@
|
||||
jobs:
|
||||
- job: test
|
||||
strategy:
|
||||
matrix:
|
||||
macos-py27:
|
||||
IMAGE_NAME: macOS-10.14
|
||||
PYTHON_VERSION: 2.7
|
||||
TOXENV: py27
|
||||
macos-py38:
|
||||
IMAGE_NAME: macOS-10.14
|
||||
PYTHON_VERSION: 3.8
|
||||
TOXENV: py38
|
||||
windows-py35:
|
||||
IMAGE_NAME: vs2017-win2016
|
||||
PYTHON_VERSION: 3.5
|
||||
TOXENV: py35
|
||||
windows-py37-cover:
|
||||
IMAGE_NAME: vs2017-win2016
|
||||
PYTHON_VERSION: 3.7
|
||||
TOXENV: py37-cover
|
||||
windows-integration-certbot:
|
||||
IMAGE_NAME: vs2017-win2016
|
||||
PYTHON_VERSION: 3.7
|
||||
TOXENV: integration-certbot
|
||||
PYTEST_ADDOPTS: --numprocesses 4
|
||||
pool:
|
||||
vmImage: $(IMAGE_NAME)
|
||||
steps:
|
||||
- bash: brew install augeas
|
||||
condition: startswith(variables['IMAGE_NAME'], 'macOS')
|
||||
displayName: Install Augeas
|
||||
- task: UsePythonVersion@0
|
||||
inputs:
|
||||
versionSpec: $(PYTHON_VERSION)
|
||||
addToPath: true
|
||||
- script: python tools/pip_install.py -U tox coverage
|
||||
displayName: Install dependencies
|
||||
- script: python -m tox
|
||||
displayName: Run tox
|
||||
@@ -8,4 +8,5 @@
|
||||
.git
|
||||
.tox
|
||||
venv
|
||||
venv3
|
||||
docs
|
||||
|
||||
@@ -1,18 +0,0 @@
|
||||
# https://editorconfig.org/
|
||||
|
||||
root = true
|
||||
|
||||
[*]
|
||||
insert_final_newline = true
|
||||
trim_trailing_whitespace = true
|
||||
end_of_line = lf
|
||||
|
||||
[*.py]
|
||||
indent_style = space
|
||||
indent_size = 4
|
||||
charset = utf-8
|
||||
max_line_length = 100
|
||||
|
||||
[*.yaml]
|
||||
indent_style = space
|
||||
indent_size = 2
|
||||
12
.envrc
12
.envrc
@@ -1,12 +0,0 @@
|
||||
# This file is just a nicety for developers who use direnv. When you cd under
|
||||
# the Certbot repo, Certbot's virtual environment will be automatically
|
||||
# activated and then deactivated when you cd elsewhere. Developers have to have
|
||||
# direnv set up and run `direnv allow` to allow this file to execute on their
|
||||
# system. You can find more information at https://direnv.net/.
|
||||
. venv/bin/activate
|
||||
# direnv doesn't support modifying PS1 so we unset it to squelch the error
|
||||
# it'll otherwise print about this being done in the activate script. See
|
||||
# https://github.com/direnv/direnv/wiki/PS1. If you would like your shell
|
||||
# prompt to change like it normally does, see
|
||||
# https://github.com/direnv/direnv/wiki/Python#restoring-the-ps1.
|
||||
unset PS1
|
||||
16
.gitignore
vendored
16
.gitignore
vendored
@@ -4,12 +4,13 @@
|
||||
build/
|
||||
dist*/
|
||||
/venv*/
|
||||
/kgs/
|
||||
/.tox/
|
||||
/releases*/
|
||||
/log*
|
||||
letsencrypt.log
|
||||
certbot.log
|
||||
poetry.lock
|
||||
letsencrypt-auto-source/letsencrypt-auto.sig.lzma.base64
|
||||
|
||||
# coverage
|
||||
.coverage
|
||||
@@ -30,6 +31,12 @@ tags
|
||||
# auth --cert-path --chain-path
|
||||
/*.pem
|
||||
|
||||
# letstest
|
||||
tests/letstest/letest-*/
|
||||
tests/letstest/*.pem
|
||||
tests/letstest/venv/
|
||||
tests/letstest/venv3/
|
||||
|
||||
.venv
|
||||
|
||||
# pytest cache
|
||||
@@ -51,10 +58,3 @@ parts
|
||||
prime
|
||||
stage
|
||||
*.snap
|
||||
snap-constraints.txt
|
||||
qemu-*
|
||||
certbot-dns*/certbot-dns*_amd64*.txt
|
||||
certbot-dns*/certbot-dns*_arm*.txt
|
||||
/certbot_amd64*.txt
|
||||
/certbot_arm*.txt
|
||||
certbot-dns*/snap
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
[settings]
|
||||
skip_glob=venv*
|
||||
skip=letsencrypt-auto-source
|
||||
force_sort_within_sections=True
|
||||
force_single_line=True
|
||||
order_by_type=False
|
||||
|
||||
@@ -8,10 +8,7 @@ jobs=0
|
||||
|
||||
# Python code to execute, usually for sys.path manipulation such as
|
||||
# pygtk.require().
|
||||
# CERTBOT COMMENT
|
||||
# This is needed for pylint to import linter_plugin.py since
|
||||
# https://github.com/PyCQA/pylint/pull/3396.
|
||||
init-hook="import pylint.config, os, sys; sys.path.append(os.path.dirname(pylint.config.PYLINTRC))"
|
||||
#init-hook=
|
||||
|
||||
# Profiled execution.
|
||||
profile=no
|
||||
@@ -257,7 +254,7 @@ ignore-mixin-members=yes
|
||||
# List of module names for which member attributes should not be checked
|
||||
# (useful for modules/projects where namespaces are manipulated during runtime
|
||||
# and thus existing member attributes cannot be deduced by static analysis
|
||||
ignored-modules=pkg_resources,confargparse,argparse
|
||||
ignored-modules=pkg_resources,confargparse,argparse,six.moves,six.moves.urllib
|
||||
# import errors ignored only in 1.4.4
|
||||
# https://bitbucket.org/logilab/pylint/commits/cd000904c9e2
|
||||
|
||||
|
||||
92
.travis.yml
Normal file
92
.travis.yml
Normal file
@@ -0,0 +1,92 @@
|
||||
language: python
|
||||
dist: xenial
|
||||
|
||||
cache:
|
||||
directories:
|
||||
- $HOME/.cache/pip
|
||||
|
||||
before_script:
|
||||
# On Travis, the fastest parallelization for integration tests has proved to be 4.
|
||||
- 'if [[ "$TOXENV" == *"integration"* ]]; then export PYTEST_ADDOPTS="--numprocesses 4"; fi'
|
||||
# Use Travis retry feature for farm tests since they are flaky
|
||||
- 'if [[ "$TOXENV" == "travis-test-farm"* ]]; then export TRAVIS_RETRY=travis_retry; fi'
|
||||
- export TOX_TESTENV_PASSENV=TRAVIS
|
||||
- 'if [[ "$SNAP" == true ]]; then snap/local/build_and_install.sh; fi'
|
||||
|
||||
# Only build pushes to the master branch, PRs, and branches beginning with
|
||||
# `test-`, `travis-test-`, or of the form `digit(s).digit(s).x` or
|
||||
# `vdigit(s).digit(s).digit(s)`. As documented at
|
||||
# https://docs.travis-ci.com/user/customizing-the-build/#safelisting-or-blocklisting-branches,
|
||||
# this includes tags so pushing tags of the form `vdigit(s).digit(s).digit(s)`
|
||||
# will also trigger tests. This reduces the number of simultaneous Travis runs,
|
||||
# which speeds turnaround time on review since there is a cap of on the number
|
||||
# of simultaneous runs.
|
||||
branches:
|
||||
# When changing these branches, please ensure the documentation under
|
||||
# "Running tests in CI" is still correct.
|
||||
only:
|
||||
- master
|
||||
- /^\d+\.\d+\.x$/ # this matches our point release branches
|
||||
- /^v\d+\.\d+\.\d+$/ # this matches our release tags
|
||||
- /^(travis-)?test-.*$/
|
||||
|
||||
# Jobs for the main test suite are always executed (including on PRs) except for pushes on master.
|
||||
not-on-master: ¬-on-master
|
||||
if: NOT (type = push AND branch = master)
|
||||
|
||||
# Jobs for the extended test suite are executed for cron jobs and pushes to
|
||||
# non-development branches.
|
||||
extended-test-suite: &extended-test-suite
|
||||
if: type = cron OR (type = push AND branch != master)
|
||||
|
||||
matrix:
|
||||
include:
|
||||
- python: "3.7"
|
||||
env:
|
||||
- TOXENV=travis-test-farm-apache2
|
||||
- secure: "f+j/Lj9s1lcuKo5sEFrlRd1kIAMnIJI4z0MTI7QF8jl9Fkmbx7KECGzw31TNgzrOSzxSapHbcueFYvNCLKST+kE/8ogMZBbwqXfEDuKpyF6BY3uYoJn+wPVE5pIb8Hhe08xPte8TTDSMIyHI3EyTfcAKrIreauoArePvh/cRvSw="
|
||||
<<: *extended-test-suite
|
||||
|
||||
# container-based infrastructure
|
||||
sudo: false
|
||||
|
||||
addons:
|
||||
apt:
|
||||
packages: # Keep in sync with letsencrypt-auto-source/pieces/bootstrappers/deb_common.sh and Boulder.
|
||||
- python-dev
|
||||
- gcc
|
||||
- libaugeas0
|
||||
- libssl-dev
|
||||
- libffi-dev
|
||||
- ca-certificates
|
||||
# For certbot-nginx integration testing
|
||||
- nginx-light
|
||||
- openssl
|
||||
|
||||
# tools/pip_install.py is used to pin packages to a known working version
|
||||
# except in tests where the environment variable CERTBOT_NO_PIN is set.
|
||||
# virtualenv is listed here explicitly to make sure it is upgraded when
|
||||
# CERTBOT_NO_PIN is set to work around failures we've seen when using an older
|
||||
# version of virtualenv. The option "-I" is set so when CERTBOT_NO_PIN is also
|
||||
# set, pip updates dependencies it thinks are already satisfied to avoid some
|
||||
# problems with its lack of real dependency resolution.
|
||||
install: 'tools/pip_install.py -I tox virtualenv'
|
||||
# Most of the time TRAVIS_RETRY is an empty string, and has no effect on the
|
||||
# script command. It is set only to `travis_retry` during farm tests, in
|
||||
# order to trigger the Travis retry feature, and compensate the inherent
|
||||
# flakiness of these specific tests.
|
||||
script: '$TRAVIS_RETRY tox'
|
||||
|
||||
notifications:
|
||||
email: false
|
||||
irc:
|
||||
if: NOT branch =~ ^(travis-)?test-.*$
|
||||
channels:
|
||||
# This is set to a secure variable to prevent forks from sending
|
||||
# notifications. This value was created by installing
|
||||
# https://github.com/travis-ci/travis.rb and running
|
||||
# `travis encrypt "chat.freenode.net#certbot-devel"`.
|
||||
- secure: "EWW66E2+KVPZyIPR8ViENZwfcup4Gx3/dlimmAZE0WuLwxDCshBBOd3O8Rf6pBokEoZlXM5eDT6XdyJj8n0DLslgjO62pExdunXpbcMwdY7l1ELxX2/UbnDTE6UnPYa09qVBHNG7156Z6yE0x2lH4M9Ykvp0G0cubjPQHylAwo0="
|
||||
on_cancel: never
|
||||
on_success: never
|
||||
on_failure: always
|
||||
12
AUTHORS.md
12
AUTHORS.md
@@ -1,7 +1,6 @@
|
||||
Authors
|
||||
=======
|
||||
|
||||
* [Aaron Gable](https://github.com/aarongable)
|
||||
* [Aaron Zirbes](https://github.com/aaronzirbes)
|
||||
* Aaron Zuehlke
|
||||
* Ada Lovelace
|
||||
@@ -37,8 +36,7 @@ Authors
|
||||
* [Blake Griffith](https://github.com/cowlicks)
|
||||
* [Brad Warren](https://github.com/bmw)
|
||||
* [Brandon Kraft](https://github.com/kraftbj)
|
||||
* [Brandon Kreisel](https://github.com/BKreisel)
|
||||
* [Brian Heim](https://github.com/brianlheim)
|
||||
* [Brandon Kreisel](https://github.com/kraftbj)
|
||||
* [Cameron Steel](https://github.com/Tugzrida)
|
||||
* [Ceesjan Luiten](https://github.com/quinox)
|
||||
* [Chad Whitacre](https://github.com/whit537)
|
||||
@@ -61,9 +59,7 @@ Authors
|
||||
* [DanCld](https://github.com/DanCld)
|
||||
* [Daniel Albers](https://github.com/AID)
|
||||
* [Daniel Aleksandersen](https://github.com/da2x)
|
||||
* [Daniel Almasi](https://github.com/almasen)
|
||||
* [Daniel Convissor](https://github.com/convissor)
|
||||
* [Daniel "Drex" Drexler](https://github.com/aeturnum)
|
||||
* [Daniel Huang](https://github.com/dhuang)
|
||||
* [Dave Guarino](https://github.com/daguar)
|
||||
* [David cz](https://github.com/dave-cz)
|
||||
@@ -88,7 +84,6 @@ Authors
|
||||
* [Felix Schwarz](https://github.com/FelixSchwarz)
|
||||
* [Felix Yan](https://github.com/felixonmars)
|
||||
* [Filip Ochnik](https://github.com/filipochnik)
|
||||
* [Florian Klink](https://github.com/flokli)
|
||||
* [Francois Marier](https://github.com/fmarier)
|
||||
* [Frank](https://github.com/Frankkkkk)
|
||||
* [Frederic BLANC](https://github.com/fblanc)
|
||||
@@ -151,13 +146,11 @@ Authors
|
||||
* [Lior Sabag](https://github.com/liorsbg)
|
||||
* [Lipis](https://github.com/lipis)
|
||||
* [lord63](https://github.com/lord63)
|
||||
* [Lorenzo Fundaró](https://github.com/lfundaro)
|
||||
* [Luca Beltrame](https://github.com/lbeltrame)
|
||||
* [Luca Ebach](https://github.com/lucebac)
|
||||
* [Luca Olivetti](https://github.com/olivluca)
|
||||
* [Luke Rogers](https://github.com/lukeroge)
|
||||
* [Maarten](https://github.com/mrtndwrd)
|
||||
* [Mads Jensen](https://github.com/atombrella)
|
||||
* [Maikel Martens](https://github.com/krukas)
|
||||
* [Malte Janduda](https://github.com/MalteJ)
|
||||
* [Mantas Mikulėnas](https://github.com/grawity)
|
||||
@@ -209,7 +202,6 @@ Authors
|
||||
* [Pierre Jaury](https://github.com/kaiyou)
|
||||
* [Piotr Kasprzyk](https://github.com/kwadrat)
|
||||
* [Prayag Verma](https://github.com/pra85)
|
||||
* [Rasesh Patel](https://github.com/raspat1)
|
||||
* [Reinaldo de Souza Jr](https://github.com/juniorz)
|
||||
* [Remi Rampin](https://github.com/remram44)
|
||||
* [Rémy HUBSCHER](https://github.com/Natim)
|
||||
@@ -217,7 +209,6 @@ Authors
|
||||
* [Richard Barnes](https://github.com/r-barnes)
|
||||
* [Richard Panek](https://github.com/kernelpanek)
|
||||
* [Robert Buchholz](https://github.com/rbu)
|
||||
* [Robert Dailey](https://github.com/pahrohfit)
|
||||
* [Robert Habermann](https://github.com/frennkie)
|
||||
* [Robert Xiao](https://github.com/nneonneo)
|
||||
* [Roland Shoemaker](https://github.com/rolandshoemaker)
|
||||
@@ -243,7 +234,6 @@ Authors
|
||||
* [Spencer Bliven](https://github.com/sbliven)
|
||||
* [Stacey Sheldon](https://github.com/solidgoldbomb)
|
||||
* [Stavros Korokithakis](https://github.com/skorokithakis)
|
||||
* [Ștefan Talpalaru](https://github.com/stefantalpalaru)
|
||||
* [Stefan Weil](https://github.com/stweil)
|
||||
* [Steve Desmond](https://github.com/stevedesmond-ca)
|
||||
* [sydneyli](https://github.com/sydneyli)
|
||||
|
||||
@@ -11,7 +11,7 @@ to the Sphinx generated docs is provided below.
|
||||
|
||||
|
||||
[1] https://github.com/blog/1184-contributing-guidelines
|
||||
[2] https://docutils.sourceforge.io/docs/user/rst/quickref.html#hyperlink-targets
|
||||
[2] http://docutils.sourceforge.net/docs/user/rst/quickref.html#hyperlink-targets
|
||||
|
||||
-->
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# This Dockerfile builds an image for development.
|
||||
FROM ubuntu:focal
|
||||
FROM debian:buster
|
||||
|
||||
# Note: this only exposes the port to other docker containers.
|
||||
EXPOSE 80 443
|
||||
@@ -8,14 +8,13 @@ WORKDIR /opt/certbot/src
|
||||
|
||||
COPY . .
|
||||
RUN apt-get update && \
|
||||
DEBIAN_FRONTEND=noninteractive apt-get install apache2 git python3-dev \
|
||||
python3-venv gcc libaugeas0 libssl-dev libffi-dev ca-certificates \
|
||||
openssl nginx-light -y --no-install-recommends && \
|
||||
apt-get install apache2 git python3-dev python3-venv gcc libaugeas0 \
|
||||
libssl-dev libffi-dev ca-certificates openssl nginx-light -y && \
|
||||
apt-get clean && \
|
||||
rm -rf /var/lib/apt/lists/* \
|
||||
/tmp/* \
|
||||
/var/tmp/*
|
||||
|
||||
RUN VENV_NAME="../venv" python3 tools/venv.py
|
||||
RUN VENV_NAME="../venv3" python3 tools/venv3.py
|
||||
|
||||
ENV PATH /opt/certbot/venv/bin:$PATH
|
||||
ENV PATH /opt/certbot/venv3/bin:$PATH
|
||||
|
||||
@@ -7,7 +7,7 @@ questions.
|
||||
## My operating system is (include version):
|
||||
|
||||
|
||||
## I installed Certbot with (snap, OS package manager, pip, certbot-auto, etc):
|
||||
## I installed Certbot with (certbot-auto, OS package manager, pip, etc):
|
||||
|
||||
|
||||
## I ran this command and it produced this output:
|
||||
|
||||
@@ -6,6 +6,7 @@ This module is an implementation of the `ACME protocol`_.
|
||||
|
||||
"""
|
||||
import sys
|
||||
import warnings
|
||||
|
||||
# This code exists to keep backwards compatibility with people using acme.jose
|
||||
# before it became the standalone josepy package.
|
||||
|
||||
@@ -5,19 +5,18 @@ import functools
|
||||
import hashlib
|
||||
import logging
|
||||
import socket
|
||||
from typing import Type
|
||||
|
||||
from cryptography.hazmat.primitives import hashes # type: ignore
|
||||
import josepy as jose
|
||||
from OpenSSL import crypto
|
||||
from OpenSSL import SSL # type: ignore # https://github.com/python/typeshed/issues/2052
|
||||
import requests
|
||||
import six
|
||||
from OpenSSL import SSL # type: ignore # https://github.com/python/typeshed/issues/2052
|
||||
from OpenSSL import crypto
|
||||
|
||||
from acme import crypto_util
|
||||
from acme import errors
|
||||
from acme import fields
|
||||
from acme.mixins import ResourceMixin
|
||||
from acme.mixins import TypeMixin
|
||||
from acme.mixins import ResourceMixin, TypeMixin
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -25,12 +24,12 @@ logger = logging.getLogger(__name__)
|
||||
class Challenge(jose.TypedJSONObjectWithFields):
|
||||
# _fields_to_partial_json
|
||||
"""ACME challenge."""
|
||||
TYPES: dict = {}
|
||||
TYPES = {} # type: dict
|
||||
|
||||
@classmethod
|
||||
def from_json(cls, jobj):
|
||||
try:
|
||||
return super().from_json(jobj)
|
||||
return super(Challenge, cls).from_json(jobj)
|
||||
except jose.UnrecognizedTypeError as error:
|
||||
logger.debug(error)
|
||||
return UnrecognizedChallenge.from_json(jobj)
|
||||
@@ -39,7 +38,7 @@ class Challenge(jose.TypedJSONObjectWithFields):
|
||||
class ChallengeResponse(ResourceMixin, TypeMixin, jose.TypedJSONObjectWithFields):
|
||||
# _fields_to_partial_json
|
||||
"""ACME challenge response."""
|
||||
TYPES: dict = {}
|
||||
TYPES = {} # type: dict
|
||||
resource_type = 'challenge'
|
||||
resource = fields.Resource(resource_type)
|
||||
|
||||
@@ -58,7 +57,7 @@ class UnrecognizedChallenge(Challenge):
|
||||
"""
|
||||
|
||||
def __init__(self, jobj):
|
||||
super().__init__()
|
||||
super(UnrecognizedChallenge, self).__init__()
|
||||
object.__setattr__(self, "jobj", jobj)
|
||||
|
||||
def to_partial_json(self):
|
||||
@@ -141,20 +140,21 @@ class KeyAuthorizationChallengeResponse(ChallengeResponse):
|
||||
return True
|
||||
|
||||
def to_partial_json(self):
|
||||
jobj = super().to_partial_json()
|
||||
jobj = super(KeyAuthorizationChallengeResponse, self).to_partial_json()
|
||||
jobj.pop('keyAuthorization', None)
|
||||
return jobj
|
||||
|
||||
|
||||
class KeyAuthorizationChallenge(_TokenChallenge, metaclass=abc.ABCMeta):
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class KeyAuthorizationChallenge(_TokenChallenge):
|
||||
"""Challenge based on Key Authorization.
|
||||
|
||||
:param response_cls: Subclass of `KeyAuthorizationChallengeResponse`
|
||||
that will be used to generate ``response``.
|
||||
that will be used to generate `response`.
|
||||
:param str typ: type of the challenge
|
||||
"""
|
||||
typ: str = NotImplemented
|
||||
response_cls: Type[KeyAuthorizationChallengeResponse] = NotImplemented
|
||||
typ = NotImplemented
|
||||
response_cls = NotImplemented
|
||||
thumbprint_hash_function = (
|
||||
KeyAuthorizationChallengeResponse.thumbprint_hash_function)
|
||||
|
||||
|
||||
@@ -4,38 +4,48 @@ import collections
|
||||
import datetime
|
||||
from email.utils import parsedate_tz
|
||||
import heapq
|
||||
import http.client as http_client
|
||||
import logging
|
||||
import re
|
||||
import sys
|
||||
import time
|
||||
from typing import cast
|
||||
from typing import Dict
|
||||
from typing import List
|
||||
from typing import Set
|
||||
from typing import Text
|
||||
from typing import Union
|
||||
|
||||
import josepy as jose
|
||||
import OpenSSL
|
||||
import requests
|
||||
from requests.adapters import HTTPAdapter
|
||||
from requests.utils import parse_header_links
|
||||
from requests_toolbelt.adapters.source import SourceAddressAdapter
|
||||
import six
|
||||
from six.moves import http_client
|
||||
|
||||
from acme import crypto_util
|
||||
from acme import errors
|
||||
from acme import jws
|
||||
from acme import messages
|
||||
from acme.magic_typing import Dict
|
||||
from acme.magic_typing import List
|
||||
from acme.magic_typing import Set
|
||||
from acme.magic_typing import Text
|
||||
from acme.mixins import VersionedLEACMEMixin
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Prior to Python 2.7.9 the stdlib SSL module did not allow a user to configure
|
||||
# many important security related options. On these platforms we use PyOpenSSL
|
||||
# for SSL, which does allow these options to be configured.
|
||||
# https://urllib3.readthedocs.org/en/latest/security.html#insecureplatformwarning
|
||||
if sys.version_info < (2, 7, 9): # pragma: no cover
|
||||
try:
|
||||
requests.packages.urllib3.contrib.pyopenssl.inject_into_urllib3() # type: ignore
|
||||
except AttributeError:
|
||||
import urllib3.contrib.pyopenssl
|
||||
urllib3.contrib.pyopenssl.inject_into_urllib3()
|
||||
|
||||
DEFAULT_NETWORK_TIMEOUT = 45
|
||||
|
||||
DER_CONTENT_TYPE = 'application/pkix-cert'
|
||||
|
||||
|
||||
class ClientBase:
|
||||
class ClientBase(object):
|
||||
"""ACME client base object.
|
||||
|
||||
:ivar messages.Directory directory:
|
||||
@@ -114,9 +124,8 @@ class ClientBase:
|
||||
"""
|
||||
return self.update_registration(regr, update={'status': 'deactivated'})
|
||||
|
||||
def deactivate_authorization(self,
|
||||
authzr: messages.AuthorizationResource
|
||||
) -> messages.AuthorizationResource:
|
||||
def deactivate_authorization(self, authzr):
|
||||
# type: (messages.AuthorizationResource) -> messages.AuthorizationResource
|
||||
"""Deactivate authorization.
|
||||
|
||||
:param messages.AuthorizationResource authzr: The Authorization resource
|
||||
@@ -191,7 +200,7 @@ class ClientBase:
|
||||
when = parsedate_tz(retry_after)
|
||||
if when is not None:
|
||||
try:
|
||||
tz_secs = datetime.timedelta(when[-1] if when[-1] is not None else 0)
|
||||
tz_secs = datetime.timedelta(when[-1] if when[-1] else 0)
|
||||
return datetime.datetime(*when[:7]) - tz_secs
|
||||
except (ValueError, OverflowError):
|
||||
pass
|
||||
@@ -250,10 +259,10 @@ class Client(ClientBase):
|
||||
if net is None:
|
||||
net = ClientNetwork(key, alg=alg, verify_ssl=verify_ssl)
|
||||
|
||||
if isinstance(directory, str):
|
||||
if isinstance(directory, six.string_types):
|
||||
directory = messages.Directory.from_json(
|
||||
net.get(directory).json())
|
||||
super().__init__(directory=directory,
|
||||
super(Client, self).__init__(directory=directory,
|
||||
net=net, acme_version=1)
|
||||
|
||||
def register(self, new_reg=None):
|
||||
@@ -426,7 +435,7 @@ class Client(ClientBase):
|
||||
|
||||
"""
|
||||
assert max_attempts > 0
|
||||
attempts: Dict[messages.AuthorizationResource, int] = collections.defaultdict(int)
|
||||
attempts = collections.defaultdict(int) # type: Dict[messages.AuthorizationResource, int]
|
||||
exhausted = set()
|
||||
|
||||
# priority queue with datetime.datetime (based on Retry-After) as key,
|
||||
@@ -438,7 +447,7 @@ class Client(ClientBase):
|
||||
heapq.heapify(waiting)
|
||||
# mapping between original Authorization Resource and the most
|
||||
# recently updated one
|
||||
updated = {authzr: authzr for authzr in authzrs}
|
||||
updated = dict((authzr, authzr) for authzr in authzrs)
|
||||
|
||||
while waiting:
|
||||
# find the smallest Retry-After, and sleep if necessary
|
||||
@@ -465,7 +474,7 @@ class Client(ClientBase):
|
||||
exhausted.add(authzr)
|
||||
|
||||
if exhausted or any(authzr.body.status == messages.STATUS_INVALID
|
||||
for authzr in updated.values()):
|
||||
for authzr in six.itervalues(updated)):
|
||||
raise errors.PollError(exhausted, updated)
|
||||
|
||||
updated_authzrs = tuple(updated[authzr] for authzr in authzrs)
|
||||
@@ -539,7 +548,7 @@ class Client(ClientBase):
|
||||
:rtype: `list` of `OpenSSL.crypto.X509` wrapped in `.ComparableX509`
|
||||
|
||||
"""
|
||||
chain: List[jose.ComparableX509] = []
|
||||
chain = [] # type: List[jose.ComparableX509]
|
||||
uri = certr.cert_chain_uri
|
||||
while uri is not None and len(chain) < max_length:
|
||||
response, cert = self._get_cert(uri)
|
||||
@@ -577,7 +586,7 @@ class ClientV2(ClientBase):
|
||||
:param .messages.Directory directory: Directory Resource
|
||||
:param .ClientNetwork net: Client network.
|
||||
"""
|
||||
super().__init__(directory=directory,
|
||||
super(ClientV2, self).__init__(directory=directory,
|
||||
net=net, acme_version=2)
|
||||
|
||||
def new_account(self, new_account):
|
||||
@@ -627,7 +636,7 @@ class ClientV2(ClientBase):
|
||||
"""
|
||||
# https://github.com/certbot/certbot/issues/6155
|
||||
new_regr = self._get_v2_account(regr)
|
||||
return super().update_registration(new_regr, update)
|
||||
return super(ClientV2, self).update_registration(new_regr, update)
|
||||
|
||||
def _get_v2_account(self, regr):
|
||||
self.net.account = None
|
||||
@@ -724,13 +733,11 @@ class ClientV2(ClientBase):
|
||||
raise errors.ValidationError(failed)
|
||||
return orderr.update(authorizations=responses)
|
||||
|
||||
def finalize_order(self, orderr, deadline, fetch_alternative_chains=False):
|
||||
def finalize_order(self, orderr, deadline):
|
||||
"""Finalize an order and obtain a certificate.
|
||||
|
||||
:param messages.OrderResource orderr: order to finalize
|
||||
:param datetime.datetime deadline: when to stop polling and timeout
|
||||
:param bool fetch_alternative_chains: whether to also fetch alternative
|
||||
certificate chains
|
||||
|
||||
:returns: finalized order
|
||||
:rtype: messages.OrderResource
|
||||
@@ -747,13 +754,8 @@ class ClientV2(ClientBase):
|
||||
if body.error is not None:
|
||||
raise errors.IssuanceError(body.error)
|
||||
if body.certificate is not None:
|
||||
certificate_response = self._post_as_get(body.certificate)
|
||||
orderr = orderr.update(body=body, fullchain_pem=certificate_response.text)
|
||||
if fetch_alternative_chains:
|
||||
alt_chains_urls = self._get_links(certificate_response, 'alternate')
|
||||
alt_chains = [self._post_as_get(url).text for url in alt_chains_urls]
|
||||
orderr = orderr.update(alternative_fullchains_pem=alt_chains)
|
||||
return orderr
|
||||
certificate_response = self._post_as_get(body.certificate).text
|
||||
return orderr.update(body=body, fullchain_pem=certificate_response)
|
||||
raise errors.TimeoutError()
|
||||
|
||||
def revoke(self, cert, rsn):
|
||||
@@ -783,22 +785,8 @@ class ClientV2(ClientBase):
|
||||
new_args = args[:1] + (None,) + args[1:]
|
||||
return self._post(*new_args, **kwargs)
|
||||
|
||||
def _get_links(self, response, relation_type):
|
||||
"""
|
||||
Retrieves all Link URIs of relation_type from the response.
|
||||
:param requests.Response response: The requests HTTP response.
|
||||
:param str relation_type: The relation type to filter by.
|
||||
"""
|
||||
# Can't use response.links directly because it drops multiple links
|
||||
# of the same relation type, which is possible in RFC8555 responses.
|
||||
if 'Link' not in response.headers:
|
||||
return []
|
||||
links = parse_header_links(response.headers['Link'])
|
||||
return [l['url'] for l in links
|
||||
if 'rel' in l and 'url' in l and l['rel'] == relation_type]
|
||||
|
||||
|
||||
class BackwardsCompatibleClientV2:
|
||||
class BackwardsCompatibleClientV2(object):
|
||||
"""ACME client wrapper that tends towards V2-style calls, but
|
||||
supports V1 servers.
|
||||
|
||||
@@ -820,7 +808,6 @@ class BackwardsCompatibleClientV2:
|
||||
def __init__(self, net, key, server):
|
||||
directory = messages.Directory.from_json(net.get(server).json())
|
||||
self.acme_version = self._acme_version_from_directory(directory)
|
||||
self.client: Union[Client, ClientV2]
|
||||
if self.acme_version == 1:
|
||||
self.client = Client(directory, key=key, net=net)
|
||||
else:
|
||||
@@ -840,18 +827,16 @@ class BackwardsCompatibleClientV2:
|
||||
if check_tos_cb is not None:
|
||||
check_tos_cb(tos)
|
||||
if self.acme_version == 1:
|
||||
client_v1 = cast(Client, self.client)
|
||||
regr = client_v1.register(regr)
|
||||
regr = self.client.register(regr)
|
||||
if regr.terms_of_service is not None:
|
||||
_assess_tos(regr.terms_of_service)
|
||||
return client_v1.agree_to_tos(regr)
|
||||
return self.client.agree_to_tos(regr)
|
||||
return regr
|
||||
else:
|
||||
client_v2 = cast(ClientV2, self.client)
|
||||
if "terms_of_service" in client_v2.directory.meta:
|
||||
_assess_tos(client_v2.directory.meta.terms_of_service)
|
||||
if "terms_of_service" in self.client.directory.meta:
|
||||
_assess_tos(self.client.directory.meta.terms_of_service)
|
||||
regr = regr.update(terms_of_service_agreed=True)
|
||||
return client_v2.new_account(regr)
|
||||
return self.client.new_account(regr)
|
||||
|
||||
def new_order(self, csr_pem):
|
||||
"""Request a new Order object from the server.
|
||||
@@ -869,32 +854,28 @@ class BackwardsCompatibleClientV2:
|
||||
|
||||
"""
|
||||
if self.acme_version == 1:
|
||||
client_v1 = cast(Client, self.client)
|
||||
csr = OpenSSL.crypto.load_certificate_request(OpenSSL.crypto.FILETYPE_PEM, csr_pem)
|
||||
# pylint: disable=protected-access
|
||||
dnsNames = crypto_util._pyopenssl_cert_or_req_all_names(csr)
|
||||
authorizations = []
|
||||
for domain in dnsNames:
|
||||
authorizations.append(client_v1.request_domain_challenges(domain))
|
||||
authorizations.append(self.client.request_domain_challenges(domain))
|
||||
return messages.OrderResource(authorizations=authorizations, csr_pem=csr_pem)
|
||||
return cast(ClientV2, self.client).new_order(csr_pem)
|
||||
return self.client.new_order(csr_pem)
|
||||
|
||||
def finalize_order(self, orderr, deadline, fetch_alternative_chains=False):
|
||||
def finalize_order(self, orderr, deadline):
|
||||
"""Finalize an order and obtain a certificate.
|
||||
|
||||
:param messages.OrderResource orderr: order to finalize
|
||||
:param datetime.datetime deadline: when to stop polling and timeout
|
||||
:param bool fetch_alternative_chains: whether to also fetch alternative
|
||||
certificate chains
|
||||
|
||||
:returns: finalized order
|
||||
:rtype: messages.OrderResource
|
||||
|
||||
"""
|
||||
if self.acme_version == 1:
|
||||
client_v1 = cast(Client, self.client)
|
||||
csr_pem = orderr.csr_pem
|
||||
certr = client_v1.request_issuance(
|
||||
certr = self.client.request_issuance(
|
||||
jose.ComparableX509(
|
||||
OpenSSL.crypto.load_certificate_request(OpenSSL.crypto.FILETYPE_PEM, csr_pem)),
|
||||
orderr.authorizations)
|
||||
@@ -902,7 +883,7 @@ class BackwardsCompatibleClientV2:
|
||||
chain = None
|
||||
while datetime.datetime.now() < deadline:
|
||||
try:
|
||||
chain = client_v1.fetch_chain(certr)
|
||||
chain = self.client.fetch_chain(certr)
|
||||
break
|
||||
except errors.Error:
|
||||
time.sleep(1)
|
||||
@@ -917,8 +898,7 @@ class BackwardsCompatibleClientV2:
|
||||
chain = crypto_util.dump_pyopenssl_chain(chain).decode()
|
||||
|
||||
return orderr.update(fullchain_pem=(cert + chain))
|
||||
return cast(ClientV2, self.client).finalize_order(
|
||||
orderr, deadline, fetch_alternative_chains)
|
||||
return self.client.finalize_order(orderr, deadline)
|
||||
|
||||
def revoke(self, cert, rsn):
|
||||
"""Revoke certificate.
|
||||
@@ -944,10 +924,10 @@ class BackwardsCompatibleClientV2:
|
||||
Always return False for ACMEv1 servers, as it doesn't use External Account Binding."""
|
||||
if self.acme_version == 1:
|
||||
return False
|
||||
return cast(ClientV2, self.client).external_account_required()
|
||||
return self.client.external_account_required()
|
||||
|
||||
|
||||
class ClientNetwork:
|
||||
class ClientNetwork(object):
|
||||
"""Wrapper around requests that signs POSTs for authentication.
|
||||
|
||||
Also adds user agent, and handles Content-Type.
|
||||
@@ -977,7 +957,7 @@ class ClientNetwork:
|
||||
self.account = account
|
||||
self.alg = alg
|
||||
self.verify_ssl = verify_ssl
|
||||
self._nonces: Set[Text] = set()
|
||||
self._nonces = set() # type: Set[Text]
|
||||
self.user_agent = user_agent
|
||||
self.session = requests.Session()
|
||||
self._default_timeout = timeout
|
||||
@@ -1137,7 +1117,6 @@ class ClientNetwork:
|
||||
|
||||
# If content is DER, log the base64 of it instead of raw bytes, to keep
|
||||
# binary data out of the logs.
|
||||
debug_content: Union[bytes, str]
|
||||
if response.headers.get("Content-Type") == DER_CONTENT_TYPE:
|
||||
debug_content = base64.b64encode(response.content)
|
||||
else:
|
||||
|
||||
@@ -5,15 +5,15 @@ import logging
|
||||
import os
|
||||
import re
|
||||
import socket
|
||||
from typing import Callable
|
||||
from typing import Tuple
|
||||
from typing import Union
|
||||
|
||||
import josepy as jose
|
||||
from OpenSSL import crypto
|
||||
from OpenSSL import SSL # type: ignore # https://github.com/python/typeshed/issues/2052
|
||||
|
||||
from acme import errors
|
||||
from acme.magic_typing import Callable
|
||||
from acme.magic_typing import Tuple
|
||||
from acme.magic_typing import Union
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -27,7 +27,7 @@ logger = logging.getLogger(__name__)
|
||||
_DEFAULT_SSL_METHOD = SSL.SSLv23_METHOD # type: ignore
|
||||
|
||||
|
||||
class _DefaultCertSelection:
|
||||
class _DefaultCertSelection(object):
|
||||
def __init__(self, certs):
|
||||
self.certs = certs
|
||||
|
||||
@@ -36,7 +36,7 @@ class _DefaultCertSelection:
|
||||
return self.certs.get(server_name, None)
|
||||
|
||||
|
||||
class SSLSocket: # pylint: disable=too-few-public-methods
|
||||
class SSLSocket(object): # pylint: disable=too-few-public-methods
|
||||
"""SSL wrapper for sockets.
|
||||
|
||||
:ivar socket sock: Original wrapped socket.
|
||||
@@ -93,7 +93,7 @@ class SSLSocket: # pylint: disable=too-few-public-methods
|
||||
new_context.set_alpn_select_callback(self.alpn_selection)
|
||||
connection.set_context(new_context)
|
||||
|
||||
class FakeConnection:
|
||||
class FakeConnection(object):
|
||||
"""Fake OpenSSL.SSL.Connection."""
|
||||
|
||||
# pylint: disable=missing-function-docstring
|
||||
@@ -166,9 +166,9 @@ def probe_sni(name, host, port=443, timeout=300, # pylint: disable=too-many-argu
|
||||
" from {0}:{1}".format(
|
||||
source_address[0],
|
||||
source_address[1]
|
||||
) if any(source_address) else ""
|
||||
) if socket_kwargs else ""
|
||||
)
|
||||
socket_tuple: Tuple[str, int] = (host, port)
|
||||
socket_tuple = (host, port) # type: Tuple[str, int]
|
||||
sock = socket.create_connection(socket_tuple, **socket_kwargs) # type: ignore
|
||||
except socket.error as error:
|
||||
raise errors.Error(error)
|
||||
@@ -186,7 +186,6 @@ def probe_sni(name, host, port=443, timeout=300, # pylint: disable=too-many-argu
|
||||
raise errors.Error(error)
|
||||
return client_ssl.get_peer_certificate()
|
||||
|
||||
|
||||
def make_csr(private_key_pem, domains, must_staple=False):
|
||||
"""Generate a CSR containing a list of domains as subjectAltNames.
|
||||
|
||||
@@ -218,7 +217,6 @@ def make_csr(private_key_pem, domains, must_staple=False):
|
||||
return crypto.dump_certificate_request(
|
||||
crypto.FILETYPE_PEM, csr)
|
||||
|
||||
|
||||
def _pyopenssl_cert_or_req_all_names(loaded_cert_or_req):
|
||||
common_name = loaded_cert_or_req.get_subject().CN
|
||||
sans = _pyopenssl_cert_or_req_san(loaded_cert_or_req)
|
||||
@@ -227,7 +225,6 @@ def _pyopenssl_cert_or_req_all_names(loaded_cert_or_req):
|
||||
return sans
|
||||
return [common_name] + [d for d in sans if d != common_name]
|
||||
|
||||
|
||||
def _pyopenssl_cert_or_req_san(cert_or_req):
|
||||
"""Get Subject Alternative Names from certificate or CSR using pyOpenSSL.
|
||||
|
||||
@@ -256,7 +253,7 @@ def _pyopenssl_cert_or_req_san(cert_or_req):
|
||||
|
||||
if isinstance(cert_or_req, crypto.X509):
|
||||
# pylint: disable=line-too-long
|
||||
func: Union[Callable[[int, crypto.X509Req], bytes], Callable[[int, crypto.X509], bytes]] = crypto.dump_certificate
|
||||
func = crypto.dump_certificate # type: Union[Callable[[int, crypto.X509Req], bytes], Callable[[int, crypto.X509], bytes]]
|
||||
else:
|
||||
func = crypto.dump_certificate_request
|
||||
text = func(crypto.FILETYPE_TEXT, cert_or_req).decode("utf-8")
|
||||
@@ -320,7 +317,6 @@ def gen_ss_cert(key, domains, not_before=None,
|
||||
cert.sign(key, "sha256")
|
||||
return cert
|
||||
|
||||
|
||||
def dump_pyopenssl_chain(chain, filetype=crypto.FILETYPE_PEM):
|
||||
"""Dump certificate chain into a bundle.
|
||||
|
||||
|
||||
@@ -28,8 +28,13 @@ class NonceError(ClientError):
|
||||
|
||||
class BadNonce(NonceError):
|
||||
"""Bad nonce error."""
|
||||
def __init__(self, nonce, error, *args):
|
||||
super().__init__(*args)
|
||||
def __init__(self, nonce, error, *args, **kwargs):
|
||||
# MyPy complains here that there is too many arguments for BaseException constructor.
|
||||
# This is an error fixed in typeshed, see https://github.com/python/mypy/issues/4183
|
||||
# The fix is included in MyPy>=0.740, but upgrading it would bring dozen of errors due to
|
||||
# new types definitions. So we ignore the error until the code base is fixed to match
|
||||
# with MyPy>=0.740 referential.
|
||||
super(BadNonce, self).__init__(*args, **kwargs) # type: ignore
|
||||
self.nonce = nonce
|
||||
self.error = error
|
||||
|
||||
@@ -44,11 +49,12 @@ class MissingNonce(NonceError):
|
||||
Replay-Nonce header field in each successful response to a POST it
|
||||
provides to a client (...)".
|
||||
|
||||
:ivar requests.Response ~.response: HTTP Response
|
||||
:ivar requests.Response response: HTTP Response
|
||||
|
||||
"""
|
||||
def __init__(self, response, *args):
|
||||
super().__init__(*args)
|
||||
def __init__(self, response, *args, **kwargs):
|
||||
# See comment in BadNonce constructor above for an explanation of type: ignore here.
|
||||
super(MissingNonce, self).__init__(*args, **kwargs) # type: ignore
|
||||
self.response = response
|
||||
|
||||
def __str__(self):
|
||||
@@ -72,7 +78,7 @@ class PollError(ClientError):
|
||||
def __init__(self, exhausted, updated):
|
||||
self.exhausted = exhausted
|
||||
self.updated = updated
|
||||
super().__init__()
|
||||
super(PollError, self).__init__()
|
||||
|
||||
@property
|
||||
def timeout(self):
|
||||
@@ -90,7 +96,7 @@ class ValidationError(Error):
|
||||
"""
|
||||
def __init__(self, failed_authzrs):
|
||||
self.failed_authzrs = failed_authzrs
|
||||
super().__init__()
|
||||
super(ValidationError, self).__init__()
|
||||
|
||||
|
||||
class TimeoutError(Error): # pylint: disable=redefined-builtin
|
||||
@@ -106,7 +112,7 @@ class IssuanceError(Error):
|
||||
:param messages.Error error: The error provided by the server.
|
||||
"""
|
||||
self.error = error
|
||||
super().__init__()
|
||||
super(IssuanceError, self).__init__()
|
||||
|
||||
|
||||
class ConflictError(ClientError):
|
||||
@@ -119,7 +125,7 @@ class ConflictError(ClientError):
|
||||
"""
|
||||
def __init__(self, location):
|
||||
self.location = location
|
||||
super().__init__()
|
||||
super(ConflictError, self).__init__()
|
||||
|
||||
|
||||
class WildcardUnsupportedError(Error):
|
||||
|
||||
@@ -12,7 +12,7 @@ class Fixed(jose.Field):
|
||||
|
||||
def __init__(self, json_name, value):
|
||||
self.value = value
|
||||
super().__init__(
|
||||
super(Fixed, self).__init__(
|
||||
json_name=json_name, default=value, omitempty=False)
|
||||
|
||||
def decode(self, value):
|
||||
@@ -53,7 +53,7 @@ class Resource(jose.Field):
|
||||
|
||||
def __init__(self, resource_type, *args, **kwargs):
|
||||
self.resource_type = resource_type
|
||||
super().__init__(
|
||||
super(Resource, self).__init__(
|
||||
'resource', default=resource_type, *args, **kwargs)
|
||||
|
||||
def decode(self, value):
|
||||
|
||||
@@ -14,9 +14,7 @@ class Header(jose.Header):
|
||||
kid = jose.Field('kid', omitempty=True)
|
||||
url = jose.Field('url', omitempty=True)
|
||||
|
||||
# Mypy does not understand the josepy magic happening here, and falsely claims
|
||||
# that nonce is redefined. Let's ignore the type check here.
|
||||
@nonce.decoder # type: ignore
|
||||
@nonce.decoder
|
||||
def nonce(value): # pylint: disable=no-self-argument,missing-function-docstring
|
||||
try:
|
||||
return jose.decode_b64jose(value)
|
||||
@@ -50,7 +48,7 @@ class JWS(jose.JWS):
|
||||
# Per ACME spec, jwk and kid are mutually exclusive, so only include a
|
||||
# jwk field if kid is not provided.
|
||||
include_jwk = kid is None
|
||||
return super().sign(payload, key=key, alg=alg,
|
||||
return super(JWS, cls).sign(payload, key=key, alg=alg,
|
||||
protect=frozenset(['nonce', 'url', 'kid', 'jwk', 'alg']),
|
||||
nonce=nonce, url=url, kid=kid,
|
||||
include_jwk=include_jwk)
|
||||
|
||||
@@ -1,17 +1,15 @@
|
||||
"""Simple shim around the typing module.
|
||||
"""Shim class to not have to depend on typing module in prod."""
|
||||
import sys
|
||||
|
||||
This was useful when this code supported Python 2 and typing wasn't always
|
||||
available. This code is being kept for now for backwards compatibility.
|
||||
|
||||
"""
|
||||
import warnings
|
||||
from typing import * # pylint: disable=wildcard-import, unused-wildcard-import
|
||||
from typing import Collection, IO # type: ignore
|
||||
|
||||
warnings.warn("acme.magic_typing is deprecated and will be removed in a future release.",
|
||||
DeprecationWarning)
|
||||
|
||||
class TypingClass:
|
||||
class TypingClass(object):
|
||||
"""Ignore import errors by getting anything"""
|
||||
def __getattr__(self, name):
|
||||
return None # pragma: no cover
|
||||
return None
|
||||
|
||||
try:
|
||||
# mypy doesn't respect modifying sys.modules
|
||||
from typing import * # pylint: disable=wildcard-import, unused-wildcard-import
|
||||
from typing import Collection, IO # type: ignore
|
||||
except ImportError:
|
||||
sys.modules[__name__] = TypingClass()
|
||||
|
||||
@@ -1,11 +1,8 @@
|
||||
"""ACME protocol messages."""
|
||||
from collections.abc import Hashable
|
||||
import json
|
||||
from typing import Any
|
||||
from typing import Dict
|
||||
from typing import Type
|
||||
|
||||
import josepy as jose
|
||||
import six
|
||||
|
||||
from acme import challenges
|
||||
from acme import errors
|
||||
@@ -14,6 +11,13 @@ from acme import jws
|
||||
from acme import util
|
||||
from acme.mixins import ResourceMixin
|
||||
|
||||
try:
|
||||
from collections.abc import Hashable
|
||||
except ImportError: # pragma: no cover
|
||||
from collections import Hashable
|
||||
|
||||
|
||||
|
||||
OLD_ERROR_PREFIX = "urn:acme:error:"
|
||||
ERROR_PREFIX = "urn:ietf:params:acme:error:"
|
||||
|
||||
@@ -64,6 +68,7 @@ def is_acme_error(err):
|
||||
return False
|
||||
|
||||
|
||||
@six.python_2_unicode_compatible
|
||||
class Error(jose.JSONObjectWithFields, errors.Error):
|
||||
"""ACME error.
|
||||
|
||||
@@ -90,9 +95,7 @@ class Error(jose.JSONObjectWithFields, errors.Error):
|
||||
raise ValueError("The supplied code: %s is not a known ACME error"
|
||||
" code" % code)
|
||||
typ = ERROR_PREFIX + code
|
||||
# Mypy will not understand that the Error constructor accepts a named argument
|
||||
# "typ" because of josepy magic. Let's ignore the type check here.
|
||||
return cls(typ=typ, **kwargs) # type: ignore
|
||||
return cls(typ=typ, **kwargs)
|
||||
|
||||
@property
|
||||
def description(self):
|
||||
@@ -129,10 +132,10 @@ class Error(jose.JSONObjectWithFields, errors.Error):
|
||||
class _Constant(jose.JSONDeSerializable, Hashable): # type: ignore
|
||||
"""ACME constant."""
|
||||
__slots__ = ('name',)
|
||||
POSSIBLE_NAMES: Dict[str, '_Constant'] = NotImplemented
|
||||
POSSIBLE_NAMES = NotImplemented
|
||||
|
||||
def __init__(self, name):
|
||||
super().__init__()
|
||||
super(_Constant, self).__init__()
|
||||
self.POSSIBLE_NAMES[name] = self # pylint: disable=unsupported-assignment-operation
|
||||
self.name = name
|
||||
|
||||
@@ -155,10 +158,13 @@ class _Constant(jose.JSONDeSerializable, Hashable): # type: ignore
|
||||
def __hash__(self):
|
||||
return hash((self.__class__, self.name))
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
||||
|
||||
class Status(_Constant):
|
||||
"""ACME "status" field."""
|
||||
POSSIBLE_NAMES: dict = {}
|
||||
POSSIBLE_NAMES = {} # type: dict
|
||||
STATUS_UNKNOWN = Status('unknown')
|
||||
STATUS_PENDING = Status('pending')
|
||||
STATUS_PROCESSING = Status('processing')
|
||||
@@ -171,7 +177,7 @@ STATUS_DEACTIVATED = Status('deactivated')
|
||||
|
||||
class IdentifierType(_Constant):
|
||||
"""ACME identifier type."""
|
||||
POSSIBLE_NAMES: Dict[str, 'IdentifierType'] = {}
|
||||
POSSIBLE_NAMES = {} # type: dict
|
||||
IDENTIFIER_FQDN = IdentifierType('dns') # IdentifierDNS in Boulder
|
||||
|
||||
|
||||
@@ -189,7 +195,7 @@ class Identifier(jose.JSONObjectWithFields):
|
||||
class Directory(jose.JSONDeSerializable):
|
||||
"""Directory."""
|
||||
|
||||
_REGISTERED_TYPES: Dict[str, Type[Any]] = {}
|
||||
_REGISTERED_TYPES = {} # type: dict
|
||||
|
||||
class Meta(jose.JSONObjectWithFields):
|
||||
"""Directory Meta."""
|
||||
@@ -200,8 +206,8 @@ class Directory(jose.JSONDeSerializable):
|
||||
external_account_required = jose.Field('externalAccountRequired', omitempty=True)
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
kwargs = {self._internal_name(k): v for k, v in kwargs.items()}
|
||||
super().__init__(**kwargs)
|
||||
kwargs = dict((self._internal_name(k), v) for k, v in kwargs.items())
|
||||
super(Directory.Meta, self).__init__(**kwargs)
|
||||
|
||||
@property
|
||||
def terms_of_service(self):
|
||||
@@ -211,7 +217,7 @@ class Directory(jose.JSONDeSerializable):
|
||||
def __iter__(self):
|
||||
# When iterating over fields, use the external name 'terms_of_service' instead of
|
||||
# the internal '_terms_of_service'.
|
||||
for name in super().__iter__():
|
||||
for name in super(Directory.Meta, self).__iter__():
|
||||
yield name[1:] if name == '_terms_of_service' else name
|
||||
|
||||
def _internal_name(self, name):
|
||||
@@ -223,7 +229,7 @@ class Directory(jose.JSONDeSerializable):
|
||||
return getattr(key, 'resource_type', key)
|
||||
|
||||
@classmethod
|
||||
def register(cls, resource_body_cls: Type[Any]) -> Type[Any]:
|
||||
def register(cls, resource_body_cls):
|
||||
"""Register resource."""
|
||||
resource_type = resource_body_cls.resource_type
|
||||
assert resource_type not in cls._REGISTERED_TYPES
|
||||
@@ -269,7 +275,7 @@ class Resource(jose.JSONObjectWithFields):
|
||||
class ResourceWithURI(Resource):
|
||||
"""ACME Resource with URI.
|
||||
|
||||
:ivar unicode ~.uri: Location of the resource.
|
||||
:ivar unicode uri: Location of the resource.
|
||||
|
||||
"""
|
||||
uri = jose.Field('uri') # no ChallengeResource.uri
|
||||
@@ -279,7 +285,7 @@ class ResourceBody(jose.JSONObjectWithFields):
|
||||
"""ACME Resource Body."""
|
||||
|
||||
|
||||
class ExternalAccountBinding:
|
||||
class ExternalAccountBinding(object):
|
||||
"""ACME External Account Binding"""
|
||||
|
||||
@classmethod
|
||||
@@ -309,9 +315,6 @@ class Registration(ResourceBody):
|
||||
# on new-reg key server ignores 'key' and populates it based on
|
||||
# JWS.signature.combined.jwk
|
||||
key = jose.Field('key', omitempty=True, decoder=jose.JWK.from_json)
|
||||
# Contact field implements special behavior to allow messages that clear existing
|
||||
# contacts while not expecting the `contact` field when loading from json.
|
||||
# This is implemented in the constructor and *_json methods.
|
||||
contact = jose.Field('contact', omitempty=True, default=())
|
||||
agreement = jose.Field('agreement', omitempty=True)
|
||||
status = jose.Field('status', omitempty=True)
|
||||
@@ -324,73 +327,24 @@ class Registration(ResourceBody):
|
||||
|
||||
@classmethod
|
||||
def from_data(cls, phone=None, email=None, external_account_binding=None, **kwargs):
|
||||
"""
|
||||
Create registration resource from contact details.
|
||||
|
||||
The `contact` keyword being passed to a Registration object is meaningful, so
|
||||
this function represents empty iterables in its kwargs by passing on an empty
|
||||
`tuple`.
|
||||
"""
|
||||
|
||||
# Note if `contact` was in kwargs.
|
||||
contact_provided = 'contact' in kwargs
|
||||
|
||||
# Pop `contact` from kwargs and add formatted email or phone numbers
|
||||
"""Create registration resource from contact details."""
|
||||
details = list(kwargs.pop('contact', ()))
|
||||
if phone is not None:
|
||||
details.append(cls.phone_prefix + phone)
|
||||
if email is not None:
|
||||
details.extend([cls.email_prefix + mail for mail in email.split(',')])
|
||||
|
||||
# Insert formatted contact information back into kwargs
|
||||
# or insert an empty tuple if `contact` provided.
|
||||
if details or contact_provided:
|
||||
kwargs['contact'] = tuple(details)
|
||||
kwargs['contact'] = tuple(details)
|
||||
|
||||
if external_account_binding:
|
||||
kwargs['external_account_binding'] = external_account_binding
|
||||
|
||||
return cls(**kwargs)
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
"""Note if the user provides a value for the `contact` member."""
|
||||
if 'contact' in kwargs:
|
||||
# Avoid the __setattr__ used by jose.TypedJSONObjectWithFields
|
||||
object.__setattr__(self, '_add_contact', True)
|
||||
super().__init__(**kwargs)
|
||||
|
||||
def _filter_contact(self, prefix):
|
||||
return tuple(
|
||||
detail[len(prefix):] for detail in self.contact # pylint: disable=not-an-iterable
|
||||
if detail.startswith(prefix))
|
||||
|
||||
def _add_contact_if_appropriate(self, jobj):
|
||||
"""
|
||||
The `contact` member of Registration objects should not be required when
|
||||
de-serializing (as it would be if the Fields' `omitempty` flag were `False`), but
|
||||
it should be included in serializations if it was provided.
|
||||
|
||||
:param jobj: Dictionary containing this Registrations' data
|
||||
:type jobj: dict
|
||||
|
||||
:returns: Dictionary containing Registrations data to transmit to the server
|
||||
:rtype: dict
|
||||
"""
|
||||
if getattr(self, '_add_contact', False):
|
||||
jobj['contact'] = self.encode('contact')
|
||||
|
||||
return jobj
|
||||
|
||||
def to_partial_json(self):
|
||||
"""Modify josepy.JSONDeserializable.to_partial_json()"""
|
||||
jobj = super().to_partial_json()
|
||||
return self._add_contact_if_appropriate(jobj)
|
||||
|
||||
def fields_to_partial_json(self):
|
||||
"""Modify josepy.JSONObjectWithFields.fields_to_partial_json()"""
|
||||
jobj = super().fields_to_partial_json()
|
||||
return self._add_contact_if_appropriate(jobj)
|
||||
|
||||
@property
|
||||
def phones(self):
|
||||
"""All phones found in the ``contact`` field."""
|
||||
@@ -459,20 +413,20 @@ class ChallengeBody(ResourceBody):
|
||||
omitempty=True, default=None)
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
kwargs = {self._internal_name(k): v for k, v in kwargs.items()}
|
||||
super().__init__(**kwargs)
|
||||
kwargs = dict((self._internal_name(k), v) for k, v in kwargs.items())
|
||||
super(ChallengeBody, self).__init__(**kwargs)
|
||||
|
||||
def encode(self, name):
|
||||
return super().encode(self._internal_name(name))
|
||||
return super(ChallengeBody, self).encode(self._internal_name(name))
|
||||
|
||||
def to_partial_json(self):
|
||||
jobj = super().to_partial_json()
|
||||
jobj = super(ChallengeBody, self).to_partial_json()
|
||||
jobj.update(self.chall.to_partial_json())
|
||||
return jobj
|
||||
|
||||
@classmethod
|
||||
def fields_from_json(cls, jobj):
|
||||
jobj_fields = super().fields_from_json(jobj)
|
||||
jobj_fields = super(ChallengeBody, cls).fields_from_json(jobj)
|
||||
jobj_fields['chall'] = challenges.Challenge.from_json(jobj)
|
||||
return jobj_fields
|
||||
|
||||
@@ -487,7 +441,7 @@ class ChallengeBody(ResourceBody):
|
||||
def __iter__(self):
|
||||
# When iterating over fields, use the external name 'uri' instead of
|
||||
# the internal '_uri'.
|
||||
for name in super().__iter__():
|
||||
for name in super(ChallengeBody, self).__iter__():
|
||||
yield name[1:] if name == '_uri' else name
|
||||
|
||||
def _internal_name(self, name):
|
||||
@@ -533,9 +487,7 @@ class Authorization(ResourceBody):
|
||||
expires = fields.RFC3339Field('expires', omitempty=True)
|
||||
wildcard = jose.Field('wildcard', omitempty=True)
|
||||
|
||||
# Mypy does not understand the josepy magic happening here, and falsely claims
|
||||
# that challenge is redefined. Let's ignore the type check here.
|
||||
@challenges.decoder # type: ignore
|
||||
@challenges.decoder
|
||||
def challenges(value): # pylint: disable=no-self-argument,missing-function-docstring
|
||||
return tuple(ChallengeBody.from_json(chall) for chall in value)
|
||||
|
||||
@@ -614,16 +566,14 @@ class Revocation(ResourceMixin, jose.JSONObjectWithFields):
|
||||
class Order(ResourceBody):
|
||||
"""Order Resource Body.
|
||||
|
||||
:ivar identifiers: List of identifiers for the certificate.
|
||||
:vartype identifiers: `list` of `.Identifier`
|
||||
:ivar list of .Identifier: List of identifiers for the certificate.
|
||||
:ivar acme.messages.Status status:
|
||||
:ivar authorizations: URLs of authorizations.
|
||||
:vartype authorizations: `list` of `str`
|
||||
:ivar list of str authorizations: URLs of authorizations.
|
||||
:ivar str certificate: URL to download certificate as a fullchain PEM.
|
||||
:ivar str finalize: URL to POST to to request issuance once all
|
||||
authorizations have "valid" status.
|
||||
:ivar datetime.datetime expires: When the order expires.
|
||||
:ivar ~.Error error: Any error that occurred during finalization, if applicable.
|
||||
:ivar .Error error: Any error that occurred during finalization, if applicable.
|
||||
"""
|
||||
identifiers = jose.Field('identifiers', omitempty=True)
|
||||
status = jose.Field('status', decoder=Status.from_json,
|
||||
@@ -634,9 +584,7 @@ class Order(ResourceBody):
|
||||
expires = fields.RFC3339Field('expires', omitempty=True)
|
||||
error = jose.Field('error', omitempty=True, decoder=Error.from_json)
|
||||
|
||||
# Mypy does not understand the josepy magic happening here, and falsely claims
|
||||
# that identifiers is redefined. Let's ignore the type check here.
|
||||
@identifiers.decoder # type: ignore
|
||||
@identifiers.decoder
|
||||
def identifiers(value): # pylint: disable=no-self-argument,missing-function-docstring
|
||||
return tuple(Identifier.from_json(identifier) for identifier in value)
|
||||
|
||||
@@ -645,20 +593,15 @@ class OrderResource(ResourceWithURI):
|
||||
|
||||
:ivar acme.messages.Order body:
|
||||
:ivar str csr_pem: The CSR this Order will be finalized with.
|
||||
:ivar authorizations: Fully-fetched AuthorizationResource objects.
|
||||
:vartype authorizations: `list` of `acme.messages.AuthorizationResource`
|
||||
:ivar list of acme.messages.AuthorizationResource authorizations:
|
||||
Fully-fetched AuthorizationResource objects.
|
||||
:ivar str fullchain_pem: The fetched contents of the certificate URL
|
||||
produced once the order was finalized, if it's present.
|
||||
:ivar alternative_fullchains_pem: The fetched contents of alternative certificate
|
||||
chain URLs produced once the order was finalized, if present and requested during
|
||||
finalization.
|
||||
:vartype alternative_fullchains_pem: `list` of `str`
|
||||
"""
|
||||
body = jose.Field('body', decoder=Order.from_json)
|
||||
csr_pem = jose.Field('csr_pem', omitempty=True)
|
||||
authorizations = jose.Field('authorizations')
|
||||
fullchain_pem = jose.Field('fullchain_pem', omitempty=True)
|
||||
alternative_fullchains_pem = jose.Field('alternative_fullchains_pem', omitempty=True)
|
||||
|
||||
@Directory.register
|
||||
class NewOrder(Order):
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
"""Useful mixins for Challenge and Resource objects"""
|
||||
|
||||
|
||||
class VersionedLEACMEMixin:
|
||||
class VersionedLEACMEMixin(object):
|
||||
"""This mixin stores the version of Let's Encrypt's endpoint being used."""
|
||||
@property
|
||||
def le_acme_version(self):
|
||||
@@ -20,7 +20,7 @@ class VersionedLEACMEMixin:
|
||||
# Required for @property to operate properly. See comment above.
|
||||
object.__setattr__(self, key, value)
|
||||
else:
|
||||
super().__setattr__(key, value) # pragma: no cover
|
||||
super(VersionedLEACMEMixin, self).__setattr__(key, value) # pragma: no cover
|
||||
|
||||
|
||||
class ResourceMixin(VersionedLEACMEMixin):
|
||||
@@ -30,12 +30,12 @@ class ResourceMixin(VersionedLEACMEMixin):
|
||||
"""
|
||||
def to_partial_json(self):
|
||||
"""See josepy.JSONDeserializable.to_partial_json()"""
|
||||
return _safe_jobj_compliance(super(),
|
||||
return _safe_jobj_compliance(super(ResourceMixin, self),
|
||||
'to_partial_json', 'resource')
|
||||
|
||||
def fields_to_partial_json(self):
|
||||
"""See josepy.JSONObjectWithFields.fields_to_partial_json()"""
|
||||
return _safe_jobj_compliance(super(),
|
||||
return _safe_jobj_compliance(super(ResourceMixin, self),
|
||||
'fields_to_partial_json', 'resource')
|
||||
|
||||
|
||||
@@ -46,12 +46,12 @@ class TypeMixin(VersionedLEACMEMixin):
|
||||
"""
|
||||
def to_partial_json(self):
|
||||
"""See josepy.JSONDeserializable.to_partial_json()"""
|
||||
return _safe_jobj_compliance(super(),
|
||||
return _safe_jobj_compliance(super(TypeMixin, self),
|
||||
'to_partial_json', 'type')
|
||||
|
||||
def fields_to_partial_json(self):
|
||||
"""See josepy.JSONObjectWithFields.fields_to_partial_json()"""
|
||||
return _safe_jobj_compliance(super(),
|
||||
return _safe_jobj_compliance(super(TypeMixin, self),
|
||||
'fields_to_partial_json', 'type')
|
||||
|
||||
|
||||
|
||||
@@ -1,16 +1,17 @@
|
||||
"""Support for standalone client challenge solvers. """
|
||||
import collections
|
||||
import functools
|
||||
import http.client as http_client
|
||||
import http.server as BaseHTTPServer
|
||||
import logging
|
||||
import socket
|
||||
import socketserver
|
||||
import threading
|
||||
from typing import List
|
||||
|
||||
from six.moves import BaseHTTPServer # type: ignore
|
||||
from six.moves import http_client
|
||||
from six.moves import socketserver # type: ignore
|
||||
|
||||
from acme import challenges
|
||||
from acme import crypto_util
|
||||
from acme.magic_typing import List
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -53,7 +54,7 @@ class ACMEServerMixin:
|
||||
allow_reuse_address = True
|
||||
|
||||
|
||||
class BaseDualNetworkedServers:
|
||||
class BaseDualNetworkedServers(object):
|
||||
"""Base class for a pair of IPv6 and IPv4 servers that tries to do everything
|
||||
it's asked for both servers, but where failures in one server don't
|
||||
affect the other.
|
||||
@@ -63,8 +64,8 @@ class BaseDualNetworkedServers:
|
||||
|
||||
def __init__(self, ServerClass, server_address, *remaining_args, **kwargs):
|
||||
port = server_address[1]
|
||||
self.threads: List[threading.Thread] = []
|
||||
self.servers: List[socketserver.BaseServer] = []
|
||||
self.threads = [] # type: List[threading.Thread]
|
||||
self.servers = [] # type: List[ACMEServerMixin]
|
||||
|
||||
# Must try True first.
|
||||
# Ubuntu, for example, will fail to bind to IPv4 if we've already bound
|
||||
@@ -203,24 +204,8 @@ class HTTP01RequestHandler(BaseHTTPServer.BaseHTTPRequestHandler):
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.simple_http_resources = kwargs.pop("simple_http_resources", set())
|
||||
self._timeout = kwargs.pop('timeout', 30)
|
||||
self.timeout = kwargs.pop('timeout', 30)
|
||||
BaseHTTPServer.BaseHTTPRequestHandler.__init__(self, *args, **kwargs)
|
||||
self.server: HTTP01Server
|
||||
|
||||
# In parent class BaseHTTPRequestHandler, 'timeout' is a class-level property but we
|
||||
# need to define its value during the initialization phase in HTTP01RequestHandler.
|
||||
# However MyPy does not appreciate that we dynamically shadow a class-level property
|
||||
# with an instance-level property (eg. self.timeout = ... in __init__()). So to make
|
||||
# everyone happy, we statically redefine 'timeout' as a method property, and set the
|
||||
# timeout value in a new internal instance-level property _timeout.
|
||||
@property
|
||||
def timeout(self):
|
||||
"""
|
||||
The default timeout this server should apply to requests.
|
||||
:return: timeout to apply
|
||||
:rtype: int
|
||||
"""
|
||||
return self._timeout
|
||||
|
||||
def log_message(self, format, *args): # pylint: disable=redefined-builtin
|
||||
"""Log arbitrary message."""
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
"""ACME utilities."""
|
||||
import six
|
||||
|
||||
|
||||
def map_keys(dikt, func):
|
||||
"""Map dictionary keys."""
|
||||
return {func(key): value for key, value in dikt.items()}
|
||||
return dict((func(key), value) for key, value in six.iteritems(dikt))
|
||||
|
||||
@@ -9,7 +9,7 @@ BUILDDIR = _build
|
||||
|
||||
# User-friendly check for sphinx-build
|
||||
ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1)
|
||||
$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from https://www.sphinx-doc.org/)
|
||||
$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/)
|
||||
endif
|
||||
|
||||
# Internal variables.
|
||||
|
||||
@@ -85,9 +85,7 @@ language = 'en'
|
||||
|
||||
# List of patterns, relative to source directory, that match files and
|
||||
# directories to ignore when looking for source files.
|
||||
exclude_patterns = [
|
||||
'_build',
|
||||
]
|
||||
exclude_patterns = ['_build']
|
||||
|
||||
# The reST default role (used for this markup: `text`) to use for all
|
||||
# documents.
|
||||
@@ -122,7 +120,7 @@ todo_include_todos = False
|
||||
# The theme to use for HTML and HTML Help pages. See the documentation for
|
||||
# a list of builtin themes.
|
||||
|
||||
# https://docs.readthedocs.io/en/stable/faq.html#i-want-to-use-the-read-the-docs-theme-locally
|
||||
# http://docs.readthedocs.org/en/latest/theme.html#how-do-i-use-this-locally-and-on-read-the-docs
|
||||
# on_rtd is whether we are on readthedocs.org
|
||||
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
|
||||
if not on_rtd: # only import and set the theme if we're building docs locally
|
||||
|
||||
@@ -65,7 +65,7 @@ if errorlevel 9009 (
|
||||
echo.may add the Sphinx directory to PATH.
|
||||
echo.
|
||||
echo.If you don't have Sphinx installed, grab it from
|
||||
echo.https://www.sphinx-doc.org/
|
||||
echo.http://sphinx-doc.org/
|
||||
exit /b 1
|
||||
)
|
||||
|
||||
|
||||
@@ -1,3 +1 @@
|
||||
:orphan:
|
||||
|
||||
.. literalinclude:: ../jws-help.txt
|
||||
|
||||
@@ -1,25 +1,41 @@
|
||||
from distutils.version import StrictVersion
|
||||
import sys
|
||||
|
||||
from setuptools import __version__ as setuptools_version
|
||||
from setuptools import find_packages
|
||||
from setuptools import setup
|
||||
from setuptools.command.test import test as TestCommand
|
||||
|
||||
version = '1.15.0.dev0'
|
||||
version = '1.6.0.dev0'
|
||||
|
||||
# Please update tox.ini when modifying dependency version requirements
|
||||
install_requires = [
|
||||
'cryptography>=2.1.4',
|
||||
# load_pem_private/public_key (>=0.6)
|
||||
# rsa_recover_prime_factors (>=0.8)
|
||||
'cryptography>=1.2.3',
|
||||
# formerly known as acme.jose:
|
||||
# 1.1.0+ is required to avoid the warnings described at
|
||||
# https://github.com/certbot/josepy/issues/13.
|
||||
'josepy>=1.1.0',
|
||||
'PyOpenSSL>=17.3.0',
|
||||
# Connection.set_tlsext_host_name (>=0.13)
|
||||
'PyOpenSSL>=0.13.1',
|
||||
'pyrfc3339',
|
||||
'pytz',
|
||||
'requests>=2.6.0',
|
||||
'requests[security]>=2.6.0', # security extras added in 2.4.1
|
||||
'requests-toolbelt>=0.3.0',
|
||||
'setuptools>=39.0.1',
|
||||
'setuptools',
|
||||
'six>=1.9.0', # needed for python_2_unicode_compatible
|
||||
]
|
||||
|
||||
setuptools_known_environment_markers = (StrictVersion(setuptools_version) >= StrictVersion('36.2'))
|
||||
if setuptools_known_environment_markers:
|
||||
install_requires.append('mock ; python_version < "3.3"')
|
||||
elif 'bdist_wheel' in sys.argv[1:]:
|
||||
raise RuntimeError('Error, you are trying to build certbot wheels using an old version '
|
||||
'of setuptools. Version 36.2+ of setuptools is required.')
|
||||
elif sys.version_info < (3,3):
|
||||
install_requires.append('mock')
|
||||
|
||||
dev_extras = [
|
||||
'pytest',
|
||||
'pytest-xdist',
|
||||
@@ -31,25 +47,43 @@ docs_extras = [
|
||||
'sphinx_rtd_theme',
|
||||
]
|
||||
|
||||
|
||||
class PyTest(TestCommand):
|
||||
user_options = []
|
||||
|
||||
def initialize_options(self):
|
||||
TestCommand.initialize_options(self)
|
||||
self.pytest_args = ''
|
||||
|
||||
def run_tests(self):
|
||||
import shlex
|
||||
# import here, cause outside the eggs aren't loaded
|
||||
import pytest
|
||||
errno = pytest.main(shlex.split(self.pytest_args))
|
||||
sys.exit(errno)
|
||||
|
||||
|
||||
setup(
|
||||
name='acme',
|
||||
version=version,
|
||||
description='ACME protocol implementation in Python',
|
||||
url='https://github.com/letsencrypt/letsencrypt',
|
||||
author="Certbot Project",
|
||||
author_email='certbot-dev@eff.org',
|
||||
author_email='client-dev@letsencrypt.org',
|
||||
license='Apache License 2.0',
|
||||
python_requires='>=3.6',
|
||||
python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*',
|
||||
classifiers=[
|
||||
'Development Status :: 5 - Production/Stable',
|
||||
'Intended Audience :: Developers',
|
||||
'License :: OSI Approved :: Apache Software License',
|
||||
'Programming Language :: Python',
|
||||
'Programming Language :: Python :: 2',
|
||||
'Programming Language :: Python :: 2.7',
|
||||
'Programming Language :: Python :: 3',
|
||||
'Programming Language :: Python :: 3.5',
|
||||
'Programming Language :: Python :: 3.6',
|
||||
'Programming Language :: Python :: 3.7',
|
||||
'Programming Language :: Python :: 3.8',
|
||||
'Programming Language :: Python :: 3.9',
|
||||
'Topic :: Internet :: WWW/HTTP',
|
||||
'Topic :: Security',
|
||||
],
|
||||
@@ -61,4 +95,7 @@ setup(
|
||||
'dev': dev_extras,
|
||||
'docs': docs_extras,
|
||||
},
|
||||
test_suite='acme',
|
||||
tests_require=["pytest"],
|
||||
cmdclass={"test": PyTest},
|
||||
)
|
||||
|
||||
@@ -1,11 +1,14 @@
|
||||
"""Tests for acme.challenges."""
|
||||
import urllib.parse as urllib_parse
|
||||
import unittest
|
||||
from unittest import mock
|
||||
|
||||
import josepy as jose
|
||||
import OpenSSL
|
||||
try:
|
||||
import mock
|
||||
except ImportError: # pragma: no cover
|
||||
from unittest import mock # type: ignore
|
||||
import requests
|
||||
from six.moves.urllib import parse as urllib_parse
|
||||
|
||||
from acme import errors
|
||||
|
||||
@@ -292,7 +295,7 @@ class TLSALPN01ResponseTest(unittest.TestCase):
|
||||
|
||||
def test_gen_verify_cert_gen_key(self):
|
||||
cert, key = self.response.gen_cert(self.domain)
|
||||
self.assertIsInstance(key, OpenSSL.crypto.PKey)
|
||||
self.assertTrue(isinstance(key, OpenSSL.crypto.PKey))
|
||||
self.assertTrue(self.response.verify_cert(self.domain, cert))
|
||||
|
||||
def test_verify_bad_cert(self):
|
||||
@@ -431,7 +434,7 @@ class DNSTest(unittest.TestCase):
|
||||
mock_gen.return_value = mock.sentinel.validation
|
||||
response = self.msg.gen_response(KEY)
|
||||
from acme.challenges import DNSResponse
|
||||
self.assertIsInstance(response, DNSResponse)
|
||||
self.assertTrue(isinstance(response, DNSResponse))
|
||||
self.assertEqual(response.validation, mock.sentinel.validation)
|
||||
|
||||
def test_validation_domain_name(self):
|
||||
|
||||
@@ -2,15 +2,17 @@
|
||||
# pylint: disable=too-many-lines
|
||||
import copy
|
||||
import datetime
|
||||
import http.client as http_client
|
||||
import json
|
||||
import unittest
|
||||
from typing import Dict
|
||||
from unittest import mock
|
||||
|
||||
import josepy as jose
|
||||
try:
|
||||
import mock
|
||||
except ImportError: # pragma: no cover
|
||||
from unittest import mock # type: ignore
|
||||
import OpenSSL
|
||||
import requests
|
||||
from six.moves import http_client # pylint: disable=import-error
|
||||
|
||||
from acme import challenges
|
||||
from acme import errors
|
||||
@@ -62,7 +64,7 @@ class ClientTestBase(unittest.TestCase):
|
||||
self.contact = ('mailto:cert-admin@example.com', 'tel:+12025551212')
|
||||
reg = messages.Registration(
|
||||
contact=self.contact, key=KEY.public_key())
|
||||
the_arg: Dict = dict(reg)
|
||||
the_arg = dict(reg) # type: Dict
|
||||
self.new_reg = messages.NewRegistration(**the_arg)
|
||||
self.regr = messages.RegistrationResource(
|
||||
body=reg, uri='https://www.letsencrypt-demo.org/acme/reg/1')
|
||||
@@ -90,7 +92,7 @@ class BackwardsCompatibleClientV2Test(ClientTestBase):
|
||||
"""Tests for acme.client.BackwardsCompatibleClientV2."""
|
||||
|
||||
def setUp(self):
|
||||
super().setUp()
|
||||
super(BackwardsCompatibleClientV2Test, self).setUp()
|
||||
# contains a loaded cert
|
||||
self.certr = messages.CertificateResource(
|
||||
body=messages_test.CERT)
|
||||
@@ -261,7 +263,7 @@ class BackwardsCompatibleClientV2Test(ClientTestBase):
|
||||
with mock.patch('acme.client.ClientV2') as mock_client:
|
||||
client = self._init()
|
||||
client.finalize_order(mock_orderr, mock_deadline)
|
||||
mock_client().finalize_order.assert_called_once_with(mock_orderr, mock_deadline, False)
|
||||
mock_client().finalize_order.assert_called_once_with(mock_orderr, mock_deadline)
|
||||
|
||||
def test_revoke(self):
|
||||
self.response.json.return_value = DIRECTORY_V1.to_json()
|
||||
@@ -319,7 +321,7 @@ class ClientTest(ClientTestBase):
|
||||
"""Tests for acme.client.Client."""
|
||||
|
||||
def setUp(self):
|
||||
super().setUp()
|
||||
super(ClientTest, self).setUp()
|
||||
|
||||
self.directory = DIRECTORY_V1
|
||||
|
||||
@@ -604,8 +606,8 @@ class ClientTest(ClientTestBase):
|
||||
# make sure that max_attempts is per-authorization, rather
|
||||
# than global
|
||||
max_attempts=max(len(authzrs[0].retries), len(authzrs[1].retries)))
|
||||
self.assertIs(cert[0], csr)
|
||||
self.assertIs(cert[1], updated_authzrs)
|
||||
self.assertTrue(cert[0] is csr)
|
||||
self.assertTrue(cert[1] is updated_authzrs)
|
||||
self.assertEqual(updated_authzrs[0].uri, 'a...')
|
||||
self.assertEqual(updated_authzrs[1].uri, 'b.')
|
||||
self.assertEqual(updated_authzrs[0].times, [
|
||||
@@ -641,7 +643,7 @@ class ClientTest(ClientTestBase):
|
||||
authzr = self.client.deactivate_authorization(self.authzr)
|
||||
self.assertEqual(authzb, authzr.body)
|
||||
self.assertEqual(self.client.net.post.call_count, 1)
|
||||
self.assertIn(self.authzr.uri, self.net.post.call_args_list[0][0])
|
||||
self.assertTrue(self.authzr.uri in self.net.post.call_args_list[0][0])
|
||||
|
||||
def test_check_cert(self):
|
||||
self.response.headers['Location'] = self.certr.uri
|
||||
@@ -700,7 +702,7 @@ class ClientTest(ClientTestBase):
|
||||
|
||||
def test_revocation_payload(self):
|
||||
obj = messages.Revocation(certificate=self.certr.body, reason=self.rsn)
|
||||
self.assertIn('reason', obj.to_partial_json().keys())
|
||||
self.assertTrue('reason' in obj.to_partial_json().keys())
|
||||
self.assertEqual(self.rsn, obj.to_partial_json()['reason'])
|
||||
|
||||
def test_revoke_bad_status_raises_error(self):
|
||||
@@ -716,7 +718,7 @@ class ClientV2Test(ClientTestBase):
|
||||
"""Tests for acme.client.ClientV2."""
|
||||
|
||||
def setUp(self):
|
||||
super().setUp()
|
||||
super(ClientV2Test, self).setUp()
|
||||
|
||||
self.directory = DIRECTORY_V2
|
||||
|
||||
@@ -840,32 +842,6 @@ class ClientV2Test(ClientTestBase):
|
||||
deadline = datetime.datetime.now() - datetime.timedelta(seconds=60)
|
||||
self.assertRaises(errors.TimeoutError, self.client.finalize_order, self.orderr, deadline)
|
||||
|
||||
def test_finalize_order_alt_chains(self):
|
||||
updated_order = self.order.update(
|
||||
certificate='https://www.letsencrypt-demo.org/acme/cert/',
|
||||
)
|
||||
updated_orderr = self.orderr.update(body=updated_order,
|
||||
fullchain_pem=CERT_SAN_PEM,
|
||||
alternative_fullchains_pem=[CERT_SAN_PEM,
|
||||
CERT_SAN_PEM])
|
||||
self.response.json.return_value = updated_order.to_json()
|
||||
self.response.text = CERT_SAN_PEM
|
||||
self.response.headers['Link'] ='<https://example.com/acme/cert/1>;rel="alternate", ' + \
|
||||
'<https://example.com/dir>;rel="index", ' + \
|
||||
'<https://example.com/acme/cert/2>;title="foo";rel="alternate"'
|
||||
|
||||
deadline = datetime.datetime(9999, 9, 9)
|
||||
resp = self.client.finalize_order(self.orderr, deadline, fetch_alternative_chains=True)
|
||||
self.net.post.assert_any_call('https://example.com/acme/cert/1',
|
||||
mock.ANY, acme_version=2, new_nonce_url=mock.ANY)
|
||||
self.net.post.assert_any_call('https://example.com/acme/cert/2',
|
||||
mock.ANY, acme_version=2, new_nonce_url=mock.ANY)
|
||||
self.assertEqual(resp, updated_orderr)
|
||||
|
||||
del self.response.headers['Link']
|
||||
resp = self.client.finalize_order(self.orderr, deadline, fetch_alternative_chains=True)
|
||||
self.assertEqual(resp, updated_orderr.update(alternative_fullchains_pem=[]))
|
||||
|
||||
def test_revoke(self):
|
||||
self.client.revoke(messages_test.CERT, self.rsn)
|
||||
self.net.post.assert_called_once_with(
|
||||
@@ -877,9 +853,9 @@ class ClientV2Test(ClientTestBase):
|
||||
self.response.headers['Location'] = self.regr.uri
|
||||
self.response.json.return_value = self.regr.body.to_json()
|
||||
self.assertEqual(self.regr, self.client.update_registration(self.regr))
|
||||
self.assertIsNotNone(self.client.net.account)
|
||||
self.assertNotEqual(self.client.net.account, None)
|
||||
self.assertEqual(self.client.net.post.call_count, 2)
|
||||
self.assertIn(DIRECTORY_V2.newAccount, self.net.post.call_args_list[0][0])
|
||||
self.assertTrue(DIRECTORY_V2.newAccount in self.net.post.call_args_list[0][0])
|
||||
|
||||
self.response.json.return_value = self.regr.body.update(
|
||||
contact=()).to_json()
|
||||
@@ -943,7 +919,7 @@ class ClientNetworkTest(unittest.TestCase):
|
||||
self.response.links = {}
|
||||
|
||||
def test_init(self):
|
||||
self.assertIs(self.net.verify_ssl, self.verify_ssl)
|
||||
self.assertTrue(self.net.verify_ssl is self.verify_ssl)
|
||||
|
||||
def test_wrap_in_jws(self):
|
||||
# pylint: disable=protected-access
|
||||
@@ -1185,7 +1161,7 @@ class ClientNetworkWithMockedResponseTest(unittest.TestCase):
|
||||
|
||||
def send_request(*args, **kwargs):
|
||||
# pylint: disable=unused-argument,missing-docstring
|
||||
self.assertNotIn("new_nonce_url", kwargs)
|
||||
self.assertFalse("new_nonce_url" in kwargs)
|
||||
method = args[0]
|
||||
uri = args[1]
|
||||
if method == 'HEAD' and uri != "new_nonce_uri":
|
||||
@@ -1330,7 +1306,7 @@ class ClientNetworkSourceAddressBindingTest(unittest.TestCase):
|
||||
from acme.client import ClientNetwork
|
||||
net = ClientNetwork(key=None, alg=None, source_address=self.source_address)
|
||||
for adapter in net.session.adapters.values():
|
||||
self.assertIn(self.source_address, adapter.source_address)
|
||||
self.assertTrue(self.source_address in adapter.source_address)
|
||||
|
||||
def test_behavior_assumption(self):
|
||||
"""This is a test that guardrails the HTTPAdapter behavior so that if the default for
|
||||
@@ -1340,7 +1316,7 @@ class ClientNetworkSourceAddressBindingTest(unittest.TestCase):
|
||||
# test should fail if the default adapter type is changed by requests
|
||||
net = ClientNetwork(key=None, alg=None)
|
||||
session = requests.Session()
|
||||
for scheme in session.adapters:
|
||||
for scheme in session.adapters.keys():
|
||||
client_network_adapter = net.session.adapters.get(scheme)
|
||||
default_adapter = session.adapters.get(scheme)
|
||||
self.assertEqual(client_network_adapter.__class__, default_adapter.__class__)
|
||||
|
||||
@@ -1,14 +1,14 @@
|
||||
"""Tests for acme.crypto_util."""
|
||||
import itertools
|
||||
import socket
|
||||
import socketserver
|
||||
import threading
|
||||
import time
|
||||
import unittest
|
||||
from typing import List
|
||||
|
||||
import josepy as jose
|
||||
import OpenSSL
|
||||
import six
|
||||
from six.moves import socketserver # type: ignore # pylint: disable=import-error
|
||||
|
||||
from acme import errors
|
||||
import test_util
|
||||
@@ -27,6 +27,8 @@ class SSLSocketAndProbeSNITest(unittest.TestCase):
|
||||
|
||||
class _TestServer(socketserver.TCPServer):
|
||||
|
||||
# six.moves.* | pylint: disable=attribute-defined-outside-init,no-init
|
||||
|
||||
def server_bind(self): # pylint: disable=missing-docstring
|
||||
self.socket = SSLSocket(socket.socket(),
|
||||
certs)
|
||||
@@ -60,6 +62,7 @@ class SSLSocketAndProbeSNITest(unittest.TestCase):
|
||||
self.assertRaises(errors.Error, self._probe, b'bar')
|
||||
|
||||
def test_probe_connection_error(self):
|
||||
# pylint has a hard time with six
|
||||
self.server.server_close()
|
||||
original_timeout = socket.getdefaulttimeout()
|
||||
try:
|
||||
@@ -118,9 +121,9 @@ class PyOpenSSLCertOrReqSANTest(unittest.TestCase):
|
||||
@classmethod
|
||||
def _get_idn_names(cls):
|
||||
"""Returns expected names from '{cert,csr}-idnsans.pem'."""
|
||||
chars = [chr(i) for i in itertools.chain(range(0x3c3, 0x400),
|
||||
range(0x641, 0x6fc),
|
||||
range(0x1820, 0x1877))]
|
||||
chars = [six.unichr(i) for i in itertools.chain(range(0x3c3, 0x400),
|
||||
range(0x641, 0x6fc),
|
||||
range(0x1820, 0x1877))]
|
||||
return [''.join(chars[i: i + 45]) + '.invalid'
|
||||
for i in range(0, len(chars), 45)]
|
||||
|
||||
@@ -181,7 +184,7 @@ class RandomSnTest(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.cert_count = 5
|
||||
self.serial_num: List[int] = []
|
||||
self.serial_num = [] # type: List[int]
|
||||
self.key = OpenSSL.crypto.PKey()
|
||||
self.key.generate_key(OpenSSL.crypto.TYPE_RSA, 2048)
|
||||
|
||||
@@ -191,7 +194,7 @@ class RandomSnTest(unittest.TestCase):
|
||||
for _ in range(self.cert_count):
|
||||
cert = gen_ss_cert(self.key, ['dummy'], force_san=True)
|
||||
self.serial_num.append(cert.get_serial_number())
|
||||
self.assertGreater(len(set(self.serial_num)), 1)
|
||||
self.assertTrue(len(set(self.serial_num)) > 1)
|
||||
|
||||
class MakeCSRTest(unittest.TestCase):
|
||||
"""Test for standalone functions."""
|
||||
@@ -206,8 +209,8 @@ class MakeCSRTest(unittest.TestCase):
|
||||
|
||||
def test_make_csr(self):
|
||||
csr_pem = self._call_with_key(["a.example", "b.example"])
|
||||
self.assertIn(b'--BEGIN CERTIFICATE REQUEST--', csr_pem)
|
||||
self.assertIn(b'--END CERTIFICATE REQUEST--', csr_pem)
|
||||
self.assertTrue(b'--BEGIN CERTIFICATE REQUEST--' in csr_pem)
|
||||
self.assertTrue(b'--END CERTIFICATE REQUEST--' in csr_pem)
|
||||
csr = OpenSSL.crypto.load_certificate_request(
|
||||
OpenSSL.crypto.FILETYPE_PEM, csr_pem)
|
||||
# In pyopenssl 0.13 (used with TOXENV=py27-oldest), csr objects don't
|
||||
|
||||
@@ -1,6 +1,10 @@
|
||||
"""Tests for acme.errors."""
|
||||
import unittest
|
||||
from unittest import mock
|
||||
|
||||
try:
|
||||
import mock
|
||||
except ImportError: # pragma: no cover
|
||||
from unittest import mock # type: ignore
|
||||
|
||||
|
||||
class BadNonceTest(unittest.TestCase):
|
||||
@@ -24,8 +28,8 @@ class MissingNonceTest(unittest.TestCase):
|
||||
self.error = MissingNonce(self.response)
|
||||
|
||||
def test_str(self):
|
||||
self.assertIn("FOO", str(self.error))
|
||||
self.assertIn("{}", str(self.error))
|
||||
self.assertTrue("FOO" in str(self.error))
|
||||
self.assertTrue("{}" in str(self.error))
|
||||
|
||||
|
||||
class PollErrorTest(unittest.TestCase):
|
||||
|
||||
@@ -48,7 +48,7 @@ class JWSTest(unittest.TestCase):
|
||||
self.assertEqual(jws.signature.combined.nonce, self.nonce)
|
||||
self.assertEqual(jws.signature.combined.url, self.url)
|
||||
self.assertEqual(jws.signature.combined.kid, self.kid)
|
||||
self.assertIsNone(jws.signature.combined.jwk)
|
||||
self.assertEqual(jws.signature.combined.jwk, None)
|
||||
# TODO: check that nonce is in protected header
|
||||
|
||||
self.assertEqual(jws, JWS.from_json(jws.to_json()))
|
||||
@@ -58,7 +58,7 @@ class JWSTest(unittest.TestCase):
|
||||
jws = JWS.sign(payload=b'foo', key=self.privkey,
|
||||
alg=jose.RS256, nonce=self.nonce,
|
||||
url=self.url)
|
||||
self.assertIsNone(jws.signature.combined.kid)
|
||||
self.assertEqual(jws.signature.combined.kid, None)
|
||||
self.assertEqual(jws.signature.combined.jwk, self.pubkey)
|
||||
|
||||
|
||||
|
||||
@@ -1,8 +1,11 @@
|
||||
"""Tests for acme.magic_typing."""
|
||||
import sys
|
||||
import unittest
|
||||
import warnings
|
||||
from unittest import mock
|
||||
|
||||
try:
|
||||
import mock
|
||||
except ImportError: # pragma: no cover
|
||||
from unittest import mock # type: ignore
|
||||
|
||||
|
||||
class MagicTypingTest(unittest.TestCase):
|
||||
@@ -10,21 +13,32 @@ class MagicTypingTest(unittest.TestCase):
|
||||
def test_import_success(self):
|
||||
try:
|
||||
import typing as temp_typing
|
||||
except ImportError: # pragma: no cover
|
||||
temp_typing = None # pragma: no cover
|
||||
except ImportError: # pragma: no cover
|
||||
temp_typing = None # pragma: no cover
|
||||
typing_class_mock = mock.MagicMock()
|
||||
text_mock = mock.MagicMock()
|
||||
typing_class_mock.Text = text_mock
|
||||
sys.modules['typing'] = typing_class_mock
|
||||
if 'acme.magic_typing' in sys.modules:
|
||||
del sys.modules['acme.magic_typing'] # pragma: no cover
|
||||
with warnings.catch_warnings():
|
||||
warnings.filterwarnings("ignore", category=DeprecationWarning)
|
||||
from acme.magic_typing import Text
|
||||
del sys.modules['acme.magic_typing'] # pragma: no cover
|
||||
from acme.magic_typing import Text
|
||||
self.assertEqual(Text, text_mock)
|
||||
del sys.modules['acme.magic_typing']
|
||||
sys.modules['typing'] = temp_typing
|
||||
|
||||
def test_import_failure(self):
|
||||
try:
|
||||
import typing as temp_typing
|
||||
except ImportError: # pragma: no cover
|
||||
temp_typing = None # pragma: no cover
|
||||
sys.modules['typing'] = None
|
||||
if 'acme.magic_typing' in sys.modules:
|
||||
del sys.modules['acme.magic_typing'] # pragma: no cover
|
||||
from acme.magic_typing import Text
|
||||
self.assertTrue(Text is None)
|
||||
del sys.modules['acme.magic_typing']
|
||||
sys.modules['typing'] = temp_typing
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main() # pragma: no cover
|
||||
|
||||
@@ -1,9 +1,11 @@
|
||||
"""Tests for acme.messages."""
|
||||
from typing import Dict
|
||||
import unittest
|
||||
from unittest import mock
|
||||
|
||||
import josepy as jose
|
||||
try:
|
||||
import mock
|
||||
except ImportError: # pragma: no cover
|
||||
from unittest import mock # type: ignore
|
||||
|
||||
from acme import challenges
|
||||
import test_util
|
||||
@@ -41,13 +43,13 @@ class ErrorTest(unittest.TestCase):
|
||||
|
||||
def test_description(self):
|
||||
self.assertEqual('The request message was malformed', self.error.description)
|
||||
self.assertIsNone(self.error_custom.description)
|
||||
self.assertTrue(self.error_custom.description is None)
|
||||
|
||||
def test_code(self):
|
||||
from acme.messages import Error
|
||||
self.assertEqual('malformed', self.error.code)
|
||||
self.assertIsNone(self.error_custom.code)
|
||||
self.assertIsNone(Error().code)
|
||||
self.assertEqual(None, self.error_custom.code)
|
||||
self.assertEqual(None, Error().code)
|
||||
|
||||
def test_is_acme_error(self):
|
||||
from acme.messages import is_acme_error, Error
|
||||
@@ -82,7 +84,7 @@ class ConstantTest(unittest.TestCase):
|
||||
from acme.messages import _Constant
|
||||
|
||||
class MockConstant(_Constant): # pylint: disable=missing-docstring
|
||||
POSSIBLE_NAMES: Dict = {}
|
||||
POSSIBLE_NAMES = {} # type: Dict
|
||||
|
||||
self.MockConstant = MockConstant # pylint: disable=invalid-name
|
||||
self.const_a = MockConstant('a')
|
||||
@@ -106,11 +108,11 @@ class ConstantTest(unittest.TestCase):
|
||||
|
||||
def test_equality(self):
|
||||
const_a_prime = self.MockConstant('a')
|
||||
self.assertNotEqual(self.const_a, self.const_b)
|
||||
self.assertEqual(self.const_a, const_a_prime)
|
||||
self.assertFalse(self.const_a == self.const_b)
|
||||
self.assertTrue(self.const_a == const_a_prime)
|
||||
|
||||
self.assertNotEqual(self.const_a, self.const_b)
|
||||
self.assertEqual(self.const_a, const_a_prime)
|
||||
self.assertTrue(self.const_a != self.const_b)
|
||||
self.assertFalse(self.const_a != const_a_prime)
|
||||
|
||||
|
||||
class DirectoryTest(unittest.TestCase):
|
||||
@@ -252,19 +254,6 @@ class RegistrationTest(unittest.TestCase):
|
||||
from acme.messages import Registration
|
||||
hash(Registration.from_json(self.jobj_from))
|
||||
|
||||
def test_default_not_transmitted(self):
|
||||
from acme.messages import NewRegistration
|
||||
empty_new_reg = NewRegistration()
|
||||
new_reg_with_contact = NewRegistration(contact=())
|
||||
|
||||
self.assertEqual(empty_new_reg.contact, ())
|
||||
self.assertEqual(new_reg_with_contact.contact, ())
|
||||
|
||||
self.assertNotIn('contact', empty_new_reg.to_partial_json())
|
||||
self.assertNotIn('contact', empty_new_reg.fields_to_partial_json())
|
||||
self.assertIn('contact', new_reg_with_contact.to_partial_json())
|
||||
self.assertIn('contact', new_reg_with_contact.fields_to_partial_json())
|
||||
|
||||
|
||||
class UpdateRegistrationTest(unittest.TestCase):
|
||||
"""Tests for acme.messages.UpdateRegistration."""
|
||||
@@ -406,7 +395,7 @@ class AuthorizationResourceTest(unittest.TestCase):
|
||||
authzr = AuthorizationResource(
|
||||
uri=mock.sentinel.uri,
|
||||
body=mock.sentinel.body)
|
||||
self.assertIsInstance(authzr, jose.JSONDeSerializable)
|
||||
self.assertTrue(isinstance(authzr, jose.JSONDeSerializable))
|
||||
|
||||
|
||||
class CertificateRequestTest(unittest.TestCase):
|
||||
@@ -417,7 +406,7 @@ class CertificateRequestTest(unittest.TestCase):
|
||||
self.req = CertificateRequest(csr=CSR)
|
||||
|
||||
def test_json_de_serializable(self):
|
||||
self.assertIsInstance(self.req, jose.JSONDeSerializable)
|
||||
self.assertTrue(isinstance(self.req, jose.JSONDeSerializable))
|
||||
from acme.messages import CertificateRequest
|
||||
self.assertEqual(
|
||||
self.req, CertificateRequest.from_json(self.req.to_json()))
|
||||
@@ -433,7 +422,7 @@ class CertificateResourceTest(unittest.TestCase):
|
||||
cert_chain_uri=mock.sentinel.cert_chain_uri)
|
||||
|
||||
def test_json_de_serializable(self):
|
||||
self.assertIsInstance(self.certr, jose.JSONDeSerializable)
|
||||
self.assertTrue(isinstance(self.certr, jose.JSONDeSerializable))
|
||||
from acme.messages import CertificateResource
|
||||
self.assertEqual(
|
||||
self.certr, CertificateResource.from_json(self.certr.to_json()))
|
||||
|
||||
@@ -1,14 +1,16 @@
|
||||
"""Tests for acme.standalone."""
|
||||
import http.client as http_client
|
||||
import socket
|
||||
import socketserver
|
||||
import threading
|
||||
import unittest
|
||||
from typing import Set
|
||||
from unittest import mock
|
||||
|
||||
import josepy as jose
|
||||
try:
|
||||
import mock
|
||||
except ImportError: # pragma: no cover
|
||||
from unittest import mock # type: ignore
|
||||
import requests
|
||||
from six.moves import http_client # pylint: disable=import-error
|
||||
from six.moves import socketserver # type: ignore # pylint: disable=import-error
|
||||
|
||||
from acme import challenges
|
||||
from acme import crypto_util
|
||||
@@ -42,7 +44,7 @@ class HTTP01ServerTest(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.account_key = jose.JWK.load(
|
||||
test_util.load_vector('rsa1024_key.pem'))
|
||||
self.resources: Set = set()
|
||||
self.resources = set() # type: Set
|
||||
|
||||
from acme.standalone import HTTP01Server
|
||||
self.server = HTTP01Server(('', 0), resources=self.resources)
|
||||
@@ -219,7 +221,7 @@ class HTTP01DualNetworkedServersTest(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.account_key = jose.JWK.load(
|
||||
test_util.load_vector('rsa1024_key.pem'))
|
||||
self.resources: Set = set()
|
||||
self.resources = set() # type: Set
|
||||
|
||||
from acme.standalone import HTTP01DualNetworkedServers
|
||||
self.servers = HTTP01DualNetworkedServers(('', 0), resources=self.resources)
|
||||
|
||||
@@ -9,6 +9,7 @@ import pkg_resources
|
||||
|
||||
from certbot import errors
|
||||
from certbot import util
|
||||
|
||||
from certbot.compat import os
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@@ -224,8 +225,7 @@ def _get_runtime_cfg(command):
|
||||
command,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
universal_newlines=True,
|
||||
env=util.env_no_snap_for_external_calls())
|
||||
universal_newlines=True)
|
||||
stdout, stderr = proc.communicate()
|
||||
|
||||
except (OSError, ValueError):
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
""" apacheconfig implementation of the ParserNode interfaces """
|
||||
from typing import Tuple
|
||||
|
||||
from certbot_apache._internal import assertions
|
||||
from certbot_apache._internal import interfaces
|
||||
@@ -15,14 +14,14 @@ class ApacheParserNode(interfaces.ParserNode):
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
ancestor, dirty, filepath, metadata = util.parsernode_kwargs(kwargs) # pylint: disable=unused-variable
|
||||
super().__init__(**kwargs)
|
||||
super(ApacheParserNode, self).__init__(**kwargs)
|
||||
self.ancestor = ancestor
|
||||
self.filepath = filepath
|
||||
self.dirty = dirty
|
||||
self.metadata = metadata
|
||||
self._raw = self.metadata["ac_ast"]
|
||||
|
||||
def save(self, msg): # pragma: no cover
|
||||
def save(self, msg): # pragma: no cover
|
||||
pass
|
||||
|
||||
def find_ancestors(self, name): # pylint: disable=unused-variable
|
||||
@@ -39,7 +38,7 @@ class ApacheCommentNode(ApacheParserNode):
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
comment, kwargs = util.commentnode_kwargs(kwargs) # pylint: disable=unused-variable
|
||||
super().__init__(**kwargs)
|
||||
super(ApacheCommentNode, self).__init__(**kwargs)
|
||||
self.comment = comment
|
||||
|
||||
def __eq__(self, other): # pragma: no cover
|
||||
@@ -57,7 +56,7 @@ class ApacheDirectiveNode(ApacheParserNode):
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
name, parameters, enabled, kwargs = util.directivenode_kwargs(kwargs)
|
||||
super().__init__(**kwargs)
|
||||
super(ApacheDirectiveNode, self).__init__(**kwargs)
|
||||
self.name = name
|
||||
self.parameters = parameters
|
||||
self.enabled = enabled
|
||||
@@ -83,8 +82,8 @@ class ApacheBlockNode(ApacheDirectiveNode):
|
||||
""" apacheconfig implementation of BlockNode interface """
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
super().__init__(**kwargs)
|
||||
self.children: Tuple[ApacheParserNode, ...] = ()
|
||||
super(ApacheBlockNode, self).__init__(**kwargs)
|
||||
self.children = ()
|
||||
|
||||
def __eq__(self, other): # pragma: no cover
|
||||
if isinstance(other, self.__class__):
|
||||
|
||||
@@ -3,6 +3,7 @@ import fnmatch
|
||||
|
||||
from certbot_apache._internal import interfaces
|
||||
|
||||
|
||||
PASS = "CERTBOT_PASS_ASSERT"
|
||||
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ Authors:
|
||||
Raphael Pinson <raphink@gmail.com>
|
||||
|
||||
About: Reference
|
||||
Online Apache configuration manual: https://httpd.apache.org/docs/trunk/
|
||||
Online Apache configuration manual: http://httpd.apache.org/docs/trunk/
|
||||
|
||||
About: License
|
||||
This file is licensed under the LGPL v2+.
|
||||
|
||||
@@ -64,10 +64,10 @@ Translates over to:
|
||||
"/files/etc/apache2/apache2.conf/bLoCk[1]",
|
||||
]
|
||||
"""
|
||||
from typing import Set
|
||||
|
||||
from acme.magic_typing import Set
|
||||
from certbot import errors
|
||||
from certbot.compat import os
|
||||
|
||||
from certbot_apache._internal import apache_util
|
||||
from certbot_apache._internal import assertions
|
||||
from certbot_apache._internal import interfaces
|
||||
@@ -80,7 +80,7 @@ class AugeasParserNode(interfaces.ParserNode):
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
ancestor, dirty, filepath, metadata = util.parsernode_kwargs(kwargs) # pylint: disable=unused-variable
|
||||
super().__init__(**kwargs)
|
||||
super(AugeasParserNode, self).__init__(**kwargs)
|
||||
self.ancestor = ancestor
|
||||
self.filepath = filepath
|
||||
self.dirty = dirty
|
||||
@@ -169,7 +169,7 @@ class AugeasCommentNode(AugeasParserNode):
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
comment, kwargs = util.commentnode_kwargs(kwargs) # pylint: disable=unused-variable
|
||||
super().__init__(**kwargs)
|
||||
super(AugeasCommentNode, self).__init__(**kwargs)
|
||||
# self.comment = comment
|
||||
self.comment = comment
|
||||
|
||||
@@ -188,7 +188,7 @@ class AugeasDirectiveNode(AugeasParserNode):
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
name, parameters, enabled, kwargs = util.directivenode_kwargs(kwargs)
|
||||
super().__init__(**kwargs)
|
||||
super(AugeasDirectiveNode, self).__init__(**kwargs)
|
||||
self.name = name
|
||||
self.enabled = enabled
|
||||
if parameters:
|
||||
@@ -245,7 +245,7 @@ class AugeasBlockNode(AugeasDirectiveNode):
|
||||
""" Augeas implementation of BlockNode interface """
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
super().__init__(**kwargs)
|
||||
super(AugeasBlockNode, self).__init__(**kwargs)
|
||||
self.children = ()
|
||||
|
||||
def __eq__(self, other):
|
||||
@@ -355,7 +355,7 @@ class AugeasBlockNode(AugeasDirectiveNode):
|
||||
ownpath = self.metadata.get("augeaspath")
|
||||
|
||||
directives = self.parser.find_dir(name, start=ownpath, exclude=exclude)
|
||||
already_parsed: Set[str] = set()
|
||||
already_parsed = set() # type: Set[str]
|
||||
for directive in directives:
|
||||
# Remove the /arg part from the Augeas path
|
||||
directive = directive.partition("/arg")[0]
|
||||
|
||||
@@ -1,24 +1,29 @@
|
||||
"""Apache Configurator."""
|
||||
# pylint: disable=too-many-lines
|
||||
from collections import defaultdict
|
||||
import copy
|
||||
from distutils.version import LooseVersion
|
||||
import copy
|
||||
import fnmatch
|
||||
import logging
|
||||
import re
|
||||
import socket
|
||||
import time
|
||||
from typing import DefaultDict
|
||||
from typing import Dict
|
||||
from typing import List
|
||||
from typing import Optional
|
||||
from typing import Set
|
||||
from typing import Union
|
||||
|
||||
import six
|
||||
import zope.component
|
||||
import zope.interface
|
||||
try:
|
||||
import apacheconfig
|
||||
HAS_APACHECONFIG = True
|
||||
except ImportError: # pragma: no cover
|
||||
HAS_APACHECONFIG = False
|
||||
|
||||
from acme import challenges
|
||||
from acme.magic_typing import DefaultDict
|
||||
from acme.magic_typing import Dict
|
||||
from acme.magic_typing import List
|
||||
from acme.magic_typing import Set
|
||||
from acme.magic_typing import Union
|
||||
from certbot import errors
|
||||
from certbot import interfaces
|
||||
from certbot import util
|
||||
@@ -36,61 +41,10 @@ from certbot_apache._internal import dualparser
|
||||
from certbot_apache._internal import http_01
|
||||
from certbot_apache._internal import obj
|
||||
from certbot_apache._internal import parser
|
||||
from certbot_apache._internal.dualparser import DualBlockNode
|
||||
from certbot_apache._internal.obj import VirtualHost
|
||||
from certbot_apache._internal.parser import ApacheParser
|
||||
|
||||
try:
|
||||
import apacheconfig
|
||||
HAS_APACHECONFIG = True
|
||||
except ImportError: # pragma: no cover
|
||||
HAS_APACHECONFIG = False
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class OsOptions:
|
||||
"""
|
||||
Dedicated class to describe the OS specificities (eg. paths, binary names)
|
||||
that the Apache configurator needs to be aware to operate properly.
|
||||
"""
|
||||
def __init__(self,
|
||||
server_root="/etc/apache2",
|
||||
vhost_root="/etc/apache2/sites-available",
|
||||
vhost_files="*",
|
||||
logs_root="/var/log/apache2",
|
||||
ctl="apache2ctl",
|
||||
version_cmd: Optional[List[str]] = None,
|
||||
restart_cmd: Optional[List[str]] = None,
|
||||
restart_cmd_alt: Optional[List[str]] = None,
|
||||
conftest_cmd: Optional[List[str]] = None,
|
||||
enmod: Optional[str] = None,
|
||||
dismod: Optional[str] = None,
|
||||
le_vhost_ext="-le-ssl.conf",
|
||||
handle_modules=False,
|
||||
handle_sites=False,
|
||||
challenge_location="/etc/apache2",
|
||||
apache_bin: Optional[str] = None,
|
||||
):
|
||||
self.server_root = server_root
|
||||
self.vhost_root = vhost_root
|
||||
self.vhost_files = vhost_files
|
||||
self.logs_root = logs_root
|
||||
self.ctl = ctl
|
||||
self.version_cmd = ['apache2ctl', '-v'] if not version_cmd else version_cmd
|
||||
self.restart_cmd = ['apache2ctl', 'graceful'] if not restart_cmd else restart_cmd
|
||||
self.restart_cmd_alt = restart_cmd_alt
|
||||
self.conftest_cmd = ['apache2ctl', 'configtest'] if not conftest_cmd else conftest_cmd
|
||||
self.enmod = enmod
|
||||
self.dismod = dismod
|
||||
self.le_vhost_ext = le_vhost_ext
|
||||
self.handle_modules = handle_modules
|
||||
self.handle_sites = handle_sites
|
||||
self.challenge_location = challenge_location
|
||||
self.bin = apache_bin
|
||||
|
||||
|
||||
# TODO: Augeas sections ie. <VirtualHost>, <IfModule> beginning and closing
|
||||
# tags need to be the same case, otherwise Augeas doesn't recognize them.
|
||||
# This is not able to be completely remedied by regular expressions because
|
||||
@@ -146,7 +100,27 @@ class ApacheConfigurator(common.Installer):
|
||||
" change depending on the operating system Certbot is run on.)"
|
||||
)
|
||||
|
||||
OS_DEFAULTS = OsOptions()
|
||||
OS_DEFAULTS = dict(
|
||||
server_root="/etc/apache2",
|
||||
vhost_root="/etc/apache2/sites-available",
|
||||
vhost_files="*",
|
||||
logs_root="/var/log/apache2",
|
||||
ctl="apache2ctl",
|
||||
version_cmd=['apache2ctl', '-v'],
|
||||
restart_cmd=['apache2ctl', 'graceful'],
|
||||
conftest_cmd=['apache2ctl', 'configtest'],
|
||||
enmod=None,
|
||||
dismod=None,
|
||||
le_vhost_ext="-le-ssl.conf",
|
||||
handle_modules=False,
|
||||
handle_sites=False,
|
||||
challenge_location="/etc/apache2",
|
||||
bin=None
|
||||
)
|
||||
|
||||
def option(self, key):
|
||||
"""Get a value from options"""
|
||||
return self.options.get(key)
|
||||
|
||||
def pick_apache_config(self, warn_on_no_mod_ssl=True):
|
||||
"""
|
||||
@@ -176,14 +150,14 @@ class ApacheConfigurator(common.Installer):
|
||||
for o in opts:
|
||||
# Config options use dashes instead of underscores
|
||||
if self.conf(o.replace("_", "-")) is not None:
|
||||
setattr(self.options, o, self.conf(o.replace("_", "-")))
|
||||
self.options[o] = self.conf(o.replace("_", "-"))
|
||||
else:
|
||||
setattr(self.options, o, getattr(self.OS_DEFAULTS, o))
|
||||
self.options[o] = self.OS_DEFAULTS[o]
|
||||
|
||||
# Special cases
|
||||
self.options.version_cmd[0] = self.options.ctl
|
||||
self.options.restart_cmd[0] = self.options.ctl
|
||||
self.options.conftest_cmd[0] = self.options.ctl
|
||||
self.options["version_cmd"][0] = self.option("ctl")
|
||||
self.options["restart_cmd"][0] = self.option("ctl")
|
||||
self.options["conftest_cmd"][0] = self.option("ctl")
|
||||
|
||||
@classmethod
|
||||
def add_parser_arguments(cls, add):
|
||||
@@ -198,30 +172,30 @@ class ApacheConfigurator(common.Installer):
|
||||
else:
|
||||
# cls.OS_DEFAULTS can be distribution specific, see override classes
|
||||
DEFAULTS = cls.OS_DEFAULTS
|
||||
add("enmod", default=DEFAULTS.enmod,
|
||||
add("enmod", default=DEFAULTS["enmod"],
|
||||
help="Path to the Apache 'a2enmod' binary")
|
||||
add("dismod", default=DEFAULTS.dismod,
|
||||
add("dismod", default=DEFAULTS["dismod"],
|
||||
help="Path to the Apache 'a2dismod' binary")
|
||||
add("le-vhost-ext", default=DEFAULTS.le_vhost_ext,
|
||||
add("le-vhost-ext", default=DEFAULTS["le_vhost_ext"],
|
||||
help="SSL vhost configuration extension")
|
||||
add("server-root", default=DEFAULTS.server_root,
|
||||
add("server-root", default=DEFAULTS["server_root"],
|
||||
help="Apache server root directory")
|
||||
add("vhost-root", default=None,
|
||||
help="Apache server VirtualHost configuration root")
|
||||
add("logs-root", default=DEFAULTS.logs_root,
|
||||
add("logs-root", default=DEFAULTS["logs_root"],
|
||||
help="Apache server logs directory")
|
||||
add("challenge-location",
|
||||
default=DEFAULTS.challenge_location,
|
||||
default=DEFAULTS["challenge_location"],
|
||||
help="Directory path for challenge configuration")
|
||||
add("handle-modules", default=DEFAULTS.handle_modules,
|
||||
add("handle-modules", default=DEFAULTS["handle_modules"],
|
||||
help="Let installer handle enabling required modules for you " +
|
||||
"(Only Ubuntu/Debian currently)")
|
||||
add("handle-sites", default=DEFAULTS.handle_sites,
|
||||
add("handle-sites", default=DEFAULTS["handle_sites"],
|
||||
help="Let installer handle enabling sites for you " +
|
||||
"(Only Ubuntu/Debian currently)")
|
||||
add("ctl", default=DEFAULTS.ctl,
|
||||
add("ctl", default=DEFAULTS["ctl"],
|
||||
help="Full path to Apache control script")
|
||||
add("bin", default=DEFAULTS.bin,
|
||||
add("bin", default=DEFAULTS["bin"],
|
||||
help="Full path to apache2/httpd binary")
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
@@ -234,33 +208,33 @@ class ApacheConfigurator(common.Installer):
|
||||
version = kwargs.pop("version", None)
|
||||
use_parsernode = kwargs.pop("use_parsernode", False)
|
||||
openssl_version = kwargs.pop("openssl_version", None)
|
||||
super().__init__(*args, **kwargs)
|
||||
super(ApacheConfigurator, self).__init__(*args, **kwargs)
|
||||
|
||||
# Add name_server association dict
|
||||
self.assoc: Dict[str, obj.VirtualHost] = {}
|
||||
self.assoc = {} # type: Dict[str, obj.VirtualHost]
|
||||
# Outstanding challenges
|
||||
self._chall_out: Set[KeyAuthorizationAnnotatedChallenge] = set()
|
||||
self._chall_out = set() # type: Set[KeyAuthorizationAnnotatedChallenge]
|
||||
# List of vhosts configured per wildcard domain on this run.
|
||||
# used by deploy_cert() and enhance()
|
||||
self._wildcard_vhosts: Dict[str, List[obj.VirtualHost]] = {}
|
||||
self._wildcard_vhosts = {} # type: Dict[str, List[obj.VirtualHost]]
|
||||
# Maps enhancements to vhosts we've enabled the enhancement for
|
||||
self._enhanced_vhosts: DefaultDict[str, Set[obj.VirtualHost]] = defaultdict(set)
|
||||
self._enhanced_vhosts = defaultdict(set) # type: DefaultDict[str, Set[obj.VirtualHost]]
|
||||
# Temporary state for AutoHSTS enhancement
|
||||
self._autohsts: Dict[str, Dict[str, Union[int, float]]] = {}
|
||||
self._autohsts = {} # type: Dict[str, Dict[str, Union[int, float]]]
|
||||
# Reverter save notes
|
||||
self.save_notes = ""
|
||||
# Should we use ParserNode implementation instead of the old behavior
|
||||
self.USE_PARSERNODE = use_parsernode
|
||||
# Saves the list of file paths that were parsed initially, and
|
||||
# not added to parser tree by self.conf("vhost-root") for example.
|
||||
self.parsed_paths: List[str] = []
|
||||
self.parsed_paths = [] # type: List[str]
|
||||
# These will be set in the prepare function
|
||||
self._prepared = False
|
||||
self.parser: ApacheParser
|
||||
self.parser_root: Optional[DualBlockNode] = None
|
||||
self.parser = None
|
||||
self.parser_root = None
|
||||
self.version = version
|
||||
self._openssl_version = openssl_version
|
||||
self.vhosts: List[VirtualHost]
|
||||
self.vhosts = None
|
||||
self.options = copy.deepcopy(self.OS_DEFAULTS)
|
||||
self._enhance_func = {"redirect": self._enable_redirect,
|
||||
"ensure-http-header": self._set_http_header,
|
||||
@@ -310,8 +284,8 @@ class ApacheConfigurator(common.Installer):
|
||||
ssl_module_location = self.parser.standard_path_from_server_root(ssl_module_location)
|
||||
else:
|
||||
# Possibility B: ssl_module is statically linked into Apache
|
||||
if self.options.bin:
|
||||
ssl_module_location = self.options.bin
|
||||
if self.option("bin"):
|
||||
ssl_module_location = self.option("bin")
|
||||
else:
|
||||
logger.warning("ssl_module is statically linked but --apache-bin is "
|
||||
"missing; not disabling session tickets.")
|
||||
@@ -341,7 +315,7 @@ class ApacheConfigurator(common.Installer):
|
||||
self._prepare_options()
|
||||
|
||||
# Verify Apache is installed
|
||||
self._verify_exe_availability(self.options.ctl)
|
||||
self._verify_exe_availability(self.option("ctl"))
|
||||
|
||||
# Make sure configuration is valid
|
||||
self.config_test()
|
||||
@@ -354,9 +328,6 @@ class ApacheConfigurator(common.Installer):
|
||||
if self.version < (2, 2):
|
||||
raise errors.NotSupportedError(
|
||||
"Apache Version {0} not supported.".format(str(self.version)))
|
||||
elif self.version < (2, 4):
|
||||
logger.warning('Support for Apache 2.2 is deprecated and will be removed in a '
|
||||
'future release.')
|
||||
|
||||
# Recover from previous crash before Augeas initialization to have the
|
||||
# correct parse tree from the get go.
|
||||
@@ -369,9 +340,8 @@ class ApacheConfigurator(common.Installer):
|
||||
"augeaspath": self.parser.get_root_augpath(),
|
||||
"ac_ast": None}
|
||||
if self.USE_PARSERNODE:
|
||||
parser_root = self.get_parsernode_root(pn_meta)
|
||||
self.parser_root = parser_root
|
||||
self.parsed_paths = parser_root.parsed_paths()
|
||||
self.parser_root = self.get_parsernode_root(pn_meta)
|
||||
self.parsed_paths = self.parser_root.parsed_paths()
|
||||
|
||||
# Check for errors in parsing files with Augeas
|
||||
self.parser.check_parsing_errors("httpd.aug")
|
||||
@@ -381,20 +351,20 @@ class ApacheConfigurator(common.Installer):
|
||||
|
||||
# We may try to enable mod_ssl later. If so, we shouldn't warn if we can't find it now.
|
||||
# This is currently only true for debian/ubuntu.
|
||||
warn_on_no_mod_ssl = not self.options.handle_modules
|
||||
warn_on_no_mod_ssl = not self.option("handle_modules")
|
||||
self.install_ssl_options_conf(self.mod_ssl_conf,
|
||||
self.updated_mod_ssl_conf_digest,
|
||||
warn_on_no_mod_ssl)
|
||||
|
||||
# Prevent two Apache plugins from modifying a config at once
|
||||
try:
|
||||
util.lock_dir_until_exit(self.options.server_root)
|
||||
util.lock_dir_until_exit(self.option("server_root"))
|
||||
except (OSError, errors.LockError):
|
||||
logger.debug("Encountered error:", exc_info=True)
|
||||
raise errors.PluginError(
|
||||
"Unable to create a lock file in {0}. Are you running"
|
||||
" Certbot with sufficient privileges to modify your"
|
||||
" Apache configuration?".format(self.options.server_root))
|
||||
" Apache configuration?".format(self.option("server_root")))
|
||||
self._prepared = True
|
||||
|
||||
def save(self, title=None, temporary=False):
|
||||
@@ -430,10 +400,10 @@ class ApacheConfigurator(common.Installer):
|
||||
:raises .errors.PluginError: If unable to recover the configuration
|
||||
|
||||
"""
|
||||
super().recovery_routine()
|
||||
super(ApacheConfigurator, self).recovery_routine()
|
||||
# Reload configuration after these changes take effect if needed
|
||||
# ie. ApacheParser has been initialized.
|
||||
if hasattr(self, "parser"):
|
||||
if self.parser:
|
||||
# TODO: wrap into non-implementation specific parser interface
|
||||
self.parser.aug.load()
|
||||
|
||||
@@ -455,7 +425,7 @@ class ApacheConfigurator(common.Installer):
|
||||
the function is unable to correctly revert the configuration
|
||||
|
||||
"""
|
||||
super().rollback_checkpoints(rollback)
|
||||
super(ApacheConfigurator, self).rollback_checkpoints(rollback)
|
||||
self.parser.aug.load()
|
||||
|
||||
def _verify_exe_availability(self, exe):
|
||||
@@ -469,7 +439,7 @@ class ApacheConfigurator(common.Installer):
|
||||
"""Initializes the ApacheParser"""
|
||||
# If user provided vhost_root value in command line, use it
|
||||
return parser.ApacheParser(
|
||||
self.options.server_root, self.conf("vhost-root"),
|
||||
self.option("server_root"), self.conf("vhost-root"),
|
||||
self.version, configurator=self)
|
||||
|
||||
def get_parsernode_root(self, metadata):
|
||||
@@ -477,9 +447,9 @@ class ApacheConfigurator(common.Installer):
|
||||
|
||||
if HAS_APACHECONFIG:
|
||||
apache_vars = {}
|
||||
apache_vars["defines"] = apache_util.parse_defines(self.options.ctl)
|
||||
apache_vars["includes"] = apache_util.parse_includes(self.options.ctl)
|
||||
apache_vars["modules"] = apache_util.parse_modules(self.options.ctl)
|
||||
apache_vars["defines"] = apache_util.parse_defines(self.option("ctl"))
|
||||
apache_vars["includes"] = apache_util.parse_includes(self.option("ctl"))
|
||||
apache_vars["modules"] = apache_util.parse_modules(self.option("ctl"))
|
||||
metadata["apache_vars"] = apache_vars
|
||||
|
||||
with open(self.parser.loc["root"]) as f:
|
||||
@@ -494,6 +464,21 @@ class ApacheConfigurator(common.Installer):
|
||||
metadata=metadata
|
||||
)
|
||||
|
||||
def _wildcard_domain(self, domain):
|
||||
"""
|
||||
Checks if domain is a wildcard domain
|
||||
|
||||
:param str domain: Domain to check
|
||||
|
||||
:returns: If the domain is wildcard domain
|
||||
:rtype: bool
|
||||
"""
|
||||
if isinstance(domain, six.text_type):
|
||||
wildcard_marker = u"*."
|
||||
else:
|
||||
wildcard_marker = b"*."
|
||||
return domain.startswith(wildcard_marker)
|
||||
|
||||
def deploy_cert(self, domain, cert_path, key_path,
|
||||
chain_path=None, fullchain_path=None):
|
||||
"""Deploys certificate to specified virtual host.
|
||||
@@ -528,7 +513,7 @@ class ApacheConfigurator(common.Installer):
|
||||
:rtype: `list` of :class:`~certbot_apache._internal.obj.VirtualHost`
|
||||
"""
|
||||
|
||||
if util.is_wildcard_domain(domain):
|
||||
if self._wildcard_domain(domain):
|
||||
if domain in self._wildcard_vhosts:
|
||||
# Vhosts for a wildcard domain were already selected
|
||||
return self._wildcard_vhosts[domain]
|
||||
@@ -860,7 +845,7 @@ class ApacheConfigurator(common.Installer):
|
||||
:rtype: set
|
||||
|
||||
"""
|
||||
all_names: Set[str] = set()
|
||||
all_names = set() # type: Set[str]
|
||||
|
||||
vhost_macro = []
|
||||
|
||||
@@ -1024,8 +1009,8 @@ class ApacheConfigurator(common.Installer):
|
||||
|
||||
"""
|
||||
# Search base config, and all included paths for VirtualHosts
|
||||
file_paths: Dict[str, str] = {}
|
||||
internal_paths: DefaultDict[str, Set[str]] = defaultdict(set)
|
||||
file_paths = {} # type: Dict[str, str]
|
||||
internal_paths = defaultdict(set) # type: DefaultDict[str, Set[str]]
|
||||
vhs = []
|
||||
# Make a list of parser paths because the parser_paths
|
||||
# dictionary may be modified during the loop.
|
||||
@@ -1076,9 +1061,6 @@ class ApacheConfigurator(common.Installer):
|
||||
:rtype: list
|
||||
"""
|
||||
|
||||
if not self.parser_root:
|
||||
raise errors.Error("This ApacheConfigurator instance is not" # pragma: no cover
|
||||
" configured to use a node parser.")
|
||||
vhs = []
|
||||
vhosts = self.parser_root.find_blocks("VirtualHost", exclude=False)
|
||||
for vhblock in vhosts:
|
||||
@@ -1331,7 +1313,7 @@ class ApacheConfigurator(common.Installer):
|
||||
:param boolean temp: If the change is temporary
|
||||
"""
|
||||
|
||||
if self.options.handle_modules:
|
||||
if self.option("handle_modules"):
|
||||
if self.version >= (2, 4) and ("socache_shmcb_module" not in
|
||||
self.parser.modules):
|
||||
self.enable_mod("socache_shmcb", temp=temp)
|
||||
@@ -1351,7 +1333,7 @@ class ApacheConfigurator(common.Installer):
|
||||
|
||||
Duplicates vhost and adds default ssl options
|
||||
New vhost will reside as (nonssl_vhost.path) +
|
||||
``self.options.le_vhost_ext``
|
||||
``self.option("le_vhost_ext")``
|
||||
|
||||
.. note:: This function saves the configuration
|
||||
|
||||
@@ -1450,15 +1432,15 @@ class ApacheConfigurator(common.Installer):
|
||||
"""
|
||||
|
||||
if self.conf("vhost-root") and os.path.exists(self.conf("vhost-root")):
|
||||
fp = os.path.join(filesystem.realpath(self.options.vhost_root),
|
||||
fp = os.path.join(filesystem.realpath(self.option("vhost_root")),
|
||||
os.path.basename(non_ssl_vh_fp))
|
||||
else:
|
||||
# Use non-ssl filepath
|
||||
fp = filesystem.realpath(non_ssl_vh_fp)
|
||||
|
||||
if fp.endswith(".conf"):
|
||||
return fp[:-(len(".conf"))] + self.options.le_vhost_ext
|
||||
return fp + self.options.le_vhost_ext
|
||||
return fp[:-(len(".conf"))] + self.option("le_vhost_ext")
|
||||
return fp + self.option("le_vhost_ext")
|
||||
|
||||
def _sift_rewrite_rule(self, line):
|
||||
"""Decides whether a line should be copied to a SSL vhost.
|
||||
@@ -1480,7 +1462,7 @@ class ApacheConfigurator(common.Installer):
|
||||
if not line.lower().lstrip().startswith("rewriterule"):
|
||||
return False
|
||||
|
||||
# According to: https://httpd.apache.org/docs/2.4/rewrite/flags.html
|
||||
# According to: http://httpd.apache.org/docs/2.4/rewrite/flags.html
|
||||
# The syntax of a RewriteRule is:
|
||||
# RewriteRule pattern target [Flag1,Flag2,Flag3]
|
||||
# i.e. target is required, so it must exist.
|
||||
@@ -2187,7 +2169,7 @@ class ApacheConfigurator(common.Installer):
|
||||
# There can be other RewriteRule directive lines in vhost config.
|
||||
# rewrite_args_dict keys are directive ids and the corresponding value
|
||||
# for each is a list of arguments to that directive.
|
||||
rewrite_args_dict: DefaultDict[str, List[str]] = defaultdict(list)
|
||||
rewrite_args_dict = defaultdict(list) # type: DefaultDict[str, List[str]]
|
||||
pat = r'(.*directive\[\d+\]).*'
|
||||
for match in rewrite_path:
|
||||
m = re.match(pat, match)
|
||||
@@ -2281,7 +2263,7 @@ class ApacheConfigurator(common.Installer):
|
||||
if ssl_vhost.aliases:
|
||||
serveralias = "ServerAlias " + " ".join(ssl_vhost.aliases)
|
||||
|
||||
rewrite_rule_args: List[str] = []
|
||||
rewrite_rule_args = [] # type: List[str]
|
||||
if self.get_version() >= (2, 3, 9):
|
||||
rewrite_rule_args = constants.REWRITE_HTTPS_ARGS_WITH_END
|
||||
else:
|
||||
@@ -2302,7 +2284,7 @@ class ApacheConfigurator(common.Installer):
|
||||
addr in self._get_proposed_addrs(ssl_vhost)),
|
||||
servername, serveralias,
|
||||
" ".join(rewrite_rule_args),
|
||||
self.options.logs_root))
|
||||
self.option("logs_root")))
|
||||
|
||||
def _write_out_redirect(self, ssl_vhost, text):
|
||||
# This is the default name
|
||||
@@ -2314,7 +2296,7 @@ class ApacheConfigurator(common.Installer):
|
||||
if len(ssl_vhost.name) < (255 - (len(redirect_filename) + 1)):
|
||||
redirect_filename = "le-redirect-%s.conf" % ssl_vhost.name
|
||||
|
||||
redirect_filepath = os.path.join(self.options.vhost_root,
|
||||
redirect_filepath = os.path.join(self.option("vhost_root"),
|
||||
redirect_filename)
|
||||
|
||||
# Register the new file that will be created
|
||||
@@ -2434,18 +2416,19 @@ class ApacheConfigurator(common.Installer):
|
||||
|
||||
"""
|
||||
try:
|
||||
util.run_script(self.options.restart_cmd)
|
||||
util.run_script(self.option("restart_cmd"))
|
||||
except errors.SubprocessError as err:
|
||||
logger.info("Unable to restart apache using %s",
|
||||
self.options.restart_cmd)
|
||||
alt_restart = self.options.restart_cmd_alt
|
||||
self.option("restart_cmd"))
|
||||
alt_restart = self.option("restart_cmd_alt")
|
||||
if alt_restart:
|
||||
logger.debug("Trying alternative restart command: %s",
|
||||
alt_restart)
|
||||
# There is an alternative restart command available
|
||||
# This usually is "restart" verb while original is "graceful"
|
||||
try:
|
||||
util.run_script(self.options.restart_cmd_alt)
|
||||
util.run_script(self.option(
|
||||
"restart_cmd_alt"))
|
||||
return
|
||||
except errors.SubprocessError as secerr:
|
||||
error = str(secerr)
|
||||
@@ -2460,7 +2443,7 @@ class ApacheConfigurator(common.Installer):
|
||||
|
||||
"""
|
||||
try:
|
||||
util.run_script(self.options.conftest_cmd)
|
||||
util.run_script(self.option("conftest_cmd"))
|
||||
except errors.SubprocessError as err:
|
||||
raise errors.MisconfigurationError(str(err))
|
||||
|
||||
@@ -2476,11 +2459,11 @@ class ApacheConfigurator(common.Installer):
|
||||
|
||||
"""
|
||||
try:
|
||||
stdout, _ = util.run_script(self.options.version_cmd)
|
||||
stdout, _ = util.run_script(self.option("version_cmd"))
|
||||
except errors.SubprocessError:
|
||||
raise errors.PluginError(
|
||||
"Unable to run %s -v" %
|
||||
self.options.version_cmd)
|
||||
self.option("version_cmd"))
|
||||
|
||||
regex = re.compile(r"Apache/([0-9\.]*)", re.IGNORECASE)
|
||||
matches = regex.findall(stdout)
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
""" Dual ParserNode implementation """
|
||||
from certbot_apache._internal import apacheparser
|
||||
from certbot_apache._internal import assertions
|
||||
from certbot_apache._internal import augeasparser
|
||||
from certbot_apache._internal import apacheparser
|
||||
|
||||
|
||||
class DualNodeBase:
|
||||
class DualNodeBase(object):
|
||||
""" Dual parser interface for in development testing. This is used as the
|
||||
base class for dual parser interface classes. This class handles runtime
|
||||
attribute value assertions."""
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
"""A class that performs HTTP-01 challenges for Apache"""
|
||||
import errno
|
||||
import logging
|
||||
from typing import List
|
||||
from typing import Set
|
||||
import errno
|
||||
|
||||
from acme.magic_typing import List
|
||||
from acme.magic_typing import Set
|
||||
from certbot import errors
|
||||
from certbot.compat import filesystem
|
||||
from certbot.compat import os
|
||||
@@ -47,7 +47,7 @@ class ApacheHttp01(common.ChallengePerformer):
|
||||
"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
super(ApacheHttp01, self).__init__(*args, **kwargs)
|
||||
self.challenge_conf_pre = os.path.join(
|
||||
self.configurator.conf("challenge-location"),
|
||||
"le_http_01_challenge_pre.conf")
|
||||
@@ -57,7 +57,7 @@ class ApacheHttp01(common.ChallengePerformer):
|
||||
self.challenge_dir = os.path.join(
|
||||
self.configurator.config.work_dir,
|
||||
"http_challenges")
|
||||
self.moded_vhosts: Set[VirtualHost] = set()
|
||||
self.moded_vhosts = set() # type: Set[VirtualHost]
|
||||
|
||||
def perform(self):
|
||||
"""Perform all HTTP-01 challenges."""
|
||||
@@ -93,7 +93,7 @@ class ApacheHttp01(common.ChallengePerformer):
|
||||
self.configurator.enable_mod(mod, temp=True)
|
||||
|
||||
def _mod_config(self):
|
||||
selected_vhosts: List[VirtualHost] = []
|
||||
selected_vhosts = [] # type: List[VirtualHost]
|
||||
http_port = str(self.configurator.config.http01_port)
|
||||
for chall in self.achalls:
|
||||
# Search for matching VirtualHosts
|
||||
@@ -169,7 +169,7 @@ class ApacheHttp01(common.ChallengePerformer):
|
||||
|
||||
def _set_up_challenges(self):
|
||||
if not os.path.isdir(self.challenge_dir):
|
||||
old_umask = filesystem.umask(0o022)
|
||||
old_umask = os.umask(0o022)
|
||||
try:
|
||||
filesystem.makedirs(self.challenge_dir, 0o755)
|
||||
except OSError as exception:
|
||||
@@ -177,7 +177,7 @@ class ApacheHttp01(common.ChallengePerformer):
|
||||
raise errors.PluginError(
|
||||
"Couldn't create root for http-01 challenge")
|
||||
finally:
|
||||
filesystem.umask(old_umask)
|
||||
os.umask(old_umask)
|
||||
|
||||
responses = []
|
||||
for achall in self.achalls:
|
||||
|
||||
@@ -100,9 +100,12 @@ For this reason the internal representation of data should not ignore the case.
|
||||
"""
|
||||
|
||||
import abc
|
||||
import six
|
||||
|
||||
|
||||
class ParserNode(object, metaclass=abc.ABCMeta):
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class ParserNode(object):
|
||||
"""
|
||||
ParserNode is the basic building block of the tree of such nodes,
|
||||
representing the structure of the configuration. It is largely meant to keep
|
||||
@@ -201,7 +204,9 @@ class ParserNode(object, metaclass=abc.ABCMeta):
|
||||
"""
|
||||
|
||||
|
||||
class CommentNode(ParserNode, metaclass=abc.ABCMeta):
|
||||
# Linter rule exclusion done because of https://github.com/PyCQA/pylint/issues/179
|
||||
@six.add_metaclass(abc.ABCMeta) # pylint: disable=abstract-method
|
||||
class CommentNode(ParserNode):
|
||||
"""
|
||||
CommentNode class is used for representation of comments within the parsed
|
||||
configuration structure. Because of the nature of comments, it is not able
|
||||
@@ -238,13 +243,14 @@ class CommentNode(ParserNode, metaclass=abc.ABCMeta):
|
||||
created or changed after the last save. Default: False.
|
||||
:type dirty: bool
|
||||
"""
|
||||
super().__init__(ancestor=kwargs['ancestor'],
|
||||
super(CommentNode, self).__init__(ancestor=kwargs['ancestor'],
|
||||
dirty=kwargs.get('dirty', False),
|
||||
filepath=kwargs['filepath'],
|
||||
metadata=kwargs.get('metadata', {})) # pragma: no cover
|
||||
|
||||
|
||||
class DirectiveNode(ParserNode, metaclass=abc.ABCMeta):
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class DirectiveNode(ParserNode):
|
||||
"""
|
||||
DirectiveNode class represents a configuration directive within the configuration.
|
||||
It can have zero or more parameters attached to it. Because of the nature of
|
||||
@@ -302,7 +308,7 @@ class DirectiveNode(ParserNode, metaclass=abc.ABCMeta):
|
||||
:type enabled: bool
|
||||
|
||||
"""
|
||||
super().__init__(ancestor=kwargs['ancestor'],
|
||||
super(DirectiveNode, self).__init__(ancestor=kwargs['ancestor'],
|
||||
dirty=kwargs.get('dirty', False),
|
||||
filepath=kwargs['filepath'],
|
||||
metadata=kwargs.get('metadata', {})) # pragma: no cover
|
||||
@@ -319,7 +325,8 @@ class DirectiveNode(ParserNode, metaclass=abc.ABCMeta):
|
||||
"""
|
||||
|
||||
|
||||
class BlockNode(DirectiveNode, metaclass=abc.ABCMeta):
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class BlockNode(DirectiveNode):
|
||||
"""
|
||||
BlockNode class represents a block of nested configuration directives, comments
|
||||
and other blocks as its children. A BlockNode can have zero or more parameters
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
"""Module contains classes used by the Apache Configurator."""
|
||||
import re
|
||||
from typing import Set
|
||||
|
||||
from acme.magic_typing import Set
|
||||
from certbot.plugins import common
|
||||
|
||||
|
||||
@@ -20,13 +20,16 @@ class Addr(common.Addr):
|
||||
self.is_wildcard() and other.is_wildcard()))
|
||||
return False
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self.__eq__(other)
|
||||
|
||||
def __repr__(self):
|
||||
return "certbot_apache._internal.obj.Addr(" + repr(self.tup) + ")"
|
||||
|
||||
def __hash__(self): # pylint: disable=useless-super-delegation
|
||||
# Python 3 requires explicit overridden for __hash__ if __eq__ or
|
||||
# __cmp__ is overridden. See https://bugs.python.org/issue2235
|
||||
return super().__hash__()
|
||||
return super(Addr, self).__hash__()
|
||||
|
||||
def _addr_less_specific(self, addr):
|
||||
"""Returns if addr.get_addr() is more specific than self.get_addr()."""
|
||||
@@ -95,7 +98,7 @@ class Addr(common.Addr):
|
||||
return self.get_addr_obj(port)
|
||||
|
||||
|
||||
class VirtualHost:
|
||||
class VirtualHost(object):
|
||||
"""Represents an Apache Virtualhost.
|
||||
|
||||
:ivar str filep: file path of VH
|
||||
@@ -137,7 +140,7 @@ class VirtualHost:
|
||||
|
||||
def get_names(self):
|
||||
"""Return a set of all names."""
|
||||
all_names: Set[str] = set()
|
||||
all_names = set() # type: Set[str]
|
||||
all_names.update(self.aliases)
|
||||
# Strip out any scheme:// and <port> field from servername
|
||||
if self.name is not None:
|
||||
@@ -188,6 +191,9 @@ class VirtualHost:
|
||||
|
||||
return False
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self.__eq__(other)
|
||||
|
||||
def __hash__(self):
|
||||
return hash((self.filep, self.path,
|
||||
tuple(self.addrs), tuple(self.get_names()),
|
||||
@@ -245,7 +251,7 @@ class VirtualHost:
|
||||
|
||||
# already_found acts to keep everything very conservative.
|
||||
# Don't allow multiple ip:ports in same set.
|
||||
already_found: Set[str] = set()
|
||||
already_found = set() # type: Set[str]
|
||||
|
||||
for addr in vhost.addrs:
|
||||
for local_addr in self.addrs:
|
||||
|
||||
@@ -3,14 +3,13 @@ import zope.interface
|
||||
|
||||
from certbot import interfaces
|
||||
from certbot_apache._internal import configurator
|
||||
from certbot_apache._internal.configurator import OsOptions
|
||||
|
||||
|
||||
@zope.interface.provider(interfaces.IPluginFactory)
|
||||
class ArchConfigurator(configurator.ApacheConfigurator):
|
||||
"""Arch Linux specific ApacheConfigurator override class"""
|
||||
|
||||
OS_DEFAULTS = OsOptions(
|
||||
OS_DEFAULTS = dict(
|
||||
server_root="/etc/httpd",
|
||||
vhost_root="/etc/httpd/conf",
|
||||
vhost_files="*.conf",
|
||||
@@ -19,5 +18,11 @@ class ArchConfigurator(configurator.ApacheConfigurator):
|
||||
version_cmd=['apachectl', '-v'],
|
||||
restart_cmd=['apachectl', 'graceful'],
|
||||
conftest_cmd=['apachectl', 'configtest'],
|
||||
enmod=None,
|
||||
dismod=None,
|
||||
le_vhost_ext="-le-ssl.conf",
|
||||
handle_modules=False,
|
||||
handle_sites=False,
|
||||
challenge_location="/etc/httpd/conf",
|
||||
bin=None,
|
||||
)
|
||||
|
||||
@@ -1,10 +1,9 @@
|
||||
""" Distribution specific override class for CentOS family (RHEL, Fedora) """
|
||||
import logging
|
||||
from typing import cast
|
||||
from typing import List
|
||||
|
||||
import zope.interface
|
||||
|
||||
from acme.magic_typing import List
|
||||
from certbot import errors
|
||||
from certbot import interfaces
|
||||
from certbot import util
|
||||
@@ -12,7 +11,6 @@ from certbot.errors import MisconfigurationError
|
||||
from certbot_apache._internal import apache_util
|
||||
from certbot_apache._internal import configurator
|
||||
from certbot_apache._internal import parser
|
||||
from certbot_apache._internal.configurator import OsOptions
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -21,7 +19,7 @@ logger = logging.getLogger(__name__)
|
||||
class CentOSConfigurator(configurator.ApacheConfigurator):
|
||||
"""CentOS specific ApacheConfigurator override class"""
|
||||
|
||||
OS_DEFAULTS = OsOptions(
|
||||
OS_DEFAULTS = dict(
|
||||
server_root="/etc/httpd",
|
||||
vhost_root="/etc/httpd/conf.d",
|
||||
vhost_files="*.conf",
|
||||
@@ -31,7 +29,13 @@ class CentOSConfigurator(configurator.ApacheConfigurator):
|
||||
restart_cmd=['apachectl', 'graceful'],
|
||||
restart_cmd_alt=['apachectl', 'restart'],
|
||||
conftest_cmd=['apachectl', 'configtest'],
|
||||
enmod=None,
|
||||
dismod=None,
|
||||
le_vhost_ext="-le-ssl.conf",
|
||||
handle_modules=False,
|
||||
handle_sites=False,
|
||||
challenge_location="/etc/httpd/conf.d",
|
||||
bin=None,
|
||||
)
|
||||
|
||||
def config_test(self):
|
||||
@@ -46,7 +50,7 @@ class CentOSConfigurator(configurator.ApacheConfigurator):
|
||||
fedora = os_info[0].lower() == "fedora"
|
||||
|
||||
try:
|
||||
super().config_test()
|
||||
super(CentOSConfigurator, self).config_test()
|
||||
except errors.MisconfigurationError:
|
||||
if fedora:
|
||||
self._try_restart_fedora()
|
||||
@@ -64,22 +68,20 @@ class CentOSConfigurator(configurator.ApacheConfigurator):
|
||||
raise errors.MisconfigurationError(str(err))
|
||||
|
||||
# Finish with actual config check to see if systemctl restart helped
|
||||
super().config_test()
|
||||
super(CentOSConfigurator, self).config_test()
|
||||
|
||||
def _prepare_options(self):
|
||||
"""
|
||||
Override the options dictionary initialization in order to support
|
||||
alternative restart cmd used in CentOS.
|
||||
"""
|
||||
super()._prepare_options()
|
||||
if not self.options.restart_cmd_alt: # pragma: no cover
|
||||
raise ValueError("OS option restart_cmd_alt must be set for CentOS.")
|
||||
self.options.restart_cmd_alt[0] = self.options.ctl
|
||||
super(CentOSConfigurator, self)._prepare_options()
|
||||
self.options["restart_cmd_alt"][0] = self.option("ctl")
|
||||
|
||||
def get_parser(self):
|
||||
"""Initializes the ApacheParser"""
|
||||
return CentOSParser(
|
||||
self.options.server_root, self.options.vhost_root,
|
||||
self.option("server_root"), self.option("vhost_root"),
|
||||
self.version, configurator=self)
|
||||
|
||||
def _deploy_cert(self, *args, **kwargs): # pylint: disable=arguments-differ
|
||||
@@ -88,7 +90,7 @@ class CentOSConfigurator(configurator.ApacheConfigurator):
|
||||
has "LoadModule ssl_module..." before parsing the VirtualHost configuration
|
||||
that was created by Certbot
|
||||
"""
|
||||
super()._deploy_cert(*args, **kwargs)
|
||||
super(CentOSConfigurator, self)._deploy_cert(*args, **kwargs)
|
||||
if self.version < (2, 4, 0):
|
||||
self._deploy_loadmodule_ssl_if_needed()
|
||||
|
||||
@@ -100,9 +102,9 @@ class CentOSConfigurator(configurator.ApacheConfigurator):
|
||||
|
||||
loadmods = self.parser.find_dir("LoadModule", "ssl_module", exclude=False)
|
||||
|
||||
correct_ifmods: List[str] = []
|
||||
loadmod_args: List[str] = []
|
||||
loadmod_paths: List[str] = []
|
||||
correct_ifmods = [] # type: List[str]
|
||||
loadmod_args = [] # type: List[str]
|
||||
loadmod_paths = [] # type: List[str]
|
||||
for m in loadmods:
|
||||
noarg_path = m.rpartition("/")[0]
|
||||
path_args = self.parser.get_all_args(noarg_path)
|
||||
@@ -116,9 +118,8 @@ class CentOSConfigurator(configurator.ApacheConfigurator):
|
||||
else:
|
||||
loadmod_args = path_args
|
||||
|
||||
centos_parser: CentOSParser = cast(CentOSParser, self.parser)
|
||||
if centos_parser.not_modssl_ifmodule(noarg_path):
|
||||
if centos_parser.loc["default"] in noarg_path:
|
||||
if self.parser.not_modssl_ifmodule(noarg_path): # pylint: disable=no-member
|
||||
if self.parser.loc["default"] in noarg_path:
|
||||
# LoadModule already in the main configuration file
|
||||
if ("ifmodule/" in noarg_path.lower() or
|
||||
"ifmodule[1]" in noarg_path.lower()):
|
||||
@@ -166,12 +167,12 @@ class CentOSParser(parser.ApacheParser):
|
||||
def __init__(self, *args, **kwargs):
|
||||
# CentOS specific configuration file for Apache
|
||||
self.sysconfig_filep = "/etc/sysconfig/httpd"
|
||||
super().__init__(*args, **kwargs)
|
||||
super(CentOSParser, self).__init__(*args, **kwargs)
|
||||
|
||||
def update_runtime_variables(self):
|
||||
""" Override for update_runtime_variables for custom parsing """
|
||||
# Opportunistic, works if SELinux not enforced
|
||||
super().update_runtime_variables()
|
||||
super(CentOSParser, self).update_runtime_variables()
|
||||
self.parse_sysconfig_var()
|
||||
|
||||
def parse_sysconfig_var(self):
|
||||
|
||||
@@ -3,19 +3,26 @@ import zope.interface
|
||||
|
||||
from certbot import interfaces
|
||||
from certbot_apache._internal import configurator
|
||||
from certbot_apache._internal.configurator import OsOptions
|
||||
|
||||
|
||||
@zope.interface.provider(interfaces.IPluginFactory)
|
||||
class DarwinConfigurator(configurator.ApacheConfigurator):
|
||||
"""macOS specific ApacheConfigurator override class"""
|
||||
|
||||
OS_DEFAULTS = OsOptions(
|
||||
OS_DEFAULTS = dict(
|
||||
server_root="/etc/apache2",
|
||||
vhost_root="/etc/apache2/other",
|
||||
vhost_files="*.conf",
|
||||
logs_root="/var/log/apache2",
|
||||
ctl="apachectl",
|
||||
version_cmd=['apachectl', '-v'],
|
||||
restart_cmd=['apachectl', 'graceful'],
|
||||
conftest_cmd=['apachectl', 'configtest'],
|
||||
enmod=None,
|
||||
dismod=None,
|
||||
le_vhost_ext="-le-ssl.conf",
|
||||
handle_modules=False,
|
||||
handle_sites=False,
|
||||
challenge_location="/etc/apache2/other",
|
||||
bin=None,
|
||||
)
|
||||
|
||||
@@ -10,7 +10,6 @@ from certbot.compat import filesystem
|
||||
from certbot.compat import os
|
||||
from certbot_apache._internal import apache_util
|
||||
from certbot_apache._internal import configurator
|
||||
from certbot_apache._internal.configurator import OsOptions
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -19,11 +18,22 @@ logger = logging.getLogger(__name__)
|
||||
class DebianConfigurator(configurator.ApacheConfigurator):
|
||||
"""Debian specific ApacheConfigurator override class"""
|
||||
|
||||
OS_DEFAULTS = OsOptions(
|
||||
OS_DEFAULTS = dict(
|
||||
server_root="/etc/apache2",
|
||||
vhost_root="/etc/apache2/sites-available",
|
||||
vhost_files="*",
|
||||
logs_root="/var/log/apache2",
|
||||
ctl="apache2ctl",
|
||||
version_cmd=['apache2ctl', '-v'],
|
||||
restart_cmd=['apache2ctl', 'graceful'],
|
||||
conftest_cmd=['apache2ctl', 'configtest'],
|
||||
enmod="a2enmod",
|
||||
dismod="a2dismod",
|
||||
le_vhost_ext="-le-ssl.conf",
|
||||
handle_modules=True,
|
||||
handle_sites=True,
|
||||
challenge_location="/etc/apache2",
|
||||
bin=None,
|
||||
)
|
||||
|
||||
def enable_site(self, vhost):
|
||||
@@ -48,7 +58,7 @@ class DebianConfigurator(configurator.ApacheConfigurator):
|
||||
if not os.path.isdir(os.path.dirname(enabled_path)):
|
||||
# For some reason, sites-enabled / sites-available do not exist
|
||||
# Call the parent method
|
||||
return super().enable_site(vhost)
|
||||
return super(DebianConfigurator, self).enable_site(vhost)
|
||||
self.reverter.register_file_creation(False, enabled_path)
|
||||
try:
|
||||
os.symlink(vhost.filep, enabled_path)
|
||||
@@ -122,11 +132,11 @@ class DebianConfigurator(configurator.ApacheConfigurator):
|
||||
# Generate reversal command.
|
||||
# Try to be safe here... check that we can probably reverse before
|
||||
# applying enmod command
|
||||
if not util.exe_exists(self.options.dismod):
|
||||
if not util.exe_exists(self.option("dismod")):
|
||||
raise errors.MisconfigurationError(
|
||||
"Unable to find a2dismod, please make sure a2enmod and "
|
||||
"a2dismod are configured correctly for certbot.")
|
||||
|
||||
self.reverter.register_undo_command(
|
||||
temp, [self.options.dismod, "-f", mod_name])
|
||||
util.run_script([self.options.enmod, mod_name])
|
||||
temp, [self.option("dismod"), "-f", mod_name])
|
||||
util.run_script([self.option("enmod"), mod_name])
|
||||
|
||||
@@ -7,14 +7,13 @@ from certbot import util
|
||||
from certbot_apache._internal import apache_util
|
||||
from certbot_apache._internal import configurator
|
||||
from certbot_apache._internal import parser
|
||||
from certbot_apache._internal.configurator import OsOptions
|
||||
|
||||
|
||||
@zope.interface.provider(interfaces.IPluginFactory)
|
||||
class FedoraConfigurator(configurator.ApacheConfigurator):
|
||||
"""Fedora 29+ specific ApacheConfigurator override class"""
|
||||
|
||||
OS_DEFAULTS = OsOptions(
|
||||
OS_DEFAULTS = dict(
|
||||
server_root="/etc/httpd",
|
||||
vhost_root="/etc/httpd/conf.d",
|
||||
vhost_files="*.conf",
|
||||
@@ -24,7 +23,13 @@ class FedoraConfigurator(configurator.ApacheConfigurator):
|
||||
restart_cmd=['apachectl', 'graceful'],
|
||||
restart_cmd_alt=['apachectl', 'restart'],
|
||||
conftest_cmd=['apachectl', 'configtest'],
|
||||
enmod=None,
|
||||
dismod=None,
|
||||
le_vhost_ext="-le-ssl.conf",
|
||||
handle_modules=False,
|
||||
handle_sites=False,
|
||||
challenge_location="/etc/httpd/conf.d",
|
||||
bin=None,
|
||||
)
|
||||
|
||||
def config_test(self):
|
||||
@@ -35,14 +40,14 @@ class FedoraConfigurator(configurator.ApacheConfigurator):
|
||||
during the first (re)start of httpd.
|
||||
"""
|
||||
try:
|
||||
super().config_test()
|
||||
super(FedoraConfigurator, self).config_test()
|
||||
except errors.MisconfigurationError:
|
||||
self._try_restart_fedora()
|
||||
|
||||
def get_parser(self):
|
||||
"""Initializes the ApacheParser"""
|
||||
return FedoraParser(
|
||||
self.options.server_root, self.options.vhost_root,
|
||||
self.option("server_root"), self.option("vhost_root"),
|
||||
self.version, configurator=self)
|
||||
|
||||
def _try_restart_fedora(self):
|
||||
@@ -55,7 +60,7 @@ class FedoraConfigurator(configurator.ApacheConfigurator):
|
||||
raise errors.MisconfigurationError(str(err))
|
||||
|
||||
# Finish with actual config check to see if systemctl restart helped
|
||||
super().config_test()
|
||||
super(FedoraConfigurator, self).config_test()
|
||||
|
||||
def _prepare_options(self):
|
||||
"""
|
||||
@@ -63,12 +68,10 @@ class FedoraConfigurator(configurator.ApacheConfigurator):
|
||||
instead of httpd and so take advantages of this new bash script in newer versions
|
||||
of Fedora to restart httpd.
|
||||
"""
|
||||
super()._prepare_options()
|
||||
self.options.restart_cmd[0] = 'apachectl'
|
||||
if not self.options.restart_cmd_alt: # pragma: no cover
|
||||
raise ValueError("OS option restart_cmd_alt must be set for Fedora.")
|
||||
self.options.restart_cmd_alt[0] = 'apachectl'
|
||||
self.options.conftest_cmd[0] = 'apachectl'
|
||||
super(FedoraConfigurator, self)._prepare_options()
|
||||
self.options["restart_cmd"][0] = 'apachectl'
|
||||
self.options["restart_cmd_alt"][0] = 'apachectl'
|
||||
self.options["conftest_cmd"][0] = 'apachectl'
|
||||
|
||||
|
||||
class FedoraParser(parser.ApacheParser):
|
||||
@@ -76,12 +79,12 @@ class FedoraParser(parser.ApacheParser):
|
||||
def __init__(self, *args, **kwargs):
|
||||
# Fedora 29+ specific configuration file for Apache
|
||||
self.sysconfig_filep = "/etc/sysconfig/httpd"
|
||||
super().__init__(*args, **kwargs)
|
||||
super(FedoraParser, self).__init__(*args, **kwargs)
|
||||
|
||||
def update_runtime_variables(self):
|
||||
""" Override for update_runtime_variables for custom parsing """
|
||||
# Opportunistic, works if SELinux not enforced
|
||||
super().update_runtime_variables()
|
||||
super(FedoraParser, self).update_runtime_variables()
|
||||
self._parse_sysconfig_var()
|
||||
|
||||
def _parse_sysconfig_var(self):
|
||||
|
||||
@@ -5,19 +5,29 @@ from certbot import interfaces
|
||||
from certbot_apache._internal import apache_util
|
||||
from certbot_apache._internal import configurator
|
||||
from certbot_apache._internal import parser
|
||||
from certbot_apache._internal.configurator import OsOptions
|
||||
|
||||
|
||||
@zope.interface.provider(interfaces.IPluginFactory)
|
||||
class GentooConfigurator(configurator.ApacheConfigurator):
|
||||
"""Gentoo specific ApacheConfigurator override class"""
|
||||
|
||||
OS_DEFAULTS = OsOptions(
|
||||
OS_DEFAULTS = dict(
|
||||
server_root="/etc/apache2",
|
||||
vhost_root="/etc/apache2/vhosts.d",
|
||||
vhost_files="*.conf",
|
||||
logs_root="/var/log/apache2",
|
||||
ctl="apache2ctl",
|
||||
version_cmd=['apache2ctl', '-v'],
|
||||
restart_cmd=['apache2ctl', 'graceful'],
|
||||
restart_cmd_alt=['apache2ctl', 'restart'],
|
||||
conftest_cmd=['apache2ctl', 'configtest'],
|
||||
enmod=None,
|
||||
dismod=None,
|
||||
le_vhost_ext="-le-ssl.conf",
|
||||
handle_modules=False,
|
||||
handle_sites=False,
|
||||
challenge_location="/etc/apache2/vhosts.d",
|
||||
bin=None,
|
||||
)
|
||||
|
||||
def _prepare_options(self):
|
||||
@@ -25,15 +35,13 @@ class GentooConfigurator(configurator.ApacheConfigurator):
|
||||
Override the options dictionary initialization in order to support
|
||||
alternative restart cmd used in Gentoo.
|
||||
"""
|
||||
super()._prepare_options()
|
||||
if not self.options.restart_cmd_alt: # pragma: no cover
|
||||
raise ValueError("OS option restart_cmd_alt must be set for Gentoo.")
|
||||
self.options.restart_cmd_alt[0] = self.options.ctl
|
||||
super(GentooConfigurator, self)._prepare_options()
|
||||
self.options["restart_cmd_alt"][0] = self.option("ctl")
|
||||
|
||||
def get_parser(self):
|
||||
"""Initializes the ApacheParser"""
|
||||
return GentooParser(
|
||||
self.options.server_root, self.options.vhost_root,
|
||||
self.option("server_root"), self.option("vhost_root"),
|
||||
self.version, configurator=self)
|
||||
|
||||
|
||||
@@ -42,7 +50,7 @@ class GentooParser(parser.ApacheParser):
|
||||
def __init__(self, *args, **kwargs):
|
||||
# Gentoo specific configuration file for Apache2
|
||||
self.apacheconfig_filep = "/etc/conf.d/apache2"
|
||||
super().__init__(*args, **kwargs)
|
||||
super(GentooParser, self).__init__(*args, **kwargs)
|
||||
|
||||
def update_runtime_variables(self):
|
||||
""" Override for update_runtime_variables for custom parsing """
|
||||
@@ -58,7 +66,7 @@ class GentooParser(parser.ApacheParser):
|
||||
|
||||
def update_modules(self):
|
||||
"""Get loaded modules from httpd process, and add them to DOM"""
|
||||
mod_cmd = [self.configurator.options.ctl, "modules"]
|
||||
mod_cmd = [self.configurator.option("ctl"), "modules"]
|
||||
matches = apache_util.parse_from_subprocess(mod_cmd, r"(.*)_module")
|
||||
for mod in matches:
|
||||
self.add_mod(mod.strip())
|
||||
|
||||
@@ -3,21 +3,26 @@ import zope.interface
|
||||
|
||||
from certbot import interfaces
|
||||
from certbot_apache._internal import configurator
|
||||
from certbot_apache._internal.configurator import OsOptions
|
||||
|
||||
|
||||
@zope.interface.provider(interfaces.IPluginFactory)
|
||||
class OpenSUSEConfigurator(configurator.ApacheConfigurator):
|
||||
"""OpenSUSE specific ApacheConfigurator override class"""
|
||||
|
||||
OS_DEFAULTS = OsOptions(
|
||||
OS_DEFAULTS = dict(
|
||||
server_root="/etc/apache2",
|
||||
vhost_root="/etc/apache2/vhosts.d",
|
||||
vhost_files="*.conf",
|
||||
ctl="apachectl",
|
||||
version_cmd=['apachectl', '-v'],
|
||||
restart_cmd=['apachectl', 'graceful'],
|
||||
conftest_cmd=['apachectl', 'configtest'],
|
||||
logs_root="/var/log/apache2",
|
||||
ctl="apache2ctl",
|
||||
version_cmd=['apache2ctl', '-v'],
|
||||
restart_cmd=['apache2ctl', 'graceful'],
|
||||
conftest_cmd=['apache2ctl', 'configtest'],
|
||||
enmod="a2enmod",
|
||||
dismod="a2dismod",
|
||||
le_vhost_ext="-le-ssl.conf",
|
||||
handle_modules=False,
|
||||
handle_sites=False,
|
||||
challenge_location="/etc/apache2/vhosts.d",
|
||||
bin=None,
|
||||
)
|
||||
|
||||
@@ -3,24 +3,21 @@ import copy
|
||||
import fnmatch
|
||||
import logging
|
||||
import re
|
||||
from typing import Dict
|
||||
from typing import List
|
||||
from typing import Optional
|
||||
import sys
|
||||
|
||||
import six
|
||||
|
||||
from acme.magic_typing import Dict
|
||||
from acme.magic_typing import List
|
||||
from certbot import errors
|
||||
from certbot.compat import os
|
||||
from certbot_apache._internal import apache_util
|
||||
from certbot_apache._internal import constants
|
||||
|
||||
try:
|
||||
from augeas import Augeas
|
||||
except ImportError: # pragma: no cover
|
||||
Augeas = None # type: ignore
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ApacheParser:
|
||||
class ApacheParser(object):
|
||||
"""Class handles the fine details of parsing the Apache Configuration.
|
||||
|
||||
.. todo:: Make parsing general... remove sites-available etc...
|
||||
@@ -45,7 +42,8 @@ class ApacheParser:
|
||||
self.configurator = configurator
|
||||
|
||||
# Initialize augeas
|
||||
self.aug = init_augeas()
|
||||
self.aug = None
|
||||
self.init_augeas()
|
||||
|
||||
if not self.check_aug_version():
|
||||
raise errors.NotSupportedError(
|
||||
@@ -53,9 +51,9 @@ class ApacheParser:
|
||||
"version 1.2.0 or higher, please make sure you have you have "
|
||||
"those installed.")
|
||||
|
||||
self.modules: Dict[str, Optional[str]] = {}
|
||||
self.parser_paths: Dict[str, List[str]] = {}
|
||||
self.variables: Dict[str, str] = {}
|
||||
self.modules = {} # type: Dict[str, str]
|
||||
self.parser_paths = {} # type: Dict[str, List[str]]
|
||||
self.variables = {} # type: Dict[str, str]
|
||||
|
||||
# Find configuration root and make sure augeas can parse it.
|
||||
self.root = os.path.abspath(root)
|
||||
@@ -81,13 +79,30 @@ class ApacheParser:
|
||||
# Must also attempt to parse additional virtual host root
|
||||
if vhostroot:
|
||||
self.parse_file(os.path.abspath(vhostroot) + "/" +
|
||||
self.configurator.options.vhost_files)
|
||||
self.configurator.option("vhost_files"))
|
||||
|
||||
# check to see if there were unparsed define statements
|
||||
if version < (2, 4):
|
||||
if self.find_dir("Define", exclude=False):
|
||||
raise errors.PluginError("Error parsing runtime variables")
|
||||
|
||||
def init_augeas(self):
|
||||
""" Initialize the actual Augeas instance """
|
||||
|
||||
try:
|
||||
import augeas
|
||||
except ImportError: # pragma: no cover
|
||||
raise errors.NoInstallationError("Problem in Augeas installation")
|
||||
|
||||
self.aug = augeas.Augeas(
|
||||
# specify a directory to load our preferred lens from
|
||||
loadpath=constants.AUGEAS_LENS_DIR,
|
||||
# Do not save backup (we do it ourselves), do not load
|
||||
# anything by default
|
||||
flags=(augeas.Augeas.NONE |
|
||||
augeas.Augeas.NO_MODL_AUTOLOAD |
|
||||
augeas.Augeas.ENABLE_SPAN))
|
||||
|
||||
def check_parsing_errors(self, lens):
|
||||
"""Verify Augeas can parse all of the lens files.
|
||||
|
||||
@@ -251,7 +266,7 @@ class ApacheParser:
|
||||
the iteration issue. Else... parse and enable mods at same time.
|
||||
|
||||
"""
|
||||
mods: Dict[str, str] = {}
|
||||
mods = {} # type: Dict[str, str]
|
||||
matches = self.find_dir("LoadModule")
|
||||
iterator = iter(matches)
|
||||
# Make sure prev_size != cur_size for do: while: iteration
|
||||
@@ -260,7 +275,7 @@ class ApacheParser:
|
||||
while len(mods) != prev_size:
|
||||
prev_size = len(mods)
|
||||
|
||||
for match_name, match_filename in zip(
|
||||
for match_name, match_filename in six.moves.zip(
|
||||
iterator, iterator):
|
||||
mod_name = self.get_arg(match_name)
|
||||
mod_filename = self.get_arg(match_filename)
|
||||
@@ -282,7 +297,7 @@ class ApacheParser:
|
||||
def update_defines(self):
|
||||
"""Updates the dictionary of known variables in the configuration"""
|
||||
|
||||
self.variables = apache_util.parse_defines(self.configurator.options.ctl)
|
||||
self.variables = apache_util.parse_defines(self.configurator.option("ctl"))
|
||||
|
||||
def update_includes(self):
|
||||
"""Get includes from httpd process, and add them to DOM if needed"""
|
||||
@@ -292,7 +307,7 @@ class ApacheParser:
|
||||
# configuration files
|
||||
_ = self.find_dir("Include")
|
||||
|
||||
matches = apache_util.parse_includes(self.configurator.options.ctl)
|
||||
matches = apache_util.parse_includes(self.configurator.option("ctl"))
|
||||
if matches:
|
||||
for i in matches:
|
||||
if not self.parsed_in_current(i):
|
||||
@@ -301,7 +316,7 @@ class ApacheParser:
|
||||
def update_modules(self):
|
||||
"""Get loaded modules from httpd process, and add them to DOM"""
|
||||
|
||||
matches = apache_util.parse_modules(self.configurator.options.ctl)
|
||||
matches = apache_util.parse_modules(self.configurator.option("ctl"))
|
||||
for mod in matches:
|
||||
self.add_mod(mod.strip())
|
||||
|
||||
@@ -538,7 +553,7 @@ class ApacheParser:
|
||||
else:
|
||||
arg_suffix = "/*[self::arg=~regexp('%s')]" % case_i(arg)
|
||||
|
||||
ordered_matches: List[str] = []
|
||||
ordered_matches = [] # type: List[str]
|
||||
|
||||
# TODO: Wildcards should be included in alphabetical order
|
||||
# https://httpd.apache.org/docs/2.4/mod/core.html#include
|
||||
@@ -716,6 +731,7 @@ class ApacheParser:
|
||||
privileged users.
|
||||
|
||||
https://apr.apache.org/docs/apr/2.0/apr__fnmatch_8h_source.html
|
||||
http://apache2.sourcearchive.com/documentation/2.2.16-6/apr__fnmatch_8h_source.html
|
||||
|
||||
:param str clean_fn_match: Apache style filename match, like globs
|
||||
|
||||
@@ -723,6 +739,9 @@ class ApacheParser:
|
||||
:rtype: str
|
||||
|
||||
"""
|
||||
if sys.version_info < (3, 6):
|
||||
# This strips off final /Z(?ms)
|
||||
return fnmatch.translate(clean_fn_match)[:-7] # pragma: no cover
|
||||
# Since Python 3.6, it returns a different pattern like (?s:.*\.load)\Z
|
||||
return fnmatch.translate(clean_fn_match)[4:-3] # pragma: no cover
|
||||
|
||||
@@ -780,7 +799,7 @@ class ApacheParser:
|
||||
def _parsed_by_parser_paths(self, filep, paths):
|
||||
"""Helper function that searches through provided paths and returns
|
||||
True if file path is found in the set"""
|
||||
for directory in paths:
|
||||
for directory in paths.keys():
|
||||
for filename in paths[directory]:
|
||||
if fnmatch.fnmatch(filep, os.path.join(directory, filename)):
|
||||
return True
|
||||
@@ -937,19 +956,3 @@ def get_aug_path(file_path):
|
||||
|
||||
"""
|
||||
return "/files%s" % file_path
|
||||
|
||||
|
||||
def init_augeas() -> Augeas:
|
||||
""" Initialize the actual Augeas instance """
|
||||
|
||||
if not Augeas: # pragma: no cover
|
||||
raise errors.NoInstallationError("Problem in Augeas installation")
|
||||
|
||||
return Augeas(
|
||||
# specify a directory to load our preferred lens from
|
||||
loadpath=constants.AUGEAS_LENS_DIR,
|
||||
# Do not save backup (we do it ourselves), do not load
|
||||
# anything by default
|
||||
flags=(Augeas.NONE |
|
||||
Augeas.NO_MODL_AUTOLOAD |
|
||||
Augeas.ENABLE_SPAN))
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
# Remember to update setup.py to match the package versions below.
|
||||
acme[dev]==0.29.0
|
||||
certbot[dev]==1.6.0
|
||||
certbot[dev]==1.1.0
|
||||
@@ -1,32 +1,61 @@
|
||||
from distutils.version import StrictVersion
|
||||
import sys
|
||||
|
||||
from setuptools import __version__ as setuptools_version
|
||||
from setuptools import find_packages
|
||||
from setuptools import setup
|
||||
from setuptools.command.test import test as TestCommand
|
||||
|
||||
version = '1.15.0.dev0'
|
||||
version = '1.6.0.dev0'
|
||||
|
||||
# Remember to update local-oldest-requirements.txt when changing the minimum
|
||||
# acme/certbot version.
|
||||
install_requires = [
|
||||
'acme>=0.29.0',
|
||||
'certbot>=1.6.0',
|
||||
'certbot>=1.1.0',
|
||||
'python-augeas',
|
||||
'setuptools>=39.0.1',
|
||||
'setuptools',
|
||||
'zope.component',
|
||||
'zope.interface',
|
||||
]
|
||||
|
||||
setuptools_known_environment_markers = (StrictVersion(setuptools_version) >= StrictVersion('36.2'))
|
||||
if setuptools_known_environment_markers:
|
||||
install_requires.append('mock ; python_version < "3.3"')
|
||||
elif 'bdist_wheel' in sys.argv[1:]:
|
||||
raise RuntimeError('Error, you are trying to build certbot wheels using an old version '
|
||||
'of setuptools. Version 36.2+ of setuptools is required.')
|
||||
elif sys.version_info < (3,3):
|
||||
install_requires.append('mock')
|
||||
|
||||
dev_extras = [
|
||||
'apacheconfig>=0.3.2',
|
||||
]
|
||||
|
||||
class PyTest(TestCommand):
|
||||
user_options = []
|
||||
|
||||
def initialize_options(self):
|
||||
TestCommand.initialize_options(self)
|
||||
self.pytest_args = ''
|
||||
|
||||
def run_tests(self):
|
||||
import shlex
|
||||
# import here, cause outside the eggs aren't loaded
|
||||
import pytest
|
||||
errno = pytest.main(shlex.split(self.pytest_args))
|
||||
sys.exit(errno)
|
||||
|
||||
|
||||
setup(
|
||||
name='certbot-apache',
|
||||
version=version,
|
||||
description="Apache plugin for Certbot",
|
||||
url='https://github.com/letsencrypt/letsencrypt',
|
||||
author="Certbot Project",
|
||||
author_email='certbot-dev@eff.org',
|
||||
author_email='client-dev@letsencrypt.org',
|
||||
license='Apache License 2.0',
|
||||
python_requires='>=3.6',
|
||||
python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*',
|
||||
classifiers=[
|
||||
'Development Status :: 5 - Production/Stable',
|
||||
'Environment :: Plugins',
|
||||
@@ -34,11 +63,13 @@ setup(
|
||||
'License :: OSI Approved :: Apache Software License',
|
||||
'Operating System :: POSIX :: Linux',
|
||||
'Programming Language :: Python',
|
||||
'Programming Language :: Python :: 2',
|
||||
'Programming Language :: Python :: 2.7',
|
||||
'Programming Language :: Python :: 3',
|
||||
'Programming Language :: Python :: 3.5',
|
||||
'Programming Language :: Python :: 3.6',
|
||||
'Programming Language :: Python :: 3.7',
|
||||
'Programming Language :: Python :: 3.8',
|
||||
'Programming Language :: Python :: 3.9',
|
||||
'Topic :: Internet :: WWW/HTTP',
|
||||
'Topic :: Security',
|
||||
'Topic :: System :: Installation/Setup',
|
||||
@@ -58,4 +89,7 @@ setup(
|
||||
'apache = certbot_apache._internal.entrypoint:ENTRYPOINT',
|
||||
],
|
||||
},
|
||||
test_suite='certbot_apache',
|
||||
tests_require=["pytest"],
|
||||
cmdclass={"test": PyTest},
|
||||
)
|
||||
|
||||
@@ -52,7 +52,7 @@ function Cleanup() {
|
||||
# if our environment asks us to enable modules, do our best!
|
||||
if [ "$1" = --debian-modules ] ; then
|
||||
sudo apt-get install -y apache2
|
||||
sudo apt-get install -y libapache2-mod-wsgi-py3
|
||||
sudo apt-get install -y libapache2-mod-wsgi
|
||||
sudo apt-get install -y libapache2-mod-macro
|
||||
|
||||
for mod in ssl rewrite macro wsgi deflate userdir version mime setenvif ; do
|
||||
|
||||
@@ -1,6 +1,4 @@
|
||||
"""Tests for AugeasParserNode classes"""
|
||||
from typing import List
|
||||
|
||||
try:
|
||||
import mock
|
||||
except ImportError: # pragma: no cover
|
||||
@@ -29,7 +27,7 @@ class AugeasParserNodeTest(util.ApacheTest): # pylint: disable=too-many-public-
|
||||
"""Test AugeasParserNode using available test configurations"""
|
||||
|
||||
def setUp(self): # pylint: disable=arguments-differ
|
||||
super().setUp()
|
||||
super(AugeasParserNodeTest, self).setUp()
|
||||
|
||||
with mock.patch("certbot_apache._internal.configurator.ApacheConfigurator.get_parsernode_root") as mock_parsernode:
|
||||
mock_parsernode.side_effect = _get_augeasnode_mock(
|
||||
@@ -109,7 +107,7 @@ class AugeasParserNodeTest(util.ApacheTest): # pylint: disable=too-many-public-
|
||||
|
||||
def test_set_parameters(self):
|
||||
servernames = self.config.parser_root.find_directives("servername")
|
||||
names: List[str] = []
|
||||
names = [] # type: List[str]
|
||||
for servername in servernames:
|
||||
names += servername.parameters
|
||||
self.assertFalse("going_to_set_this" in names)
|
||||
|
||||
@@ -7,6 +7,7 @@ try:
|
||||
import mock
|
||||
except ImportError: # pragma: no cover
|
||||
from unittest import mock # type: ignore
|
||||
import six # pylint: disable=unused-import # six is used in mock.patch()
|
||||
|
||||
from certbot import errors
|
||||
from certbot_apache._internal import constants
|
||||
@@ -18,7 +19,7 @@ class AutoHSTSTest(util.ApacheTest):
|
||||
# pylint: disable=protected-access
|
||||
|
||||
def setUp(self): # pylint: disable=arguments-differ
|
||||
super().setUp()
|
||||
super(AutoHSTSTest, self).setUp()
|
||||
|
||||
self.config = util.get_apache_configurator(
|
||||
self.config_path, self.vhost_path, self.config_dir, self.work_dir)
|
||||
|
||||
@@ -36,9 +36,9 @@ class CentOS6Tests(util.ApacheTest):
|
||||
test_dir = "centos6_apache/apache"
|
||||
config_root = "centos6_apache/apache/httpd"
|
||||
vhost_root = "centos6_apache/apache/httpd/conf.d"
|
||||
super().setUp(test_dir=test_dir,
|
||||
config_root=config_root,
|
||||
vhost_root=vhost_root)
|
||||
super(CentOS6Tests, self).setUp(test_dir=test_dir,
|
||||
config_root=config_root,
|
||||
vhost_root=vhost_root)
|
||||
|
||||
self.config = util.get_apache_configurator(
|
||||
self.config_path, self.vhost_path, self.config_dir, self.work_dir,
|
||||
|
||||
@@ -41,9 +41,9 @@ class FedoraRestartTest(util.ApacheTest):
|
||||
test_dir = "centos7_apache/apache"
|
||||
config_root = "centos7_apache/apache/httpd"
|
||||
vhost_root = "centos7_apache/apache/httpd/conf.d"
|
||||
super().setUp(test_dir=test_dir,
|
||||
config_root=config_root,
|
||||
vhost_root=vhost_root)
|
||||
super(FedoraRestartTest, self).setUp(test_dir=test_dir,
|
||||
config_root=config_root,
|
||||
vhost_root=vhost_root)
|
||||
self.config = util.get_apache_configurator(
|
||||
self.config_path, self.vhost_path, self.config_dir, self.work_dir,
|
||||
os_info="fedora_old")
|
||||
@@ -96,9 +96,9 @@ class MultipleVhostsTestCentOS(util.ApacheTest):
|
||||
test_dir = "centos7_apache/apache"
|
||||
config_root = "centos7_apache/apache/httpd"
|
||||
vhost_root = "centos7_apache/apache/httpd/conf.d"
|
||||
super().setUp(test_dir=test_dir,
|
||||
config_root=config_root,
|
||||
vhost_root=vhost_root)
|
||||
super(MultipleVhostsTestCentOS, self).setUp(test_dir=test_dir,
|
||||
config_root=config_root,
|
||||
vhost_root=vhost_root)
|
||||
|
||||
self.config = util.get_apache_configurator(
|
||||
self.config_path, self.vhost_path, self.config_dir, self.work_dir,
|
||||
@@ -140,7 +140,7 @@ class MultipleVhostsTestCentOS(util.ApacheTest):
|
||||
self.assertEqual(mock_get.call_count, 3)
|
||||
self.assertEqual(len(self.config.parser.modules), 4)
|
||||
self.assertEqual(len(self.config.parser.variables), 2)
|
||||
self.assertTrue("TEST2" in self.config.parser.variables)
|
||||
self.assertTrue("TEST2" in self.config.parser.variables.keys())
|
||||
self.assertTrue("mod_another.c" in self.config.parser.modules)
|
||||
|
||||
def test_get_virtual_hosts(self):
|
||||
@@ -172,11 +172,11 @@ class MultipleVhostsTestCentOS(util.ApacheTest):
|
||||
mock_osi.return_value = ("centos", "7")
|
||||
self.config.parser.update_runtime_variables()
|
||||
|
||||
self.assertTrue("mock_define" in self.config.parser.variables)
|
||||
self.assertTrue("mock_define_too" in self.config.parser.variables)
|
||||
self.assertTrue("mock_value" in self.config.parser.variables)
|
||||
self.assertTrue("mock_define" in self.config.parser.variables.keys())
|
||||
self.assertTrue("mock_define_too" in self.config.parser.variables.keys())
|
||||
self.assertTrue("mock_value" in self.config.parser.variables.keys())
|
||||
self.assertEqual("TRUE", self.config.parser.variables["mock_value"])
|
||||
self.assertTrue("MOCK_NOSEP" in self.config.parser.variables)
|
||||
self.assertTrue("MOCK_NOSEP" in self.config.parser.variables.keys())
|
||||
self.assertEqual("NOSEP_VAL", self.config.parser.variables["NOSEP_TWO"])
|
||||
|
||||
@mock.patch("certbot_apache._internal.configurator.util.run_script")
|
||||
|
||||
@@ -11,7 +11,7 @@ class ComplexParserTest(util.ParserTest):
|
||||
"""Apache Parser Test."""
|
||||
|
||||
def setUp(self): # pylint: disable=arguments-differ
|
||||
super().setUp(
|
||||
super(ComplexParserTest, self).setUp(
|
||||
"complex_parsing", "complex_parsing")
|
||||
|
||||
self.setup_variables()
|
||||
|
||||
@@ -16,7 +16,7 @@ class ConfiguratorReverterTest(util.ApacheTest):
|
||||
|
||||
|
||||
def setUp(self): # pylint: disable=arguments-differ
|
||||
super().setUp()
|
||||
super(ConfiguratorReverterTest, self).setUp()
|
||||
|
||||
self.config = util.get_apache_configurator(
|
||||
self.config_path, self.vhost_path, self.config_dir, self.work_dir)
|
||||
|
||||
@@ -10,6 +10,7 @@ try:
|
||||
import mock
|
||||
except ImportError: # pragma: no cover
|
||||
from unittest import mock # type: ignore
|
||||
import six # pylint: disable=unused-import # six is used in mock.patch()
|
||||
|
||||
from acme import challenges
|
||||
from certbot import achallenges
|
||||
@@ -30,7 +31,7 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
"""Test two standard well-configured HTTP vhosts."""
|
||||
|
||||
def setUp(self): # pylint: disable=arguments-differ
|
||||
super().setUp()
|
||||
super(MultipleVhostsTest, self).setUp()
|
||||
|
||||
self.config = util.get_apache_configurator(
|
||||
self.config_path, self.vhost_path, self.config_dir, self.work_dir)
|
||||
@@ -103,9 +104,9 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
"handle_modules", "handle_sites", "ctl"]
|
||||
exp = {}
|
||||
|
||||
for k in ApacheConfigurator.OS_DEFAULTS.__dict__.keys():
|
||||
for k in ApacheConfigurator.OS_DEFAULTS:
|
||||
if k in parserargs:
|
||||
exp[k.replace("_", "-")] = getattr(ApacheConfigurator.OS_DEFAULTS, k)
|
||||
exp[k.replace("_", "-")] = ApacheConfigurator.OS_DEFAULTS[k]
|
||||
# Special cases
|
||||
exp["vhost-root"] = None
|
||||
|
||||
@@ -128,13 +129,14 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
def test_all_configurators_defaults_defined(self):
|
||||
from certbot_apache._internal.entrypoint import OVERRIDE_CLASSES
|
||||
from certbot_apache._internal.configurator import ApacheConfigurator
|
||||
parameters = set(ApacheConfigurator.OS_DEFAULTS.__dict__.keys())
|
||||
parameters = set(ApacheConfigurator.OS_DEFAULTS.keys())
|
||||
for cls in OVERRIDE_CLASSES.values():
|
||||
self.assertTrue(parameters.issubset(set(cls.OS_DEFAULTS.__dict__.keys())))
|
||||
self.assertTrue(parameters.issubset(set(cls.OS_DEFAULTS.keys())))
|
||||
|
||||
def test_constant(self):
|
||||
self.assertTrue("debian_apache_2_4/multiple_vhosts/apache" in
|
||||
self.config.options.server_root)
|
||||
self.config.option("server_root"))
|
||||
self.assertEqual(self.config.option("nonexistent"), None)
|
||||
|
||||
@certbot_util.patch_get_utility()
|
||||
def test_get_all_names(self, mock_getutility):
|
||||
@@ -724,7 +726,7 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
# This calls open
|
||||
self.config.reverter.register_file_creation = mock.Mock()
|
||||
mock_open.side_effect = IOError
|
||||
with mock.patch("builtins.open", mock_open):
|
||||
with mock.patch("six.moves.builtins.open", mock_open):
|
||||
self.assertRaises(
|
||||
errors.PluginError,
|
||||
self.config.make_vhost_ssl, self.vh_truth[0])
|
||||
@@ -1335,6 +1337,13 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
self.config.enable_mod,
|
||||
"whatever")
|
||||
|
||||
def test_wildcard_domain(self):
|
||||
# pylint: disable=protected-access
|
||||
cases = {u"*.example.org": True, b"*.x.example.org": True,
|
||||
u"a.example.org": False, b"a.x.example.org": False}
|
||||
for key in cases:
|
||||
self.assertEqual(self.config._wildcard_domain(key), cases[key])
|
||||
|
||||
def test_choose_vhosts_wildcard(self):
|
||||
# pylint: disable=protected-access
|
||||
mock_path = "certbot_apache._internal.display_ops.select_vhost_multiple"
|
||||
@@ -1348,10 +1357,10 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
|
||||
# And the actual returned values
|
||||
self.assertEqual(len(vhs), 1)
|
||||
self.assertEqual(vhs[0].name, "certbot.demo")
|
||||
self.assertTrue(vhs[0].name == "certbot.demo")
|
||||
self.assertTrue(vhs[0].ssl)
|
||||
|
||||
self.assertNotEqual(vhs[0], self.vh_truth[3])
|
||||
self.assertFalse(vhs[0] == self.vh_truth[3])
|
||||
|
||||
@mock.patch("certbot_apache._internal.configurator.ApacheConfigurator.make_vhost_ssl")
|
||||
def test_choose_vhosts_wildcard_no_ssl(self, mock_makessl):
|
||||
@@ -1462,10 +1471,10 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
self.config.parser.aug.match = mock_match
|
||||
vhs = self.config.get_virtual_hosts()
|
||||
self.assertEqual(len(vhs), 2)
|
||||
self.assertEqual(vhs[0], self.vh_truth[1])
|
||||
self.assertTrue(vhs[0] == self.vh_truth[1])
|
||||
# mock_vhost should have replaced the vh_truth[0], because its filepath
|
||||
# isn't a symlink
|
||||
self.assertEqual(vhs[1], mock_vhost)
|
||||
self.assertTrue(vhs[1] == mock_vhost)
|
||||
|
||||
|
||||
class AugeasVhostsTest(util.ApacheTest):
|
||||
@@ -1476,9 +1485,9 @@ class AugeasVhostsTest(util.ApacheTest):
|
||||
td = "debian_apache_2_4/augeas_vhosts"
|
||||
cr = "debian_apache_2_4/augeas_vhosts/apache2"
|
||||
vr = "debian_apache_2_4/augeas_vhosts/apache2/sites-available"
|
||||
super().setUp(test_dir=td,
|
||||
config_root=cr,
|
||||
vhost_root=vr)
|
||||
super(AugeasVhostsTest, self).setUp(test_dir=td,
|
||||
config_root=cr,
|
||||
vhost_root=vr)
|
||||
|
||||
self.config = util.get_apache_configurator(
|
||||
self.config_path, self.vhost_path, self.config_dir,
|
||||
@@ -1555,9 +1564,9 @@ class MultiVhostsTest(util.ApacheTest):
|
||||
td = "debian_apache_2_4/multi_vhosts"
|
||||
cr = "debian_apache_2_4/multi_vhosts/apache2"
|
||||
vr = "debian_apache_2_4/multi_vhosts/apache2/sites-available"
|
||||
super().setUp(test_dir=td,
|
||||
config_root=cr,
|
||||
vhost_root=vr)
|
||||
super(MultiVhostsTest, self).setUp(test_dir=td,
|
||||
config_root=cr,
|
||||
vhost_root=vr)
|
||||
|
||||
self.config = util.get_apache_configurator(
|
||||
self.config_path, self.vhost_path,
|
||||
@@ -1660,7 +1669,7 @@ class InstallSslOptionsConfTest(util.ApacheTest):
|
||||
"""Test that the options-ssl-nginx.conf file is installed and updated properly."""
|
||||
|
||||
def setUp(self): # pylint: disable=arguments-differ
|
||||
super().setUp()
|
||||
super(InstallSslOptionsConfTest, self).setUp()
|
||||
|
||||
self.config = util.get_apache_configurator(
|
||||
self.config_path, self.vhost_path, self.config_dir, self.work_dir)
|
||||
@@ -1773,7 +1782,7 @@ class InstallSslOptionsConfTest(util.ApacheTest):
|
||||
# ssl_module statically linked
|
||||
self.config._openssl_version = None
|
||||
self.config.parser.modules['ssl_module'] = None
|
||||
self.config.options.bin = '/fake/path/to/httpd'
|
||||
self.config.options['bin'] = '/fake/path/to/httpd'
|
||||
with mock.patch("certbot_apache._internal.configurator."
|
||||
"ApacheConfigurator._open_module_file") as mock_omf:
|
||||
mock_omf.return_value = some_string_contents
|
||||
@@ -1809,7 +1818,7 @@ class InstallSslOptionsConfTest(util.ApacheTest):
|
||||
|
||||
# When ssl_module is statically linked but --apache-bin not provided
|
||||
self.config._openssl_version = None
|
||||
self.config.options.bin = None
|
||||
self.config.options['bin'] = None
|
||||
self.config.parser.modules['ssl_module'] = None
|
||||
with mock.patch("certbot_apache._internal.configurator.logger.warning") as mock_log:
|
||||
self.assertEqual(self.config.openssl_version(), None)
|
||||
@@ -1832,7 +1841,7 @@ class InstallSslOptionsConfTest(util.ApacheTest):
|
||||
|
||||
def test_open_module_file(self):
|
||||
mock_open = mock.mock_open(read_data="testing 12 3")
|
||||
with mock.patch("builtins.open", mock_open):
|
||||
with mock.patch("six.moves.builtins.open", mock_open):
|
||||
self.assertEqual(self.config._open_module_file("/nonsense/"), "testing 12 3")
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
@@ -20,7 +20,7 @@ class MultipleVhostsTestDebian(util.ApacheTest):
|
||||
_multiprocess_can_split_ = True
|
||||
|
||||
def setUp(self): # pylint: disable=arguments-differ
|
||||
super().setUp()
|
||||
super(MultipleVhostsTestDebian, self).setUp()
|
||||
self.config = util.get_apache_configurator(
|
||||
self.config_path, self.vhost_path, self.config_dir, self.work_dir,
|
||||
os_info="debian")
|
||||
|
||||
@@ -412,9 +412,9 @@ class DualParserNodeTest(unittest.TestCase): # pylint: disable=too-many-public-
|
||||
ancestor=self.block,
|
||||
filepath="/path/to/whatever",
|
||||
metadata=self.metadata)
|
||||
self.assertNotEqual(self.block, ne_block)
|
||||
self.assertNotEqual(self.directive, ne_directive)
|
||||
self.assertNotEqual(self.comment, ne_comment)
|
||||
self.assertFalse(self.block == ne_block)
|
||||
self.assertFalse(self.directive == ne_directive)
|
||||
self.assertFalse(self.comment == ne_comment)
|
||||
|
||||
def test_parsed_paths(self):
|
||||
mock_p = mock.MagicMock(return_value=['/path/file.conf',
|
||||
|
||||
@@ -46,9 +46,9 @@ class FedoraRestartTest(util.ApacheTest):
|
||||
test_dir = "centos7_apache/apache"
|
||||
config_root = "centos7_apache/apache/httpd"
|
||||
vhost_root = "centos7_apache/apache/httpd/conf.d"
|
||||
super().setUp(test_dir=test_dir,
|
||||
config_root=config_root,
|
||||
vhost_root=vhost_root)
|
||||
super(FedoraRestartTest, self).setUp(test_dir=test_dir,
|
||||
config_root=config_root,
|
||||
vhost_root=vhost_root)
|
||||
self.config = util.get_apache_configurator(
|
||||
self.config_path, self.vhost_path, self.config_dir, self.work_dir,
|
||||
os_info="fedora")
|
||||
@@ -90,9 +90,9 @@ class MultipleVhostsTestFedora(util.ApacheTest):
|
||||
test_dir = "centos7_apache/apache"
|
||||
config_root = "centos7_apache/apache/httpd"
|
||||
vhost_root = "centos7_apache/apache/httpd/conf.d"
|
||||
super().setUp(test_dir=test_dir,
|
||||
config_root=config_root,
|
||||
vhost_root=vhost_root)
|
||||
super(MultipleVhostsTestFedora, self).setUp(test_dir=test_dir,
|
||||
config_root=config_root,
|
||||
vhost_root=vhost_root)
|
||||
|
||||
self.config = util.get_apache_configurator(
|
||||
self.config_path, self.vhost_path, self.config_dir, self.work_dir,
|
||||
@@ -134,7 +134,7 @@ class MultipleVhostsTestFedora(util.ApacheTest):
|
||||
self.assertEqual(mock_get.call_count, 3)
|
||||
self.assertEqual(len(self.config.parser.modules), 4)
|
||||
self.assertEqual(len(self.config.parser.variables), 2)
|
||||
self.assertTrue("TEST2" in self.config.parser.variables)
|
||||
self.assertTrue("TEST2" in self.config.parser.variables.keys())
|
||||
self.assertTrue("mod_another.c" in self.config.parser.modules)
|
||||
|
||||
@mock.patch("certbot_apache._internal.configurator.util.run_script")
|
||||
@@ -172,11 +172,11 @@ class MultipleVhostsTestFedora(util.ApacheTest):
|
||||
mock_osi.return_value = ("fedora", "29")
|
||||
self.config.parser.update_runtime_variables()
|
||||
|
||||
self.assertTrue("mock_define" in self.config.parser.variables)
|
||||
self.assertTrue("mock_define_too" in self.config.parser.variables)
|
||||
self.assertTrue("mock_value" in self.config.parser.variables)
|
||||
self.assertTrue("mock_define" in self.config.parser.variables.keys())
|
||||
self.assertTrue("mock_define_too" in self.config.parser.variables.keys())
|
||||
self.assertTrue("mock_value" in self.config.parser.variables.keys())
|
||||
self.assertEqual("TRUE", self.config.parser.variables["mock_value"])
|
||||
self.assertTrue("MOCK_NOSEP" in self.config.parser.variables)
|
||||
self.assertTrue("MOCK_NOSEP" in self.config.parser.variables.keys())
|
||||
self.assertEqual("NOSEP_VAL", self.config.parser.variables["NOSEP_TWO"])
|
||||
|
||||
@mock.patch("certbot_apache._internal.configurator.util.run_script")
|
||||
|
||||
@@ -50,9 +50,9 @@ class MultipleVhostsTestGentoo(util.ApacheTest):
|
||||
test_dir = "gentoo_apache/apache"
|
||||
config_root = "gentoo_apache/apache/apache2"
|
||||
vhost_root = "gentoo_apache/apache/apache2/vhosts.d"
|
||||
super().setUp(test_dir=test_dir,
|
||||
config_root=config_root,
|
||||
vhost_root=vhost_root)
|
||||
super(MultipleVhostsTestGentoo, self).setUp(test_dir=test_dir,
|
||||
config_root=config_root,
|
||||
vhost_root=vhost_root)
|
||||
|
||||
# pylint: disable=line-too-long
|
||||
with mock.patch("certbot_apache._internal.override_gentoo.GentooParser.update_runtime_variables"):
|
||||
@@ -91,7 +91,7 @@ class MultipleVhostsTestGentoo(util.ApacheTest):
|
||||
with mock.patch("certbot_apache._internal.override_gentoo.GentooParser.update_modules"):
|
||||
self.config.parser.update_runtime_variables()
|
||||
for define in defines:
|
||||
self.assertTrue(define in self.config.parser.variables)
|
||||
self.assertTrue(define in self.config.parser.variables.keys())
|
||||
|
||||
@mock.patch("certbot_apache._internal.apache_util.parse_from_subprocess")
|
||||
def test_no_binary_configdump(self, mock_subprocess):
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
"""Test for certbot_apache._internal.http_01."""
|
||||
import unittest
|
||||
import errno
|
||||
from typing import List
|
||||
|
||||
try:
|
||||
import mock
|
||||
@@ -24,10 +23,10 @@ class ApacheHttp01Test(util.ApacheTest):
|
||||
"""Test for certbot_apache._internal.http_01.ApacheHttp01."""
|
||||
|
||||
def setUp(self, *args, **kwargs): # pylint: disable=arguments-differ
|
||||
super().setUp(*args, **kwargs)
|
||||
super(ApacheHttp01Test, self).setUp(*args, **kwargs)
|
||||
|
||||
self.account_key = self.rsa512jwk
|
||||
self.achalls: List[achallenges.KeyAuthorizationAnnotatedChallenge] = []
|
||||
self.achalls = [] # type: List[achallenges.KeyAuthorizationAnnotatedChallenge]
|
||||
vh_truth = util.get_vh_truth(
|
||||
self.temp_dir, "debian_apache_2_4/multiple_vhosts")
|
||||
# Takes the vhosts for encryption-example.demo, certbot.demo
|
||||
|
||||
@@ -27,14 +27,14 @@ class VirtualHostTest(unittest.TestCase):
|
||||
"certbot_apache._internal.obj.Addr(('127.0.0.1', '443'))")
|
||||
|
||||
def test_eq(self):
|
||||
self.assertEqual(self.vhost1b, self.vhost1)
|
||||
self.assertNotEqual(self.vhost1, self.vhost2)
|
||||
self.assertTrue(self.vhost1b == self.vhost1)
|
||||
self.assertFalse(self.vhost1 == self.vhost2)
|
||||
self.assertEqual(str(self.vhost1b), str(self.vhost1))
|
||||
self.assertNotEqual(self.vhost1b, 1234)
|
||||
self.assertFalse(self.vhost1b == 1234)
|
||||
|
||||
def test_ne(self):
|
||||
self.assertNotEqual(self.vhost1, self.vhost2)
|
||||
self.assertEqual(self.vhost1, self.vhost1b)
|
||||
self.assertTrue(self.vhost1 != self.vhost2)
|
||||
self.assertFalse(self.vhost1 != self.vhost1b)
|
||||
|
||||
def test_conflicts(self):
|
||||
from certbot_apache._internal.obj import Addr
|
||||
@@ -128,13 +128,13 @@ class AddrTest(unittest.TestCase):
|
||||
self.assertTrue(self.addr1.conflicts(self.addr2))
|
||||
|
||||
def test_equal(self):
|
||||
self.assertEqual(self.addr1, self.addr2)
|
||||
self.assertNotEqual(self.addr, self.addr1)
|
||||
self.assertNotEqual(self.addr, 123)
|
||||
self.assertTrue(self.addr1 == self.addr2)
|
||||
self.assertFalse(self.addr == self.addr1)
|
||||
self.assertFalse(self.addr == 123)
|
||||
|
||||
def test_not_equal(self):
|
||||
self.assertEqual(self.addr1, self.addr2)
|
||||
self.assertNotEqual(self.addr, self.addr1)
|
||||
self.assertFalse(self.addr1 != self.addr2)
|
||||
self.assertTrue(self.addr != self.addr1)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
@@ -16,7 +16,7 @@ class BasicParserTest(util.ParserTest):
|
||||
"""Apache Parser Test."""
|
||||
|
||||
def setUp(self): # pylint: disable=arguments-differ
|
||||
super().setUp()
|
||||
super(BasicParserTest, self).setUp()
|
||||
|
||||
def tearDown(self):
|
||||
shutil.rmtree(self.temp_dir)
|
||||
@@ -305,9 +305,11 @@ class BasicParserTest(util.ParserTest):
|
||||
self.assertRaises(
|
||||
errors.PluginError, self.parser.update_runtime_variables)
|
||||
|
||||
@mock.patch("certbot_apache._internal.configurator.ApacheConfigurator.option")
|
||||
@mock.patch("certbot_apache._internal.apache_util.subprocess.Popen")
|
||||
def test_update_runtime_vars_bad_ctl(self, mock_popen):
|
||||
def test_update_runtime_vars_bad_ctl(self, mock_popen, mock_opt):
|
||||
mock_popen.side_effect = OSError
|
||||
mock_opt.return_value = "nonexistent"
|
||||
self.assertRaises(
|
||||
errors.MisconfigurationError,
|
||||
self.parser.update_runtime_variables)
|
||||
@@ -330,14 +332,14 @@ class BasicParserTest(util.ParserTest):
|
||||
|
||||
class ParserInitTest(util.ApacheTest):
|
||||
def setUp(self): # pylint: disable=arguments-differ
|
||||
super().setUp()
|
||||
super(ParserInitTest, self).setUp()
|
||||
|
||||
def tearDown(self):
|
||||
shutil.rmtree(self.temp_dir)
|
||||
shutil.rmtree(self.config_dir)
|
||||
shutil.rmtree(self.work_dir)
|
||||
|
||||
@mock.patch("certbot_apache._internal.parser.init_augeas")
|
||||
@mock.patch("certbot_apache._internal.parser.ApacheParser.init_augeas")
|
||||
def test_prepare_no_augeas(self, mock_init_augeas):
|
||||
from certbot_apache._internal.parser import ApacheParser
|
||||
mock_init_augeas.side_effect = errors.NoInstallationError
|
||||
|
||||
@@ -20,7 +20,7 @@ class ConfiguratorParserNodeTest(util.ApacheTest): # pylint: disable=too-many-p
|
||||
"""Test AugeasParserNode using available test configurations"""
|
||||
|
||||
def setUp(self): # pylint: disable=arguments-differ
|
||||
super().setUp()
|
||||
super(ConfiguratorParserNodeTest, self).setUp()
|
||||
|
||||
self.config = util.get_apache_configurator(
|
||||
self.config_path, self.vhost_path, self.config_dir,
|
||||
|
||||
@@ -18,7 +18,7 @@ class DummyParserNode(interfaces.ParserNode):
|
||||
self.dirty = dirty
|
||||
self.filepath = filepath
|
||||
self.metadata = metadata
|
||||
super().__init__(**kwargs)
|
||||
super(DummyParserNode, self).__init__(**kwargs)
|
||||
|
||||
def save(self, msg): # pragma: no cover
|
||||
"""Save"""
|
||||
@@ -38,7 +38,7 @@ class DummyCommentNode(DummyParserNode):
|
||||
"""
|
||||
comment, kwargs = util.commentnode_kwargs(kwargs)
|
||||
self.comment = comment
|
||||
super().__init__(**kwargs)
|
||||
super(DummyCommentNode, self).__init__(**kwargs)
|
||||
|
||||
|
||||
class DummyDirectiveNode(DummyParserNode):
|
||||
@@ -54,7 +54,7 @@ class DummyDirectiveNode(DummyParserNode):
|
||||
self.parameters = parameters
|
||||
self.enabled = enabled
|
||||
|
||||
super().__init__(**kwargs)
|
||||
super(DummyDirectiveNode, self).__init__(**kwargs)
|
||||
|
||||
def set_parameters(self, parameters): # pragma: no cover
|
||||
"""Set parameters"""
|
||||
|
||||
@@ -26,6 +26,8 @@ class ApacheTest(unittest.TestCase):
|
||||
config_root="debian_apache_2_4/multiple_vhosts/apache2",
|
||||
vhost_root="debian_apache_2_4/multiple_vhosts/apache2/sites-available"):
|
||||
# pylint: disable=arguments-differ
|
||||
super(ApacheTest, self).setUp()
|
||||
|
||||
self.temp_dir, self.config_dir, self.work_dir = common.dir_setup(
|
||||
test_dir=test_dir,
|
||||
pkg=__name__)
|
||||
@@ -67,7 +69,7 @@ class ParserTest(ApacheTest):
|
||||
def setUp(self, test_dir="debian_apache_2_4/multiple_vhosts",
|
||||
config_root="debian_apache_2_4/multiple_vhosts/apache2",
|
||||
vhost_root="debian_apache_2_4/multiple_vhosts/apache2/sites-available"):
|
||||
super().setUp(test_dir, config_root, vhost_root)
|
||||
super(ParserTest, self).setUp(test_dir, config_root, vhost_root)
|
||||
|
||||
zope.component.provideUtility(display_util.FileDisplay(sys.stdout,
|
||||
False))
|
||||
@@ -123,11 +125,11 @@ def get_apache_configurator(
|
||||
version=version, use_parsernode=use_parsernode,
|
||||
openssl_version=openssl_version)
|
||||
if not conf_vhost_path:
|
||||
config_class.OS_DEFAULTS.vhost_root = vhost_path
|
||||
config_class.OS_DEFAULTS["vhost_root"] = vhost_path
|
||||
else:
|
||||
# Custom virtualhost path was requested
|
||||
config.config.apache_vhost_root = conf_vhost_path
|
||||
config.config.apache_ctl = config_class.OS_DEFAULTS.ctl
|
||||
config.config.apache_ctl = config_class.OS_DEFAULTS["ctl"]
|
||||
config.prepare()
|
||||
return config
|
||||
|
||||
|
||||
513
certbot-auto
513
certbot-auto
@@ -31,7 +31,7 @@ if [ -z "$VENV_PATH" ]; then
|
||||
fi
|
||||
VENV_BIN="$VENV_PATH/bin"
|
||||
BOOTSTRAP_VERSION_PATH="$VENV_PATH/certbot-auto-bootstrap-version.txt"
|
||||
LE_AUTO_VERSION="1.14.0"
|
||||
LE_AUTO_VERSION="1.5.0"
|
||||
BASENAME=$(basename $0)
|
||||
USAGE="Usage: $BASENAME [OPTIONS]
|
||||
A self-updating wrapper script for the Certbot ACME client. When run, updates
|
||||
@@ -258,7 +258,7 @@ DeprecationBootstrap() {
|
||||
|
||||
MIN_PYTHON_2_VERSION="2.7"
|
||||
MIN_PYVER2=$(echo "$MIN_PYTHON_2_VERSION" | sed 's/\.//')
|
||||
MIN_PYTHON_3_VERSION="3.6"
|
||||
MIN_PYTHON_3_VERSION="3.5"
|
||||
MIN_PYVER3=$(echo "$MIN_PYTHON_3_VERSION" | sed 's/\.//')
|
||||
# Sets LE_PYTHON to Python version string and PYVER to the first two
|
||||
# digits of the python version.
|
||||
@@ -799,15 +799,18 @@ BootstrapMageiaCommon() {
|
||||
# that function. If Bootstrap is set to a function that doesn't install any
|
||||
# packages BOOTSTRAP_VERSION is not set.
|
||||
if [ -f /etc/debian_version ]; then
|
||||
DEPRECATED_OS=1
|
||||
NO_SELF_UPGRADE=1
|
||||
Bootstrap() {
|
||||
BootstrapMessage "Debian-based OSes"
|
||||
BootstrapDebCommon
|
||||
}
|
||||
BOOTSTRAP_VERSION="BootstrapDebCommon $BOOTSTRAP_DEB_COMMON_VERSION"
|
||||
elif [ -f /etc/mageia-release ]; then
|
||||
# Mageia has both /etc/mageia-release and /etc/redhat-release
|
||||
DEPRECATED_OS=1
|
||||
NO_SELF_UPGRADE=1
|
||||
Bootstrap() {
|
||||
ExperimentalBootstrap "Mageia" BootstrapMageiaCommon
|
||||
}
|
||||
BOOTSTRAP_VERSION="BootstrapMageiaCommon $BOOTSTRAP_MAGEIA_COMMON_VERSION"
|
||||
elif [ -f /etc/redhat-release ]; then
|
||||
DEPRECATED_OS=1
|
||||
NO_SELF_UPGRADE=1
|
||||
# Run DeterminePythonVersion to decide on the basis of available Python versions
|
||||
# whether to use 2.x or 3.x on RedHat-like systems.
|
||||
# Then, revert LE_PYTHON to its previous state.
|
||||
@@ -840,7 +843,12 @@ elif [ -f /etc/redhat-release ]; then
|
||||
INTERACTIVE_BOOTSTRAP=1
|
||||
fi
|
||||
|
||||
Bootstrap() {
|
||||
BootstrapMessage "Legacy RedHat-based OSes that will use Python3"
|
||||
BootstrapRpmPython3Legacy
|
||||
}
|
||||
USE_PYTHON_3=1
|
||||
BOOTSTRAP_VERSION="BootstrapRpmPython3Legacy $BOOTSTRAP_RPM_PYTHON3_LEGACY_VERSION"
|
||||
|
||||
# Try now to enable SCL rh-python36 for systems already bootstrapped
|
||||
# NB: EnablePython36SCL has been defined along with BootstrapRpmPython3Legacy in certbot-auto
|
||||
@@ -859,38 +867,73 @@ elif [ -f /etc/redhat-release ]; then
|
||||
fi
|
||||
|
||||
if [ "$RPM_USE_PYTHON_3" = 1 ]; then
|
||||
Bootstrap() {
|
||||
BootstrapMessage "RedHat-based OSes that will use Python3"
|
||||
BootstrapRpmPython3
|
||||
}
|
||||
USE_PYTHON_3=1
|
||||
BOOTSTRAP_VERSION="BootstrapRpmPython3 $BOOTSTRAP_RPM_PYTHON3_VERSION"
|
||||
else
|
||||
Bootstrap() {
|
||||
BootstrapMessage "RedHat-based OSes"
|
||||
BootstrapRpmCommon
|
||||
}
|
||||
BOOTSTRAP_VERSION="BootstrapRpmCommon $BOOTSTRAP_RPM_COMMON_VERSION"
|
||||
fi
|
||||
fi
|
||||
|
||||
LE_PYTHON="$prev_le_python"
|
||||
elif [ -f /etc/os-release ] && `grep -q openSUSE /etc/os-release` ; then
|
||||
DEPRECATED_OS=1
|
||||
NO_SELF_UPGRADE=1
|
||||
Bootstrap() {
|
||||
BootstrapMessage "openSUSE-based OSes"
|
||||
BootstrapSuseCommon
|
||||
}
|
||||
BOOTSTRAP_VERSION="BootstrapSuseCommon $BOOTSTRAP_SUSE_COMMON_VERSION"
|
||||
elif [ -f /etc/arch-release ]; then
|
||||
DEPRECATED_OS=1
|
||||
NO_SELF_UPGRADE=1
|
||||
Bootstrap() {
|
||||
if [ "$DEBUG" = 1 ]; then
|
||||
BootstrapMessage "Archlinux"
|
||||
BootstrapArchCommon
|
||||
else
|
||||
error "Please use pacman to install letsencrypt packages:"
|
||||
error "# pacman -S certbot certbot-apache"
|
||||
error
|
||||
error "If you would like to use the virtualenv way, please run the script again with the"
|
||||
error "--debug flag."
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
BOOTSTRAP_VERSION="BootstrapArchCommon $BOOTSTRAP_ARCH_COMMON_VERSION"
|
||||
elif [ -f /etc/manjaro-release ]; then
|
||||
DEPRECATED_OS=1
|
||||
NO_SELF_UPGRADE=1
|
||||
Bootstrap() {
|
||||
ExperimentalBootstrap "Manjaro Linux" BootstrapArchCommon
|
||||
}
|
||||
BOOTSTRAP_VERSION="BootstrapArchCommon $BOOTSTRAP_ARCH_COMMON_VERSION"
|
||||
elif [ -f /etc/gentoo-release ]; then
|
||||
DEPRECATED_OS=1
|
||||
NO_SELF_UPGRADE=1
|
||||
elif uname | grep -iq FreeBSD ; then
|
||||
DEPRECATED_OS=1
|
||||
NO_SELF_UPGRADE=1
|
||||
elif uname | grep -iq Darwin ; then
|
||||
DEPRECATED_OS=1
|
||||
NO_SELF_UPGRADE=1
|
||||
elif [ -f /etc/issue ] && grep -iq "Amazon Linux" /etc/issue ; then
|
||||
DEPRECATED_OS=1
|
||||
NO_SELF_UPGRADE=1
|
||||
Bootstrap() {
|
||||
ExperimentalBootstrap "Amazon Linux" BootstrapRpmCommon
|
||||
}
|
||||
BOOTSTRAP_VERSION="BootstrapRpmCommon $BOOTSTRAP_RPM_COMMON_VERSION"
|
||||
elif [ -f /etc/product ] && grep -q "Joyent Instance" /etc/product ; then
|
||||
DEPRECATED_OS=1
|
||||
NO_SELF_UPGRADE=1
|
||||
Bootstrap() {
|
||||
ExperimentalBootstrap "Joyent SmartOS Zone" BootstrapSmartOS
|
||||
}
|
||||
BOOTSTRAP_VERSION="BootstrapSmartOS $BOOTSTRAP_SMARTOS_VERSION"
|
||||
else
|
||||
DEPRECATED_OS=1
|
||||
NO_SELF_UPGRADE=1
|
||||
Bootstrap() {
|
||||
error "Sorry, I don't know how to bootstrap Certbot on your operating system!"
|
||||
error
|
||||
error "You will need to install OS dependencies, configure virtualenv, and run pip install manually."
|
||||
error "Please see https://letsencrypt.readthedocs.org/en/latest/contributing.html#prerequisites"
|
||||
error "for more info."
|
||||
exit 1
|
||||
}
|
||||
fi
|
||||
|
||||
# We handle this case after determining the normal bootstrap version to allow
|
||||
@@ -1119,9 +1162,7 @@ if [ "$1" = "--le-auto-phase2" ]; then
|
||||
fi
|
||||
|
||||
if [ -f "$VENV_BIN/letsencrypt" -a "$INSTALL_ONLY" != 1 ]; then
|
||||
error "certbot-auto and its Certbot installation will no longer receive updates."
|
||||
error "You will not receive any bug fixes including those fixing server compatibility"
|
||||
error "or security problems."
|
||||
error "Certbot will no longer receive updates."
|
||||
error "Please visit https://certbot.eff.org/ to check for other alternatives."
|
||||
"$VENV_BIN/letsencrypt" "$@"
|
||||
exit 0
|
||||
@@ -1224,40 +1265,45 @@ if [ "$1" = "--le-auto-phase2" ]; then
|
||||
# pip install hashin
|
||||
# hashin -r dependency-requirements.txt cryptography==1.5.2
|
||||
# ```
|
||||
ConfigArgParse==1.2.3 \
|
||||
--hash=sha256:edd17be986d5c1ba2e307150b8e5f5107aba125f3574dddd02c85d5cdcfd37dc
|
||||
certifi==2020.4.5.1 \
|
||||
--hash=sha256:1d987a998c75633c40847cc966fcf5904906c920a7f17ef374f5aa4282abd304 \
|
||||
--hash=sha256:51fcb31174be6e6664c5f69e3e1691a2d72a1a12e90f872cbdb1567eb47b6519
|
||||
cffi==1.14.0 \
|
||||
--hash=sha256:001bf3242a1bb04d985d63e138230802c6c8d4db3668fb545fb5005ddf5bb5ff \
|
||||
--hash=sha256:00789914be39dffba161cfc5be31b55775de5ba2235fe49aa28c148236c4e06b \
|
||||
--hash=sha256:028a579fc9aed3af38f4892bdcc7390508adabc30c6af4a6e4f611b0c680e6ac \
|
||||
--hash=sha256:14491a910663bf9f13ddf2bc8f60562d6bc5315c1f09c704937ef17293fb85b0 \
|
||||
--hash=sha256:1cae98a7054b5c9391eb3249b86e0e99ab1e02bb0cc0575da191aedadbdf4384 \
|
||||
--hash=sha256:2089ed025da3919d2e75a4d963d008330c96751127dd6f73c8dc0c65041b4c26 \
|
||||
--hash=sha256:2d384f4a127a15ba701207f7639d94106693b6cd64173d6c8988e2c25f3ac2b6 \
|
||||
--hash=sha256:337d448e5a725bba2d8293c48d9353fc68d0e9e4088d62a9571def317797522b \
|
||||
--hash=sha256:399aed636c7d3749bbed55bc907c3288cb43c65c4389964ad5ff849b6370603e \
|
||||
--hash=sha256:3b911c2dbd4f423b4c4fcca138cadde747abdb20d196c4a48708b8a2d32b16dd \
|
||||
--hash=sha256:3d311bcc4a41408cf5854f06ef2c5cab88f9fded37a3b95936c9879c1640d4c2 \
|
||||
--hash=sha256:62ae9af2d069ea2698bf536dcfe1e4eed9090211dbaafeeedf5cb6c41b352f66 \
|
||||
--hash=sha256:66e41db66b47d0d8672d8ed2708ba91b2f2524ece3dee48b5dfb36be8c2f21dc \
|
||||
--hash=sha256:675686925a9fb403edba0114db74e741d8181683dcf216be697d208857e04ca8 \
|
||||
--hash=sha256:7e63cbcf2429a8dbfe48dcc2322d5f2220b77b2e17b7ba023d6166d84655da55 \
|
||||
--hash=sha256:8a6c688fefb4e1cd56feb6c511984a6c4f7ec7d2a1ff31a10254f3c817054ae4 \
|
||||
--hash=sha256:8c0ffc886aea5df6a1762d0019e9cb05f825d0eec1f520c51be9d198701daee5 \
|
||||
--hash=sha256:95cd16d3dee553f882540c1ffe331d085c9e629499ceadfbda4d4fde635f4b7d \
|
||||
--hash=sha256:99f748a7e71ff382613b4e1acc0ac83bf7ad167fb3802e35e90d9763daba4d78 \
|
||||
--hash=sha256:b8c78301cefcf5fd914aad35d3c04c2b21ce8629b5e4f4e45ae6812e461910fa \
|
||||
--hash=sha256:c420917b188a5582a56d8b93bdd8e0f6eca08c84ff623a4c16e809152cd35793 \
|
||||
--hash=sha256:c43866529f2f06fe0edc6246eb4faa34f03fe88b64a0a9a942561c8e22f4b71f \
|
||||
--hash=sha256:cab50b8c2250b46fe738c77dbd25ce017d5e6fb35d3407606e7a4180656a5a6a \
|
||||
--hash=sha256:cef128cb4d5e0b3493f058f10ce32365972c554572ff821e175dbc6f8ff6924f \
|
||||
--hash=sha256:cf16e3cf6c0a5fdd9bc10c21687e19d29ad1fe863372b5543deaec1039581a30 \
|
||||
--hash=sha256:e56c744aa6ff427a607763346e4170629caf7e48ead6921745986db3692f987f \
|
||||
--hash=sha256:e577934fc5f8779c554639376beeaa5657d54349096ef24abe8c74c5d9c117c3 \
|
||||
--hash=sha256:f2b0fa0c01d8a0c7483afd9f31d7ecf2d71760ca24499c8697aeb5ca37dc090c
|
||||
ConfigArgParse==1.0 \
|
||||
--hash=sha256:bf378245bc9cdc403a527e5b7406b991680c2a530e7e81af747880b54eb57133
|
||||
certifi==2019.11.28 \
|
||||
--hash=sha256:017c25db2a153ce562900032d5bc68e9f191e44e9a0f762f373977de9df1fbb3 \
|
||||
--hash=sha256:25b64c7da4cd7479594d035c08c2d809eb4aab3a26e5a990ea98cc450c320f1f
|
||||
cffi==1.13.2 \
|
||||
--hash=sha256:0b49274afc941c626b605fb59b59c3485c17dc776dc3cc7cc14aca74cc19cc42 \
|
||||
--hash=sha256:0e3ea92942cb1168e38c05c1d56b0527ce31f1a370f6117f1d490b8dcd6b3a04 \
|
||||
--hash=sha256:135f69aecbf4517d5b3d6429207b2dff49c876be724ac0c8bf8e1ea99df3d7e5 \
|
||||
--hash=sha256:19db0cdd6e516f13329cba4903368bff9bb5a9331d3410b1b448daaadc495e54 \
|
||||
--hash=sha256:2781e9ad0e9d47173c0093321bb5435a9dfae0ed6a762aabafa13108f5f7b2ba \
|
||||
--hash=sha256:291f7c42e21d72144bb1c1b2e825ec60f46d0a7468f5346841860454c7aa8f57 \
|
||||
--hash=sha256:2c5e309ec482556397cb21ede0350c5e82f0eb2621de04b2633588d118da4396 \
|
||||
--hash=sha256:2e9c80a8c3344a92cb04661115898a9129c074f7ab82011ef4b612f645939f12 \
|
||||
--hash=sha256:32a262e2b90ffcfdd97c7a5e24a6012a43c61f1f5a57789ad80af1d26c6acd97 \
|
||||
--hash=sha256:3c9fff570f13480b201e9ab69453108f6d98244a7f495e91b6c654a47486ba43 \
|
||||
--hash=sha256:415bdc7ca8c1c634a6d7163d43fb0ea885a07e9618a64bda407e04b04333b7db \
|
||||
--hash=sha256:42194f54c11abc8583417a7cf4eaff544ce0de8187abaf5d29029c91b1725ad3 \
|
||||
--hash=sha256:4424e42199e86b21fc4db83bd76909a6fc2a2aefb352cb5414833c030f6ed71b \
|
||||
--hash=sha256:4a43c91840bda5f55249413037b7a9b79c90b1184ed504883b72c4df70778579 \
|
||||
--hash=sha256:599a1e8ff057ac530c9ad1778293c665cb81a791421f46922d80a86473c13346 \
|
||||
--hash=sha256:5c4fae4e9cdd18c82ba3a134be256e98dc0596af1e7285a3d2602c97dcfa5159 \
|
||||
--hash=sha256:5ecfa867dea6fabe2a58f03ac9186ea64da1386af2159196da51c4904e11d652 \
|
||||
--hash=sha256:62f2578358d3a92e4ab2d830cd1c2049c9c0d0e6d3c58322993cc341bdeac22e \
|
||||
--hash=sha256:6471a82d5abea994e38d2c2abc77164b4f7fbaaf80261cb98394d5793f11b12a \
|
||||
--hash=sha256:6d4f18483d040e18546108eb13b1dfa1000a089bcf8529e30346116ea6240506 \
|
||||
--hash=sha256:71a608532ab3bd26223c8d841dde43f3516aa5d2bf37b50ac410bb5e99053e8f \
|
||||
--hash=sha256:74a1d8c85fb6ff0b30fbfa8ad0ac23cd601a138f7509dc617ebc65ef305bb98d \
|
||||
--hash=sha256:7b93a885bb13073afb0aa73ad82059a4c41f4b7d8eb8368980448b52d4c7dc2c \
|
||||
--hash=sha256:7d4751da932caaec419d514eaa4215eaf14b612cff66398dd51129ac22680b20 \
|
||||
--hash=sha256:7f627141a26b551bdebbc4855c1157feeef18241b4b8366ed22a5c7d672ef858 \
|
||||
--hash=sha256:8169cf44dd8f9071b2b9248c35fc35e8677451c52f795daa2bb4643f32a540bc \
|
||||
--hash=sha256:aa00d66c0fab27373ae44ae26a66a9e43ff2a678bf63a9c7c1a9a4d61172827a \
|
||||
--hash=sha256:ccb032fda0873254380aa2bfad2582aedc2959186cce61e3a17abc1a55ff89c3 \
|
||||
--hash=sha256:d754f39e0d1603b5b24a7f8484b22d2904fa551fe865fd0d4c3332f078d20d4e \
|
||||
--hash=sha256:d75c461e20e29afc0aee7172a0950157c704ff0dd51613506bd7d82b718e7410 \
|
||||
--hash=sha256:dcd65317dd15bc0451f3e01c80da2216a31916bdcffd6221ca1202d96584aa25 \
|
||||
--hash=sha256:e570d3ab32e2c2861c4ebe6ffcad6a8abf9347432a37608fe1fbd157b3f0036b \
|
||||
--hash=sha256:fd43a88e045cf992ed09fa724b5315b790525f2676883a6ea64e3263bae6549d
|
||||
chardet==3.0.4 \
|
||||
--hash=sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae \
|
||||
--hash=sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691
|
||||
@@ -1285,66 +1331,67 @@ cryptography==2.8 \
|
||||
--hash=sha256:df6b4dca2e11865e6cfbfb708e800efb18370f5a46fd601d3755bc7f85b3a8a2 \
|
||||
--hash=sha256:ecadccc7ba52193963c0475ac9f6fa28ac01e01349a2ca48509667ef41ffd2cf \
|
||||
--hash=sha256:fb81c17e0ebe3358486cd8cc3ad78adbae58af12fc2bf2bc0bb84e8090fa5ce8
|
||||
distro==1.5.0 \
|
||||
--hash=sha256:0e58756ae38fbd8fc3020d54badb8eae17c5b9dcbed388b17bb55b8a5928df92 \
|
||||
--hash=sha256:df74eed763e18d10d0da624258524ae80486432cd17392d9c3d96f5e83cd2799
|
||||
enum34==1.1.10; python_version < '3.4' \
|
||||
--hash=sha256:a98a201d6de3f2ab3db284e70a33b0f896fbf35f8086594e8c9e74b909058d53 \
|
||||
--hash=sha256:c3858660960c984d6ab0ebad691265180da2b43f07e061c0f8dca9ef3cffd328 \
|
||||
--hash=sha256:cce6a7477ed816bd2542d03d53db9f0db935dd013b70f336a95c73979289f248
|
||||
distro==1.4.0 \
|
||||
--hash=sha256:362dde65d846d23baee4b5c058c8586f219b5a54be1cf5fc6ff55c4578392f57 \
|
||||
--hash=sha256:eedf82a470ebe7d010f1872c17237c79ab04097948800029994fa458e52fb4b4
|
||||
# Package enum34 needs to be explicitly limited to Python2.x, in order to avoid
|
||||
# certbot-auto failures on Python 3.6+ which enum34 doesn't support. See #5456.
|
||||
enum34==1.1.6 ; python_version < '3.4' \
|
||||
--hash=sha256:2d81cbbe0e73112bdfe6ef8576f2238f2ba27dd0d55752a776c41d38b7da2850 \
|
||||
--hash=sha256:644837f692e5f550741432dd3f223bbb9852018674981b1664e5dc339387588a \
|
||||
--hash=sha256:6bd0f6ad48ec2aa117d3d141940d484deccda84d4fcd884f5c3d93c23ecd8c79 \
|
||||
--hash=sha256:8ad8c4783bf61ded74527bffb48ed9b54166685e4230386a9ed9b1279e2df5b1
|
||||
funcsigs==1.0.2 \
|
||||
--hash=sha256:330cc27ccbf7f1e992e69fef78261dc7c6569012cf397db8d3de0234e6c937ca \
|
||||
--hash=sha256:a7bb0f2cf3a3fd1ab2732cb49eba4252c2af4240442415b4abce3b87022a8f50
|
||||
idna==2.9 \
|
||||
--hash=sha256:7588d1c14ae4c77d74036e8c22ff447b26d0fde8f007354fd48a7814db15b7cb \
|
||||
--hash=sha256:a068a21ceac8a4d63dbfd964670474107f541babbd2250d61922f029858365fa
|
||||
idna==2.8 \
|
||||
--hash=sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407 \
|
||||
--hash=sha256:ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c
|
||||
ipaddress==1.0.23 \
|
||||
--hash=sha256:6e0f4a39e66cb5bb9a137b00276a2eff74f93b71dcbdad6f10ff7df9d3557fcc \
|
||||
--hash=sha256:b7f8e0369580bb4a24d5ba1d7cc29660a4a6987763faf1d8a8046830e020e7e2
|
||||
josepy==1.3.0 \
|
||||
--hash=sha256:c341ffa403399b18e9eae9012f804843045764d1390f9cb4648980a7569b1619 \
|
||||
--hash=sha256:e54882c64be12a2a76533f73d33cba9e331950fda9e2731e843490b774e7a01c
|
||||
josepy==1.2.0 \
|
||||
--hash=sha256:8ea15573203f28653c00f4ac0142520777b1c59d9eddd8da3f256c6ba3cac916 \
|
||||
--hash=sha256:9cec9a839fe9520f0420e4f38e7219525daccce4813296627436fe444cd002d3
|
||||
mock==1.3.0 \
|
||||
--hash=sha256:1e247dbecc6ce057299eb7ee019ad68314bb93152e81d9a6110d35f4d5eca0f6 \
|
||||
--hash=sha256:3f573a18be94de886d1191f27c168427ef693e8dcfcecf95b170577b2eb69cbb
|
||||
parsedatetime==2.5 \
|
||||
--hash=sha256:3b835fc54e472c17ef447be37458b400e3fefdf14bb1ffdedb5d2c853acf4ba1 \
|
||||
--hash=sha256:d2e9ddb1e463de871d32088a3f3cea3dc8282b1b2800e081bd0ef86900451667
|
||||
pbr==5.4.5 \
|
||||
--hash=sha256:07f558fece33b05caf857474a366dfcc00562bca13dd8b47b2b3e22d9f9bf55c \
|
||||
--hash=sha256:579170e23f8e0c2f24b0de612f71f648eccb79fb1322c814ae6b3c07b5ba23e8
|
||||
pbr==5.4.4 \
|
||||
--hash=sha256:139d2625547dbfa5fb0b81daebb39601c478c21956dc57e2e07b74450a8c506b \
|
||||
--hash=sha256:61aa52a0f18b71c5cc58232d2cf8f8d09cd67fcad60b742a60124cb8d6951488
|
||||
pyOpenSSL==19.1.0 \
|
||||
--hash=sha256:621880965a720b8ece2f1b2f54ea2071966ab00e2970ad2ce11d596102063504 \
|
||||
--hash=sha256:9a24494b2602aaf402be5c9e30a0b82d4a5c67528fe8fb475e3f3bc00dd69507
|
||||
pyRFC3339==1.1 \
|
||||
--hash=sha256:67196cb83b470709c580bb4738b83165e67c6cc60e1f2e4f286cfcb402a926f4 \
|
||||
--hash=sha256:81b8cbe1519cdb79bed04910dd6fa4e181faf8c88dff1e1b987b5f7ab23a5b1a
|
||||
pycparser==2.20 \
|
||||
--hash=sha256:2d475327684562c3a96cc71adf7dc8c4f0565175cf86b6d7a404ff4c771f15f0 \
|
||||
--hash=sha256:7582ad22678f0fcd81102833f60ef8d0e57288b6b5fb00323d101be910e35705
|
||||
pyparsing==2.4.7 \
|
||||
--hash=sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1 \
|
||||
--hash=sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b
|
||||
pycparser==2.19 \
|
||||
--hash=sha256:a988718abfad80b6b157acce7bf130a30876d27603738ac39f140993246b25b3
|
||||
pyparsing==2.4.6 \
|
||||
--hash=sha256:4c830582a84fb022400b85429791bc551f1f4871c33f23e44f353119e92f969f \
|
||||
--hash=sha256:c342dccb5250c08d45fd6f8b4a559613ca603b57498511740e65cd11a2e7dcec
|
||||
python-augeas==0.5.0 \
|
||||
--hash=sha256:67d59d66cdba8d624e0389b87b2a83a176f21f16a87553b50f5703b23f29bac2
|
||||
pytz==2020.1 \
|
||||
--hash=sha256:a494d53b6d39c3c6e44c3bec237336e14305e4f29bbf800b599253057fbb79ed \
|
||||
--hash=sha256:c35965d010ce31b23eeb663ed3cc8c906275d6be1a34393a1d73a41febf4a048
|
||||
requests==2.23.0 \
|
||||
--hash=sha256:43999036bfa82904b6af1d99e4882b560e5e2c68e5c4b0aa03b655f3d7d73fee \
|
||||
--hash=sha256:b3f43d496c6daba4493e7c431722aeb7dbc6288f52a6e04e7b6023b0247817e6
|
||||
pytz==2019.3 \
|
||||
--hash=sha256:1c557d7d0e871de1f5ccd5833f60fb2550652da6be2693c1e02300743d21500d \
|
||||
--hash=sha256:b02c06db6cf09c12dd25137e563b31700d3b80fcc4ad23abb7a315f2789819be
|
||||
requests==2.22.0 \
|
||||
--hash=sha256:11e007a8a2aa0323f5a921e9e6a2d7e4e67d9877e85773fba9ba6419025cbeb4 \
|
||||
--hash=sha256:9cf5292fcd0f598c671cfc1e0d7d1a7f13bb8085e9a590f48c010551dc6c4b31
|
||||
requests-toolbelt==0.9.1 \
|
||||
--hash=sha256:380606e1d10dc85c3bd47bf5a6095f815ec007be7a8b69c878507068df059e6f \
|
||||
--hash=sha256:968089d4584ad4ad7c171454f0a5c6dac23971e9472521ea3b6d49d610aa6fc0
|
||||
six==1.15.0 \
|
||||
--hash=sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259 \
|
||||
--hash=sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced
|
||||
urllib3==1.25.9 \
|
||||
--hash=sha256:3018294ebefce6572a474f0604c2021e33b3fd8006ecd11d62107a5d2a963527 \
|
||||
--hash=sha256:88206b0eb87e6d677d424843ac5209e3fb9d0190d0ee169599165ec25e9d9115
|
||||
zope.component==4.6.1 \
|
||||
--hash=sha256:bfbe55d4a93e70a78b10edc3aad4de31bb8860919b7cbd8d66f717f7d7b279ac \
|
||||
--hash=sha256:d9c7c27673d787faff8a83797ce34d6ebcae26a370e25bddb465ac2182766aca
|
||||
six==1.14.0 \
|
||||
--hash=sha256:236bdbdce46e6e6a3d61a337c0f8b763ca1e8717c03b369e87a7ec7ce1319c0a \
|
||||
--hash=sha256:8f3cd2e254d8f793e7f3d6d9df77b92252b52637291d0f0da013c76ea2724b6c
|
||||
urllib3==1.25.8 \
|
||||
--hash=sha256:2f3db8b19923a873b3e5256dc9c2dedfa883e33d87c690d9c7913e1f40673cdc \
|
||||
--hash=sha256:87716c2d2a7121198ebcb7ce7cccf6ce5e9ba539041cfbaeecfb641dc0bf6acc
|
||||
zope.component==4.6 \
|
||||
--hash=sha256:ec2afc5bbe611dcace98bb39822c122d44743d635dafc7315b9aef25097db9e6
|
||||
zope.deferredimport==4.3.1 \
|
||||
--hash=sha256:57b2345e7b5eef47efcd4f634ff16c93e4265de3dcf325afc7315ade48d909e1 \
|
||||
--hash=sha256:9a0c211df44aa95f1c4e6d2626f90b400f56989180d3ef96032d708da3d23e0a
|
||||
@@ -1354,129 +1401,126 @@ zope.deprecation==4.4.0 \
|
||||
zope.event==4.4 \
|
||||
--hash=sha256:69c27debad9bdacd9ce9b735dad382142281ac770c4a432b533d6d65c4614bcf \
|
||||
--hash=sha256:d8e97d165fd5a0997b45f5303ae11ea3338becfe68c401dd88ffd2113fe5cae7
|
||||
zope.hookable==5.0.1 \
|
||||
--hash=sha256:0194b9b9e7f614abba60c90b231908861036578297515d3d6508eb10190f266d \
|
||||
--hash=sha256:0c2977473918bdefc6fa8dfb311f154e7f13c6133957fe649704deca79b92093 \
|
||||
--hash=sha256:17b8bdb3b77e03a152ca0d5ca185a7ae0156f5e5a2dbddf538676633a1f7380f \
|
||||
--hash=sha256:29d07681a78042cdd15b268ae9decffed9ace68a53eebeb61d65ae931d158841 \
|
||||
--hash=sha256:36fb1b35d1150267cb0543a1ddd950c0bc2c75ed0e6e92e3aaa6ac2e29416cb7 \
|
||||
--hash=sha256:3aed60c2bb5e812bbf9295c70f25b17ac37c233f30447a96c67913ba5073642f \
|
||||
--hash=sha256:3cac1565cc768911e72ca9ec4ddf5c5109e1fef0104f19f06649cf1874943b60 \
|
||||
--hash=sha256:3d4bc0cc4a37c3cd3081063142eeb2125511db3c13f6dc932d899c512690378e \
|
||||
--hash=sha256:3f73096f27b8c28be53ffb6604f7b570fbbb82f273c6febe5f58119009b59898 \
|
||||
--hash=sha256:522d1153d93f2d48aa0bd9fb778d8d4500be2e4dcf86c3150768f0e3adbbc4ef \
|
||||
--hash=sha256:523d2928fb7377bbdbc9af9c0b14ad73e6eaf226349f105733bdae27efd15b5a \
|
||||
--hash=sha256:5848309d4fc5c02150a45e8f8d2227e5bfda386a508bbd3160fed7c633c5a2fa \
|
||||
--hash=sha256:6781f86e6d54a110980a76e761eb54590630fd2af2a17d7edf02a079d2646c1d \
|
||||
--hash=sha256:6fd27921ebf3aaa945fa25d790f1f2046204f24dba4946f82f5f0a442577c3e9 \
|
||||
--hash=sha256:70d581862863f6bf9e175e85c9d70c2d7155f53fb04dcdb2f73cf288ca559a53 \
|
||||
--hash=sha256:81867c23b0dc66c8366f351d00923f2bc5902820a24c2534dfd7bf01a5879963 \
|
||||
--hash=sha256:81db29edadcbb740cd2716c95a297893a546ed89db1bfe9110168732d7f0afdd \
|
||||
--hash=sha256:86bd12624068cea60860a0759af5e2c3adc89c12aef6f71cf12f577e28deefe3 \
|
||||
--hash=sha256:9c184d8f9f7a76e1ced99855ccf390ffdd0ec3765e5cbf7b9cada600accc0a1e \
|
||||
--hash=sha256:acc789e8c29c13555e43fe4bf9fcd15a65512c9645e97bbaa5602e3201252b02 \
|
||||
--hash=sha256:afaa740206b7660d4cc3b8f120426c85761f51379af7a5b05451f624ad12b0af \
|
||||
--hash=sha256:b5f5fa323f878bb16eae68ea1ba7f6c0419d4695d0248bed4b18f51d7ce5ab85 \
|
||||
--hash=sha256:bd89e0e2c67bf4ac3aca2a19702b1a37269fb1923827f68324ac2e7afd6e3406 \
|
||||
--hash=sha256:c212de743283ec0735db24ec6ad913758df3af1b7217550ff270038062afd6ae \
|
||||
--hash=sha256:ca553f524293a0bdea05e7f44c3e685e4b7b022cb37d87bc4a3efa0f86587a8d \
|
||||
--hash=sha256:cab67065a3db92f636128d3157cc5424a145f82d96fb47159c539132833a6d36 \
|
||||
--hash=sha256:d3b3b3eedfdbf6b02898216e85aa6baf50207f4378a2a6803d6d47650cd37031 \
|
||||
--hash=sha256:d9f4a5a72f40256b686d31c5c0b1fde503172307beb12c1568296e76118e402c \
|
||||
--hash=sha256:df5067d87aaa111ed5d050e1ee853ba284969497f91806efd42425f5348f1c06 \
|
||||
--hash=sha256:e2587644812c6138f05b8a41594a8337c6790e3baf9a01915e52438c13fc6bef \
|
||||
--hash=sha256:e27fd877662db94f897f3fd532ef211ca4901eb1a70ba456f15c0866a985464a \
|
||||
--hash=sha256:e427ebbdd223c72e06ba94c004bb04e996c84dec8a0fa84e837556ae145c439e \
|
||||
--hash=sha256:e583ad4309c203ef75a09d43434cf9c2b4fa247997ecb0dcad769982c39411c7 \
|
||||
--hash=sha256:e760b2bc8ece9200804f0c2b64d10147ecaf18455a2a90827fbec4c9d84f3ad5 \
|
||||
--hash=sha256:ea9a9cc8bcc70e18023f30fa2f53d11ae069572a162791224e60cd65df55fb69 \
|
||||
--hash=sha256:ecb3f17dce4803c1099bd21742cd126b59817a4e76a6544d31d2cca6e30dbffd \
|
||||
--hash=sha256:ed794e3b3de42486d30444fb60b5561e724ee8a2d1b17b0c2e0f81e3ddaf7a87 \
|
||||
--hash=sha256:ee885d347279e38226d0a437b6a932f207f691c502ee565aba27a7022f1285df \
|
||||
--hash=sha256:fd5e7bc5f24f7e3d490698f7b854659a9851da2187414617cd5ed360af7efd63 \
|
||||
--hash=sha256:fe45f6870f7588ac7b2763ff1ce98cce59369717afe70cc353ec5218bc854bcc
|
||||
zope.interface==5.1.0 \
|
||||
--hash=sha256:0103cba5ed09f27d2e3de7e48bb320338592e2fabc5ce1432cf33808eb2dfd8b \
|
||||
--hash=sha256:14415d6979356629f1c386c8c4249b4d0082f2ea7f75871ebad2e29584bd16c5 \
|
||||
--hash=sha256:1ae4693ccee94c6e0c88a4568fb3b34af8871c60f5ba30cf9f94977ed0e53ddd \
|
||||
--hash=sha256:1b87ed2dc05cb835138f6a6e3595593fea3564d712cb2eb2de963a41fd35758c \
|
||||
--hash=sha256:269b27f60bcf45438e8683269f8ecd1235fa13e5411de93dae3b9ee4fe7f7bc7 \
|
||||
--hash=sha256:27d287e61639d692563d9dab76bafe071fbeb26818dd6a32a0022f3f7ca884b5 \
|
||||
--hash=sha256:39106649c3082972106f930766ae23d1464a73b7d30b3698c986f74bf1256a34 \
|
||||
--hash=sha256:40e4c42bd27ed3c11b2c983fecfb03356fae1209de10686d03c02c8696a1d90e \
|
||||
--hash=sha256:461d4339b3b8f3335d7e2c90ce335eb275488c587b61aca4b305196dde2ff086 \
|
||||
--hash=sha256:4f98f70328bc788c86a6a1a8a14b0ea979f81ae6015dd6c72978f1feff70ecda \
|
||||
--hash=sha256:558a20a0845d1a5dc6ff87cd0f63d7dac982d7c3be05d2ffb6322a87c17fa286 \
|
||||
--hash=sha256:562dccd37acec149458c1791da459f130c6cf8902c94c93b8d47c6337b9fb826 \
|
||||
--hash=sha256:5e86c66a6dea8ab6152e83b0facc856dc4d435fe0f872f01d66ce0a2131b7f1d \
|
||||
--hash=sha256:60a207efcd8c11d6bbeb7862e33418fba4e4ad79846d88d160d7231fcb42a5ee \
|
||||
--hash=sha256:645a7092b77fdbc3f68d3cc98f9d3e71510e419f54019d6e282328c0dd140dcd \
|
||||
--hash=sha256:6874367586c020705a44eecdad5d6b587c64b892e34305bb6ed87c9bbe22a5e9 \
|
||||
--hash=sha256:74bf0a4f9091131de09286f9a605db449840e313753949fe07c8d0fe7659ad1e \
|
||||
--hash=sha256:7b726194f938791a6691c7592c8b9e805fc6d1b9632a833b9c0640828cd49cbc \
|
||||
--hash=sha256:8149ded7f90154fdc1a40e0c8975df58041a6f693b8f7edcd9348484e9dc17fe \
|
||||
--hash=sha256:8cccf7057c7d19064a9e27660f5aec4e5c4001ffcf653a47531bde19b5aa2a8a \
|
||||
--hash=sha256:911714b08b63d155f9c948da2b5534b223a1a4fc50bb67139ab68b277c938578 \
|
||||
--hash=sha256:a5f8f85986197d1dd6444763c4a15c991bfed86d835a1f6f7d476f7198d5f56a \
|
||||
--hash=sha256:a744132d0abaa854d1aad50ba9bc64e79c6f835b3e92521db4235a1991176813 \
|
||||
--hash=sha256:af2c14efc0bb0e91af63d00080ccc067866fb8cbbaca2b0438ab4105f5e0f08d \
|
||||
--hash=sha256:b054eb0a8aa712c8e9030065a59b5e6a5cf0746ecdb5f087cca5ec7685690c19 \
|
||||
--hash=sha256:b0becb75418f8a130e9d465e718316cd17c7a8acce6fe8fe07adc72762bee425 \
|
||||
--hash=sha256:b1d2ed1cbda2ae107283befd9284e650d840f8f7568cb9060b5466d25dc48975 \
|
||||
--hash=sha256:ba4261c8ad00b49d48bbb3b5af388bb7576edfc0ca50a49c11dcb77caa1d897e \
|
||||
--hash=sha256:d1fe9d7d09bb07228650903d6a9dc48ea649e3b8c69b1d263419cc722b3938e8 \
|
||||
--hash=sha256:d7804f6a71fc2dda888ef2de266727ec2f3915373d5a785ed4ddc603bbc91e08 \
|
||||
--hash=sha256:da2844fba024dd58eaa712561da47dcd1e7ad544a257482392472eae1c86d5e5 \
|
||||
--hash=sha256:dcefc97d1daf8d55199420e9162ab584ed0893a109f45e438b9794ced44c9fd0 \
|
||||
--hash=sha256:dd98c436a1fc56f48c70882cc243df89ad036210d871c7427dc164b31500dc11 \
|
||||
--hash=sha256:e74671e43ed4569fbd7989e5eecc7d06dc134b571872ab1d5a88f4a123814e9f \
|
||||
--hash=sha256:eb9b92f456ff3ec746cd4935b73c1117538d6124b8617bc0fe6fda0b3816e345 \
|
||||
--hash=sha256:ebb4e637a1fb861c34e48a00d03cffa9234f42bef923aec44e5625ffb9a8e8f9 \
|
||||
--hash=sha256:ef739fe89e7f43fb6494a43b1878a36273e5924869ba1d866f752c5812ae8d58 \
|
||||
--hash=sha256:f40db0e02a8157d2b90857c24d89b6310f9b6c3642369852cdc3b5ac49b92afc \
|
||||
--hash=sha256:f68bf937f113b88c866d090fea0bc52a098695173fc613b055a17ff0cf9683b6 \
|
||||
--hash=sha256:fb55c182a3f7b84c1a2d6de5fa7b1a05d4660d866b91dbf8d74549c57a1499e8
|
||||
zope.proxy==4.3.5 \
|
||||
--hash=sha256:00573dfa755d0703ab84bb23cb6ecf97bb683c34b340d4df76651f97b0bab068 \
|
||||
--hash=sha256:092049280f2848d2ba1b57b71fe04881762a220a97b65288bcb0968bb199ec30 \
|
||||
--hash=sha256:0cbd27b4d3718b5ec74fc65ffa53c78d34c65c6fd9411b8352d2a4f855220cf1 \
|
||||
--hash=sha256:17fc7e16d0c81f833a138818a30f366696653d521febc8e892858041c4d88785 \
|
||||
--hash=sha256:19577dfeb70e8a67249ba92c8ad20589a1a2d86a8d693647fa8385408a4c17b0 \
|
||||
--hash=sha256:207aa914576b1181597a1516e1b90599dc690c095343ae281b0772e44945e6a4 \
|
||||
--hash=sha256:219a7db5ed53e523eb4a4769f13105118b6d5b04ed169a283c9775af221e231f \
|
||||
--hash=sha256:2b50ea79849e46b5f4f2b0247a3687505d32d161eeb16a75f6f7e6cd81936e43 \
|
||||
--hash=sha256:5903d38362b6c716e66bbe470f190579c530a5baf03dbc8500e5c2357aa569a5 \
|
||||
--hash=sha256:5c24903675e271bd688c6e9e7df5775ac6b168feb87dbe0e4bcc90805f21b28f \
|
||||
--hash=sha256:5ef6bc5ed98139e084f4e91100f2b098a0cd3493d4e76f9d6b3f7b95d7ad0f06 \
|
||||
--hash=sha256:61b55ae3c23a126a788b33ffb18f37d6668e79a05e756588d9e4d4be7246ab1c \
|
||||
--hash=sha256:63ddb992931a5e616c87d3d89f5a58db086e617548005c7f9059fac68c03a5cc \
|
||||
--hash=sha256:6943da9c09870490dcfd50c4909c0cc19f434fa6948f61282dc9cb07bcf08160 \
|
||||
--hash=sha256:6ad40f85c1207803d581d5d75e9ea25327cd524925699a83dfc03bf8e4ba72b7 \
|
||||
--hash=sha256:6b44433a79bdd7af0e3337bd7bbcf53dd1f9b0fa66bf21bcb756060ce32a96c1 \
|
||||
--hash=sha256:6bbaa245015d933a4172395baad7874373f162955d73612f0b66b6c2c33b6366 \
|
||||
--hash=sha256:7007227f4ea85b40a2f5e5a244479f6a6dfcf906db9b55e812a814a8f0e2c28d \
|
||||
--hash=sha256:74884a0aec1f1609190ec8b34b5d58fb3b5353cf22b96161e13e0e835f13518f \
|
||||
--hash=sha256:7d25fe5571ddb16369054f54cdd883f23de9941476d97f2b92eb6d7d83afe22d \
|
||||
--hash=sha256:7e162bdc5e3baad26b2262240be7d2bab36991d85a6a556e48b9dfb402370261 \
|
||||
--hash=sha256:814d62678dc3a30f4aa081982d830b7c342cf230ffc9d030b020cb154eeebf9e \
|
||||
--hash=sha256:8878a34c5313ee52e20aa50b03138af8d472bae465710fb954d133a9bfd3c38d \
|
||||
--hash=sha256:a66a0d94e5b081d5d695e66d6667e91e74d79e273eee95c1747717ba9cb70792 \
|
||||
--hash=sha256:a69f5cbf4addcfdf03dda564a671040127a6b7c34cf9fe4973582e68441b63fa \
|
||||
--hash=sha256:b00f9f0c334d07709d3f73a7cb8ae63c6ca1a90c790a63b5e7effa666ef96021 \
|
||||
--hash=sha256:b6ed71e4a7b4690447b626f499d978aa13197a0e592950e5d7020308f6054698 \
|
||||
--hash=sha256:bdf5041e5851526e885af579d2f455348dba68d74f14a32781933569a327fddf \
|
||||
--hash=sha256:be034360dd34e62608419f86e799c97d389c10a0e677a25f236a971b2f40dac9 \
|
||||
--hash=sha256:cc8f590a5eed30b314ae6b0232d925519ade433f663de79cc3783e4b10d662ba \
|
||||
--hash=sha256:cd7a318a15fe6cc4584bf3c4426f092ed08c0fd012cf2a9173114234fe193e11 \
|
||||
--hash=sha256:cf19b5f63a59c20306e034e691402b02055c8f4e38bf6792c23cad489162a642 \
|
||||
--hash=sha256:cfc781ce442ec407c841e9aa51d0e1024f72b6ec34caa8fdb6ef9576d549acf2 \
|
||||
--hash=sha256:dea9f6f8633571e18bc20cad83603072e697103a567f4b0738d52dd0211b4527 \
|
||||
--hash=sha256:e4a86a1d5eb2cce83c5972b3930c7c1eac81ab3508464345e2b8e54f119d5505 \
|
||||
--hash=sha256:e7106374d4a74ed9ff00c46cc00f0a9f06a0775f8868e423f85d4464d2333679 \
|
||||
--hash=sha256:e98a8a585b5668aa9e34d10f7785abf9545fe72663b4bfc16c99a115185ae6a5 \
|
||||
--hash=sha256:f64840e68483316eb58d82c376ad3585ca995e69e33b230436de0cdddf7363f9 \
|
||||
--hash=sha256:f8f4b0a9e6683e43889852130595c8854d8ae237f2324a053cdd884de936aa9b \
|
||||
--hash=sha256:fc45a53219ed30a7f670a6d8c98527af0020e6fd4ee4c0a8fb59f147f06d816c
|
||||
zope.hookable==5.0.0 \
|
||||
--hash=sha256:0992a0dd692003c09fb958e1480cebd1a28f2ef32faa4857d864f3ca8e9d6952 \
|
||||
--hash=sha256:0f325838dbac827a1e2ed5d482c1f2656b6844dc96aa098f7727e76395fcd694 \
|
||||
--hash=sha256:22a317ba00f61bac99eac1a5e330be7cb8c316275a21269ec58aa396b602af0c \
|
||||
--hash=sha256:25531cb5e7b35e8a6d1d6eddef624b9a22ce5dcf8f4448ef0f165acfa8c3fc21 \
|
||||
--hash=sha256:30890892652766fc80d11f078aca9a5b8150bef6b88aba23799581a53515c404 \
|
||||
--hash=sha256:342d682d93937e5b8c232baffb32a87d5eee605d44f74566657c64a239b7f342 \
|
||||
--hash=sha256:46b2fddf1f5aeb526e02b91f7e62afbb9fff4ffd7aafc97cdb00a0d717641567 \
|
||||
--hash=sha256:523318ff96df9b8d378d997c00c5d4cbfbff68dc48ff5ee5addabdb697d27528 \
|
||||
--hash=sha256:53aa02eb8921d4e667c69d76adeed8fe426e43870c101cb08dcd2f3468aff742 \
|
||||
--hash=sha256:62e79e8fdde087cb20822d7874758f5acbedbffaf3c0fbe06309eb8a41ee4e06 \
|
||||
--hash=sha256:74bf2f757f7385b56dc3548adae508d8b3ef952d600b4b12b88f7d1706b05dcc \
|
||||
--hash=sha256:751ee9d89eb96e00c1d7048da9725ce392a708ed43406416dc5ed61e4d199764 \
|
||||
--hash=sha256:7b83bc341e682771fe810b360cd5d9c886a948976aea4b979ff214e10b8b523b \
|
||||
--hash=sha256:81eeeb27dbb0ddaed8070daee529f0d1bfe4f74c7351cce2aaca3ea287c4cc32 \
|
||||
--hash=sha256:856509191e16930335af4d773c0fc31a17bae8991eb6f167a09d5eddf25b56cc \
|
||||
--hash=sha256:8853e81fd07b18fa9193b19e070dc0557848d9945b1d2dac3b7782543458c87d \
|
||||
--hash=sha256:94506a732da2832029aecdfe6ea07eb1b70ee06d802fff34e1b3618fe7cdf026 \
|
||||
--hash=sha256:95ad874a8cc94e786969215d660143817f745225579bfe318c4676e218d3147c \
|
||||
--hash=sha256:9758ec9174966ffe5c499b6c3d149f80aa0a9238020006a2b87c6af5963fcf48 \
|
||||
--hash=sha256:a169823e331da939aa7178fc152e65699aeb78957e46c6f80ccb50ee4c3616c2 \
|
||||
--hash=sha256:a67878a798f6ca292729a28c2226592b3d000dc6ee7825d31887b553686c7ac7 \
|
||||
--hash=sha256:a9a6d9eb2319a09905670810e2de971d6c49013843700b4975e2fc0afe96c8db \
|
||||
--hash=sha256:b3e118b58a3d2301960e6f5f25736d92f6b9f861728d3b8c26d69f54d8a157d2 \
|
||||
--hash=sha256:ca6705c2a1fb5059a4efbe9f5426be4cdf71b3c9564816916fc7aa7902f19ede \
|
||||
--hash=sha256:cf711527c9d4ae72085f137caffb4be74fc007ffb17cd103628c7d5ba17e205f \
|
||||
--hash=sha256:d087602a6845ebe9d5a1c5a949fedde2c45f372d77fbce4f7fe44b68b28a1d03 \
|
||||
--hash=sha256:d1080e1074ddf75ad6662a9b34626650759c19a9093e1a32a503d37e48da135b \
|
||||
--hash=sha256:db9c60368aff2b7e6c47115f3ad9bd6e96aa298b12ed5f8cb13f5673b30be565 \
|
||||
--hash=sha256:dbeb127a04473f5a989169eb400b67beb921c749599b77650941c21fe39cb8d9 \
|
||||
--hash=sha256:dca336ca3682d869d291d7cd18284f6ff6876e4244eb1821430323056b000e2c \
|
||||
--hash=sha256:dd69a9be95346d10c853b6233fcafe3c0315b89424b378f2ad45170d8e161568 \
|
||||
--hash=sha256:dd79f8fae5894f1ee0a0042214685f2d039341250c994b825c10a4cd075d80f6 \
|
||||
--hash=sha256:e647d850aa1286d98910133cee12bd87c354f7b7bb3f3cd816a62ba7fa2f7007 \
|
||||
--hash=sha256:f37a210b5c04b2d4e4bac494ab15b70196f219a1e1649ddca78560757d4278fb \
|
||||
--hash=sha256:f67820b6d33a705dc3c1c457156e51686f7b350ff57f2112e1a9a4dad38ec268 \
|
||||
--hash=sha256:f68969978ccf0e6123902f7365aae5b7a9e99169d4b9105c47cf28e788116894 \
|
||||
--hash=sha256:f717a0b34460ae1ac0064e91b267c0588ac2c098ffd695992e72cd5462d97a67 \
|
||||
--hash=sha256:f9d58ccec8684ca276d5a4e7b0dfacca028336300a8f715d616d9f0ce9ae8096 \
|
||||
--hash=sha256:fcc3513a54e656067cbf7b98bab0d6b9534b9eabc666d1f78aad6acdf0962736
|
||||
zope.interface==4.7.1 \
|
||||
--hash=sha256:048b16ac882a05bc7ef534e8b9f15c9d7a6c190e24e8938a19b7617af4ed854a \
|
||||
--hash=sha256:05816cf8e7407cf62f2ec95c0a5d69ec4fa5741d9ccd10db9f21691916a9a098 \
|
||||
--hash=sha256:065d6a1ac89d35445168813bed45048ed4e67a4cdfc5a68fdb626a770378869f \
|
||||
--hash=sha256:14157421f4121a57625002cc4f48ac7521ea238d697c4a4459a884b62132b977 \
|
||||
--hash=sha256:18dc895945694f397a0be86be760ff664b790f95d8e7752d5bab80284ff9105d \
|
||||
--hash=sha256:1962c9f838bd6ae4075d0014f72697510daefc7e1c7e48b2607df0b6e157989c \
|
||||
--hash=sha256:1a67408cacd198c7e6274a19920bb4568d56459e659e23c4915528686ac1763a \
|
||||
--hash=sha256:21bf781076dd616bd07cf0223f79d61ab4f45176076f90bc2890e18c48195da4 \
|
||||
--hash=sha256:21c0a5d98650aebb84efa16ce2c8df1a46bdc4fe8a9e33237d0ca0b23f416ead \
|
||||
--hash=sha256:23cfeea25d1e42ff3bf4f9a0c31e9d5950aa9e7c4b12f0c4bd086f378f7b7a71 \
|
||||
--hash=sha256:24b6fce1fb71abf9f4093e3259084efcc0ef479f89356757780685bd2b06ef37 \
|
||||
--hash=sha256:24f84ce24eb6b5fcdcb38ad9761524f1ae96f7126abb5e597f8a3973d9921409 \
|
||||
--hash=sha256:25e0ef4a824017809d6d8b0ce4ab3288594ba283e4d4f94d8cfb81d73ed65114 \
|
||||
--hash=sha256:2e8fdd625e9aba31228e7ddbc36bad5c38dc3ee99a86aa420f89a290bd987ce9 \
|
||||
--hash=sha256:2f3bc2f49b67b1bea82b942d25bc958d4f4ea6709b411cb2b6b9718adf7914ce \
|
||||
--hash=sha256:35d24be9d04d50da3a6f4d61de028c1dd087045385a0ff374d93ef85af61b584 \
|
||||
--hash=sha256:35dbe4e8c73003dff40dfaeb15902910a4360699375e7b47d3c909a83ff27cd0 \
|
||||
--hash=sha256:3dfce831b824ab5cf446ed0c350b793ac6fa5fe33b984305cb4c966a86a8fb79 \
|
||||
--hash=sha256:3f7866365df5a36a7b8de8056cd1c605648f56f9a226d918ed84c85d25e8d55f \
|
||||
--hash=sha256:455cc8c01de3bac6f9c223967cea41f4449f58b4c2e724ec8177382ddd183ab4 \
|
||||
--hash=sha256:4bb937e998be9d5e345f486693e477ba79e4344674484001a0b646be1d530487 \
|
||||
--hash=sha256:52303a20902ca0888dfb83230ca3ee6fbe63c0ad1dd60aa0bba7958ccff454d8 \
|
||||
--hash=sha256:6e0a897d4e09859cc80c6a16a29697406ead752292ace17f1805126a4f63c838 \
|
||||
--hash=sha256:6e1816e7c10966330d77af45f77501f9a68818c065dec0ad11d22b50a0e212e7 \
|
||||
--hash=sha256:73b5921c5c6ce3358c836461b5470bf675601c96d5e5d8f2a446951470614f67 \
|
||||
--hash=sha256:8093cd45cdb5f6c8591cfd1af03d32b32965b0f79b94684cd0c9afdf841982bb \
|
||||
--hash=sha256:864b4a94b60db301899cf373579fd9ef92edddbf0fb2cd5ae99f53ef423ccc56 \
|
||||
--hash=sha256:8a27b4d3ea9c6d086ce8e7cdb3e8d319b6752e2a03238a388ccc83ccbe165f50 \
|
||||
--hash=sha256:91b847969d4784abd855165a2d163f72ac1e58e6dce09a5e46c20e58f19cc96d \
|
||||
--hash=sha256:b47b1028be4758c3167e474884ccc079b94835f058984b15c145966c4df64d27 \
|
||||
--hash=sha256:b68814a322835d8ad671b7acc23a3b2acecba527bb14f4b53fc925f8a27e44d8 \
|
||||
--hash=sha256:bcb50a032c3b6ec7fb281b3a83d2b31ab5246c5b119588725b1350d3a1d9f6a3 \
|
||||
--hash=sha256:c56db7d10b25ce8918b6aec6b08ac401842b47e6c136773bfb3b590753f7fb67 \
|
||||
--hash=sha256:c94b77a13d4f47883e4f97f9fa00f5feadd38af3e6b3c7be45cfdb0a14c7149b \
|
||||
--hash=sha256:db381f6fdaef483ad435f778086ccc4890120aff8df2ba5cfeeac24d280b3145 \
|
||||
--hash=sha256:e6487d01c8b7ed86af30ea141fcc4f93f8a7dde26f94177c1ad637c353bd5c07 \
|
||||
--hash=sha256:e86923fa728dfba39c5bb6046a450bd4eec8ad949ac404eca728cfce320d1732 \
|
||||
--hash=sha256:f6ca36dc1e9eeb46d779869c60001b3065fb670b5775c51421c099ea2a77c3c9 \
|
||||
--hash=sha256:fb62f2cbe790a50d95593fb40e8cca261c31a2f5637455ea39440d6457c2ba25
|
||||
zope.proxy==4.3.3 \
|
||||
--hash=sha256:04646ac04ffa9c8e32fb2b5c3cd42995b2548ea14251f3c21ca704afae88e42c \
|
||||
--hash=sha256:07b6bceea232559d24358832f1cd2ed344bbf05ca83855a5b9698b5f23c5ed60 \
|
||||
--hash=sha256:1ef452cc02e0e2f8e3c917b1a5b936ef3280f2c2ca854ee70ac2164d1655f7e6 \
|
||||
--hash=sha256:22bf61857c5977f34d4e391476d40f9a3b8c6ab24fb0cac448d42d8f8b9bf7b2 \
|
||||
--hash=sha256:299870e3428cbff1cd9f9b34144e76ecdc1d9e3192a8cf5f1b0258f47a239f58 \
|
||||
--hash=sha256:2bfc36bfccbe047671170ea5677efd3d5ab730a55d7e45611d76d495e5b96766 \
|
||||
--hash=sha256:32e82d5a640febc688c0789e15ea875bf696a10cf358f049e1ed841f01710a9b \
|
||||
--hash=sha256:3b2051bdc4bc3f02fa52483f6381cf40d4d48167645241993f9d7ebbd142ed9b \
|
||||
--hash=sha256:3f734bd8a08f5185a64fb6abb8f14dc97ec27a689ca808fb7a83cdd38d745e4f \
|
||||
--hash=sha256:3f78dd8de3112df8bbd970f0916ac876dc3fbe63810bd1cf7cc5eec4cbac4f04 \
|
||||
--hash=sha256:4eabeb48508953ba1f3590ad0773b8daea9e104eec66d661917e9bbcd7125a67 \
|
||||
--hash=sha256:4f05ecc33808187f430f249cb1ccab35c38f570b181f2d380fbe253da94b18d8 \
|
||||
--hash=sha256:4f4f4cbf23d3afc1526294a31e7b3eaa0f682cc28ac5366065dc1d6bb18bd7be \
|
||||
--hash=sha256:5483d5e70aacd06f0aa3effec9fed597c0b50f45060956eeeb1203c44d4338c3 \
|
||||
--hash=sha256:56a5f9b46892b115a75d0a1f2292431ad5988461175826600acc69a24cb3edee \
|
||||
--hash=sha256:64bb63af8a06f736927d260efdd4dfc5253d42244f281a8063e4b9eea2ddcbc5 \
|
||||
--hash=sha256:653f8cbefcf7c6ac4cece2cdef367c4faa2b7c19795d52bd7cbec11a8739a7c1 \
|
||||
--hash=sha256:664211d63306e4bd4eec35bf2b4bd9db61c394037911cf2d1804c43b511a49f1 \
|
||||
--hash=sha256:6651e6caed66a8fff0fef1a3e81c0ed2253bf361c0fdc834500488732c5d16e9 \
|
||||
--hash=sha256:6c1fba6cdfdf105739d3069cf7b07664f2944d82a8098218ab2300a82d8f40fc \
|
||||
--hash=sha256:6e64246e6e9044a4534a69dca1283c6ddab6e757be5e6874f69024329b3aa61f \
|
||||
--hash=sha256:838390245c7ec137af4993c0c8052f49d5ec79e422b4451bfa37fee9b9ccaa01 \
|
||||
--hash=sha256:856b410a14793069d8ba35f33fff667213ea66f2df25a0024cc72a7493c56d4c \
|
||||
--hash=sha256:8b932c364c1d1605a91907a41128ed0ee8a2d326fc0fafb2c55cd46f545f4599 \
|
||||
--hash=sha256:9086cf6d20f08dae7f296a78f6c77d1f8d24079d448f023ee0eb329078dd35e1 \
|
||||
--hash=sha256:9698533c14afa0548188de4968a7932d1f3f965f3f5ba1474de673596bb875af \
|
||||
--hash=sha256:9b12b05dd7c28f5068387c1afee8cb94f9d02501e7ef495a7c5c7e27139b96ad \
|
||||
--hash=sha256:a884c7426a5bc6fb7fc71a55ad14e66818e13f05b78b20a6f37175f324b7acb8 \
|
||||
--hash=sha256:abe9e7f1a3e76286c5f5baf2bf5162d41dc0310da493b34a2c36555f38d928f7 \
|
||||
--hash=sha256:bd6fde63b015a27262be06bd6bbdd895273cc2bdf2d4c7e1c83711d26a8fbace \
|
||||
--hash=sha256:bda7c62c954f47b87ed9a89f525eee1b318ec7c2162dfdba76c2ccfa334e0caa \
|
||||
--hash=sha256:be8a4908dd3f6e965993c0068b006bdbd0474fbcbd1da4893b49356e73fc1557 \
|
||||
--hash=sha256:ced65fc3c7d7205267506d854bb1815bb445899cca9d21d1d4b949070a635546 \
|
||||
--hash=sha256:dac4279aa05055d3897ab5e5ee5a7b39db121f91df65a530f8b1ac7f9bd93119 \
|
||||
--hash=sha256:e4f1863056e3e4f399c285b67fa816f411a7bfa1c81ef50e186126164e396e59 \
|
||||
--hash=sha256:ecd85f68b8cd9ab78a0141e87ea9a53b2f31fd9b1350a1c44da1f7481b5363ef \
|
||||
--hash=sha256:ed269b83750413e8fc5c96276372f49ee3fcb7ed61c49fe8e5a67f54459a5a4a \
|
||||
--hash=sha256:f19b0b80cba73b204dee68501870b11067711d21d243fb6774256d3ca2e5391f \
|
||||
--hash=sha256:ffdafb98db7574f9da84c489a10a5d582079a888cb43c64e9e6b0e3fe1034685
|
||||
|
||||
# Contains the requirements for the letsencrypt package.
|
||||
#
|
||||
@@ -1489,18 +1533,18 @@ letsencrypt==0.7.0 \
|
||||
--hash=sha256:105a5fb107e45bcd0722eb89696986dcf5f08a86a321d6aef25a0c7c63375ade \
|
||||
--hash=sha256:c36e532c486a7e92155ee09da54b436a3c420813ec1c590b98f635d924720de9
|
||||
|
||||
certbot==1.14.0 \
|
||||
--hash=sha256:67b4d26ceaea6c7f8325d0d45169e7a165a2cabc7122c84bc971ba068ca19cca \
|
||||
--hash=sha256:959ea90c6bb8dca38eab9772722cb940972ef6afcd5f15deef08b3c3636841eb
|
||||
acme==1.14.0 \
|
||||
--hash=sha256:4f48c41261202f1a389ec2986b2580b58f53e0d5a1ae2463b34318d78b87fc66 \
|
||||
--hash=sha256:61daccfb0343628cbbca551a7fc4c82482113952c21db3fe0c585b7c98fa1c35
|
||||
certbot-apache==1.14.0 \
|
||||
--hash=sha256:b757038db23db707c44630fecb46e99172bd791f0db5a8e623c0842613c4d3d9 \
|
||||
--hash=sha256:887fe4a21af2de1e5c2c9428bacba6eb7c1219257bc70f1a1d8447c8a321adb0
|
||||
certbot-nginx==1.14.0 \
|
||||
--hash=sha256:8916a815437988d6c192df9f035bb7a176eab20eee0956677b335d0698d243fb \
|
||||
--hash=sha256:cc2a8a0de56d9bb6b2efbda6c80c647dad8db2bb90675cac03ade94bd5fc8597
|
||||
certbot==1.5.0 \
|
||||
--hash=sha256:ec1f01af06b52a6f079f5b02cb70e88f0671a7b13ecb3e45b040563e32c6e53a \
|
||||
--hash=sha256:c52017a4f84137e1312c898d6ae69c5f7977d79d2bd4c2df013cbbf39b6539bf
|
||||
acme==1.5.0 \
|
||||
--hash=sha256:66de67b394bb7606f97f2c21507e6eb6a88936db2a940f5c4893025f87e3852a \
|
||||
--hash=sha256:b051ff7dd3935b2032c2f8c8386e905d9b658eba9f3455e352650d85bea9c8f0
|
||||
certbot-apache==1.5.0 \
|
||||
--hash=sha256:d2c28be6dcd6c56a8040c8c733e72c1341238b1b47fb59f544eb832b9d5c81ba \
|
||||
--hash=sha256:3eec5a49ae4fcf86213f962eb1e11d8a725b65e7dcee18f9b92c7aa73f821764
|
||||
certbot-nginx==1.5.0 \
|
||||
--hash=sha256:3d27fd02ebe15b07ce5fa9525ceeda82aa5fdc45aa064729434faff0442d1f91 \
|
||||
--hash=sha256:b38f101588af6d2b8ea7c2e3334f249afbe14461a85add2f1420091d860df983
|
||||
|
||||
UNLIKELY_EOF
|
||||
# -------------------------------------------------------------------------
|
||||
@@ -1574,11 +1618,6 @@ maybe_argparse = (
|
||||
if sys.version_info < (2, 7, 0) else [])
|
||||
|
||||
|
||||
# Be careful when updating the pinned versions here, in particular for pip.
|
||||
# Indeed starting from 10.0, pip will build dependencies in isolation if the
|
||||
# related projects are compliant with PEP 517. This is not something we want
|
||||
# as of now, so the isolation build will need to be disabled wherever
|
||||
# pipstrap is used (see https://github.com/certbot/certbot/issues/8256).
|
||||
PACKAGES = maybe_argparse + [
|
||||
# Pip has no dependencies, as it vendors everything:
|
||||
('11/b6/abcb525026a4be042b486df43905d6893fb04f05aac21c32c638e939e447/'
|
||||
|
||||
@@ -1,60 +0,0 @@
|
||||
options {
|
||||
directory "/var/cache/bind";
|
||||
|
||||
// Running inside Docker. Bind address on Docker host is 127.0.0.1.
|
||||
listen-on { any; };
|
||||
listen-on-v6 { any; };
|
||||
|
||||
// We are allowing BIND to service recursive queries, but only in an extremely limimited sense
|
||||
// where it is entirely disconnected from public DNS:
|
||||
// - Iterative queries are disabled. Only forwarding to a non-existent forwarder.
|
||||
// - The only recursive answers we can get (that will not be a SERVFAIL) will come from the
|
||||
// RPZ "mock-recursion" zone. Effectively this means we are mocking out the entirety of
|
||||
// public DNS.
|
||||
allow-recursion { any; }; // BIND will only answer using RPZ if recursion is enabled
|
||||
forwarders { 192.0.2.254; }; // Nobody is listening, this is TEST-NET-1
|
||||
forward only; // Do NOT perform iterative queries from the root zone
|
||||
dnssec-validation no; // Do not bother fetching the root DNSKEY set (performance)
|
||||
response-policy { // All recursive queries will be served from here.
|
||||
zone "mock-recursion"
|
||||
log yes;
|
||||
} recursive-only no // Allow RPZs to affect authoritative zones too.
|
||||
qname-wait-recurse no // No real recursion.
|
||||
nsip-wait-recurse no; // No real recursion.
|
||||
|
||||
allow-transfer { none; };
|
||||
allow-update { none; };
|
||||
};
|
||||
|
||||
key "default-key." {
|
||||
algorithm hmac-sha512;
|
||||
secret "91CgOwzihr0nAVEHKFXJPQCbuBBbBI19Ks5VAweUXgbF40NWTD83naeg3c5y2MPdEiFRXnRLJxL6M+AfHCGLNw==";
|
||||
};
|
||||
|
||||
zone "mock-recursion" {
|
||||
type primary;
|
||||
file "/var/lib/bind/rpz.mock-recursion";
|
||||
allow-query {
|
||||
none;
|
||||
};
|
||||
};
|
||||
|
||||
zone "example.com." {
|
||||
type primary;
|
||||
file "/var/lib/bind/db.example.com";
|
||||
journal "/var/cache/bind/db.example.com.jnl";
|
||||
|
||||
update-policy {
|
||||
grant default-key zonesub TXT;
|
||||
};
|
||||
};
|
||||
|
||||
zone "sub.example.com." {
|
||||
type primary;
|
||||
file "/var/lib/bind/db.sub.example.com";
|
||||
journal "/var/cache/bind/db.sub.example.com.jnl";
|
||||
|
||||
update-policy {
|
||||
grant default-key zonesub TXT;
|
||||
};
|
||||
};
|
||||
@@ -1,10 +0,0 @@
|
||||
# Target DNS server
|
||||
dns_rfc2136_server = {server_address}
|
||||
# Target DNS port
|
||||
dns_rfc2136_port = {server_port}
|
||||
# TSIG key name
|
||||
dns_rfc2136_name = default-key.
|
||||
# TSIG key secret
|
||||
dns_rfc2136_secret = 91CgOwzihr0nAVEHKFXJPQCbuBBbBI19Ks5VAweUXgbF40NWTD83naeg3c5y2MPdEiFRXnRLJxL6M+AfHCGLNw==
|
||||
# TSIG key algorithm
|
||||
dns_rfc2136_algorithm = HMAC-SHA512
|
||||
@@ -1,11 +0,0 @@
|
||||
$ORIGIN example.com.
|
||||
$TTL 3600
|
||||
example.com. IN SOA ns1.example.com. admin.example.com. ( 2020091025 7200 3600 1209600 3600 )
|
||||
|
||||
example.com. IN NS ns1
|
||||
example.com. IN NS ns2
|
||||
|
||||
ns1 IN A 192.0.2.2
|
||||
ns2 IN A 192.0.2.3
|
||||
|
||||
@ IN A 192.0.2.1
|
||||
@@ -1,9 +0,0 @@
|
||||
$ORIGIN sub.example.com.
|
||||
$TTL 3600
|
||||
sub.example.com. IN SOA ns1.example.com. admin.example.com. ( 2020091025 7200 3600 1209600 3600 )
|
||||
|
||||
sub.example.com. IN NS ns1
|
||||
sub.example.com. IN NS ns2
|
||||
|
||||
ns1 IN A 192.0.2.2
|
||||
ns2 IN A 192.0.2.3
|
||||
@@ -1,6 +0,0 @@
|
||||
$TTL 3600
|
||||
|
||||
@ SOA ns1.example.test. dummy.example.test. 1 12h 15m 3w 2h
|
||||
NS ns1.example.test.
|
||||
|
||||
_acme-challenge.aliased.example IN CNAME _acme-challenge.example.com.
|
||||
@@ -1,4 +1,5 @@
|
||||
#!/usr/bin/env python
|
||||
from __future__ import print_function
|
||||
import os
|
||||
import sys
|
||||
|
||||
|
||||
@@ -1,14 +0,0 @@
|
||||
This directory contains your keys and certificates.
|
||||
|
||||
`privkey.pem` : the private key for your certificate.
|
||||
`fullchain.pem`: the certificate file used in most server software.
|
||||
`chain.pem` : used for OCSP stapling in Nginx >=1.3.7.
|
||||
`cert.pem` : will break many server configurations, and should not be used
|
||||
without reading further documentation (see link below).
|
||||
|
||||
WARNING: DO NOT MOVE OR RENAME THESE FILES!
|
||||
Certbot expects these files to remain in this location in order
|
||||
to function properly!
|
||||
|
||||
We recommend not moving these files. For more information, see the Certbot
|
||||
User Guide at https://certbot.eff.org/docs/using.html#where-are-my-certificates.
|
||||
@@ -1,18 +0,0 @@
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIC2zCCAcOgAwIBAgIIBvrEnbPRYu8wDQYJKoZIhvcNAQELBQAwKDEmMCQGA1UE
|
||||
AxMdUGViYmxlIEludGVybWVkaWF0ZSBDQSAxMjZjNGIwHhcNMjAxMDEyMjEwNzQw
|
||||
WhcNMjUxMDEyMjEwNzQwWjAjMSEwHwYDVQQDExhjLmVuY3J5cHRpb24tZXhhbXBs
|
||||
ZS5jb20wWTATBgcqhkjOPQIBBggqhkjOPQMBBwNCAARjMhuW0ENPPC33PjB5XsYU
|
||||
CRw640kPQENIDatcTJaENZIZdqKd6rI6jc+lpbmXot7Zi52clJlSJS+V6oDAt2Lh
|
||||
o4HYMIHVMA4GA1UdDwEB/wQEAwIFoDAdBgNVHSUEFjAUBggrBgEFBQcDAQYIKwYB
|
||||
BQUHAwIwDAYDVR0TAQH/BAIwADAdBgNVHQ4EFgQUj7Kd3ENqxlPf8B2bIGhsjydX
|
||||
mPswHwYDVR0jBBgwFoAUEiGxlkRsi+VvcogH5dVD3h1laAcwMQYIKwYBBQUHAQEE
|
||||
JTAjMCEGCCsGAQUFBzABhhVodHRwOi8vMTI3LjAuMC4xOjQwMDIwIwYDVR0RBBww
|
||||
GoIYYy5lbmNyeXB0aW9uLWV4YW1wbGUuY29tMA0GCSqGSIb3DQEBCwUAA4IBAQCl
|
||||
k0JXsa8y7fg41WWMDhw60bPW77O0FtOmTcnhdI5daYNemQVk+Q5EMaBLQ/oGjgXd
|
||||
9QXFzXH1PL904YEnSLt+iTpXn++7rQSNzQsdYqw0neWk4f5pEBiN+WORpb6mwobV
|
||||
ifMtBOkNEHvrJ2Pkci9U1lLwtKD/DSew6QtJU5DSkmH1XdGuMJiubygEIvELtvgq
|
||||
cP9S368ZvPmPGmKaJQXBiuaR8MTjY/Bkr79aXQMjKbf+mpn7h0POCcePk1DY/rm6
|
||||
Da+X16lf0hHyQhSUa7Vgyim6rK1/hlw+Z00i+sQCKD9Ih7kXuuGqfSDC33cfO8Tj
|
||||
o/MXO8lcxkrem5zU5QWP
|
||||
-----END CERTIFICATE-----
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user