Compare commits

..

3 Commits

Author SHA1 Message Date
Brad Warren
fcb85becb9 only test apache 2020-06-04 10:15:09 -07:00
Alex Zorin
ca1573ea32 Merge remote-tracking branch 'upstream/master' into apache_static_ssl_module 2020-06-04 08:59:33 +10:00
Alex Zorin
8af28d83a6 apache: handle statically linked mod_ssl
In #7771, the Apache configurator gained the ability to identify what
version of OpenSSL Apache's ssl_module is linked against. However, the
detection was only functional if the module was built as a DSO (which is
almost always the case).

This commit covers the case where the ssl_module is statically linked
within the Apache binary. It requires the user to specify the path to
the binary (with --apache-bin) and emits a warning if static linking is
detected but no path has been provided.
2020-05-20 21:00:53 +10:00
320 changed files with 4586 additions and 7397 deletions

View File

@@ -2,13 +2,12 @@
trigger:
# When changing these triggers, please ensure the documentation under
# "Running tests in CI" is still correct.
- azure-test-*
- test-*
pr: none
variables:
# We don't publish our Docker images in this pipeline, but when building them
# for testing, let's use the nightly tag.
dockerTag: nightly
stages:
- template: templates/stages/test-and-package-stage.yml
jobs:
# Any addition here should be reflected in the advanced and release pipelines.
# It is advised to declare all jobs here as templates to improve maintainability.
- template: templates/tests-suite.yml
- template: templates/installer-tests.yml

View File

@@ -0,0 +1,18 @@
# Advanced pipeline for running our full test suite on protected branches.
trigger:
- '*.x'
pr: none
# This pipeline is also nightly run on master
schedules:
- cron: "0 4 * * *"
displayName: Nightly build
branches:
include:
- master
always: true
jobs:
# Any addition here should be reflected in the advanced-test and release pipelines.
# It is advised to declare all jobs here as templates to improve maintainability.
- template: templates/tests-suite.yml
- template: templates/installer-tests.yml

View File

@@ -1,7 +1,8 @@
trigger: none
trigger:
- master
pr:
- master
- '*.x'
jobs:
- template: templates/jobs/standard-tests-jobs.yml
- template: templates/tests-suite.yml

View File

@@ -1,18 +0,0 @@
# Nightly pipeline running each day for master.
trigger: none
pr: none
schedules:
- cron: "30 4 * * *"
displayName: Nightly build
branches:
include:
- master
always: true
variables:
dockerTag: nightly
stages:
- template: templates/stages/test-and-package-stage.yml
- template: templates/stages/deploy-stage.yml
- template: templates/stages/notify-failure-stage.yml

View File

@@ -1,18 +1,13 @@
# Release pipeline to run our full test suite, build artifacts, and deploy them
# for GitHub release tags.
# Release pipeline to build and deploy Certbot for Windows for GitHub release tags
trigger:
tags:
include:
- v*
pr: none
variables:
dockerTag: ${{variables['Build.SourceBranchName']}}
stages:
- template: templates/stages/test-and-package-stage.yml
- template: templates/stages/changelog-stage.yml
- template: templates/stages/deploy-stage.yml
parameters:
snapReleaseChannel: beta
- template: templates/stages/notify-failure-stage.yml
jobs:
# Any addition here should be reflected in the advanced and advanced-test pipelines.
# It is advised to declare all jobs here as templates to improve maintainability.
- template: templates/tests-suite.yml
- template: templates/installer-tests.yml
- template: templates/changelog.yml

View File

@@ -0,0 +1,14 @@
jobs:
- job: changelog
pool:
vmImage: vs2017-win2016
steps:
- bash: |
CERTBOT_VERSION="$(cd certbot && python -c "import certbot; print(certbot.__version__)" && cd ~-)"
"${BUILD_REPOSITORY_LOCALPATH}\tools\extract_changelog.py" "${CERTBOT_VERSION}" >> "${BUILD_ARTIFACTSTAGINGDIRECTORY}/release_notes.md"
displayName: Prepare changelog
- task: PublishPipelineArtifact@1
inputs:
path: $(Build.ArtifactStagingDirectory)
artifact: changelog
displayName: Publish changelog

View File

@@ -0,0 +1,61 @@
jobs:
- job: installer_build
pool:
vmImage: vs2017-win2016
steps:
- task: UsePythonVersion@0
inputs:
versionSpec: 3.7
architecture: x86
addToPath: true
- script: python windows-installer/construct.py
displayName: Build Certbot installer
- task: CopyFiles@2
inputs:
sourceFolder: $(System.DefaultWorkingDirectory)/windows-installer/build/nsis
contents: '*.exe'
targetFolder: $(Build.ArtifactStagingDirectory)
- task: PublishPipelineArtifact@1
inputs:
path: $(Build.ArtifactStagingDirectory)
artifact: windows-installer
displayName: Publish Windows installer
- job: installer_run
dependsOn: installer_build
strategy:
matrix:
win2019:
imageName: windows-2019
win2016:
imageName: vs2017-win2016
pool:
vmImage: $(imageName)
steps:
- powershell: |
$currentVersion = $PSVersionTable.PSVersion
if ($currentVersion.Major -ne 5) {
throw "Powershell version is not 5.x"
}
condition: eq(variables['imageName'], 'vs2017-win2016')
displayName: Check Powershell 5.x is used in vs2017-win2016
- task: UsePythonVersion@0
inputs:
versionSpec: 3.8
addToPath: true
- task: DownloadPipelineArtifact@2
inputs:
artifact: windows-installer
path: $(Build.SourcesDirectory)/bin
displayName: Retrieve Windows installer
- script: |
py -3 -m venv venv
venv\Scripts\python tools\pip_install.py -e certbot-ci
displayName: Prepare Certbot-CI
- script: |
set PATH=%ProgramFiles(x86)%\Certbot\bin;%PATH%
venv\Scripts\python -m pytest certbot-ci\windows_installer_integration_tests --allow-persistent-changes --installer-path $(Build.SourcesDirectory)\bin\certbot-beta-installer-win32.exe
displayName: Run windows installer integration tests
- script: |
set PATH=%ProgramFiles(x86)%\Certbot\bin;%PATH%
venv\Scripts\python -m pytest certbot-ci\certbot_integration_tests\certbot_tests -n 4
displayName: Run certbot integration tests

View File

@@ -1,102 +0,0 @@
jobs:
- job: extended_test
variables:
- name: IMAGE_NAME
value: ubuntu-18.04
- name: PYTHON_VERSION
value: 3.9
- group: certbot-common
strategy:
matrix:
linux-py36:
PYTHON_VERSION: 3.6
TOXENV: py36
linux-py37:
PYTHON_VERSION: 3.7
TOXENV: py37
linux-py38:
PYTHON_VERSION: 3.8
TOXENV: py38
linux-py37-nopin:
PYTHON_VERSION: 3.7
TOXENV: py37
CERTBOT_NO_PIN: 1
linux-boulder-v1-integration-certbot-oldest:
TOXENV: integration-certbot-oldest
ACME_SERVER: boulder-v1
linux-boulder-v2-integration-certbot-oldest:
TOXENV: integration-certbot-oldest
ACME_SERVER: boulder-v2
linux-boulder-v1-integration-nginx-oldest:
TOXENV: integration-nginx-oldest
ACME_SERVER: boulder-v1
linux-boulder-v2-integration-nginx-oldest:
TOXENV: integration-nginx-oldest
ACME_SERVER: boulder-v2
linux-boulder-v1-py27-integration:
PYTHON_VERSION: 2.7
TOXENV: integration
ACME_SERVER: boulder-v1
linux-boulder-v2-py27-integration:
PYTHON_VERSION: 2.7
TOXENV: integration
ACME_SERVER: boulder-v2
linux-boulder-v1-py36-integration:
PYTHON_VERSION: 3.6
TOXENV: integration
ACME_SERVER: boulder-v1
linux-boulder-v2-py36-integration:
PYTHON_VERSION: 3.6
TOXENV: integration
ACME_SERVER: boulder-v2
linux-boulder-v1-py37-integration:
PYTHON_VERSION: 3.7
TOXENV: integration
ACME_SERVER: boulder-v1
linux-boulder-v2-py37-integration:
PYTHON_VERSION: 3.7
TOXENV: integration
ACME_SERVER: boulder-v2
linux-boulder-v1-py38-integration:
PYTHON_VERSION: 3.8
TOXENV: integration
ACME_SERVER: boulder-v1
linux-boulder-v2-py38-integration:
PYTHON_VERSION: 3.8
TOXENV: integration
ACME_SERVER: boulder-v2
linux-boulder-v1-py39-integration:
PYTHON_VERSION: 3.9
TOXENV: integration
ACME_SERVER: boulder-v1
linux-boulder-v2-py39-integration:
PYTHON_VERSION: 3.9
TOXENV: integration
ACME_SERVER: boulder-v2
nginx-compat:
TOXENV: nginx_compat
linux-integration-rfc2136:
IMAGE_NAME: ubuntu-18.04
PYTHON_VERSION: 3.8
TOXENV: integration-dns-rfc2136
docker-dev:
TOXENV: docker_dev
macos-farmtest-apache2:
# We run one of these test farm tests on macOS to help ensure the
# tests continue to work on the platform.
IMAGE_NAME: macOS-10.15
PYTHON_VERSION: 3.8
TOXENV: test-farm-apache2
farmtest-leauto-upgrades:
PYTHON_VERSION: 3.7
TOXENV: test-farm-leauto-upgrades
farmtest-certonly-standalone:
PYTHON_VERSION: 3.7
TOXENV: test-farm-certonly-standalone
farmtest-sdists:
PYTHON_VERSION: 3.7
TOXENV: test-farm-sdists
pool:
vmImage: $(IMAGE_NAME)
steps:
- template: ../steps/tox-steps.yml

View File

@@ -1,217 +0,0 @@
jobs:
# - job: docker_build
# pool:
# vmImage: ubuntu-18.04
# strategy:
# matrix:
# amd64:
# DOCKER_ARCH: amd64
# # Do not run the heavy non-amd64 builds for test branches
# ${{ if not(startsWith(variables['Build.SourceBranchName'], 'test-')) }}:
# arm32v6:
# DOCKER_ARCH: arm32v6
# arm64v8:
# DOCKER_ARCH: arm64v8
# steps:
# - bash: set -e && tools/docker/build.sh $(dockerTag) $DOCKER_ARCH
# displayName: Build the Docker images
# # We don't filter for the Docker Hub organization to continue to allow
# # easy testing of these scripts on forks.
# - bash: |
# set -e
# DOCKER_IMAGES=$(docker images --filter reference='*/certbot' --filter reference='*/dns-*' --format '{{.Repository}}')
# docker save --output images.tar $DOCKER_IMAGES
# displayName: Save the Docker images
# # If the name of the tar file or artifact changes, the deploy stage will
# # also need to be updated.
# - bash: set -e && mv images.tar $(Build.ArtifactStagingDirectory)
# displayName: Prepare Docker artifact
# - task: PublishPipelineArtifact@1
# inputs:
# path: $(Build.ArtifactStagingDirectory)
# artifact: docker_$(DOCKER_ARCH)
# displayName: Store Docker artifact
# - job: docker_run
# dependsOn: docker_build
# pool:
# vmImage: ubuntu-18.04
# steps:
# - task: DownloadPipelineArtifact@2
# inputs:
# artifact: docker_amd64
# path: $(Build.SourcesDirectory)
# displayName: Retrieve Docker images
# - bash: set -e && docker load --input $(Build.SourcesDirectory)/images.tar
# displayName: Load Docker images
# - bash: |
# set -ex
# DOCKER_IMAGES=$(docker images --filter reference='*/certbot' --filter reference='*/dns-*' --format '{{.Repository}}:{{.Tag}}')
# for DOCKER_IMAGE in ${DOCKER_IMAGES}
# do docker run --rm "${DOCKER_IMAGE}" plugins --prepare
# done
# displayName: Run integration tests for Docker images
# - job: installer_build
# pool:
# vmImage: vs2017-win2016
# steps:
# - task: UsePythonVersion@0
# inputs:
# versionSpec: 3.7
# architecture: x86
# addToPath: true
# - script: python windows-installer/construct.py
# displayName: Build Certbot installer
# - task: CopyFiles@2
# inputs:
# sourceFolder: $(System.DefaultWorkingDirectory)/windows-installer/build/nsis
# contents: '*.exe'
# targetFolder: $(Build.ArtifactStagingDirectory)
# - task: PublishPipelineArtifact@1
# inputs:
# path: $(Build.ArtifactStagingDirectory)
# # If we change the artifact's name, it should also be changed in tools/create_github_release.py
# artifact: windows-installer
# displayName: Publish Windows installer
# - job: installer_run
# dependsOn: installer_build
# strategy:
# matrix:
# win2019:
# imageName: windows-2019
# win2016:
# imageName: vs2017-win2016
# pool:
# vmImage: $(imageName)
# steps:
# - powershell: |
# if ($PSVersionTable.PSVersion.Major -ne 5) {
# throw "Powershell version is not 5.x"
# }
# condition: eq(variables['imageName'], 'vs2017-win2016')
# displayName: Check Powershell 5.x is used in vs2017-win2016
# - task: UsePythonVersion@0
# inputs:
# versionSpec: 3.8
# addToPath: true
# - task: DownloadPipelineArtifact@2
# inputs:
# artifact: windows-installer
# path: $(Build.SourcesDirectory)/bin
# displayName: Retrieve Windows installer
# - script: |
# python -m venv venv
# venv\Scripts\python tools\pipstrap.py
# venv\Scripts\python tools\pip_install.py -e certbot-ci
# env:
# PIP_NO_BUILD_ISOLATION: no
# displayName: Prepare Certbot-CI
# - script: |
# set PATH=%ProgramFiles(x86)%\Certbot\bin;%PATH%
# venv\Scripts\python -m pytest certbot-ci\windows_installer_integration_tests --allow-persistent-changes --installer-path $(Build.SourcesDirectory)\bin\certbot-beta-installer-win32.exe
# displayName: Run windows installer integration tests
# - script: |
# set PATH=%ProgramFiles(x86)%\Certbot\bin;%PATH%
# venv\Scripts\python -m pytest certbot-ci\certbot_integration_tests\certbot_tests -n 4
# displayName: Run certbot integration tests
- job: snaps_build
pool:
vmImage: ubuntu-18.04
timeoutInMinutes: 0
variables:
# Do not run the heavy non-amd64 builds for test branches
${{ if not(startsWith(variables['Build.SourceBranchName'], 'test-')) }}:
ARCHS: amd64 arm64 armhf
${{ if startsWith(variables['Build.SourceBranchName'], 'test-') }}:
ARCHS: amd64
steps:
- script: |
set -e
sudo apt-get update
sudo apt-get install -y --no-install-recommends snapd
sudo snap install --classic snapcraft
displayName: Install dependencies
- task: UsePythonVersion@0
inputs:
versionSpec: 3.8
addToPath: true
- task: DownloadSecureFile@1
name: credentials
inputs:
secureFile: launchpad-credentials
- script: |
set -e
git config --global user.email "$(Build.RequestedForEmail)"
git config --global user.name "$(Build.RequestedFor)"
mkdir -p ~/.local/share/snapcraft/provider/launchpad
cp $(credentials.secureFilePath) ~/.local/share/snapcraft/provider/launchpad/credentials
python3 tools/snap/build_remote.py ALL --archs ${ARCHS} --timeout 19800
displayName: Build snaps
- script: |
set -e
mv *.snap $(Build.ArtifactStagingDirectory)
mv certbot-dns-*/*.snap $(Build.ArtifactStagingDirectory)
displayName: Prepare artifacts
- task: PublishPipelineArtifact@1
inputs:
path: $(Build.ArtifactStagingDirectory)
artifact: snaps
displayName: Store snaps artifacts
- job: snap_run
dependsOn: snaps_build
pool:
vmImage: ubuntu-18.04
steps:
- task: UsePythonVersion@0
inputs:
versionSpec: 3.8
addToPath: true
- script: |
set -e
sudo apt-get update
sudo apt-get install -y --no-install-recommends nginx-light snapd
python3 -m venv venv
venv/bin/python tools/pipstrap.py
venv/bin/python tools/pip_install.py -U tox
displayName: Install dependencies
- task: DownloadPipelineArtifact@2
inputs:
artifact: snaps
path: $(Build.SourcesDirectory)/snap
displayName: Retrieve Certbot snaps
- script: |
set -e
sudo snap install --dangerous --classic snap/certbot_*_amd64.snap
displayName: Install Certbot snap
- script: |
set -e
venv/bin/python -m tox -e integration-external,apacheconftest-external-with-pebble
displayName: Run tox
- job: snap_dns_run
dependsOn: snaps_build
pool:
vmImage: ubuntu-18.04
steps:
- script: |
set -e
sudo apt-get update
sudo apt-get install -y --no-install-recommends snapd
displayName: Install dependencies
- task: UsePythonVersion@0
inputs:
versionSpec: 3.8
addToPath: true
- task: DownloadPipelineArtifact@2
inputs:
artifact: snaps
path: $(Build.SourcesDirectory)/snap
displayName: Retrieve Certbot snaps
- script: |
set -e
python3 -m venv venv
venv/bin/python tools/pipstrap.py
venv/bin/python tools/pip_install.py -e certbot-ci
displayName: Prepare Certbot-CI
- script: |
set -e
sudo -E venv/bin/pytest certbot-ci/snap_integration_tests/dns_tests --allow-persistent-changes --snap-folder $(Build.SourcesDirectory)/snap --snap-arch amd64
displayName: Test DNS plugins snaps

View File

@@ -1,75 +0,0 @@
jobs:
- job: test
variables:
PYTHON_VERSION: 3.9
strategy:
matrix:
macos-py27:
IMAGE_NAME: macOS-10.15
PYTHON_VERSION: 2.7
TOXENV: py27
macos-py39:
IMAGE_NAME: macOS-10.15
PYTHON_VERSION: 3.9
TOXENV: py39
windows-py36:
IMAGE_NAME: vs2017-win2016
PYTHON_VERSION: 3.6
TOXENV: py36
windows-py37-cover:
IMAGE_NAME: vs2017-win2016
PYTHON_VERSION: 3.7
TOXENV: py37-cover
windows-integration-certbot:
IMAGE_NAME: vs2017-win2016
PYTHON_VERSION: 3.7
TOXENV: integration-certbot
linux-oldest-tests-1:
IMAGE_NAME: ubuntu-18.04
TOXENV: py27-{acme,apache,apache-v2,certbot}-oldest
linux-oldest-tests-2:
IMAGE_NAME: ubuntu-18.04
TOXENV: py27-{dns,nginx}-oldest
linux-py27:
IMAGE_NAME: ubuntu-18.04
PYTHON_VERSION: 2.7
TOXENV: py27
linux-py36:
IMAGE_NAME: ubuntu-18.04
PYTHON_VERSION: 3.6
TOXENV: py36
linux-py39-cover:
IMAGE_NAME: ubuntu-18.04
PYTHON_VERSION: 3.9
TOXENV: py39-cover
linux-py37-lint:
IMAGE_NAME: ubuntu-18.04
PYTHON_VERSION: 3.7
TOXENV: lint
linux-py36-mypy:
IMAGE_NAME: ubuntu-18.04
PYTHON_VERSION: 3.6
TOXENV: mypy
linux-integration:
IMAGE_NAME: ubuntu-18.04
PYTHON_VERSION: 3.8
TOXENV: integration
ACME_SERVER: pebble
apache-compat:
IMAGE_NAME: ubuntu-18.04
TOXENV: apache_compat
le-modification:
IMAGE_NAME: ubuntu-18.04
TOXENV: modification
apacheconftest:
IMAGE_NAME: ubuntu-18.04
PYTHON_VERSION: 2.7
TOXENV: apacheconftest-with-pebble
nginxroundtrip:
IMAGE_NAME: ubuntu-18.04
PYTHON_VERSION: 2.7
TOXENV: nginxroundtrip
pool:
vmImage: $(IMAGE_NAME)
steps:
- template: ../steps/tox-steps.yml

View File

@@ -1,19 +0,0 @@
stages:
- stage: Changelog
jobs:
- job: prepare
pool:
vmImage: vs2017-win2016
steps:
# If we change the output filename from `release_notes.md`, it should also be changed in tools/create_github_release.py
- bash: |
set -e
CERTBOT_VERSION="$(cd certbot && python -c "import certbot; print(certbot.__version__)" && cd ~-)"
"${BUILD_REPOSITORY_LOCALPATH}\tools\extract_changelog.py" "${CERTBOT_VERSION}" >> "${BUILD_ARTIFACTSTAGINGDIRECTORY}/release_notes.md"
displayName: Prepare changelog
- task: PublishPipelineArtifact@1
inputs:
path: $(Build.ArtifactStagingDirectory)
# If we change the artifact's name, it should also be changed in tools/create_github_release.py
artifact: changelog
displayName: Publish changelog

View File

@@ -1,99 +0,0 @@
parameters:
- name: snapReleaseChannel
type: string
default: edge
values:
- edge
- beta
stages:
- stage: Deploy
jobs:
# This job relies on credentials used to publish the Certbot snaps. This
# credential file was created by running:
#
# snapcraft logout
# snapcraft login (provide the shared snapcraft credentials when prompted)
# snapcraft export-login --channels=beta,edge snapcraft.cfg
#
# Then the file was added as a secure file in Azure pipelines
# with the name snapcraft.cfg by following the instructions at
# https://docs.microsoft.com/en-us/azure/devops/pipelines/library/secure-files?view=azure-devops
# including authorizing the file in all pipelines as described at
# https://docs.microsoft.com/en-us/azure/devops/pipelines/library/secure-files?view=azure-devops#how-do-i-authorize-a-secure-file-for-use-in-all-pipelines.
#
# This file has a maximum lifetime of one year and the current
# file will expire on 2021-07-28 which is also tracked by
# https://github.com/certbot/certbot/issues/7931. The file will
# need to be updated before then to prevent automated deploys
# from breaking.
#
# Revoking these credentials can be done by changing the password of the
# account used to generate the credentials. See
# https://forum.snapcraft.io/t/revoking-exported-credentials/19031 for
# more info.
- job: publish_snap
pool:
vmImage: ubuntu-18.04
variables:
- group: certbot-common
steps:
- bash: |
set -e
sudo apt-get update
sudo apt-get install -y --no-install-recommends snapd
sudo snap install --classic snapcraft
displayName: Install dependencies
- task: DownloadPipelineArtifact@2
inputs:
artifact: snaps
path: $(Build.SourcesDirectory)/snap
displayName: Retrieve Certbot snaps
- task: DownloadSecureFile@1
name: snapcraftCfg
inputs:
secureFile: snapcraft.cfg
- bash: |
set -e
mkdir -p .snapcraft
ln -s $(snapcraftCfg.secureFilePath) .snapcraft/snapcraft.cfg
for SNAP_FILE in snap/*.snap; do
tools/retry.sh eval snapcraft upload --release=${{ parameters.snapReleaseChannel }} "${SNAP_FILE}"
done
displayName: Publish to Snap store
- job: publish_docker
pool:
vmImage: ubuntu-18.04
strategy:
matrix:
amd64:
DOCKER_ARCH: amd64
arm32v6:
DOCKER_ARCH: arm32v6
arm64v8:
DOCKER_ARCH: arm64v8
steps:
- task: DownloadPipelineArtifact@2
inputs:
artifact: docker_$(DOCKER_ARCH)
path: $(Build.SourcesDirectory)
displayName: Retrieve Docker images
- bash: set -e && docker load --input $(Build.SourcesDirectory)/images.tar
displayName: Load Docker images
- task: Docker@2
inputs:
command: login
# The credentials used here are for the shared certbotbot account
# on Docker Hub. The credentials are stored in a service account
# which was created by following the instructions at
# https://docs.microsoft.com/en-us/azure/devops/pipelines/library/service-endpoints?view=azure-devops&tabs=yaml#sep-docreg.
# The name given to this service account must match the value
# given to containerRegistry below. "Grant access to all
# pipelines" should also be checked. To revoke these
# credentials, we can change the password on the certbotbot
# Docker Hub account or remove the account from the
# Certbot organization on Docker Hub.
containerRegistry: docker-hub
displayName: Login to Docker Hub
- bash: set -e && tools/docker/deploy.sh $(dockerTag) $DOCKER_ARCH
displayName: Deploy the Docker images

View File

@@ -1,19 +0,0 @@
stages:
- stage: On_Failure
jobs:
- job: notify_mattermost
variables:
- group: certbot-common
pool:
vmImage: ubuntu-latest
steps:
- bash: |
set -e
MESSAGE="\
---\n\
##### Azure Pipeline
*Repo* $(Build.Repository.ID) - *Pipeline* $(Build.DefinitionName) #$(Build.BuildNumber) - *Branch/PR* $(Build.SourceBranchName)\n\
:warning: __Pipeline has failed__: [Link to the build](https://dev.azure.com/$(Build.Repository.ID)/_build/results?buildId=$(Build.BuildId)&view=results)\n\n\
---"
curl -i -X POST --data-urlencode "payload={\"text\":\"${MESSAGE}\"}" "$(MATTERMOST_URL)"
condition: failed()

View File

@@ -1,6 +0,0 @@
stages:
- stage: TestAndPackage
jobs:
# - template: ../jobs/standard-tests-jobs.yml
# - template: ../jobs/extended-tests-jobs.yml
- template: ../jobs/packaging-jobs.yml

View File

@@ -1,57 +0,0 @@
steps:
- bash: |
set -e
brew install augeas
condition: startswith(variables['IMAGE_NAME'], 'macOS')
displayName: Install MacOS dependencies
- bash: |
set -e
sudo apt-get update
sudo apt-get install -y --no-install-recommends \
python-dev \
gcc \
libaugeas0 \
libssl-dev \
libffi-dev \
ca-certificates \
nginx-light \
openssl
sudo systemctl stop nginx
condition: startswith(variables['IMAGE_NAME'], 'ubuntu')
displayName: Install Linux dependencies
- task: UsePythonVersion@0
inputs:
versionSpec: $(PYTHON_VERSION)
addToPath: true
# tools/pip_install.py is used to pin packages to a known working version
# except in tests where the environment variable CERTBOT_NO_PIN is set.
# virtualenv is listed here explicitly to make sure it is upgraded when
# CERTBOT_NO_PIN is set to work around failures we've seen when using an older
# version of virtualenv. The option "-I" is set so when CERTBOT_NO_PIN is also
# set, pip updates dependencies it thinks are already satisfied to avoid some
# problems with its lack of real dependency resolution.
- bash: |
set -e
python tools/pipstrap.py
python tools/pip_install.py -I tox virtualenv
displayName: Install runtime dependencies
- task: DownloadSecureFile@1
name: testFarmPem
inputs:
secureFile: azure-test-farm.pem
condition: contains(variables['TOXENV'], 'test-farm')
- bash: |
set -e
export TARGET_BRANCH="`echo "${BUILD_SOURCEBRANCH}" | sed -E 's!refs/(heads|tags)/!!g'`"
[ -z "${SYSTEM_PULLREQUEST_TARGETBRANCH}" ] || export TARGET_BRANCH="${SYSTEM_PULLREQUEST_TARGETBRANCH}"
env
if [[ "${TOXENV}" == *"oldest"* ]]; then
tools/run_oldest_tests.sh
else
python -m tox
fi
env:
AWS_ACCESS_KEY_ID: $(AWS_ACCESS_KEY_ID)
AWS_SECRET_ACCESS_KEY: $(AWS_SECRET_ACCESS_KEY)
AWS_EC2_PEM_FILE: $(testFarmPem.secureFilePath)
displayName: Run tox

View File

@@ -0,0 +1,39 @@
jobs:
- job: test
strategy:
matrix:
macos-py27:
IMAGE_NAME: macOS-10.14
PYTHON_VERSION: 2.7
TOXENV: py27
macos-py38:
IMAGE_NAME: macOS-10.14
PYTHON_VERSION: 3.8
TOXENV: py38
windows-py35:
IMAGE_NAME: vs2017-win2016
PYTHON_VERSION: 3.5
TOXENV: py35
windows-py37-cover:
IMAGE_NAME: vs2017-win2016
PYTHON_VERSION: 3.7
TOXENV: py37-cover
windows-integration-certbot:
IMAGE_NAME: vs2017-win2016
PYTHON_VERSION: 3.7
TOXENV: integration-certbot
PYTEST_ADDOPTS: --numprocesses 4
pool:
vmImage: $(IMAGE_NAME)
steps:
- bash: brew install augeas
condition: startswith(variables['IMAGE_NAME'], 'macOS')
displayName: Install Augeas
- task: UsePythonVersion@0
inputs:
versionSpec: $(PYTHON_VERSION)
addToPath: true
- script: python tools/pip_install.py -U tox coverage
displayName: Install dependencies
- script: python -m tox
displayName: Run tox

View File

@@ -1,18 +0,0 @@
# https://editorconfig.org/
root = true
[*]
insert_final_newline = true
trim_trailing_whitespace = true
end_of_line = lf
[*.py]
indent_style = space
indent_size = 4
charset = utf-8
max_line_length = 100
[*.yaml]
indent_style = space
indent_size = 2

12
.envrc
View File

@@ -1,12 +0,0 @@
# This file is just a nicety for developers who use direnv. When you cd under
# the Certbot repo, Certbot's virtual environment will be automatically
# activated and then deactivated when you cd elsewhere. Developers have to have
# direnv set up and run `direnv allow` to allow this file to execute on their
# system. You can find more information at https://direnv.net/.
. venv3/bin/activate
# direnv doesn't support modifying PS1 so we unset it to squelch the error
# it'll otherwise print about this being done in the activate script. See
# https://github.com/direnv/direnv/wiki/PS1. If you would like your shell
# prompt to change like it normally does, see
# https://github.com/direnv/direnv/wiki/Python#restoring-the-ps1.
unset PS1

7
.gitignore vendored
View File

@@ -58,10 +58,3 @@ parts
prime
stage
*.snap
snap-constraints.txt
qemu-*
certbot-dns*/certbot-dns*_amd64*.txt
certbot-dns*/certbot-dns*_arm*.txt
/certbot_amd64*.txt
/certbot_arm*.txt
certbot-dns*/snap

92
.travis.yml Normal file
View File

@@ -0,0 +1,92 @@
language: python
dist: xenial
cache:
directories:
- $HOME/.cache/pip
before_script:
# On Travis, the fastest parallelization for integration tests has proved to be 4.
- 'if [[ "$TOXENV" == *"integration"* ]]; then export PYTEST_ADDOPTS="--numprocesses 4"; fi'
# Use Travis retry feature for farm tests since they are flaky
- 'if [[ "$TOXENV" == "travis-test-farm"* ]]; then export TRAVIS_RETRY=travis_retry; fi'
- export TOX_TESTENV_PASSENV=TRAVIS
- 'if [[ "$SNAP" == true ]]; then snap/local/build_and_install.sh; fi'
# Only build pushes to the master branch, PRs, and branches beginning with
# `test-`, `travis-test-`, or of the form `digit(s).digit(s).x` or
# `vdigit(s).digit(s).digit(s)`. As documented at
# https://docs.travis-ci.com/user/customizing-the-build/#safelisting-or-blocklisting-branches,
# this includes tags so pushing tags of the form `vdigit(s).digit(s).digit(s)`
# will also trigger tests. This reduces the number of simultaneous Travis runs,
# which speeds turnaround time on review since there is a cap of on the number
# of simultaneous runs.
branches:
# When changing these branches, please ensure the documentation under
# "Running tests in CI" is still correct.
only:
- master
- /^\d+\.\d+\.x$/ # this matches our point release branches
- /^v\d+\.\d+\.\d+$/ # this matches our release tags
- /^(travis-)?test-.*$/
# Jobs for the main test suite are always executed (including on PRs) except for pushes on master.
not-on-master: &not-on-master
if: NOT (type = push AND branch = master)
# Jobs for the extended test suite are executed for cron jobs and pushes to
# non-development branches.
extended-test-suite: &extended-test-suite
if: type = cron OR (type = push AND branch != master)
matrix:
include:
- python: "3.7"
env:
- TOXENV=travis-test-farm-apache2
- secure: "f+j/Lj9s1lcuKo5sEFrlRd1kIAMnIJI4z0MTI7QF8jl9Fkmbx7KECGzw31TNgzrOSzxSapHbcueFYvNCLKST+kE/8ogMZBbwqXfEDuKpyF6BY3uYoJn+wPVE5pIb8Hhe08xPte8TTDSMIyHI3EyTfcAKrIreauoArePvh/cRvSw="
<<: *extended-test-suite
# container-based infrastructure
sudo: false
addons:
apt:
packages: # Keep in sync with letsencrypt-auto-source/pieces/bootstrappers/deb_common.sh and Boulder.
- python-dev
- gcc
- libaugeas0
- libssl-dev
- libffi-dev
- ca-certificates
# For certbot-nginx integration testing
- nginx-light
- openssl
# tools/pip_install.py is used to pin packages to a known working version
# except in tests where the environment variable CERTBOT_NO_PIN is set.
# virtualenv is listed here explicitly to make sure it is upgraded when
# CERTBOT_NO_PIN is set to work around failures we've seen when using an older
# version of virtualenv. The option "-I" is set so when CERTBOT_NO_PIN is also
# set, pip updates dependencies it thinks are already satisfied to avoid some
# problems with its lack of real dependency resolution.
install: 'tools/pip_install.py -I tox virtualenv'
# Most of the time TRAVIS_RETRY is an empty string, and has no effect on the
# script command. It is set only to `travis_retry` during farm tests, in
# order to trigger the Travis retry feature, and compensate the inherent
# flakiness of these specific tests.
script: '$TRAVIS_RETRY tox'
notifications:
email: false
irc:
if: NOT branch =~ ^(travis-)?test-.*$
channels:
# This is set to a secure variable to prevent forks from sending
# notifications. This value was created by installing
# https://github.com/travis-ci/travis.rb and running
# `travis encrypt "chat.freenode.net#certbot-devel"`.
- secure: "EWW66E2+KVPZyIPR8ViENZwfcup4Gx3/dlimmAZE0WuLwxDCshBBOd3O8Rf6pBokEoZlXM5eDT6XdyJj8n0DLslgjO62pExdunXpbcMwdY7l1ELxX2/UbnDTE6UnPYa09qVBHNG7156Z6yE0x2lH4M9Ykvp0G0cubjPQHylAwo0="
on_cancel: never
on_success: never
on_failure: always

View File

@@ -36,8 +36,7 @@ Authors
* [Blake Griffith](https://github.com/cowlicks)
* [Brad Warren](https://github.com/bmw)
* [Brandon Kraft](https://github.com/kraftbj)
* [Brandon Kreisel](https://github.com/BKreisel)
* [Brian Heim](https://github.com/brianlheim)
* [Brandon Kreisel](https://github.com/kraftbj)
* [Cameron Steel](https://github.com/Tugzrida)
* [Ceesjan Luiten](https://github.com/quinox)
* [Chad Whitacre](https://github.com/whit537)
@@ -61,7 +60,6 @@ Authors
* [Daniel Albers](https://github.com/AID)
* [Daniel Aleksandersen](https://github.com/da2x)
* [Daniel Convissor](https://github.com/convissor)
* [Daniel "Drex" Drexler](https://github.com/aeturnum)
* [Daniel Huang](https://github.com/dhuang)
* [Dave Guarino](https://github.com/daguar)
* [David cz](https://github.com/dave-cz)
@@ -86,7 +84,6 @@ Authors
* [Felix Schwarz](https://github.com/FelixSchwarz)
* [Felix Yan](https://github.com/felixonmars)
* [Filip Ochnik](https://github.com/filipochnik)
* [Florian Klink](https://github.com/flokli)
* [Francois Marier](https://github.com/fmarier)
* [Frank](https://github.com/Frankkkkk)
* [Frederic BLANC](https://github.com/fblanc)
@@ -154,7 +151,6 @@ Authors
* [Luca Olivetti](https://github.com/olivluca)
* [Luke Rogers](https://github.com/lukeroge)
* [Maarten](https://github.com/mrtndwrd)
* [Mads Jensen](https://github.com/atombrella)
* [Maikel Martens](https://github.com/krukas)
* [Malte Janduda](https://github.com/MalteJ)
* [Mantas Mikulėnas](https://github.com/grawity)
@@ -206,7 +202,6 @@ Authors
* [Pierre Jaury](https://github.com/kaiyou)
* [Piotr Kasprzyk](https://github.com/kwadrat)
* [Prayag Verma](https://github.com/pra85)
* [Rasesh Patel](https://github.com/raspat1)
* [Reinaldo de Souza Jr](https://github.com/juniorz)
* [Remi Rampin](https://github.com/remram44)
* [Rémy HUBSCHER](https://github.com/Natim)
@@ -214,7 +209,6 @@ Authors
* [Richard Barnes](https://github.com/r-barnes)
* [Richard Panek](https://github.com/kernelpanek)
* [Robert Buchholz](https://github.com/rbu)
* [Robert Dailey](https://github.com/pahrohfit)
* [Robert Habermann](https://github.com/frennkie)
* [Robert Xiao](https://github.com/nneonneo)
* [Roland Shoemaker](https://github.com/rolandshoemaker)
@@ -240,7 +234,6 @@ Authors
* [Spencer Bliven](https://github.com/sbliven)
* [Stacey Sheldon](https://github.com/solidgoldbomb)
* [Stavros Korokithakis](https://github.com/skorokithakis)
* [Ștefan Talpalaru](https://github.com/stefantalpalaru)
* [Stefan Weil](https://github.com/stweil)
* [Steve Desmond](https://github.com/stevedesmond-ca)
* [sydneyli](https://github.com/sydneyli)

View File

@@ -11,7 +11,7 @@ to the Sphinx generated docs is provided below.
[1] https://github.com/blog/1184-contributing-guidelines
[2] https://docutils.sourceforge.io/docs/user/rst/quickref.html#hyperlink-targets
[2] http://docutils.sourceforge.net/docs/user/rst/quickref.html#hyperlink-targets
-->

View File

@@ -20,10 +20,3 @@ for mod in list(sys.modules):
# preserved (acme.jose.* is josepy.*)
if mod == 'josepy' or mod.startswith('josepy.'):
sys.modules['acme.' + mod.replace('josepy', 'jose', 1)] = sys.modules[mod]
if sys.version_info[0] == 2:
warnings.warn(
"Python 2 support will be dropped in the next release of acme. "
"Please upgrade your Python version.",
PendingDeprecationWarning,
) # pragma: no cover

View File

@@ -13,7 +13,6 @@ import josepy as jose
import OpenSSL
import requests
from requests.adapters import HTTPAdapter
from requests.utils import parse_header_links
from requests_toolbelt.adapters.source import SourceAddressAdapter
import six
from six.moves import http_client
@@ -201,7 +200,7 @@ class ClientBase(object):
when = parsedate_tz(retry_after)
if when is not None:
try:
tz_secs = datetime.timedelta(when[-1] if when[-1] is not None else 0)
tz_secs = datetime.timedelta(when[-1] if when[-1] else 0)
return datetime.datetime(*when[:7]) - tz_secs
except (ValueError, OverflowError):
pass
@@ -448,7 +447,7 @@ class Client(ClientBase):
heapq.heapify(waiting)
# mapping between original Authorization Resource and the most
# recently updated one
updated = {authzr: authzr for authzr in authzrs}
updated = dict((authzr, authzr) for authzr in authzrs)
while waiting:
# find the smallest Retry-After, and sleep if necessary
@@ -734,13 +733,11 @@ class ClientV2(ClientBase):
raise errors.ValidationError(failed)
return orderr.update(authorizations=responses)
def finalize_order(self, orderr, deadline, fetch_alternative_chains=False):
def finalize_order(self, orderr, deadline):
"""Finalize an order and obtain a certificate.
:param messages.OrderResource orderr: order to finalize
:param datetime.datetime deadline: when to stop polling and timeout
:param bool fetch_alternative_chains: whether to also fetch alternative
certificate chains
:returns: finalized order
:rtype: messages.OrderResource
@@ -757,13 +754,8 @@ class ClientV2(ClientBase):
if body.error is not None:
raise errors.IssuanceError(body.error)
if body.certificate is not None:
certificate_response = self._post_as_get(body.certificate)
orderr = orderr.update(body=body, fullchain_pem=certificate_response.text)
if fetch_alternative_chains:
alt_chains_urls = self._get_links(certificate_response, 'alternate')
alt_chains = [self._post_as_get(url).text for url in alt_chains_urls]
orderr = orderr.update(alternative_fullchains_pem=alt_chains)
return orderr
certificate_response = self._post_as_get(body.certificate).text
return orderr.update(body=body, fullchain_pem=certificate_response)
raise errors.TimeoutError()
def revoke(self, cert, rsn):
@@ -793,20 +785,6 @@ class ClientV2(ClientBase):
new_args = args[:1] + (None,) + args[1:]
return self._post(*new_args, **kwargs)
def _get_links(self, response, relation_type):
"""
Retrieves all Link URIs of relation_type from the response.
:param requests.Response response: The requests HTTP response.
:param str relation_type: The relation type to filter by.
"""
# Can't use response.links directly because it drops multiple links
# of the same relation type, which is possible in RFC8555 responses.
if 'Link' not in response.headers:
return []
links = parse_header_links(response.headers['Link'])
return [l['url'] for l in links
if 'rel' in l and 'url' in l and l['rel'] == relation_type]
class BackwardsCompatibleClientV2(object):
"""ACME client wrapper that tends towards V2-style calls, but
@@ -885,13 +863,11 @@ class BackwardsCompatibleClientV2(object):
return messages.OrderResource(authorizations=authorizations, csr_pem=csr_pem)
return self.client.new_order(csr_pem)
def finalize_order(self, orderr, deadline, fetch_alternative_chains=False):
def finalize_order(self, orderr, deadline):
"""Finalize an order and obtain a certificate.
:param messages.OrderResource orderr: order to finalize
:param datetime.datetime deadline: when to stop polling and timeout
:param bool fetch_alternative_chains: whether to also fetch alternative
certificate chains
:returns: finalized order
:rtype: messages.OrderResource
@@ -922,7 +898,7 @@ class BackwardsCompatibleClientV2(object):
chain = crypto_util.dump_pyopenssl_chain(chain).decode()
return orderr.update(fullchain_pem=(cert + chain))
return self.client.finalize_order(orderr, deadline, fetch_alternative_chains)
return self.client.finalize_order(orderr, deadline)
def revoke(self, cert, rsn):
"""Revoke certificate.

View File

@@ -12,5 +12,4 @@ try:
from typing import * # pylint: disable=wildcard-import, unused-wildcard-import
from typing import Collection, IO # type: ignore
except ImportError:
# mypy complains because TypingClass is not a module
sys.modules[__name__] = TypingClass() # type: ignore
sys.modules[__name__] = TypingClass()

View File

@@ -206,7 +206,7 @@ class Directory(jose.JSONDeSerializable):
external_account_required = jose.Field('externalAccountRequired', omitempty=True)
def __init__(self, **kwargs):
kwargs = {self._internal_name(k): v for k, v in kwargs.items()}
kwargs = dict((self._internal_name(k), v) for k, v in kwargs.items())
super(Directory.Meta, self).__init__(**kwargs)
@property
@@ -315,9 +315,6 @@ class Registration(ResourceBody):
# on new-reg key server ignores 'key' and populates it based on
# JWS.signature.combined.jwk
key = jose.Field('key', omitempty=True, decoder=jose.JWK.from_json)
# Contact field implements special behavior to allow messages that clear existing
# contacts while not expecting the `contact` field when loading from json.
# This is implemented in the constructor and *_json methods.
contact = jose.Field('contact', omitempty=True, default=())
agreement = jose.Field('agreement', omitempty=True)
status = jose.Field('status', omitempty=True)
@@ -330,73 +327,24 @@ class Registration(ResourceBody):
@classmethod
def from_data(cls, phone=None, email=None, external_account_binding=None, **kwargs):
"""
Create registration resource from contact details.
The `contact` keyword being passed to a Registration object is meaningful, so
this function represents empty iterables in its kwargs by passing on an empty
`tuple`.
"""
# Note if `contact` was in kwargs.
contact_provided = 'contact' in kwargs
# Pop `contact` from kwargs and add formatted email or phone numbers
"""Create registration resource from contact details."""
details = list(kwargs.pop('contact', ()))
if phone is not None:
details.append(cls.phone_prefix + phone)
if email is not None:
details.extend([cls.email_prefix + mail for mail in email.split(',')])
# Insert formatted contact information back into kwargs
# or insert an empty tuple if `contact` provided.
if details or contact_provided:
kwargs['contact'] = tuple(details)
kwargs['contact'] = tuple(details)
if external_account_binding:
kwargs['external_account_binding'] = external_account_binding
return cls(**kwargs)
def __init__(self, **kwargs):
"""Note if the user provides a value for the `contact` member."""
if 'contact' in kwargs:
# Avoid the __setattr__ used by jose.TypedJSONObjectWithFields
object.__setattr__(self, '_add_contact', True)
super(Registration, self).__init__(**kwargs)
def _filter_contact(self, prefix):
return tuple(
detail[len(prefix):] for detail in self.contact # pylint: disable=not-an-iterable
if detail.startswith(prefix))
def _add_contact_if_appropriate(self, jobj):
"""
The `contact` member of Registration objects should not be required when
de-serializing (as it would be if the Fields' `omitempty` flag were `False`), but
it should be included in serializations if it was provided.
:param jobj: Dictionary containing this Registrations' data
:type jobj: dict
:returns: Dictionary containing Registrations data to transmit to the server
:rtype: dict
"""
if getattr(self, '_add_contact', False):
jobj['contact'] = self.encode('contact')
return jobj
def to_partial_json(self):
"""Modify josepy.JSONDeserializable.to_partial_json()"""
jobj = super(Registration, self).to_partial_json()
return self._add_contact_if_appropriate(jobj)
def fields_to_partial_json(self):
"""Modify josepy.JSONObjectWithFields.fields_to_partial_json()"""
jobj = super(Registration, self).fields_to_partial_json()
return self._add_contact_if_appropriate(jobj)
@property
def phones(self):
"""All phones found in the ``contact`` field."""
@@ -465,7 +413,7 @@ class ChallengeBody(ResourceBody):
omitempty=True, default=None)
def __init__(self, **kwargs):
kwargs = {self._internal_name(k): v for k, v in kwargs.items()}
kwargs = dict((self._internal_name(k), v) for k, v in kwargs.items())
super(ChallengeBody, self).__init__(**kwargs)
def encode(self, name):
@@ -618,11 +566,9 @@ class Revocation(ResourceMixin, jose.JSONObjectWithFields):
class Order(ResourceBody):
"""Order Resource Body.
:ivar identifiers: List of identifiers for the certificate.
:vartype identifiers: `list` of `.Identifier`
:ivar list of .Identifier: List of identifiers for the certificate.
:ivar acme.messages.Status status:
:ivar authorizations: URLs of authorizations.
:vartype authorizations: `list` of `str`
:ivar list of str authorizations: URLs of authorizations.
:ivar str certificate: URL to download certificate as a fullchain PEM.
:ivar str finalize: URL to POST to to request issuance once all
authorizations have "valid" status.
@@ -647,20 +593,15 @@ class OrderResource(ResourceWithURI):
:ivar acme.messages.Order body:
:ivar str csr_pem: The CSR this Order will be finalized with.
:ivar authorizations: Fully-fetched AuthorizationResource objects.
:vartype authorizations: `list` of `acme.messages.AuthorizationResource`
:ivar list of acme.messages.AuthorizationResource authorizations:
Fully-fetched AuthorizationResource objects.
:ivar str fullchain_pem: The fetched contents of the certificate URL
produced once the order was finalized, if it's present.
:ivar alternative_fullchains_pem: The fetched contents of alternative certificate
chain URLs produced once the order was finalized, if present and requested during
finalization.
:vartype alternative_fullchains_pem: `list` of `str`
"""
body = jose.Field('body', decoder=Order.from_json)
csr_pem = jose.Field('csr_pem', omitempty=True)
authorizations = jose.Field('authorizations')
fullchain_pem = jose.Field('fullchain_pem', omitempty=True)
alternative_fullchains_pem = jose.Field('alternative_fullchains_pem', omitempty=True)
@Directory.register
class NewOrder(Order):

View File

@@ -4,4 +4,4 @@ import six
def map_keys(dikt, func):
"""Map dictionary keys."""
return {func(key): value for key, value in six.iteritems(dikt)}
return dict((func(key), value) for key, value in six.iteritems(dikt))

View File

@@ -9,7 +9,7 @@ BUILDDIR = _build
# User-friendly check for sphinx-build
ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1)
$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from https://www.sphinx-doc.org/)
$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/)
endif
# Internal variables.

View File

@@ -120,7 +120,7 @@ todo_include_todos = False
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
# https://docs.readthedocs.io/en/stable/faq.html#i-want-to-use-the-read-the-docs-theme-locally
# http://docs.readthedocs.org/en/latest/theme.html#how-do-i-use-this-locally-and-on-read-the-docs
# on_rtd is whether we are on readthedocs.org
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
if not on_rtd: # only import and set the theme if we're building docs locally

View File

@@ -65,7 +65,7 @@ if errorlevel 9009 (
echo.may add the Sphinx directory to PATH.
echo.
echo.If you don't have Sphinx installed, grab it from
echo.https://www.sphinx-doc.org/
echo.http://sphinx-doc.org/
exit /b 1
)

View File

@@ -1,11 +1,12 @@
from distutils.version import LooseVersion
from distutils.version import StrictVersion
import sys
from setuptools import __version__ as setuptools_version
from setuptools import find_packages
from setuptools import setup
from setuptools.command.test import test as TestCommand
version = '1.11.0.dev0'
version = '1.6.0.dev0'
# Please update tox.ini when modifying dependency version requirements
install_requires = [
@@ -16,8 +17,8 @@ install_requires = [
# 1.1.0+ is required to avoid the warnings described at
# https://github.com/certbot/josepy/issues/13.
'josepy>=1.1.0',
# Connection.set_tlsext_host_name (>=0.13) + matching Xenial requirements (>=0.15.1)
'PyOpenSSL>=0.15.1',
# Connection.set_tlsext_host_name (>=0.13)
'PyOpenSSL>=0.13.1',
'pyrfc3339',
'pytz',
'requests[security]>=2.6.0', # security extras added in 2.4.1
@@ -26,7 +27,7 @@ install_requires = [
'six>=1.9.0', # needed for python_2_unicode_compatible
]
setuptools_known_environment_markers = (LooseVersion(setuptools_version) >= LooseVersion('36.2'))
setuptools_known_environment_markers = (StrictVersion(setuptools_version) >= StrictVersion('36.2'))
if setuptools_known_environment_markers:
install_requires.append('mock ; python_version < "3.3"')
elif 'bdist_wheel' in sys.argv[1:]:
@@ -46,6 +47,22 @@ docs_extras = [
'sphinx_rtd_theme',
]
class PyTest(TestCommand):
user_options = []
def initialize_options(self):
TestCommand.initialize_options(self)
self.pytest_args = ''
def run_tests(self):
import shlex
# import here, cause outside the eggs aren't loaded
import pytest
errno = pytest.main(shlex.split(self.pytest_args))
sys.exit(errno)
setup(
name='acme',
version=version,
@@ -54,7 +71,7 @@ setup(
author="Certbot Project",
author_email='client-dev@letsencrypt.org',
license='Apache License 2.0',
python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*',
python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
@@ -63,10 +80,10 @@ setup(
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Security',
],
@@ -78,4 +95,7 @@ setup(
'dev': dev_extras,
'docs': docs_extras,
},
test_suite='acme',
tests_require=["pytest"],
cmdclass={"test": PyTest},
)

View File

@@ -263,7 +263,7 @@ class BackwardsCompatibleClientV2Test(ClientTestBase):
with mock.patch('acme.client.ClientV2') as mock_client:
client = self._init()
client.finalize_order(mock_orderr, mock_deadline)
mock_client().finalize_order.assert_called_once_with(mock_orderr, mock_deadline, False)
mock_client().finalize_order.assert_called_once_with(mock_orderr, mock_deadline)
def test_revoke(self):
self.response.json.return_value = DIRECTORY_V1.to_json()
@@ -842,32 +842,6 @@ class ClientV2Test(ClientTestBase):
deadline = datetime.datetime.now() - datetime.timedelta(seconds=60)
self.assertRaises(errors.TimeoutError, self.client.finalize_order, self.orderr, deadline)
def test_finalize_order_alt_chains(self):
updated_order = self.order.update(
certificate='https://www.letsencrypt-demo.org/acme/cert/',
)
updated_orderr = self.orderr.update(body=updated_order,
fullchain_pem=CERT_SAN_PEM,
alternative_fullchains_pem=[CERT_SAN_PEM,
CERT_SAN_PEM])
self.response.json.return_value = updated_order.to_json()
self.response.text = CERT_SAN_PEM
self.response.headers['Link'] ='<https://example.com/acme/cert/1>;rel="alternate", ' + \
'<https://example.com/dir>;rel="index", ' + \
'<https://example.com/acme/cert/2>;title="foo";rel="alternate"'
deadline = datetime.datetime(9999, 9, 9)
resp = self.client.finalize_order(self.orderr, deadline, fetch_alternative_chains=True)
self.net.post.assert_any_call('https://example.com/acme/cert/1',
mock.ANY, acme_version=2, new_nonce_url=mock.ANY)
self.net.post.assert_any_call('https://example.com/acme/cert/2',
mock.ANY, acme_version=2, new_nonce_url=mock.ANY)
self.assertEqual(resp, updated_orderr)
del self.response.headers['Link']
resp = self.client.finalize_order(self.orderr, deadline, fetch_alternative_chains=True)
self.assertEqual(resp, updated_orderr.update(alternative_fullchains_pem=[]))
def test_revoke(self):
self.client.revoke(messages_test.CERT, self.rsn)
self.net.post.assert_called_once_with(
@@ -1342,7 +1316,7 @@ class ClientNetworkSourceAddressBindingTest(unittest.TestCase):
# test should fail if the default adapter type is changed by requests
net = ClientNetwork(key=None, alg=None)
session = requests.Session()
for scheme in session.adapters:
for scheme in session.adapters.keys():
client_network_adapter = net.session.adapters.get(scheme)
default_adapter = session.adapters.get(scheme)
self.assertEqual(client_network_adapter.__class__, default_adapter.__class__)

View File

@@ -108,11 +108,11 @@ class ConstantTest(unittest.TestCase):
def test_equality(self):
const_a_prime = self.MockConstant('a')
self.assertNotEqual(self.const_a, self.const_b)
self.assertEqual(self.const_a, const_a_prime)
self.assertFalse(self.const_a == self.const_b)
self.assertTrue(self.const_a == const_a_prime)
self.assertNotEqual(self.const_a, self.const_b)
self.assertEqual(self.const_a, const_a_prime)
self.assertTrue(self.const_a != self.const_b)
self.assertFalse(self.const_a != const_a_prime)
class DirectoryTest(unittest.TestCase):
@@ -254,19 +254,6 @@ class RegistrationTest(unittest.TestCase):
from acme.messages import Registration
hash(Registration.from_json(self.jobj_from))
def test_default_not_transmitted(self):
from acme.messages import NewRegistration
empty_new_reg = NewRegistration()
new_reg_with_contact = NewRegistration(contact=())
self.assertEqual(empty_new_reg.contact, ())
self.assertEqual(new_reg_with_contact.contact, ())
self.assertTrue('contact' not in empty_new_reg.to_partial_json())
self.assertTrue('contact' not in empty_new_reg.fields_to_partial_json())
self.assertTrue('contact' in new_reg_with_contact.to_partial_json())
self.assertTrue('contact' in new_reg_with_contact.fields_to_partial_json())
class UpdateRegistrationTest(unittest.TestCase):
"""Tests for acme.messages.UpdateRegistration."""

View File

@@ -225,8 +225,7 @@ def _get_runtime_cfg(command):
command,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
universal_newlines=True,
env=util.env_no_snap_for_external_calls())
universal_newlines=True)
stdout, stderr = proc.communicate()
except (OSError, ValueError):

View File

@@ -6,7 +6,7 @@ Authors:
Raphael Pinson <raphink@gmail.com>
About: Reference
Online Apache configuration manual: https://httpd.apache.org/docs/trunk/
Online Apache configuration manual: http://httpd.apache.org/docs/trunk/
About: License
This file is licensed under the LGPL v2+.

View File

@@ -9,6 +9,7 @@ import re
import socket
import time
import six
import zope.component
import zope.interface
try:
@@ -327,9 +328,6 @@ class ApacheConfigurator(common.Installer):
if self.version < (2, 2):
raise errors.NotSupportedError(
"Apache Version {0} not supported.".format(str(self.version)))
elif self.version < (2, 4):
logger.warning('Support for Apache 2.2 is deprecated and will be removed in a '
'future release.')
# Recover from previous crash before Augeas initialization to have the
# correct parse tree from the get go.
@@ -466,6 +464,21 @@ class ApacheConfigurator(common.Installer):
metadata=metadata
)
def _wildcard_domain(self, domain):
"""
Checks if domain is a wildcard domain
:param str domain: Domain to check
:returns: If the domain is wildcard domain
:rtype: bool
"""
if isinstance(domain, six.text_type):
wildcard_marker = u"*."
else:
wildcard_marker = b"*."
return domain.startswith(wildcard_marker)
def deploy_cert(self, domain, cert_path, key_path,
chain_path=None, fullchain_path=None):
"""Deploys certificate to specified virtual host.
@@ -500,7 +513,7 @@ class ApacheConfigurator(common.Installer):
:rtype: `list` of :class:`~certbot_apache._internal.obj.VirtualHost`
"""
if util.is_wildcard_domain(domain):
if self._wildcard_domain(domain):
if domain in self._wildcard_vhosts:
# Vhosts for a wildcard domain were already selected
return self._wildcard_vhosts[domain]
@@ -1449,7 +1462,7 @@ class ApacheConfigurator(common.Installer):
if not line.lower().lstrip().startswith("rewriterule"):
return False
# According to: https://httpd.apache.org/docs/2.4/rewrite/flags.html
# According to: http://httpd.apache.org/docs/2.4/rewrite/flags.html
# The syntax of a RewriteRule is:
# RewriteRule pattern target [Flag1,Flag2,Flag3]
# i.e. target is required, so it must exist.

View File

@@ -169,7 +169,7 @@ class ApacheHttp01(common.ChallengePerformer):
def _set_up_challenges(self):
if not os.path.isdir(self.challenge_dir):
old_umask = filesystem.umask(0o022)
old_umask = os.umask(0o022)
try:
filesystem.makedirs(self.challenge_dir, 0o755)
except OSError as exception:
@@ -177,7 +177,7 @@ class ApacheHttp01(common.ChallengePerformer):
raise errors.PluginError(
"Couldn't create root for http-01 challenge")
finally:
filesystem.umask(old_umask)
os.umask(old_umask)
responses = []
for achall in self.achalls:

View File

@@ -731,6 +731,7 @@ class ApacheParser(object):
privileged users.
https://apr.apache.org/docs/apr/2.0/apr__fnmatch_8h_source.html
http://apache2.sourcearchive.com/documentation/2.2.16-6/apr__fnmatch_8h_source.html
:param str clean_fn_match: Apache style filename match, like globs
@@ -798,7 +799,7 @@ class ApacheParser(object):
def _parsed_by_parser_paths(self, filep, paths):
"""Helper function that searches through provided paths and returns
True if file path is found in the set"""
for directory in paths:
for directory in paths.keys():
for filename in paths[directory]:
if fnmatch.fnmatch(filep, os.path.join(directory, filename)):
return True

View File

@@ -1,3 +1,3 @@
# Remember to update setup.py to match the package versions below.
acme[dev]==0.29.0
certbot[dev]==1.6.0
certbot[dev]==1.1.0

View File

@@ -1,24 +1,25 @@
from distutils.version import LooseVersion
from distutils.version import StrictVersion
import sys
from setuptools import __version__ as setuptools_version
from setuptools import find_packages
from setuptools import setup
from setuptools.command.test import test as TestCommand
version = '1.11.0.dev0'
version = '1.6.0.dev0'
# Remember to update local-oldest-requirements.txt when changing the minimum
# acme/certbot version.
install_requires = [
'acme>=0.29.0',
'certbot>=1.6.0',
'certbot>=1.1.0',
'python-augeas',
'setuptools',
'zope.component',
'zope.interface',
]
setuptools_known_environment_markers = (LooseVersion(setuptools_version) >= LooseVersion('36.2'))
setuptools_known_environment_markers = (StrictVersion(setuptools_version) >= StrictVersion('36.2'))
if setuptools_known_environment_markers:
install_requires.append('mock ; python_version < "3.3"')
elif 'bdist_wheel' in sys.argv[1:]:
@@ -31,6 +32,21 @@ dev_extras = [
'apacheconfig>=0.3.2',
]
class PyTest(TestCommand):
user_options = []
def initialize_options(self):
TestCommand.initialize_options(self)
self.pytest_args = ''
def run_tests(self):
import shlex
# import here, cause outside the eggs aren't loaded
import pytest
errno = pytest.main(shlex.split(self.pytest_args))
sys.exit(errno)
setup(
name='certbot-apache',
version=version,
@@ -39,7 +55,7 @@ setup(
author="Certbot Project",
author_email='client-dev@letsencrypt.org',
license='Apache License 2.0',
python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*',
python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Plugins',
@@ -50,10 +66,10 @@ setup(
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Security',
'Topic :: System :: Installation/Setup',
@@ -73,4 +89,7 @@ setup(
'apache = certbot_apache._internal.entrypoint:ENTRYPOINT',
],
},
test_suite='certbot_apache',
tests_require=["pytest"],
cmdclass={"test": PyTest},
)

View File

@@ -52,7 +52,7 @@ function Cleanup() {
# if our environment asks us to enable modules, do our best!
if [ "$1" = --debian-modules ] ; then
sudo apt-get install -y apache2
sudo apt-get install -y libapache2-mod-wsgi-py3
sudo apt-get install -y libapache2-mod-wsgi
sudo apt-get install -y libapache2-mod-macro
for mod in ssl rewrite macro wsgi deflate userdir version mime setenvif ; do

View File

@@ -140,7 +140,7 @@ class MultipleVhostsTestCentOS(util.ApacheTest):
self.assertEqual(mock_get.call_count, 3)
self.assertEqual(len(self.config.parser.modules), 4)
self.assertEqual(len(self.config.parser.variables), 2)
self.assertTrue("TEST2" in self.config.parser.variables)
self.assertTrue("TEST2" in self.config.parser.variables.keys())
self.assertTrue("mod_another.c" in self.config.parser.modules)
def test_get_virtual_hosts(self):
@@ -172,11 +172,11 @@ class MultipleVhostsTestCentOS(util.ApacheTest):
mock_osi.return_value = ("centos", "7")
self.config.parser.update_runtime_variables()
self.assertTrue("mock_define" in self.config.parser.variables)
self.assertTrue("mock_define_too" in self.config.parser.variables)
self.assertTrue("mock_value" in self.config.parser.variables)
self.assertTrue("mock_define" in self.config.parser.variables.keys())
self.assertTrue("mock_define_too" in self.config.parser.variables.keys())
self.assertTrue("mock_value" in self.config.parser.variables.keys())
self.assertEqual("TRUE", self.config.parser.variables["mock_value"])
self.assertTrue("MOCK_NOSEP" in self.config.parser.variables)
self.assertTrue("MOCK_NOSEP" in self.config.parser.variables.keys())
self.assertEqual("NOSEP_VAL", self.config.parser.variables["NOSEP_TWO"])
@mock.patch("certbot_apache._internal.configurator.util.run_script")

View File

@@ -1337,6 +1337,13 @@ class MultipleVhostsTest(util.ApacheTest):
self.config.enable_mod,
"whatever")
def test_wildcard_domain(self):
# pylint: disable=protected-access
cases = {u"*.example.org": True, b"*.x.example.org": True,
u"a.example.org": False, b"a.x.example.org": False}
for key in cases:
self.assertEqual(self.config._wildcard_domain(key), cases[key])
def test_choose_vhosts_wildcard(self):
# pylint: disable=protected-access
mock_path = "certbot_apache._internal.display_ops.select_vhost_multiple"
@@ -1350,10 +1357,10 @@ class MultipleVhostsTest(util.ApacheTest):
# And the actual returned values
self.assertEqual(len(vhs), 1)
self.assertEqual(vhs[0].name, "certbot.demo")
self.assertTrue(vhs[0].name == "certbot.demo")
self.assertTrue(vhs[0].ssl)
self.assertNotEqual(vhs[0], self.vh_truth[3])
self.assertFalse(vhs[0] == self.vh_truth[3])
@mock.patch("certbot_apache._internal.configurator.ApacheConfigurator.make_vhost_ssl")
def test_choose_vhosts_wildcard_no_ssl(self, mock_makessl):
@@ -1464,10 +1471,10 @@ class MultipleVhostsTest(util.ApacheTest):
self.config.parser.aug.match = mock_match
vhs = self.config.get_virtual_hosts()
self.assertEqual(len(vhs), 2)
self.assertEqual(vhs[0], self.vh_truth[1])
self.assertTrue(vhs[0] == self.vh_truth[1])
# mock_vhost should have replaced the vh_truth[0], because its filepath
# isn't a symlink
self.assertEqual(vhs[1], mock_vhost)
self.assertTrue(vhs[1] == mock_vhost)
class AugeasVhostsTest(util.ApacheTest):

View File

@@ -412,9 +412,9 @@ class DualParserNodeTest(unittest.TestCase): # pylint: disable=too-many-public-
ancestor=self.block,
filepath="/path/to/whatever",
metadata=self.metadata)
self.assertNotEqual(self.block, ne_block)
self.assertNotEqual(self.directive, ne_directive)
self.assertNotEqual(self.comment, ne_comment)
self.assertFalse(self.block == ne_block)
self.assertFalse(self.directive == ne_directive)
self.assertFalse(self.comment == ne_comment)
def test_parsed_paths(self):
mock_p = mock.MagicMock(return_value=['/path/file.conf',

View File

@@ -134,7 +134,7 @@ class MultipleVhostsTestFedora(util.ApacheTest):
self.assertEqual(mock_get.call_count, 3)
self.assertEqual(len(self.config.parser.modules), 4)
self.assertEqual(len(self.config.parser.variables), 2)
self.assertTrue("TEST2" in self.config.parser.variables)
self.assertTrue("TEST2" in self.config.parser.variables.keys())
self.assertTrue("mod_another.c" in self.config.parser.modules)
@mock.patch("certbot_apache._internal.configurator.util.run_script")
@@ -172,11 +172,11 @@ class MultipleVhostsTestFedora(util.ApacheTest):
mock_osi.return_value = ("fedora", "29")
self.config.parser.update_runtime_variables()
self.assertTrue("mock_define" in self.config.parser.variables)
self.assertTrue("mock_define_too" in self.config.parser.variables)
self.assertTrue("mock_value" in self.config.parser.variables)
self.assertTrue("mock_define" in self.config.parser.variables.keys())
self.assertTrue("mock_define_too" in self.config.parser.variables.keys())
self.assertTrue("mock_value" in self.config.parser.variables.keys())
self.assertEqual("TRUE", self.config.parser.variables["mock_value"])
self.assertTrue("MOCK_NOSEP" in self.config.parser.variables)
self.assertTrue("MOCK_NOSEP" in self.config.parser.variables.keys())
self.assertEqual("NOSEP_VAL", self.config.parser.variables["NOSEP_TWO"])
@mock.patch("certbot_apache._internal.configurator.util.run_script")

View File

@@ -91,7 +91,7 @@ class MultipleVhostsTestGentoo(util.ApacheTest):
with mock.patch("certbot_apache._internal.override_gentoo.GentooParser.update_modules"):
self.config.parser.update_runtime_variables()
for define in defines:
self.assertTrue(define in self.config.parser.variables)
self.assertTrue(define in self.config.parser.variables.keys())
@mock.patch("certbot_apache._internal.apache_util.parse_from_subprocess")
def test_no_binary_configdump(self, mock_subprocess):

View File

@@ -27,14 +27,14 @@ class VirtualHostTest(unittest.TestCase):
"certbot_apache._internal.obj.Addr(('127.0.0.1', '443'))")
def test_eq(self):
self.assertEqual(self.vhost1b, self.vhost1)
self.assertNotEqual(self.vhost1, self.vhost2)
self.assertTrue(self.vhost1b == self.vhost1)
self.assertFalse(self.vhost1 == self.vhost2)
self.assertEqual(str(self.vhost1b), str(self.vhost1))
self.assertNotEqual(self.vhost1b, 1234)
self.assertFalse(self.vhost1b == 1234)
def test_ne(self):
self.assertNotEqual(self.vhost1, self.vhost2)
self.assertEqual(self.vhost1, self.vhost1b)
self.assertTrue(self.vhost1 != self.vhost2)
self.assertFalse(self.vhost1 != self.vhost1b)
def test_conflicts(self):
from certbot_apache._internal.obj import Addr
@@ -128,13 +128,13 @@ class AddrTest(unittest.TestCase):
self.assertTrue(self.addr1.conflicts(self.addr2))
def test_equal(self):
self.assertEqual(self.addr1, self.addr2)
self.assertNotEqual(self.addr, self.addr1)
self.assertNotEqual(self.addr, 123)
self.assertTrue(self.addr1 == self.addr2)
self.assertFalse(self.addr == self.addr1)
self.assertFalse(self.addr == 123)
def test_not_equal(self):
self.assertEqual(self.addr1, self.addr2)
self.assertNotEqual(self.addr, self.addr1)
self.assertFalse(self.addr1 != self.addr2)
self.assertTrue(self.addr != self.addr1)
if __name__ == "__main__":

View File

@@ -26,6 +26,8 @@ class ApacheTest(unittest.TestCase):
config_root="debian_apache_2_4/multiple_vhosts/apache2",
vhost_root="debian_apache_2_4/multiple_vhosts/apache2/sites-available"):
# pylint: disable=arguments-differ
super(ApacheTest, self).setUp()
self.temp_dir, self.config_dir, self.work_dir = common.dir_setup(
test_dir=test_dir,
pkg=__name__)

View File

@@ -31,7 +31,7 @@ if [ -z "$VENV_PATH" ]; then
fi
VENV_BIN="$VENV_PATH/bin"
BOOTSTRAP_VERSION_PATH="$VENV_PATH/certbot-auto-bootstrap-version.txt"
LE_AUTO_VERSION="1.10.1"
LE_AUTO_VERSION="1.5.0"
BASENAME=$(basename $0)
USAGE="Usage: $BASENAME [OPTIONS]
A self-updating wrapper script for the Certbot ACME client. When run, updates
@@ -258,7 +258,7 @@ DeprecationBootstrap() {
MIN_PYTHON_2_VERSION="2.7"
MIN_PYVER2=$(echo "$MIN_PYTHON_2_VERSION" | sed 's/\.//')
MIN_PYTHON_3_VERSION="3.6"
MIN_PYTHON_3_VERSION="3.5"
MIN_PYVER3=$(echo "$MIN_PYTHON_3_VERSION" | sed 's/\.//')
# Sets LE_PYTHON to Python version string and PYVER to the first two
# digits of the python version.
@@ -799,10 +799,17 @@ BootstrapMageiaCommon() {
# that function. If Bootstrap is set to a function that doesn't install any
# packages BOOTSTRAP_VERSION is not set.
if [ -f /etc/debian_version ]; then
DEPRECATED_OS=1
Bootstrap() {
BootstrapMessage "Debian-based OSes"
BootstrapDebCommon
}
BOOTSTRAP_VERSION="BootstrapDebCommon $BOOTSTRAP_DEB_COMMON_VERSION"
elif [ -f /etc/mageia-release ]; then
# Mageia has both /etc/mageia-release and /etc/redhat-release
DEPRECATED_OS=1
Bootstrap() {
ExperimentalBootstrap "Mageia" BootstrapMageiaCommon
}
BOOTSTRAP_VERSION="BootstrapMageiaCommon $BOOTSTRAP_MAGEIA_COMMON_VERSION"
elif [ -f /etc/redhat-release ]; then
# Run DeterminePythonVersion to decide on the basis of available Python versions
# whether to use 2.x or 3.x on RedHat-like systems.
@@ -877,11 +884,31 @@ elif [ -f /etc/redhat-release ]; then
LE_PYTHON="$prev_le_python"
elif [ -f /etc/os-release ] && `grep -q openSUSE /etc/os-release` ; then
DEPRECATED_OS=1
Bootstrap() {
BootstrapMessage "openSUSE-based OSes"
BootstrapSuseCommon
}
BOOTSTRAP_VERSION="BootstrapSuseCommon $BOOTSTRAP_SUSE_COMMON_VERSION"
elif [ -f /etc/arch-release ]; then
DEPRECATED_OS=1
Bootstrap() {
if [ "$DEBUG" = 1 ]; then
BootstrapMessage "Archlinux"
BootstrapArchCommon
else
error "Please use pacman to install letsencrypt packages:"
error "# pacman -S certbot certbot-apache"
error
error "If you would like to use the virtualenv way, please run the script again with the"
error "--debug flag."
exit 1
fi
}
BOOTSTRAP_VERSION="BootstrapArchCommon $BOOTSTRAP_ARCH_COMMON_VERSION"
elif [ -f /etc/manjaro-release ]; then
DEPRECATED_OS=1
Bootstrap() {
ExperimentalBootstrap "Manjaro Linux" BootstrapArchCommon
}
BOOTSTRAP_VERSION="BootstrapArchCommon $BOOTSTRAP_ARCH_COMMON_VERSION"
elif [ -f /etc/gentoo-release ]; then
DEPRECATED_OS=1
elif uname | grep -iq FreeBSD ; then
@@ -894,9 +921,19 @@ elif [ -f /etc/issue ] && grep -iq "Amazon Linux" /etc/issue ; then
}
BOOTSTRAP_VERSION="BootstrapRpmCommon $BOOTSTRAP_RPM_COMMON_VERSION"
elif [ -f /etc/product ] && grep -q "Joyent Instance" /etc/product ; then
DEPRECATED_OS=1
Bootstrap() {
ExperimentalBootstrap "Joyent SmartOS Zone" BootstrapSmartOS
}
BOOTSTRAP_VERSION="BootstrapSmartOS $BOOTSTRAP_SMARTOS_VERSION"
else
DEPRECATED_OS=1
Bootstrap() {
error "Sorry, I don't know how to bootstrap Certbot on your operating system!"
error
error "You will need to install OS dependencies, configure virtualenv, and run pip install manually."
error "Please see https://letsencrypt.readthedocs.org/en/latest/contributing.html#prerequisites"
error "for more info."
exit 1
}
fi
# We handle this case after determining the normal bootstrap version to allow
@@ -1228,40 +1265,45 @@ if [ "$1" = "--le-auto-phase2" ]; then
# pip install hashin
# hashin -r dependency-requirements.txt cryptography==1.5.2
# ```
ConfigArgParse==1.2.3 \
--hash=sha256:edd17be986d5c1ba2e307150b8e5f5107aba125f3574dddd02c85d5cdcfd37dc
certifi==2020.4.5.1 \
--hash=sha256:1d987a998c75633c40847cc966fcf5904906c920a7f17ef374f5aa4282abd304 \
--hash=sha256:51fcb31174be6e6664c5f69e3e1691a2d72a1a12e90f872cbdb1567eb47b6519
cffi==1.14.0 \
--hash=sha256:001bf3242a1bb04d985d63e138230802c6c8d4db3668fb545fb5005ddf5bb5ff \
--hash=sha256:00789914be39dffba161cfc5be31b55775de5ba2235fe49aa28c148236c4e06b \
--hash=sha256:028a579fc9aed3af38f4892bdcc7390508adabc30c6af4a6e4f611b0c680e6ac \
--hash=sha256:14491a910663bf9f13ddf2bc8f60562d6bc5315c1f09c704937ef17293fb85b0 \
--hash=sha256:1cae98a7054b5c9391eb3249b86e0e99ab1e02bb0cc0575da191aedadbdf4384 \
--hash=sha256:2089ed025da3919d2e75a4d963d008330c96751127dd6f73c8dc0c65041b4c26 \
--hash=sha256:2d384f4a127a15ba701207f7639d94106693b6cd64173d6c8988e2c25f3ac2b6 \
--hash=sha256:337d448e5a725bba2d8293c48d9353fc68d0e9e4088d62a9571def317797522b \
--hash=sha256:399aed636c7d3749bbed55bc907c3288cb43c65c4389964ad5ff849b6370603e \
--hash=sha256:3b911c2dbd4f423b4c4fcca138cadde747abdb20d196c4a48708b8a2d32b16dd \
--hash=sha256:3d311bcc4a41408cf5854f06ef2c5cab88f9fded37a3b95936c9879c1640d4c2 \
--hash=sha256:62ae9af2d069ea2698bf536dcfe1e4eed9090211dbaafeeedf5cb6c41b352f66 \
--hash=sha256:66e41db66b47d0d8672d8ed2708ba91b2f2524ece3dee48b5dfb36be8c2f21dc \
--hash=sha256:675686925a9fb403edba0114db74e741d8181683dcf216be697d208857e04ca8 \
--hash=sha256:7e63cbcf2429a8dbfe48dcc2322d5f2220b77b2e17b7ba023d6166d84655da55 \
--hash=sha256:8a6c688fefb4e1cd56feb6c511984a6c4f7ec7d2a1ff31a10254f3c817054ae4 \
--hash=sha256:8c0ffc886aea5df6a1762d0019e9cb05f825d0eec1f520c51be9d198701daee5 \
--hash=sha256:95cd16d3dee553f882540c1ffe331d085c9e629499ceadfbda4d4fde635f4b7d \
--hash=sha256:99f748a7e71ff382613b4e1acc0ac83bf7ad167fb3802e35e90d9763daba4d78 \
--hash=sha256:b8c78301cefcf5fd914aad35d3c04c2b21ce8629b5e4f4e45ae6812e461910fa \
--hash=sha256:c420917b188a5582a56d8b93bdd8e0f6eca08c84ff623a4c16e809152cd35793 \
--hash=sha256:c43866529f2f06fe0edc6246eb4faa34f03fe88b64a0a9a942561c8e22f4b71f \
--hash=sha256:cab50b8c2250b46fe738c77dbd25ce017d5e6fb35d3407606e7a4180656a5a6a \
--hash=sha256:cef128cb4d5e0b3493f058f10ce32365972c554572ff821e175dbc6f8ff6924f \
--hash=sha256:cf16e3cf6c0a5fdd9bc10c21687e19d29ad1fe863372b5543deaec1039581a30 \
--hash=sha256:e56c744aa6ff427a607763346e4170629caf7e48ead6921745986db3692f987f \
--hash=sha256:e577934fc5f8779c554639376beeaa5657d54349096ef24abe8c74c5d9c117c3 \
--hash=sha256:f2b0fa0c01d8a0c7483afd9f31d7ecf2d71760ca24499c8697aeb5ca37dc090c
ConfigArgParse==1.0 \
--hash=sha256:bf378245bc9cdc403a527e5b7406b991680c2a530e7e81af747880b54eb57133
certifi==2019.11.28 \
--hash=sha256:017c25db2a153ce562900032d5bc68e9f191e44e9a0f762f373977de9df1fbb3 \
--hash=sha256:25b64c7da4cd7479594d035c08c2d809eb4aab3a26e5a990ea98cc450c320f1f
cffi==1.13.2 \
--hash=sha256:0b49274afc941c626b605fb59b59c3485c17dc776dc3cc7cc14aca74cc19cc42 \
--hash=sha256:0e3ea92942cb1168e38c05c1d56b0527ce31f1a370f6117f1d490b8dcd6b3a04 \
--hash=sha256:135f69aecbf4517d5b3d6429207b2dff49c876be724ac0c8bf8e1ea99df3d7e5 \
--hash=sha256:19db0cdd6e516f13329cba4903368bff9bb5a9331d3410b1b448daaadc495e54 \
--hash=sha256:2781e9ad0e9d47173c0093321bb5435a9dfae0ed6a762aabafa13108f5f7b2ba \
--hash=sha256:291f7c42e21d72144bb1c1b2e825ec60f46d0a7468f5346841860454c7aa8f57 \
--hash=sha256:2c5e309ec482556397cb21ede0350c5e82f0eb2621de04b2633588d118da4396 \
--hash=sha256:2e9c80a8c3344a92cb04661115898a9129c074f7ab82011ef4b612f645939f12 \
--hash=sha256:32a262e2b90ffcfdd97c7a5e24a6012a43c61f1f5a57789ad80af1d26c6acd97 \
--hash=sha256:3c9fff570f13480b201e9ab69453108f6d98244a7f495e91b6c654a47486ba43 \
--hash=sha256:415bdc7ca8c1c634a6d7163d43fb0ea885a07e9618a64bda407e04b04333b7db \
--hash=sha256:42194f54c11abc8583417a7cf4eaff544ce0de8187abaf5d29029c91b1725ad3 \
--hash=sha256:4424e42199e86b21fc4db83bd76909a6fc2a2aefb352cb5414833c030f6ed71b \
--hash=sha256:4a43c91840bda5f55249413037b7a9b79c90b1184ed504883b72c4df70778579 \
--hash=sha256:599a1e8ff057ac530c9ad1778293c665cb81a791421f46922d80a86473c13346 \
--hash=sha256:5c4fae4e9cdd18c82ba3a134be256e98dc0596af1e7285a3d2602c97dcfa5159 \
--hash=sha256:5ecfa867dea6fabe2a58f03ac9186ea64da1386af2159196da51c4904e11d652 \
--hash=sha256:62f2578358d3a92e4ab2d830cd1c2049c9c0d0e6d3c58322993cc341bdeac22e \
--hash=sha256:6471a82d5abea994e38d2c2abc77164b4f7fbaaf80261cb98394d5793f11b12a \
--hash=sha256:6d4f18483d040e18546108eb13b1dfa1000a089bcf8529e30346116ea6240506 \
--hash=sha256:71a608532ab3bd26223c8d841dde43f3516aa5d2bf37b50ac410bb5e99053e8f \
--hash=sha256:74a1d8c85fb6ff0b30fbfa8ad0ac23cd601a138f7509dc617ebc65ef305bb98d \
--hash=sha256:7b93a885bb13073afb0aa73ad82059a4c41f4b7d8eb8368980448b52d4c7dc2c \
--hash=sha256:7d4751da932caaec419d514eaa4215eaf14b612cff66398dd51129ac22680b20 \
--hash=sha256:7f627141a26b551bdebbc4855c1157feeef18241b4b8366ed22a5c7d672ef858 \
--hash=sha256:8169cf44dd8f9071b2b9248c35fc35e8677451c52f795daa2bb4643f32a540bc \
--hash=sha256:aa00d66c0fab27373ae44ae26a66a9e43ff2a678bf63a9c7c1a9a4d61172827a \
--hash=sha256:ccb032fda0873254380aa2bfad2582aedc2959186cce61e3a17abc1a55ff89c3 \
--hash=sha256:d754f39e0d1603b5b24a7f8484b22d2904fa551fe865fd0d4c3332f078d20d4e \
--hash=sha256:d75c461e20e29afc0aee7172a0950157c704ff0dd51613506bd7d82b718e7410 \
--hash=sha256:dcd65317dd15bc0451f3e01c80da2216a31916bdcffd6221ca1202d96584aa25 \
--hash=sha256:e570d3ab32e2c2861c4ebe6ffcad6a8abf9347432a37608fe1fbd157b3f0036b \
--hash=sha256:fd43a88e045cf992ed09fa724b5315b790525f2676883a6ea64e3263bae6549d
chardet==3.0.4 \
--hash=sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae \
--hash=sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691
@@ -1289,66 +1331,67 @@ cryptography==2.8 \
--hash=sha256:df6b4dca2e11865e6cfbfb708e800efb18370f5a46fd601d3755bc7f85b3a8a2 \
--hash=sha256:ecadccc7ba52193963c0475ac9f6fa28ac01e01349a2ca48509667ef41ffd2cf \
--hash=sha256:fb81c17e0ebe3358486cd8cc3ad78adbae58af12fc2bf2bc0bb84e8090fa5ce8
distro==1.5.0 \
--hash=sha256:0e58756ae38fbd8fc3020d54badb8eae17c5b9dcbed388b17bb55b8a5928df92 \
--hash=sha256:df74eed763e18d10d0da624258524ae80486432cd17392d9c3d96f5e83cd2799
enum34==1.1.10; python_version < '3.4' \
--hash=sha256:a98a201d6de3f2ab3db284e70a33b0f896fbf35f8086594e8c9e74b909058d53 \
--hash=sha256:c3858660960c984d6ab0ebad691265180da2b43f07e061c0f8dca9ef3cffd328 \
--hash=sha256:cce6a7477ed816bd2542d03d53db9f0db935dd013b70f336a95c73979289f248
distro==1.4.0 \
--hash=sha256:362dde65d846d23baee4b5c058c8586f219b5a54be1cf5fc6ff55c4578392f57 \
--hash=sha256:eedf82a470ebe7d010f1872c17237c79ab04097948800029994fa458e52fb4b4
# Package enum34 needs to be explicitly limited to Python2.x, in order to avoid
# certbot-auto failures on Python 3.6+ which enum34 doesn't support. See #5456.
enum34==1.1.6 ; python_version < '3.4' \
--hash=sha256:2d81cbbe0e73112bdfe6ef8576f2238f2ba27dd0d55752a776c41d38b7da2850 \
--hash=sha256:644837f692e5f550741432dd3f223bbb9852018674981b1664e5dc339387588a \
--hash=sha256:6bd0f6ad48ec2aa117d3d141940d484deccda84d4fcd884f5c3d93c23ecd8c79 \
--hash=sha256:8ad8c4783bf61ded74527bffb48ed9b54166685e4230386a9ed9b1279e2df5b1
funcsigs==1.0.2 \
--hash=sha256:330cc27ccbf7f1e992e69fef78261dc7c6569012cf397db8d3de0234e6c937ca \
--hash=sha256:a7bb0f2cf3a3fd1ab2732cb49eba4252c2af4240442415b4abce3b87022a8f50
idna==2.9 \
--hash=sha256:7588d1c14ae4c77d74036e8c22ff447b26d0fde8f007354fd48a7814db15b7cb \
--hash=sha256:a068a21ceac8a4d63dbfd964670474107f541babbd2250d61922f029858365fa
idna==2.8 \
--hash=sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407 \
--hash=sha256:ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c
ipaddress==1.0.23 \
--hash=sha256:6e0f4a39e66cb5bb9a137b00276a2eff74f93b71dcbdad6f10ff7df9d3557fcc \
--hash=sha256:b7f8e0369580bb4a24d5ba1d7cc29660a4a6987763faf1d8a8046830e020e7e2
josepy==1.3.0 \
--hash=sha256:c341ffa403399b18e9eae9012f804843045764d1390f9cb4648980a7569b1619 \
--hash=sha256:e54882c64be12a2a76533f73d33cba9e331950fda9e2731e843490b774e7a01c
josepy==1.2.0 \
--hash=sha256:8ea15573203f28653c00f4ac0142520777b1c59d9eddd8da3f256c6ba3cac916 \
--hash=sha256:9cec9a839fe9520f0420e4f38e7219525daccce4813296627436fe444cd002d3
mock==1.3.0 \
--hash=sha256:1e247dbecc6ce057299eb7ee019ad68314bb93152e81d9a6110d35f4d5eca0f6 \
--hash=sha256:3f573a18be94de886d1191f27c168427ef693e8dcfcecf95b170577b2eb69cbb
parsedatetime==2.5 \
--hash=sha256:3b835fc54e472c17ef447be37458b400e3fefdf14bb1ffdedb5d2c853acf4ba1 \
--hash=sha256:d2e9ddb1e463de871d32088a3f3cea3dc8282b1b2800e081bd0ef86900451667
pbr==5.4.5 \
--hash=sha256:07f558fece33b05caf857474a366dfcc00562bca13dd8b47b2b3e22d9f9bf55c \
--hash=sha256:579170e23f8e0c2f24b0de612f71f648eccb79fb1322c814ae6b3c07b5ba23e8
pbr==5.4.4 \
--hash=sha256:139d2625547dbfa5fb0b81daebb39601c478c21956dc57e2e07b74450a8c506b \
--hash=sha256:61aa52a0f18b71c5cc58232d2cf8f8d09cd67fcad60b742a60124cb8d6951488
pyOpenSSL==19.1.0 \
--hash=sha256:621880965a720b8ece2f1b2f54ea2071966ab00e2970ad2ce11d596102063504 \
--hash=sha256:9a24494b2602aaf402be5c9e30a0b82d4a5c67528fe8fb475e3f3bc00dd69507
pyRFC3339==1.1 \
--hash=sha256:67196cb83b470709c580bb4738b83165e67c6cc60e1f2e4f286cfcb402a926f4 \
--hash=sha256:81b8cbe1519cdb79bed04910dd6fa4e181faf8c88dff1e1b987b5f7ab23a5b1a
pycparser==2.20 \
--hash=sha256:2d475327684562c3a96cc71adf7dc8c4f0565175cf86b6d7a404ff4c771f15f0 \
--hash=sha256:7582ad22678f0fcd81102833f60ef8d0e57288b6b5fb00323d101be910e35705
pyparsing==2.4.7 \
--hash=sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1 \
--hash=sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b
pycparser==2.19 \
--hash=sha256:a988718abfad80b6b157acce7bf130a30876d27603738ac39f140993246b25b3
pyparsing==2.4.6 \
--hash=sha256:4c830582a84fb022400b85429791bc551f1f4871c33f23e44f353119e92f969f \
--hash=sha256:c342dccb5250c08d45fd6f8b4a559613ca603b57498511740e65cd11a2e7dcec
python-augeas==0.5.0 \
--hash=sha256:67d59d66cdba8d624e0389b87b2a83a176f21f16a87553b50f5703b23f29bac2
pytz==2020.1 \
--hash=sha256:a494d53b6d39c3c6e44c3bec237336e14305e4f29bbf800b599253057fbb79ed \
--hash=sha256:c35965d010ce31b23eeb663ed3cc8c906275d6be1a34393a1d73a41febf4a048
requests==2.23.0 \
--hash=sha256:43999036bfa82904b6af1d99e4882b560e5e2c68e5c4b0aa03b655f3d7d73fee \
--hash=sha256:b3f43d496c6daba4493e7c431722aeb7dbc6288f52a6e04e7b6023b0247817e6
pytz==2019.3 \
--hash=sha256:1c557d7d0e871de1f5ccd5833f60fb2550652da6be2693c1e02300743d21500d \
--hash=sha256:b02c06db6cf09c12dd25137e563b31700d3b80fcc4ad23abb7a315f2789819be
requests==2.22.0 \
--hash=sha256:11e007a8a2aa0323f5a921e9e6a2d7e4e67d9877e85773fba9ba6419025cbeb4 \
--hash=sha256:9cf5292fcd0f598c671cfc1e0d7d1a7f13bb8085e9a590f48c010551dc6c4b31
requests-toolbelt==0.9.1 \
--hash=sha256:380606e1d10dc85c3bd47bf5a6095f815ec007be7a8b69c878507068df059e6f \
--hash=sha256:968089d4584ad4ad7c171454f0a5c6dac23971e9472521ea3b6d49d610aa6fc0
six==1.15.0 \
--hash=sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259 \
--hash=sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced
urllib3==1.25.9 \
--hash=sha256:3018294ebefce6572a474f0604c2021e33b3fd8006ecd11d62107a5d2a963527 \
--hash=sha256:88206b0eb87e6d677d424843ac5209e3fb9d0190d0ee169599165ec25e9d9115
zope.component==4.6.1 \
--hash=sha256:bfbe55d4a93e70a78b10edc3aad4de31bb8860919b7cbd8d66f717f7d7b279ac \
--hash=sha256:d9c7c27673d787faff8a83797ce34d6ebcae26a370e25bddb465ac2182766aca
six==1.14.0 \
--hash=sha256:236bdbdce46e6e6a3d61a337c0f8b763ca1e8717c03b369e87a7ec7ce1319c0a \
--hash=sha256:8f3cd2e254d8f793e7f3d6d9df77b92252b52637291d0f0da013c76ea2724b6c
urllib3==1.25.8 \
--hash=sha256:2f3db8b19923a873b3e5256dc9c2dedfa883e33d87c690d9c7913e1f40673cdc \
--hash=sha256:87716c2d2a7121198ebcb7ce7cccf6ce5e9ba539041cfbaeecfb641dc0bf6acc
zope.component==4.6 \
--hash=sha256:ec2afc5bbe611dcace98bb39822c122d44743d635dafc7315b9aef25097db9e6
zope.deferredimport==4.3.1 \
--hash=sha256:57b2345e7b5eef47efcd4f634ff16c93e4265de3dcf325afc7315ade48d909e1 \
--hash=sha256:9a0c211df44aa95f1c4e6d2626f90b400f56989180d3ef96032d708da3d23e0a
@@ -1358,129 +1401,126 @@ zope.deprecation==4.4.0 \
zope.event==4.4 \
--hash=sha256:69c27debad9bdacd9ce9b735dad382142281ac770c4a432b533d6d65c4614bcf \
--hash=sha256:d8e97d165fd5a0997b45f5303ae11ea3338becfe68c401dd88ffd2113fe5cae7
zope.hookable==5.0.1 \
--hash=sha256:0194b9b9e7f614abba60c90b231908861036578297515d3d6508eb10190f266d \
--hash=sha256:0c2977473918bdefc6fa8dfb311f154e7f13c6133957fe649704deca79b92093 \
--hash=sha256:17b8bdb3b77e03a152ca0d5ca185a7ae0156f5e5a2dbddf538676633a1f7380f \
--hash=sha256:29d07681a78042cdd15b268ae9decffed9ace68a53eebeb61d65ae931d158841 \
--hash=sha256:36fb1b35d1150267cb0543a1ddd950c0bc2c75ed0e6e92e3aaa6ac2e29416cb7 \
--hash=sha256:3aed60c2bb5e812bbf9295c70f25b17ac37c233f30447a96c67913ba5073642f \
--hash=sha256:3cac1565cc768911e72ca9ec4ddf5c5109e1fef0104f19f06649cf1874943b60 \
--hash=sha256:3d4bc0cc4a37c3cd3081063142eeb2125511db3c13f6dc932d899c512690378e \
--hash=sha256:3f73096f27b8c28be53ffb6604f7b570fbbb82f273c6febe5f58119009b59898 \
--hash=sha256:522d1153d93f2d48aa0bd9fb778d8d4500be2e4dcf86c3150768f0e3adbbc4ef \
--hash=sha256:523d2928fb7377bbdbc9af9c0b14ad73e6eaf226349f105733bdae27efd15b5a \
--hash=sha256:5848309d4fc5c02150a45e8f8d2227e5bfda386a508bbd3160fed7c633c5a2fa \
--hash=sha256:6781f86e6d54a110980a76e761eb54590630fd2af2a17d7edf02a079d2646c1d \
--hash=sha256:6fd27921ebf3aaa945fa25d790f1f2046204f24dba4946f82f5f0a442577c3e9 \
--hash=sha256:70d581862863f6bf9e175e85c9d70c2d7155f53fb04dcdb2f73cf288ca559a53 \
--hash=sha256:81867c23b0dc66c8366f351d00923f2bc5902820a24c2534dfd7bf01a5879963 \
--hash=sha256:81db29edadcbb740cd2716c95a297893a546ed89db1bfe9110168732d7f0afdd \
--hash=sha256:86bd12624068cea60860a0759af5e2c3adc89c12aef6f71cf12f577e28deefe3 \
--hash=sha256:9c184d8f9f7a76e1ced99855ccf390ffdd0ec3765e5cbf7b9cada600accc0a1e \
--hash=sha256:acc789e8c29c13555e43fe4bf9fcd15a65512c9645e97bbaa5602e3201252b02 \
--hash=sha256:afaa740206b7660d4cc3b8f120426c85761f51379af7a5b05451f624ad12b0af \
--hash=sha256:b5f5fa323f878bb16eae68ea1ba7f6c0419d4695d0248bed4b18f51d7ce5ab85 \
--hash=sha256:bd89e0e2c67bf4ac3aca2a19702b1a37269fb1923827f68324ac2e7afd6e3406 \
--hash=sha256:c212de743283ec0735db24ec6ad913758df3af1b7217550ff270038062afd6ae \
--hash=sha256:ca553f524293a0bdea05e7f44c3e685e4b7b022cb37d87bc4a3efa0f86587a8d \
--hash=sha256:cab67065a3db92f636128d3157cc5424a145f82d96fb47159c539132833a6d36 \
--hash=sha256:d3b3b3eedfdbf6b02898216e85aa6baf50207f4378a2a6803d6d47650cd37031 \
--hash=sha256:d9f4a5a72f40256b686d31c5c0b1fde503172307beb12c1568296e76118e402c \
--hash=sha256:df5067d87aaa111ed5d050e1ee853ba284969497f91806efd42425f5348f1c06 \
--hash=sha256:e2587644812c6138f05b8a41594a8337c6790e3baf9a01915e52438c13fc6bef \
--hash=sha256:e27fd877662db94f897f3fd532ef211ca4901eb1a70ba456f15c0866a985464a \
--hash=sha256:e427ebbdd223c72e06ba94c004bb04e996c84dec8a0fa84e837556ae145c439e \
--hash=sha256:e583ad4309c203ef75a09d43434cf9c2b4fa247997ecb0dcad769982c39411c7 \
--hash=sha256:e760b2bc8ece9200804f0c2b64d10147ecaf18455a2a90827fbec4c9d84f3ad5 \
--hash=sha256:ea9a9cc8bcc70e18023f30fa2f53d11ae069572a162791224e60cd65df55fb69 \
--hash=sha256:ecb3f17dce4803c1099bd21742cd126b59817a4e76a6544d31d2cca6e30dbffd \
--hash=sha256:ed794e3b3de42486d30444fb60b5561e724ee8a2d1b17b0c2e0f81e3ddaf7a87 \
--hash=sha256:ee885d347279e38226d0a437b6a932f207f691c502ee565aba27a7022f1285df \
--hash=sha256:fd5e7bc5f24f7e3d490698f7b854659a9851da2187414617cd5ed360af7efd63 \
--hash=sha256:fe45f6870f7588ac7b2763ff1ce98cce59369717afe70cc353ec5218bc854bcc
zope.interface==5.1.0 \
--hash=sha256:0103cba5ed09f27d2e3de7e48bb320338592e2fabc5ce1432cf33808eb2dfd8b \
--hash=sha256:14415d6979356629f1c386c8c4249b4d0082f2ea7f75871ebad2e29584bd16c5 \
--hash=sha256:1ae4693ccee94c6e0c88a4568fb3b34af8871c60f5ba30cf9f94977ed0e53ddd \
--hash=sha256:1b87ed2dc05cb835138f6a6e3595593fea3564d712cb2eb2de963a41fd35758c \
--hash=sha256:269b27f60bcf45438e8683269f8ecd1235fa13e5411de93dae3b9ee4fe7f7bc7 \
--hash=sha256:27d287e61639d692563d9dab76bafe071fbeb26818dd6a32a0022f3f7ca884b5 \
--hash=sha256:39106649c3082972106f930766ae23d1464a73b7d30b3698c986f74bf1256a34 \
--hash=sha256:40e4c42bd27ed3c11b2c983fecfb03356fae1209de10686d03c02c8696a1d90e \
--hash=sha256:461d4339b3b8f3335d7e2c90ce335eb275488c587b61aca4b305196dde2ff086 \
--hash=sha256:4f98f70328bc788c86a6a1a8a14b0ea979f81ae6015dd6c72978f1feff70ecda \
--hash=sha256:558a20a0845d1a5dc6ff87cd0f63d7dac982d7c3be05d2ffb6322a87c17fa286 \
--hash=sha256:562dccd37acec149458c1791da459f130c6cf8902c94c93b8d47c6337b9fb826 \
--hash=sha256:5e86c66a6dea8ab6152e83b0facc856dc4d435fe0f872f01d66ce0a2131b7f1d \
--hash=sha256:60a207efcd8c11d6bbeb7862e33418fba4e4ad79846d88d160d7231fcb42a5ee \
--hash=sha256:645a7092b77fdbc3f68d3cc98f9d3e71510e419f54019d6e282328c0dd140dcd \
--hash=sha256:6874367586c020705a44eecdad5d6b587c64b892e34305bb6ed87c9bbe22a5e9 \
--hash=sha256:74bf0a4f9091131de09286f9a605db449840e313753949fe07c8d0fe7659ad1e \
--hash=sha256:7b726194f938791a6691c7592c8b9e805fc6d1b9632a833b9c0640828cd49cbc \
--hash=sha256:8149ded7f90154fdc1a40e0c8975df58041a6f693b8f7edcd9348484e9dc17fe \
--hash=sha256:8cccf7057c7d19064a9e27660f5aec4e5c4001ffcf653a47531bde19b5aa2a8a \
--hash=sha256:911714b08b63d155f9c948da2b5534b223a1a4fc50bb67139ab68b277c938578 \
--hash=sha256:a5f8f85986197d1dd6444763c4a15c991bfed86d835a1f6f7d476f7198d5f56a \
--hash=sha256:a744132d0abaa854d1aad50ba9bc64e79c6f835b3e92521db4235a1991176813 \
--hash=sha256:af2c14efc0bb0e91af63d00080ccc067866fb8cbbaca2b0438ab4105f5e0f08d \
--hash=sha256:b054eb0a8aa712c8e9030065a59b5e6a5cf0746ecdb5f087cca5ec7685690c19 \
--hash=sha256:b0becb75418f8a130e9d465e718316cd17c7a8acce6fe8fe07adc72762bee425 \
--hash=sha256:b1d2ed1cbda2ae107283befd9284e650d840f8f7568cb9060b5466d25dc48975 \
--hash=sha256:ba4261c8ad00b49d48bbb3b5af388bb7576edfc0ca50a49c11dcb77caa1d897e \
--hash=sha256:d1fe9d7d09bb07228650903d6a9dc48ea649e3b8c69b1d263419cc722b3938e8 \
--hash=sha256:d7804f6a71fc2dda888ef2de266727ec2f3915373d5a785ed4ddc603bbc91e08 \
--hash=sha256:da2844fba024dd58eaa712561da47dcd1e7ad544a257482392472eae1c86d5e5 \
--hash=sha256:dcefc97d1daf8d55199420e9162ab584ed0893a109f45e438b9794ced44c9fd0 \
--hash=sha256:dd98c436a1fc56f48c70882cc243df89ad036210d871c7427dc164b31500dc11 \
--hash=sha256:e74671e43ed4569fbd7989e5eecc7d06dc134b571872ab1d5a88f4a123814e9f \
--hash=sha256:eb9b92f456ff3ec746cd4935b73c1117538d6124b8617bc0fe6fda0b3816e345 \
--hash=sha256:ebb4e637a1fb861c34e48a00d03cffa9234f42bef923aec44e5625ffb9a8e8f9 \
--hash=sha256:ef739fe89e7f43fb6494a43b1878a36273e5924869ba1d866f752c5812ae8d58 \
--hash=sha256:f40db0e02a8157d2b90857c24d89b6310f9b6c3642369852cdc3b5ac49b92afc \
--hash=sha256:f68bf937f113b88c866d090fea0bc52a098695173fc613b055a17ff0cf9683b6 \
--hash=sha256:fb55c182a3f7b84c1a2d6de5fa7b1a05d4660d866b91dbf8d74549c57a1499e8
zope.proxy==4.3.5 \
--hash=sha256:00573dfa755d0703ab84bb23cb6ecf97bb683c34b340d4df76651f97b0bab068 \
--hash=sha256:092049280f2848d2ba1b57b71fe04881762a220a97b65288bcb0968bb199ec30 \
--hash=sha256:0cbd27b4d3718b5ec74fc65ffa53c78d34c65c6fd9411b8352d2a4f855220cf1 \
--hash=sha256:17fc7e16d0c81f833a138818a30f366696653d521febc8e892858041c4d88785 \
--hash=sha256:19577dfeb70e8a67249ba92c8ad20589a1a2d86a8d693647fa8385408a4c17b0 \
--hash=sha256:207aa914576b1181597a1516e1b90599dc690c095343ae281b0772e44945e6a4 \
--hash=sha256:219a7db5ed53e523eb4a4769f13105118b6d5b04ed169a283c9775af221e231f \
--hash=sha256:2b50ea79849e46b5f4f2b0247a3687505d32d161eeb16a75f6f7e6cd81936e43 \
--hash=sha256:5903d38362b6c716e66bbe470f190579c530a5baf03dbc8500e5c2357aa569a5 \
--hash=sha256:5c24903675e271bd688c6e9e7df5775ac6b168feb87dbe0e4bcc90805f21b28f \
--hash=sha256:5ef6bc5ed98139e084f4e91100f2b098a0cd3493d4e76f9d6b3f7b95d7ad0f06 \
--hash=sha256:61b55ae3c23a126a788b33ffb18f37d6668e79a05e756588d9e4d4be7246ab1c \
--hash=sha256:63ddb992931a5e616c87d3d89f5a58db086e617548005c7f9059fac68c03a5cc \
--hash=sha256:6943da9c09870490dcfd50c4909c0cc19f434fa6948f61282dc9cb07bcf08160 \
--hash=sha256:6ad40f85c1207803d581d5d75e9ea25327cd524925699a83dfc03bf8e4ba72b7 \
--hash=sha256:6b44433a79bdd7af0e3337bd7bbcf53dd1f9b0fa66bf21bcb756060ce32a96c1 \
--hash=sha256:6bbaa245015d933a4172395baad7874373f162955d73612f0b66b6c2c33b6366 \
--hash=sha256:7007227f4ea85b40a2f5e5a244479f6a6dfcf906db9b55e812a814a8f0e2c28d \
--hash=sha256:74884a0aec1f1609190ec8b34b5d58fb3b5353cf22b96161e13e0e835f13518f \
--hash=sha256:7d25fe5571ddb16369054f54cdd883f23de9941476d97f2b92eb6d7d83afe22d \
--hash=sha256:7e162bdc5e3baad26b2262240be7d2bab36991d85a6a556e48b9dfb402370261 \
--hash=sha256:814d62678dc3a30f4aa081982d830b7c342cf230ffc9d030b020cb154eeebf9e \
--hash=sha256:8878a34c5313ee52e20aa50b03138af8d472bae465710fb954d133a9bfd3c38d \
--hash=sha256:a66a0d94e5b081d5d695e66d6667e91e74d79e273eee95c1747717ba9cb70792 \
--hash=sha256:a69f5cbf4addcfdf03dda564a671040127a6b7c34cf9fe4973582e68441b63fa \
--hash=sha256:b00f9f0c334d07709d3f73a7cb8ae63c6ca1a90c790a63b5e7effa666ef96021 \
--hash=sha256:b6ed71e4a7b4690447b626f499d978aa13197a0e592950e5d7020308f6054698 \
--hash=sha256:bdf5041e5851526e885af579d2f455348dba68d74f14a32781933569a327fddf \
--hash=sha256:be034360dd34e62608419f86e799c97d389c10a0e677a25f236a971b2f40dac9 \
--hash=sha256:cc8f590a5eed30b314ae6b0232d925519ade433f663de79cc3783e4b10d662ba \
--hash=sha256:cd7a318a15fe6cc4584bf3c4426f092ed08c0fd012cf2a9173114234fe193e11 \
--hash=sha256:cf19b5f63a59c20306e034e691402b02055c8f4e38bf6792c23cad489162a642 \
--hash=sha256:cfc781ce442ec407c841e9aa51d0e1024f72b6ec34caa8fdb6ef9576d549acf2 \
--hash=sha256:dea9f6f8633571e18bc20cad83603072e697103a567f4b0738d52dd0211b4527 \
--hash=sha256:e4a86a1d5eb2cce83c5972b3930c7c1eac81ab3508464345e2b8e54f119d5505 \
--hash=sha256:e7106374d4a74ed9ff00c46cc00f0a9f06a0775f8868e423f85d4464d2333679 \
--hash=sha256:e98a8a585b5668aa9e34d10f7785abf9545fe72663b4bfc16c99a115185ae6a5 \
--hash=sha256:f64840e68483316eb58d82c376ad3585ca995e69e33b230436de0cdddf7363f9 \
--hash=sha256:f8f4b0a9e6683e43889852130595c8854d8ae237f2324a053cdd884de936aa9b \
--hash=sha256:fc45a53219ed30a7f670a6d8c98527af0020e6fd4ee4c0a8fb59f147f06d816c
zope.hookable==5.0.0 \
--hash=sha256:0992a0dd692003c09fb958e1480cebd1a28f2ef32faa4857d864f3ca8e9d6952 \
--hash=sha256:0f325838dbac827a1e2ed5d482c1f2656b6844dc96aa098f7727e76395fcd694 \
--hash=sha256:22a317ba00f61bac99eac1a5e330be7cb8c316275a21269ec58aa396b602af0c \
--hash=sha256:25531cb5e7b35e8a6d1d6eddef624b9a22ce5dcf8f4448ef0f165acfa8c3fc21 \
--hash=sha256:30890892652766fc80d11f078aca9a5b8150bef6b88aba23799581a53515c404 \
--hash=sha256:342d682d93937e5b8c232baffb32a87d5eee605d44f74566657c64a239b7f342 \
--hash=sha256:46b2fddf1f5aeb526e02b91f7e62afbb9fff4ffd7aafc97cdb00a0d717641567 \
--hash=sha256:523318ff96df9b8d378d997c00c5d4cbfbff68dc48ff5ee5addabdb697d27528 \
--hash=sha256:53aa02eb8921d4e667c69d76adeed8fe426e43870c101cb08dcd2f3468aff742 \
--hash=sha256:62e79e8fdde087cb20822d7874758f5acbedbffaf3c0fbe06309eb8a41ee4e06 \
--hash=sha256:74bf2f757f7385b56dc3548adae508d8b3ef952d600b4b12b88f7d1706b05dcc \
--hash=sha256:751ee9d89eb96e00c1d7048da9725ce392a708ed43406416dc5ed61e4d199764 \
--hash=sha256:7b83bc341e682771fe810b360cd5d9c886a948976aea4b979ff214e10b8b523b \
--hash=sha256:81eeeb27dbb0ddaed8070daee529f0d1bfe4f74c7351cce2aaca3ea287c4cc32 \
--hash=sha256:856509191e16930335af4d773c0fc31a17bae8991eb6f167a09d5eddf25b56cc \
--hash=sha256:8853e81fd07b18fa9193b19e070dc0557848d9945b1d2dac3b7782543458c87d \
--hash=sha256:94506a732da2832029aecdfe6ea07eb1b70ee06d802fff34e1b3618fe7cdf026 \
--hash=sha256:95ad874a8cc94e786969215d660143817f745225579bfe318c4676e218d3147c \
--hash=sha256:9758ec9174966ffe5c499b6c3d149f80aa0a9238020006a2b87c6af5963fcf48 \
--hash=sha256:a169823e331da939aa7178fc152e65699aeb78957e46c6f80ccb50ee4c3616c2 \
--hash=sha256:a67878a798f6ca292729a28c2226592b3d000dc6ee7825d31887b553686c7ac7 \
--hash=sha256:a9a6d9eb2319a09905670810e2de971d6c49013843700b4975e2fc0afe96c8db \
--hash=sha256:b3e118b58a3d2301960e6f5f25736d92f6b9f861728d3b8c26d69f54d8a157d2 \
--hash=sha256:ca6705c2a1fb5059a4efbe9f5426be4cdf71b3c9564816916fc7aa7902f19ede \
--hash=sha256:cf711527c9d4ae72085f137caffb4be74fc007ffb17cd103628c7d5ba17e205f \
--hash=sha256:d087602a6845ebe9d5a1c5a949fedde2c45f372d77fbce4f7fe44b68b28a1d03 \
--hash=sha256:d1080e1074ddf75ad6662a9b34626650759c19a9093e1a32a503d37e48da135b \
--hash=sha256:db9c60368aff2b7e6c47115f3ad9bd6e96aa298b12ed5f8cb13f5673b30be565 \
--hash=sha256:dbeb127a04473f5a989169eb400b67beb921c749599b77650941c21fe39cb8d9 \
--hash=sha256:dca336ca3682d869d291d7cd18284f6ff6876e4244eb1821430323056b000e2c \
--hash=sha256:dd69a9be95346d10c853b6233fcafe3c0315b89424b378f2ad45170d8e161568 \
--hash=sha256:dd79f8fae5894f1ee0a0042214685f2d039341250c994b825c10a4cd075d80f6 \
--hash=sha256:e647d850aa1286d98910133cee12bd87c354f7b7bb3f3cd816a62ba7fa2f7007 \
--hash=sha256:f37a210b5c04b2d4e4bac494ab15b70196f219a1e1649ddca78560757d4278fb \
--hash=sha256:f67820b6d33a705dc3c1c457156e51686f7b350ff57f2112e1a9a4dad38ec268 \
--hash=sha256:f68969978ccf0e6123902f7365aae5b7a9e99169d4b9105c47cf28e788116894 \
--hash=sha256:f717a0b34460ae1ac0064e91b267c0588ac2c098ffd695992e72cd5462d97a67 \
--hash=sha256:f9d58ccec8684ca276d5a4e7b0dfacca028336300a8f715d616d9f0ce9ae8096 \
--hash=sha256:fcc3513a54e656067cbf7b98bab0d6b9534b9eabc666d1f78aad6acdf0962736
zope.interface==4.7.1 \
--hash=sha256:048b16ac882a05bc7ef534e8b9f15c9d7a6c190e24e8938a19b7617af4ed854a \
--hash=sha256:05816cf8e7407cf62f2ec95c0a5d69ec4fa5741d9ccd10db9f21691916a9a098 \
--hash=sha256:065d6a1ac89d35445168813bed45048ed4e67a4cdfc5a68fdb626a770378869f \
--hash=sha256:14157421f4121a57625002cc4f48ac7521ea238d697c4a4459a884b62132b977 \
--hash=sha256:18dc895945694f397a0be86be760ff664b790f95d8e7752d5bab80284ff9105d \
--hash=sha256:1962c9f838bd6ae4075d0014f72697510daefc7e1c7e48b2607df0b6e157989c \
--hash=sha256:1a67408cacd198c7e6274a19920bb4568d56459e659e23c4915528686ac1763a \
--hash=sha256:21bf781076dd616bd07cf0223f79d61ab4f45176076f90bc2890e18c48195da4 \
--hash=sha256:21c0a5d98650aebb84efa16ce2c8df1a46bdc4fe8a9e33237d0ca0b23f416ead \
--hash=sha256:23cfeea25d1e42ff3bf4f9a0c31e9d5950aa9e7c4b12f0c4bd086f378f7b7a71 \
--hash=sha256:24b6fce1fb71abf9f4093e3259084efcc0ef479f89356757780685bd2b06ef37 \
--hash=sha256:24f84ce24eb6b5fcdcb38ad9761524f1ae96f7126abb5e597f8a3973d9921409 \
--hash=sha256:25e0ef4a824017809d6d8b0ce4ab3288594ba283e4d4f94d8cfb81d73ed65114 \
--hash=sha256:2e8fdd625e9aba31228e7ddbc36bad5c38dc3ee99a86aa420f89a290bd987ce9 \
--hash=sha256:2f3bc2f49b67b1bea82b942d25bc958d4f4ea6709b411cb2b6b9718adf7914ce \
--hash=sha256:35d24be9d04d50da3a6f4d61de028c1dd087045385a0ff374d93ef85af61b584 \
--hash=sha256:35dbe4e8c73003dff40dfaeb15902910a4360699375e7b47d3c909a83ff27cd0 \
--hash=sha256:3dfce831b824ab5cf446ed0c350b793ac6fa5fe33b984305cb4c966a86a8fb79 \
--hash=sha256:3f7866365df5a36a7b8de8056cd1c605648f56f9a226d918ed84c85d25e8d55f \
--hash=sha256:455cc8c01de3bac6f9c223967cea41f4449f58b4c2e724ec8177382ddd183ab4 \
--hash=sha256:4bb937e998be9d5e345f486693e477ba79e4344674484001a0b646be1d530487 \
--hash=sha256:52303a20902ca0888dfb83230ca3ee6fbe63c0ad1dd60aa0bba7958ccff454d8 \
--hash=sha256:6e0a897d4e09859cc80c6a16a29697406ead752292ace17f1805126a4f63c838 \
--hash=sha256:6e1816e7c10966330d77af45f77501f9a68818c065dec0ad11d22b50a0e212e7 \
--hash=sha256:73b5921c5c6ce3358c836461b5470bf675601c96d5e5d8f2a446951470614f67 \
--hash=sha256:8093cd45cdb5f6c8591cfd1af03d32b32965b0f79b94684cd0c9afdf841982bb \
--hash=sha256:864b4a94b60db301899cf373579fd9ef92edddbf0fb2cd5ae99f53ef423ccc56 \
--hash=sha256:8a27b4d3ea9c6d086ce8e7cdb3e8d319b6752e2a03238a388ccc83ccbe165f50 \
--hash=sha256:91b847969d4784abd855165a2d163f72ac1e58e6dce09a5e46c20e58f19cc96d \
--hash=sha256:b47b1028be4758c3167e474884ccc079b94835f058984b15c145966c4df64d27 \
--hash=sha256:b68814a322835d8ad671b7acc23a3b2acecba527bb14f4b53fc925f8a27e44d8 \
--hash=sha256:bcb50a032c3b6ec7fb281b3a83d2b31ab5246c5b119588725b1350d3a1d9f6a3 \
--hash=sha256:c56db7d10b25ce8918b6aec6b08ac401842b47e6c136773bfb3b590753f7fb67 \
--hash=sha256:c94b77a13d4f47883e4f97f9fa00f5feadd38af3e6b3c7be45cfdb0a14c7149b \
--hash=sha256:db381f6fdaef483ad435f778086ccc4890120aff8df2ba5cfeeac24d280b3145 \
--hash=sha256:e6487d01c8b7ed86af30ea141fcc4f93f8a7dde26f94177c1ad637c353bd5c07 \
--hash=sha256:e86923fa728dfba39c5bb6046a450bd4eec8ad949ac404eca728cfce320d1732 \
--hash=sha256:f6ca36dc1e9eeb46d779869c60001b3065fb670b5775c51421c099ea2a77c3c9 \
--hash=sha256:fb62f2cbe790a50d95593fb40e8cca261c31a2f5637455ea39440d6457c2ba25
zope.proxy==4.3.3 \
--hash=sha256:04646ac04ffa9c8e32fb2b5c3cd42995b2548ea14251f3c21ca704afae88e42c \
--hash=sha256:07b6bceea232559d24358832f1cd2ed344bbf05ca83855a5b9698b5f23c5ed60 \
--hash=sha256:1ef452cc02e0e2f8e3c917b1a5b936ef3280f2c2ca854ee70ac2164d1655f7e6 \
--hash=sha256:22bf61857c5977f34d4e391476d40f9a3b8c6ab24fb0cac448d42d8f8b9bf7b2 \
--hash=sha256:299870e3428cbff1cd9f9b34144e76ecdc1d9e3192a8cf5f1b0258f47a239f58 \
--hash=sha256:2bfc36bfccbe047671170ea5677efd3d5ab730a55d7e45611d76d495e5b96766 \
--hash=sha256:32e82d5a640febc688c0789e15ea875bf696a10cf358f049e1ed841f01710a9b \
--hash=sha256:3b2051bdc4bc3f02fa52483f6381cf40d4d48167645241993f9d7ebbd142ed9b \
--hash=sha256:3f734bd8a08f5185a64fb6abb8f14dc97ec27a689ca808fb7a83cdd38d745e4f \
--hash=sha256:3f78dd8de3112df8bbd970f0916ac876dc3fbe63810bd1cf7cc5eec4cbac4f04 \
--hash=sha256:4eabeb48508953ba1f3590ad0773b8daea9e104eec66d661917e9bbcd7125a67 \
--hash=sha256:4f05ecc33808187f430f249cb1ccab35c38f570b181f2d380fbe253da94b18d8 \
--hash=sha256:4f4f4cbf23d3afc1526294a31e7b3eaa0f682cc28ac5366065dc1d6bb18bd7be \
--hash=sha256:5483d5e70aacd06f0aa3effec9fed597c0b50f45060956eeeb1203c44d4338c3 \
--hash=sha256:56a5f9b46892b115a75d0a1f2292431ad5988461175826600acc69a24cb3edee \
--hash=sha256:64bb63af8a06f736927d260efdd4dfc5253d42244f281a8063e4b9eea2ddcbc5 \
--hash=sha256:653f8cbefcf7c6ac4cece2cdef367c4faa2b7c19795d52bd7cbec11a8739a7c1 \
--hash=sha256:664211d63306e4bd4eec35bf2b4bd9db61c394037911cf2d1804c43b511a49f1 \
--hash=sha256:6651e6caed66a8fff0fef1a3e81c0ed2253bf361c0fdc834500488732c5d16e9 \
--hash=sha256:6c1fba6cdfdf105739d3069cf7b07664f2944d82a8098218ab2300a82d8f40fc \
--hash=sha256:6e64246e6e9044a4534a69dca1283c6ddab6e757be5e6874f69024329b3aa61f \
--hash=sha256:838390245c7ec137af4993c0c8052f49d5ec79e422b4451bfa37fee9b9ccaa01 \
--hash=sha256:856b410a14793069d8ba35f33fff667213ea66f2df25a0024cc72a7493c56d4c \
--hash=sha256:8b932c364c1d1605a91907a41128ed0ee8a2d326fc0fafb2c55cd46f545f4599 \
--hash=sha256:9086cf6d20f08dae7f296a78f6c77d1f8d24079d448f023ee0eb329078dd35e1 \
--hash=sha256:9698533c14afa0548188de4968a7932d1f3f965f3f5ba1474de673596bb875af \
--hash=sha256:9b12b05dd7c28f5068387c1afee8cb94f9d02501e7ef495a7c5c7e27139b96ad \
--hash=sha256:a884c7426a5bc6fb7fc71a55ad14e66818e13f05b78b20a6f37175f324b7acb8 \
--hash=sha256:abe9e7f1a3e76286c5f5baf2bf5162d41dc0310da493b34a2c36555f38d928f7 \
--hash=sha256:bd6fde63b015a27262be06bd6bbdd895273cc2bdf2d4c7e1c83711d26a8fbace \
--hash=sha256:bda7c62c954f47b87ed9a89f525eee1b318ec7c2162dfdba76c2ccfa334e0caa \
--hash=sha256:be8a4908dd3f6e965993c0068b006bdbd0474fbcbd1da4893b49356e73fc1557 \
--hash=sha256:ced65fc3c7d7205267506d854bb1815bb445899cca9d21d1d4b949070a635546 \
--hash=sha256:dac4279aa05055d3897ab5e5ee5a7b39db121f91df65a530f8b1ac7f9bd93119 \
--hash=sha256:e4f1863056e3e4f399c285b67fa816f411a7bfa1c81ef50e186126164e396e59 \
--hash=sha256:ecd85f68b8cd9ab78a0141e87ea9a53b2f31fd9b1350a1c44da1f7481b5363ef \
--hash=sha256:ed269b83750413e8fc5c96276372f49ee3fcb7ed61c49fe8e5a67f54459a5a4a \
--hash=sha256:f19b0b80cba73b204dee68501870b11067711d21d243fb6774256d3ca2e5391f \
--hash=sha256:ffdafb98db7574f9da84c489a10a5d582079a888cb43c64e9e6b0e3fe1034685
# Contains the requirements for the letsencrypt package.
#
@@ -1493,18 +1533,18 @@ letsencrypt==0.7.0 \
--hash=sha256:105a5fb107e45bcd0722eb89696986dcf5f08a86a321d6aef25a0c7c63375ade \
--hash=sha256:c36e532c486a7e92155ee09da54b436a3c420813ec1c590b98f635d924720de9
certbot==1.10.1 \
--hash=sha256:011ac980fa21b9f29e02c9b8d8b86e8a4bf4670b51b6ad91656e401e9d2d2231 \
--hash=sha256:0d9ee3fc09e0d03b2d1b1f1c4916e61ecfc6904b4216ddef4e6a5ca1424d9cb7
acme==1.10.1 \
--hash=sha256:752d598e54e98ad1e874de53fd50c61044f1b566d6deb790db5676ce9c573546 \
--hash=sha256:fcbb559aedc96b404edf593e78517dcd7291984d5a37036c3fc77f3c5c122fd8
certbot-apache==1.10.1 \
--hash=sha256:f077b4b7f166627ef5e0921fe7cde57700670fc86e9ad9dbdfaf2c573cc0f2fa \
--hash=sha256:97ed637b4c7b03820db6c69aa90145dc989933351d46a3d62baf6b71674f0a10
certbot-nginx==1.10.1 \
--hash=sha256:7c36459021f8a1ec3b6c062e4c4fc866bfaa1dbf26ccd29e043dd6848003be08 \
--hash=sha256:c0bbeccf85f46b728fd95e6bb8c2649d32d3383d7f47ea4b9c312d12bf04d2f0
certbot==1.5.0 \
--hash=sha256:ec1f01af06b52a6f079f5b02cb70e88f0671a7b13ecb3e45b040563e32c6e53a \
--hash=sha256:c52017a4f84137e1312c898d6ae69c5f7977d79d2bd4c2df013cbbf39b6539bf
acme==1.5.0 \
--hash=sha256:66de67b394bb7606f97f2c21507e6eb6a88936db2a940f5c4893025f87e3852a \
--hash=sha256:b051ff7dd3935b2032c2f8c8386e905d9b658eba9f3455e352650d85bea9c8f0
certbot-apache==1.5.0 \
--hash=sha256:d2c28be6dcd6c56a8040c8c733e72c1341238b1b47fb59f544eb832b9d5c81ba \
--hash=sha256:3eec5a49ae4fcf86213f962eb1e11d8a725b65e7dcee18f9b92c7aa73f821764
certbot-nginx==1.5.0 \
--hash=sha256:3d27fd02ebe15b07ce5fa9525ceeda82aa5fdc45aa064729434faff0442d1f91 \
--hash=sha256:b38f101588af6d2b8ea7c2e3334f249afbe14461a85add2f1420091d860df983
UNLIKELY_EOF
# -------------------------------------------------------------------------
@@ -1578,11 +1618,6 @@ maybe_argparse = (
if sys.version_info < (2, 7, 0) else [])
# Be careful when updating the pinned versions here, in particular for pip.
# Indeed starting from 10.0, pip will build dependencies in isolation if the
# related projects are compliant with PEP 517. This is not something we want
# as of now, so the isolation build will need to be disabled wherever
# pipstrap is used (see https://github.com/certbot/certbot/issues/8256).
PACKAGES = maybe_argparse + [
# Pip has no dependencies, as it vendors everything:
('11/b6/abcb525026a4be042b486df43905d6893fb04f05aac21c32c638e939e447/'

View File

@@ -1,60 +0,0 @@
options {
directory "/var/cache/bind";
// Running inside Docker. Bind address on Docker host is 127.0.0.1.
listen-on { any; };
listen-on-v6 { any; };
// We are allowing BIND to service recursive queries, but only in an extremely limimited sense
// where it is entirely disconnected from public DNS:
// - Iterative queries are disabled. Only forwarding to a non-existent forwarder.
// - The only recursive answers we can get (that will not be a SERVFAIL) will come from the
// RPZ "mock-recursion" zone. Effectively this means we are mocking out the entirety of
// public DNS.
allow-recursion { any; }; // BIND will only answer using RPZ if recursion is enabled
forwarders { 192.0.2.254; }; // Nobody is listening, this is TEST-NET-1
forward only; // Do NOT perform iterative queries from the root zone
dnssec-validation no; // Do not bother fetching the root DNSKEY set (performance)
response-policy { // All recursive queries will be served from here.
zone "mock-recursion"
log yes;
} recursive-only no // Allow RPZs to affect authoritative zones too.
qname-wait-recurse no // No real recursion.
nsip-wait-recurse no; // No real recursion.
allow-transfer { none; };
allow-update { none; };
};
key "default-key." {
algorithm hmac-sha512;
secret "91CgOwzihr0nAVEHKFXJPQCbuBBbBI19Ks5VAweUXgbF40NWTD83naeg3c5y2MPdEiFRXnRLJxL6M+AfHCGLNw==";
};
zone "mock-recursion" {
type primary;
file "/var/lib/bind/rpz.mock-recursion";
allow-query {
none;
};
};
zone "example.com." {
type primary;
file "/var/lib/bind/db.example.com";
journal "/var/cache/bind/db.example.com.jnl";
update-policy {
grant default-key zonesub TXT;
};
};
zone "sub.example.com." {
type primary;
file "/var/lib/bind/db.sub.example.com";
journal "/var/cache/bind/db.sub.example.com.jnl";
update-policy {
grant default-key zonesub TXT;
};
};

View File

@@ -1,10 +0,0 @@
# Target DNS server
dns_rfc2136_server = {server_address}
# Target DNS port
dns_rfc2136_port = {server_port}
# TSIG key name
dns_rfc2136_name = default-key.
# TSIG key secret
dns_rfc2136_secret = 91CgOwzihr0nAVEHKFXJPQCbuBBbBI19Ks5VAweUXgbF40NWTD83naeg3c5y2MPdEiFRXnRLJxL6M+AfHCGLNw==
# TSIG key algorithm
dns_rfc2136_algorithm = HMAC-SHA512

View File

@@ -1,11 +0,0 @@
$ORIGIN example.com.
$TTL 3600
example.com. IN SOA ns1.example.com. admin.example.com. ( 2020091025 7200 3600 1209600 3600 )
example.com. IN NS ns1
example.com. IN NS ns2
ns1 IN A 192.0.2.2
ns2 IN A 192.0.2.3
@ IN A 192.0.2.1

View File

@@ -1,9 +0,0 @@
$ORIGIN sub.example.com.
$TTL 3600
sub.example.com. IN SOA ns1.example.com. admin.example.com. ( 2020091025 7200 3600 1209600 3600 )
sub.example.com. IN NS ns1
sub.example.com. IN NS ns2
ns1 IN A 192.0.2.2
ns2 IN A 192.0.2.3

View File

@@ -1,6 +0,0 @@
$TTL 3600
@ SOA ns1.example.test. dummy.example.test. 1 12h 15m 3w 2h
NS ns1.example.test.
_acme-challenge.aliased.example IN CNAME _acme-challenge.example.com.

View File

@@ -1,14 +0,0 @@
This directory contains your keys and certificates.
`privkey.pem` : the private key for your certificate.
`fullchain.pem`: the certificate file used in most server software.
`chain.pem` : used for OCSP stapling in Nginx >=1.3.7.
`cert.pem` : will break many server configurations, and should not be used
without reading further documentation (see link below).
WARNING: DO NOT MOVE OR RENAME THESE FILES!
Certbot expects these files to remain in this location in order
to function properly!
We recommend not moving these files. For more information, see the Certbot
User Guide at https://certbot.eff.org/docs/using.html#where-are-my-certificates.

View File

@@ -1,18 +0,0 @@
-----BEGIN CERTIFICATE-----
MIIC2zCCAcOgAwIBAgIIBvrEnbPRYu8wDQYJKoZIhvcNAQELBQAwKDEmMCQGA1UE
AxMdUGViYmxlIEludGVybWVkaWF0ZSBDQSAxMjZjNGIwHhcNMjAxMDEyMjEwNzQw
WhcNMjUxMDEyMjEwNzQwWjAjMSEwHwYDVQQDExhjLmVuY3J5cHRpb24tZXhhbXBs
ZS5jb20wWTATBgcqhkjOPQIBBggqhkjOPQMBBwNCAARjMhuW0ENPPC33PjB5XsYU
CRw640kPQENIDatcTJaENZIZdqKd6rI6jc+lpbmXot7Zi52clJlSJS+V6oDAt2Lh
o4HYMIHVMA4GA1UdDwEB/wQEAwIFoDAdBgNVHSUEFjAUBggrBgEFBQcDAQYIKwYB
BQUHAwIwDAYDVR0TAQH/BAIwADAdBgNVHQ4EFgQUj7Kd3ENqxlPf8B2bIGhsjydX
mPswHwYDVR0jBBgwFoAUEiGxlkRsi+VvcogH5dVD3h1laAcwMQYIKwYBBQUHAQEE
JTAjMCEGCCsGAQUFBzABhhVodHRwOi8vMTI3LjAuMC4xOjQwMDIwIwYDVR0RBBww
GoIYYy5lbmNyeXB0aW9uLWV4YW1wbGUuY29tMA0GCSqGSIb3DQEBCwUAA4IBAQCl
k0JXsa8y7fg41WWMDhw60bPW77O0FtOmTcnhdI5daYNemQVk+Q5EMaBLQ/oGjgXd
9QXFzXH1PL904YEnSLt+iTpXn++7rQSNzQsdYqw0neWk4f5pEBiN+WORpb6mwobV
ifMtBOkNEHvrJ2Pkci9U1lLwtKD/DSew6QtJU5DSkmH1XdGuMJiubygEIvELtvgq
cP9S368ZvPmPGmKaJQXBiuaR8MTjY/Bkr79aXQMjKbf+mpn7h0POCcePk1DY/rm6
Da+X16lf0hHyQhSUa7Vgyim6rK1/hlw+Z00i+sQCKD9Ih7kXuuGqfSDC33cfO8Tj
o/MXO8lcxkrem5zU5QWP
-----END CERTIFICATE-----

View File

@@ -1,20 +0,0 @@
-----BEGIN CERTIFICATE-----
MIIDUDCCAjigAwIBAgIIbi787yVrcMAwDQYJKoZIhvcNAQELBQAwIDEeMBwGA1UE
AxMVUGViYmxlIFJvb3QgQ0EgMGM1MjI1MCAXDTIwMTAxMjIwMjI0NloYDzIwNTAx
MDEyMjEyMjQ2WjAoMSYwJAYDVQQDEx1QZWJibGUgSW50ZXJtZWRpYXRlIENBIDEy
NmM0YjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALGeVk1BMJraeqRq
mJ2+hgso8VOAv2s2CVxUJjIVcn7f2adE8NyTsSQ1brlsnKCUYUw7yLTQH0izLQRB
qKVIDFkUqo5/FuTJ2QlfA2EwBL8J7s/7L7vj3L0DiVpwgxPSyFEwdl/Y5y7ofsX5
CIhCFcaMAmTIuKLiSfCJjGwkbEMuolm+lO8Mikxxc/JtDVUC479ugU7PU9O09bMH
nm+sD6Bgd+KMoPkCCCoeShJS9X3Ziq9HGc7Z6nhM/zirFARt2XkonEdAZ8br01zY
MRiY9txhlWQ7mUkOtzOSoEuYJNoUbvMUf0+tNzto26WRyF7dJmh7lTBsYrvAwUTx
PzNyst0CAwEAAaOBgzCBgDAOBgNVHQ8BAf8EBAMCAoQwHQYDVR0lBBYwFAYIKwYB
BQUHAwEGCCsGAQUFBwMCMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFBIhsZZE
bIvlb3KIB+XVQ94dZWgHMB8GA1UdIwQYMBaAFOaKTaXg37vKgRt7d79YOjAoAtJT
MA0GCSqGSIb3DQEBCwUAA4IBAQAU2mZii7PH2pkw2lNM0QqPbcW/UYyvFoUeM8Aq
uCtsI2s+oxCJTqzfLsA0N8NY4nHLQ5wAlNJfJekngni8hbmJTKU4JFTMe7kLQO8P
fJbk0pTzhhHVQw7CVwB6Pwq3u2m/JV+d6xDIDc+AVkuEl19ZJU0rTWyooClfFLZV
EdZmEiUtA3PGlxoYwYhoGHYlhFxsoFONhCsBEdN7k7FKtFGVxN7oc5SKmKp0YZTW
fcrEtrdNThATO4ymhCC2zh33NI/MT1O74fpaAc2k6LcTl57MKiLfTYX4LTL6v9JG
9tlNqjFVRRmzEbtXTPcCb+w9g1VqoOGok7mGXYLTYtShCuvE
-----END CERTIFICATE-----

View File

@@ -1,38 +0,0 @@
-----BEGIN CERTIFICATE-----
MIIC2zCCAcOgAwIBAgIILlmGtZhUFEwwDQYJKoZIhvcNAQELBQAwKDEmMCQGA1UE
AxMdUGViYmxlIEludGVybWVkaWF0ZSBDQSAxMjZjNGIwHhcNMjAxMDEyMjA1MDM0
WhcNMjUxMDEyMjA1MDM0WjAjMSEwHwYDVQQDExhjLmVuY3J5cHRpb24tZXhhbXBs
ZS5jb20wWTATBgcqhkjOPQIBBggqhkjOPQMBBwNCAARHEzR8JPWrEmpmgM+F2bk5
9mT0u6CjzmJG0QpbaqprLiG5NGpW84VQ5TFCrmC4KxYfigCfMhfHRNfFYvNUK3V/
o4HYMIHVMA4GA1UdDwEB/wQEAwIFoDAdBgNVHSUEFjAUBggrBgEFBQcDAQYIKwYB
BQUHAwIwDAYDVR0TAQH/BAIwADAdBgNVHQ4EFgQU1CsVL+bPnzaxxQ5jUENmQJIO
lKwwHwYDVR0jBBgwFoAUEiGxlkRsi+VvcogH5dVD3h1laAcwMQYIKwYBBQUHAQEE
JTAjMCEGCCsGAQUFBzABhhVodHRwOi8vMTI3LjAuMC4xOjQwMDIwIwYDVR0RBBww
GoIYYy5lbmNyeXB0aW9uLWV4YW1wbGUuY29tMA0GCSqGSIb3DQEBCwUAA4IBAQBn
2D8loC7pfk28JYpFLr5lmFKJWWmtLGlpsWDj61fVjtTfGKLziJz+MM6il4Y3hIz5
58qiFK0ue0M63dIBJ33N+XxSEXon4Q0gy/zRWfH9jtPJ3FwfjkU/RT9PAUClYi0G
ptNWnTmgQkNzousbcAtRNXuuShH3856vhUnwkX+xM+cbIDi1JVmFjcGrEEQJ0rUF
mv2ZTyfbWbUs3v4rReETi2NVzr1Ql6J+ByNcMvHODzFy3t0L6yelAw2ca1I+c9HU
+Z0tnp/ykR7eXNuVLivok8UBf5OC413lh8ZO5g+Bgzh/LdtkUuavg1MYtEX0H6mX
9U7y3nVI8WEbPGf+HDeu
-----END CERTIFICATE-----
-----BEGIN CERTIFICATE-----
MIIDUDCCAjigAwIBAgIIbi787yVrcMAwDQYJKoZIhvcNAQELBQAwIDEeMBwGA1UE
AxMVUGViYmxlIFJvb3QgQ0EgMGM1MjI1MCAXDTIwMTAxMjIwMjI0NloYDzIwNTAx
MDEyMjEyMjQ2WjAoMSYwJAYDVQQDEx1QZWJibGUgSW50ZXJtZWRpYXRlIENBIDEy
NmM0YjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALGeVk1BMJraeqRq
mJ2+hgso8VOAv2s2CVxUJjIVcn7f2adE8NyTsSQ1brlsnKCUYUw7yLTQH0izLQRB
qKVIDFkUqo5/FuTJ2QlfA2EwBL8J7s/7L7vj3L0DiVpwgxPSyFEwdl/Y5y7ofsX5
CIhCFcaMAmTIuKLiSfCJjGwkbEMuolm+lO8Mikxxc/JtDVUC479ugU7PU9O09bMH
nm+sD6Bgd+KMoPkCCCoeShJS9X3Ziq9HGc7Z6nhM/zirFARt2XkonEdAZ8br01zY
MRiY9txhlWQ7mUkOtzOSoEuYJNoUbvMUf0+tNzto26WRyF7dJmh7lTBsYrvAwUTx
PzNyst0CAwEAAaOBgzCBgDAOBgNVHQ8BAf8EBAMCAoQwHQYDVR0lBBYwFAYIKwYB
BQUHAwEGCCsGAQUFBwMCMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFBIhsZZE
bIvlb3KIB+XVQ94dZWgHMB8GA1UdIwQYMBaAFOaKTaXg37vKgRt7d79YOjAoAtJT
MA0GCSqGSIb3DQEBCwUAA4IBAQAU2mZii7PH2pkw2lNM0QqPbcW/UYyvFoUeM8Aq
uCtsI2s+oxCJTqzfLsA0N8NY4nHLQ5wAlNJfJekngni8hbmJTKU4JFTMe7kLQO8P
fJbk0pTzhhHVQw7CVwB6Pwq3u2m/JV+d6xDIDc+AVkuEl19ZJU0rTWyooClfFLZV
EdZmEiUtA3PGlxoYwYhoGHYlhFxsoFONhCsBEdN7k7FKtFGVxN7oc5SKmKp0YZTW
fcrEtrdNThATO4ymhCC2zh33NI/MT1O74fpaAc2k6LcTl57MKiLfTYX4LTL6v9JG
9tlNqjFVRRmzEbtXTPcCb+w9g1VqoOGok7mGXYLTYtShCuvE
-----END CERTIFICATE-----

View File

@@ -1,5 +0,0 @@
-----BEGIN PRIVATE KEY-----
MIGHAgEAMBMGByqGSM49AgEGCCqGSM49AwEHBG0wawIBAQQgNgefv2dad4U1VYEi
0WkdHuqywi5QXAe30OwNTTGjhbihRANCAARHEzR8JPWrEmpmgM+F2bk59mT0u6Cj
zmJG0QpbaqprLiG5NGpW84VQ5TFCrmC4KxYfigCfMhfHRNfFYvNUK3V/
-----END PRIVATE KEY-----

View File

@@ -1,14 +0,0 @@
This directory contains your keys and certificates.
`privkey.pem` : the private key for your certificate.
`fullchain.pem`: the certificate file used in most server software.
`chain.pem` : used for OCSP stapling in Nginx >=1.3.7.
`cert.pem` : will break many server configurations, and should not be used
without reading further documentation (see link below).
WARNING: DO NOT MOVE OR RENAME THESE FILES!
Certbot expects these files to remain in this location in order
to function properly!
We recommend not moving these files. For more information, see the Certbot
User Guide at https://certbot.eff.org/docs/using.html#where-are-my-certificates.

View File

@@ -1 +0,0 @@
../../archive/c.encryption-example.com/cert.pem

View File

@@ -1 +0,0 @@
../../archive/c.encryption-example.com/chain.pem

View File

@@ -1 +0,0 @@
../../archive/c.encryption-example.com/fullchain.pem

View File

@@ -1 +0,0 @@
../../archive/c.encryption-example.com/privkey.pem

View File

@@ -1,17 +0,0 @@
# renew_before_expiry = 30 days
version = 1.10.0.dev0
archive_dir = sample-config/archive/c.encryption-example.com
cert = sample-config/live/c.encryption-example.com/cert.pem
privkey = sample-config/live/c.encryption-example.com/privkey.pem
chain = sample-config/live/c.encryption-example.com/chain.pem
fullchain = sample-config/live/c.encryption-example.com/fullchain.pem
# Options used in the renewal process
[renewalparams]
authenticator = apache
installer = apache
account = 48d6b9e8d767eccf7e4d877d6ffa81e3
key_type = ecdsa
config_dir = sample-config-ec
elliptic_curve = secp256r1
manual_public_ip_logging_ok = True

View File

@@ -2,11 +2,6 @@
import io
import os
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives.asymmetric.ec import EllipticCurvePrivateKey
from cryptography.hazmat.primitives.asymmetric.rsa import RSAPrivateKey
from cryptography.hazmat.primitives.serialization import load_pem_private_key
try:
import grp
POSIX_MODE = True
@@ -21,33 +16,6 @@ SYSTEM_SID = 'S-1-5-18'
ADMINS_SID = 'S-1-5-32-544'
def assert_elliptic_key(key, curve):
"""
Asserts that the key at the given path is an EC key using the given curve.
:param key: path to key
:param curve: name of the expected elliptic curve
"""
with open(key, 'rb') as file:
privkey1 = file.read()
key = load_pem_private_key(data=privkey1, password=None, backend=default_backend())
assert isinstance(key, EllipticCurvePrivateKey)
assert isinstance(key.curve, curve)
def assert_rsa_key(key):
"""
Asserts that the key at the given path is an RSA key.
:param key: path to key
"""
with open(key, 'rb') as file:
privkey1 = file.read()
key = load_pem_private_key(data=privkey1, password=None, backend=default_backend())
assert isinstance(key, RSAPrivateKey)
def assert_hook_execution(probe_path, probe_content):
"""
Assert that a certbot hook has been executed

View File

@@ -9,15 +9,10 @@ import shutil
import subprocess
import time
from cryptography.hazmat.primitives.asymmetric.ec import SECP256R1, SECP384R1
from cryptography.x509 import NameOID
import pytest
from certbot_integration_tests.certbot_tests import context as certbot_context
from certbot_integration_tests.certbot_tests.assertions import assert_cert_count_for_lineage
from certbot_integration_tests.certbot_tests.assertions import assert_elliptic_key
from certbot_integration_tests.certbot_tests.assertions import assert_rsa_key
from certbot_integration_tests.certbot_tests.assertions import assert_equals_group_owner
from certbot_integration_tests.certbot_tests.assertions import assert_equals_group_permissions
from certbot_integration_tests.certbot_tests.assertions import assert_equals_world_read_permissions
@@ -292,7 +287,7 @@ def test_renew_with_changed_private_key_complexity(context):
assert_cert_count_for_lineage(context.config_dir, certname, 1)
context.certbot(['renew'])
assert_cert_count_for_lineage(context.config_dir, certname, 2)
key2 = join(context.config_dir, 'archive', certname, 'privkey2.pem')
assert os.stat(key2).st_size > 3000
@@ -424,93 +419,20 @@ def test_reuse_key(context):
assert len({cert1, cert2, cert3}) == 3
def test_incorrect_key_type(context):
with pytest.raises(subprocess.CalledProcessError):
context.certbot(['--key-type="failwhale"'])
def test_ecdsa(context):
"""Test issuance for ECDSA CSR based request (legacy supported mode)."""
"""Test certificate issuance with ECDSA key."""
key_path = join(context.workspace, 'privkey-p384.pem')
csr_path = join(context.workspace, 'csr-p384.der')
cert_path = join(context.workspace, 'cert-p384.pem')
chain_path = join(context.workspace, 'chain-p384.pem')
misc.generate_csr(
[context.get_domain('ecdsa')],
key_path, csr_path,
key_type=misc.ECDSA_KEY_TYPE
)
context.certbot([
'auth', '--csr', csr_path, '--cert-path', cert_path,
'--chain-path', chain_path,
])
misc.generate_csr([context.get_domain('ecdsa')], key_path, csr_path, key_type=misc.ECDSA_KEY_TYPE)
context.certbot(['auth', '--csr', csr_path, '--cert-path', cert_path, '--chain-path', chain_path])
certificate = misc.read_certificate(cert_path)
assert 'ASN1 OID: secp384r1' in certificate
def test_default_key_type(context):
"""Test default key type is RSA"""
certname = context.get_domain('renew')
context.certbot([
'certonly',
'--cert-name', certname, '-d', certname
])
filename = join(context.config_dir, 'archive/{0}/privkey1.pem').format(certname)
assert_rsa_key(filename)
def test_default_curve_type(context):
"""test that the curve used when not specifying any is secp256r1"""
certname = context.get_domain('renew')
context.certbot([
'--key-type', 'ecdsa', '--cert-name', certname, '-d', certname
])
key1 = join(context.config_dir, 'archive/{0}/privkey1.pem'.format(certname))
assert_elliptic_key(key1, SECP256R1)
def test_renew_with_ec_keys(context):
"""Test proper renew with updated private key complexity."""
certname = context.get_domain('renew')
context.certbot([
'certonly',
'--cert-name', certname,
'--key-type', 'ecdsa', '--elliptic-curve', 'secp256r1',
'--force-renewal', '-d', certname,
])
key1 = join(context.config_dir, "archive", certname, 'privkey1.pem')
assert 200 < os.stat(key1).st_size < 250 # ec keys of 256 bits are ~225 bytes
assert_elliptic_key(key1, SECP256R1)
assert_cert_count_for_lineage(context.config_dir, certname, 1)
context.certbot(['renew', '--elliptic-curve', 'secp384r1'])
assert_cert_count_for_lineage(context.config_dir, certname, 2)
key2 = join(context.config_dir, 'archive', certname, 'privkey2.pem')
assert_elliptic_key(key2, SECP384R1)
assert 280 < os.stat(key2).st_size < 320 # ec keys of 384 bits are ~310 bytes
# We expect here that the command will fail because without --key-type specified,
# Certbot must error out to prevent changing an existing certificate key type,
# without explicit user consent (by specifying both --cert-name and --key-type).
with pytest.raises(subprocess.CalledProcessError):
context.certbot([
'certonly',
'--force-renewal',
'-d', certname
])
# We expect that the previous behavior of requiring both --cert-name and
# --key-type to be set to not apply to the renew subcommand.
context.certbot(['renew', '--force-renewal', '--key-type', 'rsa'])
assert_cert_count_for_lineage(context.config_dir, certname, 3)
key3 = join(context.config_dir, 'archive', certname, 'privkey3.pem')
assert_rsa_key(key3)
def test_ocsp_must_staple(context):
"""Test that OCSP Must-Staple is correctly set in the generated certificate."""
if context.acme_server == 'pebble':
@@ -620,8 +542,7 @@ def test_revoke_multiple_lineages(context):
'revoke', '--cert-path', join(context.config_dir, 'live', cert1, 'cert.pem')
])
with open(join(context.workspace, 'logs', 'letsencrypt.log'), 'r') as f:
assert 'Not deleting revoked certs due to overlapping archive dirs' in f.read()
assert 'Not deleting revoked certs due to overlapping archive dirs' in output
def test_wildcard_certificates(context):
@@ -707,31 +628,3 @@ def test_dry_run_deactivate_authzs(context):
context.certbot(args)
with open(join(context.workspace, 'logs', 'letsencrypt.log'), 'r') as f:
assert log_line in f.read(), 'Second order should have been recreated due to authz reuse'
def test_preferred_chain(context):
"""Test that --preferred-chain results in the correct chain.pem being produced"""
try:
issuers = misc.get_acme_issuers(context)
except NotImplementedError:
pytest.skip('This ACME server does not support alternative issuers.')
names = [i.issuer.get_attributes_for_oid(NameOID.COMMON_NAME)[0].value \
for i in issuers]
domain = context.get_domain('preferred-chain')
cert_path = join(context.config_dir, 'live', domain, 'chain.pem')
conf_path = join(context.config_dir, 'renewal', '{}.conf'.format(domain))
for (requested, expected) in [(n, n) for n in names] + [('nonexistent', names[0])]:
args = ['certonly', '--cert-name', domain, '-d', domain,
'--preferred-chain', requested, '--force-renewal']
context.certbot(args)
dumped = misc.read_certificate(cert_path)
assert 'Issuer: CN={}'.format(expected) in dumped, \
'Expected chain issuer to be {} when preferring {}'.format(expected, requested)
with open(conf_path, 'r') as f:
assert 'preferred_chain = {}'.format(requested) in f.read(), \
'Expected preferred_chain to be set in renewal config'

View File

@@ -12,8 +12,6 @@ import subprocess
import sys
from certbot_integration_tests.utils import acme_server as acme_lib
from certbot_integration_tests.utils import dns_server as dns_lib
from certbot_integration_tests.utils.dns_server import DNSServer
def pytest_addoption(parser):
@@ -25,10 +23,6 @@ def pytest_addoption(parser):
choices=['boulder-v1', 'boulder-v2', 'pebble'],
help='select the ACME server to use (boulder-v1, boulder-v2, '
'pebble), defaulting to pebble')
parser.addoption('--dns-server', default='challtestsrv',
choices=['bind', 'challtestsrv'],
help='select the DNS server to use (bind, challtestsrv), '
'defaulting to challtestsrv')
def pytest_configure(config):
@@ -38,7 +32,7 @@ def pytest_configure(config):
"""
if not hasattr(config, 'slaveinput'): # If true, this is the primary node
with _print_on_err():
_setup_primary_node(config)
config.acme_xdist = _setup_primary_node(config)
def pytest_configure_node(node):
@@ -47,7 +41,6 @@ def pytest_configure_node(node):
:param node: current worker node
"""
node.slaveinput['acme_xdist'] = node.config.acme_xdist
node.slaveinput['dns_xdist'] = node.config.dns_xdist
@contextlib.contextmanager
@@ -68,18 +61,12 @@ def _print_on_err():
def _setup_primary_node(config):
"""
Setup the environment for integration tests.
This function will:
Will:
- check runtime compatibility (Docker, docker-compose, Nginx)
- create a temporary workspace and the persistent GIT repositories space
- configure and start a DNS server using Docker, if configured
- configure and start paralleled ACME CA servers using Docker
- transfer ACME CA and DNS servers configurations to pytest nodes using env variables
This function modifies `config` by injecting the ACME CA and DNS server configurations,
in addition to cleanup functions for those servers.
:param config: Configuration of the pytest primary node. Is modified by this function.
- transfer ACME CA servers configurations to pytest nodes using env variables
:param config: Configuration of the pytest primary node
"""
# Check for runtime compatibility: some tools are required to be available in PATH
if 'boulder' in config.option.acme_server:
@@ -99,26 +86,11 @@ def _setup_primary_node(config):
workers = ['primary'] if not config.option.numprocesses\
else ['gw{0}'.format(i) for i in range(config.option.numprocesses)]
# If a non-default DNS server is configured, start it and feed it to the ACME server
dns_server = None
acme_dns_server = None
if config.option.dns_server == 'bind':
dns_server = dns_lib.DNSServer(workers)
config.add_cleanup(dns_server.stop)
print('DNS xdist config:\n{0}'.format(dns_server.dns_xdist))
dns_server.start()
acme_dns_server = '{}:{}'.format(
dns_server.dns_xdist['address'],
dns_server.dns_xdist['port']
)
# By calling setup_acme_server we ensure that all necessary acme server instances will be
# fully started. This runtime is reflected by the acme_xdist returned.
acme_server = acme_lib.ACMEServer(config.option.acme_server, workers,
dns_server=acme_dns_server)
acme_server = acme_lib.ACMEServer(config.option.acme_server, workers)
config.add_cleanup(acme_server.stop)
print('ACME xdist config:\n{0}'.format(acme_server.acme_xdist))
acme_server.start()
config.acme_xdist = acme_server.acme_xdist
config.dns_xdist = dns_server.dns_xdist if dns_server else None
return acme_server.acme_xdist

View File

@@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
"""General purpose nginx test configuration generator."""
import getpass
@@ -43,8 +42,6 @@ events {{
worker_connections 1024;
}}
# “This comment contains valid Unicode”.
http {{
# Set an array of temp, cache and log file options that will otherwise default to
# restricted locations accessible only to root.
@@ -54,61 +51,61 @@ http {{
#scgi_temp_path {nginx_root}/scgi_temp;
#uwsgi_temp_path {nginx_root}/uwsgi_temp;
access_log {nginx_root}/error.log;
# This should be turned off in a Virtualbox VM, as it can cause some
# interesting issues with data corruption in delivered files.
sendfile off;
tcp_nopush on;
tcp_nodelay on;
keepalive_timeout 65;
types_hash_max_size 2048;
#include /etc/nginx/mime.types;
index index.html index.htm index.php;
log_format main '$remote_addr - $remote_user [$time_local] $status '
'"$request" $body_bytes_sent "$http_referer" '
'"$http_user_agent" "$http_x_forwarded_for"';
default_type application/octet-stream;
server {{
# IPv4.
listen {http_port} {default_server};
# IPv6.
listen [::]:{http_port} {default_server};
server_name nginx.{wtf_prefix}.wtf nginx2.{wtf_prefix}.wtf;
root {nginx_webroot};
location / {{
# First attempt to serve request as file, then as directory, then fall
# back to index.html.
try_files $uri $uri/ /index.html;
}}
}}
server {{
listen {http_port};
listen [::]:{http_port};
server_name nginx3.{wtf_prefix}.wtf;
root {nginx_webroot};
location /.well-known/ {{
return 404;
}}
return 301 https://$host$request_uri;
}}
server {{
listen {other_port};
listen [::]:{other_port};
server_name nginx4.{wtf_prefix}.wtf nginx5.{wtf_prefix}.wtf;
}}
server {{
listen {http_port};
listen [::]:{http_port};

View File

@@ -1,64 +0,0 @@
from contextlib import contextmanager
from pytest import skip
from pkg_resources import resource_filename
import tempfile
from certbot_integration_tests.certbot_tests import context as certbot_context
from certbot_integration_tests.utils import certbot_call
class IntegrationTestsContext(certbot_context.IntegrationTestsContext):
"""Integration test context for certbot-dns-rfc2136"""
def __init__(self, request):
super(IntegrationTestsContext, self).__init__(request)
self.request = request
self._dns_xdist = None
if hasattr(request.config, 'slaveinput'): # Worker node
self._dns_xdist = request.config.slaveinput['dns_xdist']
else: # Primary node
self._dns_xdist = request.config.dns_xdist
def certbot_test_rfc2136(self, args):
"""
Main command to execute certbot using the RFC2136 DNS authenticator.
:param list args: list of arguments to pass to Certbot
"""
command = ['--authenticator', 'dns-rfc2136', '--dns-rfc2136-propagation-seconds', '2']
command.extend(args)
return certbot_call.certbot_test(
command, self.directory_url, self.http_01_port, self.tls_alpn_01_port,
self.config_dir, self.workspace, force_renew=True)
@contextmanager
def rfc2136_credentials(self, label='default'):
# type: (str) -> str
"""
Produces the contents of a certbot-dns-rfc2136 credentials file.
:param str label: which RFC2136 credential to use
:yields: Path to credentials file
:rtype: str
"""
src_file = resource_filename('certbot_integration_tests',
'assets/bind-config/rfc2136-credentials-{}.ini.tpl'
.format(label))
contents = None
with open(src_file, 'r') as f:
contents = f.read().format(
server_address=self._dns_xdist['address'],
server_port=self._dns_xdist['port']
)
with tempfile.NamedTemporaryFile('w+', prefix='rfc2136-creds-{}'.format(label),
suffix='.ini', dir=self.workspace) as f:
f.write(contents)
f.flush()
yield f.name
def skip_if_no_bind9_server(self):
"""Skips the test if there was no RFC2136-capable DNS server configured
in the test environment"""
if not self._dns_xdist:
skip('No RFC2136-capable DNS server is configured')

View File

@@ -1,25 +0,0 @@
"""Module executing integration tests against Certbot with the RFC2136 DNS authenticator."""
import pytest
from certbot_integration_tests.rfc2136_tests import context as rfc2136_context
@pytest.fixture()
def context(request):
# Fixture request is a built-in pytest fixture describing current test request.
integration_test_context = rfc2136_context.IntegrationTestsContext(request)
try:
yield integration_test_context
finally:
integration_test_context.cleanup()
@pytest.mark.parametrize('domain', [('example.com'), ('sub.example.com')])
def test_get_certificate(domain, context):
context.skip_if_no_bind9_server()
with context.rfc2136_credentials() as creds:
context.certbot_test_rfc2136([
'certonly', '--dns-rfc2136-credentials', creds,
'-d', domain, '-d', '*.{}'.format(domain)
])

View File

@@ -2,12 +2,10 @@
"""Module to setup an ACME CA server environment able to run multiple tests in parallel"""
from __future__ import print_function
import argparse
import errno
import json
import os
from os.path import join
import re
import shutil
import subprocess
import sys
@@ -34,14 +32,13 @@ class ACMEServer(object):
ACMEServer is also a context manager, and so can be used to ensure ACME server is started/stopped
upon context enter/exit.
"""
def __init__(self, acme_server, nodes, http_proxy=True, stdout=False, dns_server=None):
def __init__(self, acme_server, nodes, http_proxy=True, stdout=False):
"""
Create an ACMEServer instance.
:param str acme_server: the type of acme server used (boulder-v1, boulder-v2 or pebble)
:param list nodes: list of node names that will be setup by pytest xdist
:param bool http_proxy: if False do not start the HTTP proxy
:param bool stdout: if True stream all subprocesses stdout to standard stdout
:param str dns_server: if set, Pebble/Boulder will use it to resolve domains
"""
self._construct_acme_xdist(acme_server, nodes)
@@ -50,7 +47,6 @@ class ACMEServer(object):
self._workspace = tempfile.mkdtemp()
self._processes = []
self._stdout = sys.stdout if stdout else open(os.devnull, 'w')
self._dns_server = dns_server
def start(self):
"""Start the test stack"""
@@ -134,25 +130,19 @@ class ACMEServer(object):
environ['PEBBLE_VA_NOSLEEP'] = '1'
environ['PEBBLE_WFE_NONCEREJECT'] = '0'
environ['PEBBLE_AUTHZREUSE'] = '100'
environ['PEBBLE_ALTERNATE_ROOTS'] = str(PEBBLE_ALTERNATE_ROOTS)
if self._dns_server:
dns_server = self._dns_server
else:
dns_server = '127.0.0.1:8053'
self._launch_process(
[challtestsrv_path, '-management', ':{0}'.format(CHALLTESTSRV_PORT),
'-defaultIPv6', '""', '-defaultIPv4', '127.0.0.1', '-http01', '""',
'-tlsalpn01', '""', '-https01', '""'])
self._launch_process(
[pebble_path, '-config', pebble_config_path, '-dnsserver', dns_server, '-strict'],
[pebble_path, '-config', pebble_config_path, '-dnsserver', '127.0.0.1:8053', '-strict'],
env=environ)
# pebble_ocsp_server is imported here and not at the top of module in order to avoid a
# useless ImportError, in the case where cryptography dependency is too old to support ocsp,
# but Boulder is used instead of Pebble, so pebble_ocsp_server is not used. This is the
# typical situation of integration-certbot-oldest tox testenv.
self._launch_process(
[challtestsrv_path, '-management', ':{0}'.format(CHALLTESTSRV_PORT), '-defaultIPv6', '""',
'-defaultIPv4', '127.0.0.1', '-http01', '""', '-tlsalpn01', '""', '-https01', '""'])
# pebble_ocsp_server is imported here and not at the top of module in order to avoid a useless
# ImportError, in the case where cryptography dependency is too old to support ocsp, but
# Boulder is used instead of Pebble, so pebble_ocsp_server is not used. This is the typical
# situation of integration-certbot-oldest tox testenv.
from certbot_integration_tests.utils import pebble_ocsp_server
self._launch_process([sys.executable, pebble_ocsp_server.__file__])
@@ -176,15 +166,6 @@ class ACMEServer(object):
os.rename(join(instance_path, 'test/rate-limit-policies-b.yml'),
join(instance_path, 'test/rate-limit-policies.yml'))
if self._dns_server:
# Change Boulder config to use the provided DNS server
for suffix in ["", "-remote-a", "-remote-b"]:
with open(join(instance_path, 'test/config/va{}.json'.format(suffix)), 'r') as f:
config = json.loads(f.read())
config['va']['dnsResolvers'] = [self._dns_server]
with open(join(instance_path, 'test/config/va{}.json'.format(suffix)), 'w') as f:
f.write(json.dumps(config, indent=2, separators=(',', ': ')))
try:
# Launch the Boulder server
self._launch_process(['docker-compose', 'up', '--force-recreate'], cwd=instance_path)
@@ -193,11 +174,10 @@ class ACMEServer(object):
print('=> Waiting for boulder instance to respond...')
misc.check_until_timeout(self.acme_xdist['directory_url'], attempts=300)
if not self._dns_server:
# Configure challtestsrv to answer any A record request with ip of the docker host.
response = requests.post('http://localhost:{0}/set-default-ipv4'.format(CHALLTESTSRV_PORT),
json={'ip': '10.77.77.1'})
response.raise_for_status()
# Configure challtestsrv to answer any A record request with ip of the docker host.
response = requests.post('http://localhost:{0}/set-default-ipv4'.format(CHALLTESTSRV_PORT),
json={'ip': '10.77.77.1'})
response.raise_for_status()
except BaseException:
# If we failed to set up boulder, print its logs.
print('=> Boulder setup failed. Boulder logs are:')
@@ -227,19 +207,14 @@ class ACMEServer(object):
def main():
parser = argparse.ArgumentParser(
description='CLI tool to start a local instance of Pebble or Boulder CA server.')
parser.add_argument('--server-type', '-s',
choices=['pebble', 'boulder-v1', 'boulder-v2'], default='pebble',
help='type of CA server to start: can be Pebble or Boulder '
'(in ACMEv1 or ACMEv2 mode), Pebble is used if not set.')
parser.add_argument('--dns-server', '-d',
help='specify the DNS server as `IP:PORT` to use by '
'Pebble; if not specified, a local mock DNS server will be used to '
'resolve domains to localhost.')
args = parser.parse_args()
args = sys.argv[1:]
server_type = args[0] if args else 'pebble'
possible_values = ('pebble', 'boulder-v1', 'boulder-v2')
if server_type not in possible_values:
raise ValueError('Invalid server value {0}, should be one of {1}'
.format(server_type, possible_values))
acme_server = ACMEServer(args.server_type, [], http_proxy=False, stdout=True, dns_server=args.dns_server)
acme_server = ACMEServer(server_type, [], http_proxy=False, stdout=True)
try:
with acme_server as acme_xdist:

View File

@@ -7,4 +7,3 @@ BOULDER_V2_DIRECTORY_URL = 'http://localhost:4001/directory'
PEBBLE_DIRECTORY_URL = 'https://localhost:14000/dir'
PEBBLE_MANAGEMENT_URL = 'https://localhost:15000'
MOCK_OCSP_SERVER_PORT = 4002
PEBBLE_ALTERNATE_ROOTS = 2

View File

@@ -1,144 +0,0 @@
#!/usr/bin/env python
"""Module to setup an RFC2136-capable DNS server"""
from __future__ import print_function
import os
import os.path
from pkg_resources import resource_filename
import shutil
import socket
import subprocess
import sys
import tempfile
import time
BIND_DOCKER_IMAGE = 'internetsystemsconsortium/bind9:9.16'
BIND_BIND_ADDRESS = ('127.0.0.1', 45953)
# A TCP DNS message which is a query for '. CH A' transaction ID 0xcb37. This is used
# by _wait_until_ready to check that BIND is responding without depending on dnspython.
BIND_TEST_QUERY = bytearray.fromhex('0011cb37000000010000000000000000010003')
class DNSServer(object):
"""
DNSServer configures and handles the lifetime of an RFC2136-capable server.
DNServer provides access to the dns_xdist parameter, listing the address and port
to use for each pytest node.
At this time, DNSServer should only be used with a single node, but may be expanded in
future to support parallelization (https://github.com/certbot/certbot/issues/8455).
"""
def __init__(self, nodes, show_output=False):
"""
Create an DNSServer instance.
:param list nodes: list of node names that will be setup by pytest xdist
:param bool show_output: if True, print the output of the DNS server
"""
self.bind_root = tempfile.mkdtemp()
self.process = None
self.dns_xdist = {
'address': BIND_BIND_ADDRESS[0],
'port': BIND_BIND_ADDRESS[1]
}
# Unfortunately the BIND9 image forces everything to stderr with -g and we can't
# modify the verbosity.
self._output = sys.stderr if show_output else open(os.devnull, 'w')
def start(self):
"""Start the DNS server"""
try:
self._configure_bind()
self._start_bind()
except:
self.stop()
raise
def stop(self):
"""Stop the DNS server, and clean its resources"""
if self.process:
try:
self.process.terminate()
self.process.wait()
except BaseException as e:
print("BIND9 did not stop cleanly: {}".format(e), file=sys.stderr)
shutil.rmtree(self.bind_root, ignore_errors=True)
if self._output != sys.stderr:
self._output.close()
def _configure_bind(self):
"""Configure the BIND9 server based on the prebaked configuration"""
bind_conf_src = resource_filename('certbot_integration_tests', 'assets/bind-config')
for dir in ('conf', 'zones'):
shutil.copytree(os.path.join(bind_conf_src, dir), os.path.join(self.bind_root, dir))
def _start_bind(self):
"""Launch the BIND9 server as a Docker container"""
addr_str = '{}:{}'.format(BIND_BIND_ADDRESS[0], BIND_BIND_ADDRESS[1])
self.process = subprocess.Popen([
'docker', 'run', '--rm',
'-p', '{}:53/udp'.format(addr_str),
'-p', '{}:53/tcp'.format(addr_str),
'-v', '{}/conf:/etc/bind'.format(self.bind_root),
'-v', '{}/zones:/var/lib/bind'.format(self.bind_root),
BIND_DOCKER_IMAGE
], stdout=self._output, stderr=self._output)
if self.process.poll():
raise("BIND9 server stopped unexpectedly")
try:
self._wait_until_ready()
except:
# The container might be running even if we think it isn't
self.stop()
raise
def _wait_until_ready(self, attempts=30):
# type: (int) -> None
"""
Polls the DNS server over TCP until it gets a response, or until
it runs out of attempts and raises a ValueError.
The DNS response message must match the txn_id of the DNS query message,
but otherwise the contents are ignored.
:param int attempts: The number of attempts to make.
"""
for _ in range(attempts):
if self.process.poll():
raise ValueError('BIND9 server stopped unexpectedly')
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.settimeout(5.0)
try:
sock.connect(BIND_BIND_ADDRESS)
sock.sendall(BIND_TEST_QUERY)
buf = sock.recv(1024)
# We should receive a DNS message with the same tx_id
if buf and len(buf) > 4 and buf[2:4] == BIND_TEST_QUERY[2:4]:
return
# If we got a response but it wasn't the one we wanted, wait a little
time.sleep(1)
except:
# If there was a network error, wait a little
time.sleep(1)
pass
finally:
sock.close()
raise ValueError(
'Gave up waiting for DNS server {} to respond'.format(BIND_BIND_ADDRESS))
def __enter__(self):
self.start()
return self.dns_xdist
def __exit__(self, exc_type, exc_val, exc_tb):
self.stop()

View File

@@ -19,30 +19,16 @@ from cryptography.hazmat.primitives.asymmetric import ec
from cryptography.hazmat.primitives.serialization import Encoding
from cryptography.hazmat.primitives.serialization import NoEncryption
from cryptography.hazmat.primitives.serialization import PrivateFormat
from cryptography.x509 import load_pem_x509_certificate
from OpenSSL import crypto
import pkg_resources
import requests
from six.moves import SimpleHTTPServer
from six.moves import socketserver
from certbot_integration_tests.utils.constants import \
PEBBLE_ALTERNATE_ROOTS, PEBBLE_MANAGEMENT_URL
RSA_KEY_TYPE = 'rsa'
ECDSA_KEY_TYPE = 'ecdsa'
def _suppress_x509_verification_warnings():
try:
import urllib3
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
except ImportError:
# Handle old versions of request with vendorized urllib3
from requests.packages.urllib3.exceptions import InsecureRequestWarning
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
def check_until_timeout(url, attempts=30):
"""
Wait and block until given url responds with status 200, or raise an exception
@@ -51,7 +37,14 @@ def check_until_timeout(url, attempts=30):
:param int attempts: the number of times to try to connect to the URL
:raise ValueError: exception raised if unable to reach the URL
"""
_suppress_x509_verification_warnings()
try:
import urllib3
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
except ImportError:
# Handle old versions of request with vendorized urllib3
from requests.packages.urllib3.exceptions import InsecureRequestWarning
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
for _ in range(attempts):
time.sleep(1)
try:
@@ -279,21 +272,16 @@ def load_sample_data_path(workspace):
shutil.copytree(original, copied, symlinks=True)
if os.name == 'nt':
# Fix the symlinks on Windows if GIT is not configured to create them upon checkout
for lineage in [
'a.encryption-example.com',
'b.encryption-example.com',
'c.encryption-example.com',
]:
# Fix the symlinks on Windows since GIT is not creating them upon checkout
for lineage in ['a.encryption-example.com', 'b.encryption-example.com']:
current_live = os.path.join(copied, 'live', lineage)
for name in os.listdir(current_live):
if name != 'README':
current_file = os.path.join(current_live, name)
if not os.path.islink(current_file):
with open(current_file) as file_h:
src = file_h.read()
os.unlink(current_file)
os.symlink(os.path.join(current_live, src), current_file)
with open(current_file) as file_h:
src = file_h.read()
os.unlink(current_file)
os.symlink(os.path.join(current_live, src), current_file)
return copied
@@ -311,23 +299,3 @@ def echo(keyword, path=None):
.format(keyword))
return '{0} -c "from __future__ import print_function; print(\'{1}\')"{2}'.format(
os.path.basename(sys.executable), keyword, ' >> "{0}"'.format(path) if path else '')
def get_acme_issuers(context):
"""Gets the list of one or more issuer certificates from the ACME server used by the
context.
:param context: the testing context.
:return: the `list of x509.Certificate` representing the list of issuers.
"""
# TODO: in fact, Boulder has alternate chains in config-next/, just not yet in config/.
if context.acme_server != "pebble":
raise NotImplementedError()
_suppress_x509_verification_warnings()
issuers = []
for i in range(PEBBLE_ALTERNATE_ROOTS + 1):
request = requests.get(PEBBLE_MANAGEMENT_URL + '/intermediates/{}'.format(i), verify=False)
issuers.append(load_pem_x509_certificate(request.content, default_backend()))
return issuers

View File

@@ -1,4 +1,4 @@
from distutils.version import LooseVersion
from distutils.version import StrictVersion
import sys
from setuptools import __version__ as setuptools_version
@@ -26,7 +26,7 @@ install_requires = [
# However environment markers are supported only with setuptools >= 36.2.
# So this dependency is not added for old Linux distributions with old setuptools,
# in order to allow these systems to build certbot from sources.
if LooseVersion(setuptools_version) >= LooseVersion('36.2'):
if StrictVersion(setuptools_version) >= StrictVersion('36.2'):
install_requires.append("pywin32>=224 ; sys_platform == 'win32'")
elif 'bdist_wheel' in sys.argv[1:]:
raise RuntimeError('Error, you are trying to build certbot wheels using an old version '
@@ -40,7 +40,7 @@ setup(
author="Certbot Project",
author_email='client-dev@letsencrypt.org',
license='Apache License 2.0',
python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*',
python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
@@ -49,10 +49,10 @@ setup(
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Security',
],

View File

@@ -1,45 +0,0 @@
"""
General conftest for pytest execution of all integration tests lying
in the snap_installer_integration tests package.
As stated by pytest documentation, conftest module is used to set on
for a directory a specific configuration using built-in pytest hooks.
See https://docs.pytest.org/en/latest/reference.html#hook-reference
"""
import glob
import os
def pytest_addoption(parser):
"""
Standard pytest hook to add options to the pytest parser.
:param parser: current pytest parser that will be used on the CLI
"""
parser.addoption('--snap-folder', required=True,
help='set the folder path where snaps to test are located')
parser.addoption('--snap-arch', default='amd64',
help='set the architecture do test (default: amd64)')
parser.addoption('--allow-persistent-changes', action='store_true',
help='needs to be set, and confirm that the test will make persistent changes on this machine')
def pytest_configure(config):
"""
Standard pytest hook used to add a configuration logic for each node of a pytest run.
:param config: the current pytest configuration
"""
if not config.option.allow_persistent_changes:
raise RuntimeError('This integration test would install the Certbot snap on your machine. '
'Please run it again with the `--allow-persistent-changes` flag set to acknowledge.')
def pytest_generate_tests(metafunc):
"""
Generate (multiple) parametrized calls to a test function.
"""
if "dns_snap_path" in metafunc.fixturenames:
snap_arch = metafunc.config.getoption('snap_arch')
snap_folder = metafunc.config.getoption('snap_folder')
snap_dns_path_list = glob.glob(os.path.join(snap_folder,
'certbot-dns-*_{0}.snap'.format(snap_arch)))
metafunc.parametrize("dns_snap_path", snap_dns_path_list)

View File

@@ -1,46 +0,0 @@
#!/usr/bin/env python3
import pytest
import subprocess
import glob
import os
import re
@pytest.fixture(autouse=True, scope="module")
def install_certbot_snap(request):
with pytest.raises(Exception):
subprocess.check_call(['certbot', '--version'])
try:
snap_folder = request.config.getoption("snap_folder")
snap_arch = request.config.getoption("snap_arch")
snap_path = glob.glob(os.path.join(snap_folder, 'certbot_*_{0}.snap'.format(snap_arch)))[0]
subprocess.check_call(['snap', 'install', '--classic', '--dangerous', snap_path])
subprocess.check_call(['certbot', '--version'])
yield
finally:
subprocess.call(['snap', 'remove', 'certbot'])
def test_dns_plugin_install(dns_snap_path):
"""
Test that each DNS plugin Certbot snap can be installed
and is usable with the Certbot snap.
"""
plugin_name = re.match(r'^certbot-(dns-\w+)_.*\.snap$',
os.path.basename(dns_snap_path)).group(1)
snap_name = 'certbot-{0}'.format(plugin_name)
assert plugin_name not in subprocess.check_output(['certbot', 'plugins', '--prepare'],
universal_newlines=True)
try:
subprocess.check_call(['snap', 'install', '--dangerous', dns_snap_path])
subprocess.check_call(['snap', 'set', 'certbot', 'trust-plugin-with-root=ok'])
subprocess.check_call(['snap', 'connect', 'certbot:plugin', snap_name])
assert plugin_name in subprocess.check_output(['certbot', 'plugins', '--prepare'],
universal_newlines=True)
subprocess.check_call(['snap', 'connect', snap_name + ':certbot-metadata',
'certbot:certbot-metadata'])
subprocess.check_call(['snap', 'install', '--dangerous', dns_snap_path])
finally:
subprocess.call(['snap', 'remove', 'plugin_name'])

View File

@@ -9,6 +9,8 @@ See https://docs.pytest.org/en/latest/reference.html#hook-reference
from __future__ import print_function
import os
import pytest
ROOT_PATH = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))

View File

@@ -1,18 +1,47 @@
FROM debian:buster
FROM debian:stretch
MAINTAINER Brad Warren <bmw@eff.org>
RUN apt-get update && \
apt install python3-dev python3-venv gcc libaugeas0 libssl-dev \
libffi-dev ca-certificates openssl -y
# no need to mkdir anything:
# https://docs.docker.com/reference/builder/#copy
# If <dest> doesn't exist, it is created along with all missing
# directories in its path.
WORKDIR /opt/certbot/src
# TODO: Install non-default Python versions for tox.
# TODO: Install Apache/Nginx for plugin development.
COPY letsencrypt-auto-source /opt/certbot/src/letsencrypt-auto-source
RUN /opt/certbot/src/letsencrypt-auto-source/letsencrypt-auto --os-packages-only
# We copy all contents of the build directory to allow us to easily use
# things like tools/venv3.py which expects all of our packages to be available.
COPY . .
# the above is not likely to change, so by putting it further up the
# Dockerfile we make sure we cache as much as possible
RUN tools/venv3.py
ENV PATH /opt/certbot/src/venv3/bin:$PATH
COPY certbot/setup.py certbot/README.rst certbot/CHANGELOG.md certbot/MANIFEST.in linter_plugin.py tox.cover.py tox.ini .pylintrc /opt/certbot/src/
# all above files are necessary for setup.py, however, package source
# code directory has to be copied separately to a subdirectory...
# https://docs.docker.com/reference/builder/#copy: "If <src> is a
# directory, the entire contents of the directory are copied,
# including filesystem metadata. Note: The directory itself is not
# copied, just its contents." Order again matters, three files are far
# more likely to be cached than the whole project directory
COPY certbot /opt/certbot/src/certbot/
COPY acme /opt/certbot/src/acme/
COPY certbot-apache /opt/certbot/src/certbot-apache/
COPY certbot-nginx /opt/certbot/src/certbot-nginx/
COPY certbot-compatibility-test /opt/certbot/src/certbot-compatibility-test/
COPY tools /opt/certbot/src/tools
RUN VIRTUALENV_NO_DOWNLOAD=1 virtualenv -p python2 /opt/certbot/venv && \
/opt/certbot/venv/bin/pip install -U setuptools && \
/opt/certbot/venv/bin/pip install -U pip
ENV PATH /opt/certbot/venv/bin:$PATH
RUN /opt/certbot/venv/bin/python \
/opt/certbot/src/tools/pip_install_editable.py \
/opt/certbot/src/acme \
/opt/certbot/src/certbot \
/opt/certbot/src/certbot-apache \
/opt/certbot/src/certbot-nginx \
/opt/certbot/src/certbot-compatibility-test
# install in editable mode (-e) to save space: it's not possible to
# "rm -rf /opt/certbot/src" (it's stays in the underlaying image);

View File

@@ -57,7 +57,7 @@ class Proxy(configurators_common.Proxy):
def _prepare_configurator(self):
"""Prepares the Apache plugin for testing"""
for k in entrypoint.ENTRYPOINT.OS_DEFAULTS:
for k in entrypoint.ENTRYPOINT.OS_DEFAULTS.keys():
setattr(self.le_config, "apache_" + k,
entrypoint.ENTRYPOINT.OS_DEFAULTS[k])

View File

@@ -69,10 +69,11 @@ class Proxy(object):
shutil.copy(cert_path, cert)
key = os.path.join(cert_and_key_dir, "key")
shutil.copy(key_path, key)
chain = None
if chain_path:
chain = os.path.join(cert_and_key_dir, "chain")
shutil.copy(chain_path, chain)
else:
chain = None
return cert, key, chain

View File

@@ -102,10 +102,8 @@ def _create_achalls(plugin):
prefs = plugin.get_chall_pref(domain)
for chall_type in prefs:
if chall_type == challenges.HTTP01:
# challenges.HTTP01.TOKEN_SIZE is a float but os.urandom
# expects an integer.
chall = challenges.HTTP01(
token=os.urandom(int(challenges.HTTP01.TOKEN_SIZE)))
token=os.urandom(challenges.HTTP01.TOKEN_SIZE))
challb = acme_util.chall_to_challb(
chall, messages.STATUS_PENDING)
achall = achallenges.KeyAuthorizationAnnotatedChallenge(
@@ -139,7 +137,7 @@ def test_deploy_cert(plugin, temp_dir, domains):
"""Tests deploy_cert returning True if the tests are successful"""
cert = crypto_util.gen_ss_cert(util.KEY, domains)
cert_path = os.path.join(temp_dir, "cert.pem")
with open(cert_path, "wb") as f:
with open(cert_path, "w") as f:
f.write(OpenSSL.crypto.dump_certificate(
OpenSSL.crypto.FILETYPE_PEM, cert))
@@ -275,7 +273,7 @@ def _dirs_are_unequal(dir1, dir2):
logger.error(str(dircmp.diff_files))
return True
for subdir in dircmp.subdirs.values():
for subdir in dircmp.subdirs.itervalues():
dircmps.append(subdir)
return False

View File

@@ -1,17 +1,13 @@
-----BEGIN CERTIFICATE-----
MIICqDCCAZACCQCRC1UKg2WfRTANBgkqhkiG9w0BAQsFADAWMRQwEgYDVQQDDAtl
eGFtcGxlLmNvbTAeFw0yMDA4MTkyMzM5MjdaFw0yMDA5MTgyMzM5MjdaMBYxFDAS
BgNVBAMMC2V4YW1wbGUuY29tMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC
AQEA5tViHnJx4y+BbCb8Qz9uxsnqp1ynONR7ET/XL+M/jQ4xPeJg4L2uZ3YnogPc
WdEoey17WXBg3KRqKfg+7PqIdGqVeonSCfXhD1HoGJRsThSUJ2fK3uoQ+zGgJTWR
FYWa8Cb6xsuq0xaYtw2jaJBp+697Np60PWs4pY5FkadT50wZ0TYDnYt3NSAdn+Pt
j3cpI4ocZZ2FLiOFn+UFOaRcetGtpnU1QwvmygD9tiL7kJ55B4CWGEv6DMRQk/UE
eMUETzse1NkVlaxQ1TCd5iAfBTluiV30EpmmWa+OsXJWxCK+EEOkXD1r3CdXAldY
nRYxJrn4udrFe69QX95wiRZNXwIDAQABMA0GCSqGSIb3DQEBCwUAA4IBAQCJvtDC
875CK7SKNf006gSciXsNPNSVORGPjc/5OQ23baK4iPhxftI4LGZN8773N14jWp3E
QnQLL1gZ9/G+98SlI5lm97a4m4XZyNaULbmQwRKgI22H0F1AWbvsG0SppjnhVlJ+
93ZUqSQBXgbXelFHSsNfk1AB6Kvo6+UvS8s0vkz7SfkPOZGx0b+3RJSJZnZHvYih
ggudN/jJggSgRrb+F6lpaelJE9pZsznJFb9R7mFI33AGBpQWV4r3p1ZbM1vGMqGc
4PGBzDzi28BhLBplSOPZZxqRiINQzGiQ5T2SfN06usr7EafFr6+7YKNhgrCdlVjU
thzJ5MgHZgALNXsh
MIICATCCAWoCCQCvMbKu4FHZ6zANBgkqhkiG9w0BAQsFADBFMQswCQYDVQQGEwJB
VTETMBEGA1UECAwKU29tZS1TdGF0ZTEhMB8GA1UECgwYSW50ZXJuZXQgV2lkZ2l0
cyBQdHkgTHRkMB4XDTE1MDcyMzIzMjc1MFoXDTE2MDcyMjIzMjc1MFowRTELMAkG
A1UEBhMCQVUxEzARBgNVBAgMClNvbWUtU3RhdGUxITAfBgNVBAoMGEludGVybmV0
IFdpZGdpdHMgUHR5IEx0ZDCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEAws3o
y46PMLM9Gr68pbex0MhdPr7Cq4rRe9BBpnOuHFdF35Ak0aPrzFwVzLlGOir94U11
e5JYJDWJi+4FwLBRkOAfanjJ5GJ9BnEHSOdbtO+sv9uhbt+7iYOOUOngKSiJyUrM
i1THAE+B1CenxZ1KHRQCke708zkK8jVuxLeIAOMCAwEAATANBgkqhkiG9w0BAQsF
AAOBgQCC3LUP3MHk+IBmwHHZAZCX+6p4lop9SP6y6rDpWgnqEEeb9oFleHi2Rvzq
7gxl6nS5AsaSzfAygJ3zWKTwVAZyU4GOQ8QTK+nHk3+LO1X4cDbUlQfm5+YuwKDa
4LFKeovmrK6BiMLIc1J+MxUjLfCeVHYSdkZULTVXue0zif0BUA==
-----END CERTIFICATE-----

View File

@@ -18,7 +18,7 @@ class Validator(object):
def certificate(self, cert, name, alt_host=None, port=443):
"""Verifies the certificate presented at name is cert"""
if alt_host is None:
host = socket.gethostbyname(name).encode()
host = socket.gethostbyname(name)
elif isinstance(alt_host, six.binary_type):
host = alt_host
else:

View File

@@ -61,6 +61,7 @@ server {
server {
listen 80;
server_name random1413.example.org www.random1413.example.org;
server_name random28524.example.org www.random28524.example.org;
server_name random25266.example.org www.random25266.example.org;
server_name random26791.example.org www.random26791.example.org;

View File

@@ -30,6 +30,7 @@ server {
server_name www.random3140.example.org;
server_name random28398.example.org;
server_name random23689.example.org www.random23689.example.org;
server_name random25863.example.org www.random25863.example.org;
rewrite ^ http://random3140.example.org$request_uri permanent;
}

View File

@@ -29,5 +29,6 @@ server {
server {
server_name www.random1413.example.org;
server_name random28524.example.org www.random28524.example.org;
rewrite ^ http://random1413.example.org$request_uri permanent;
}

View File

@@ -1,11 +1,11 @@
from distutils.version import LooseVersion
from distutils.version import StrictVersion
import sys
from setuptools import __version__ as setuptools_version
from setuptools import find_packages
from setuptools import setup
version = '1.11.0.dev0'
version = '1.6.0.dev0'
install_requires = [
'certbot',
@@ -15,7 +15,7 @@ install_requires = [
'zope.interface',
]
setuptools_known_environment_markers = (LooseVersion(setuptools_version) >= LooseVersion('36.2'))
setuptools_known_environment_markers = (StrictVersion(setuptools_version) >= StrictVersion('36.2'))
if setuptools_known_environment_markers:
install_requires.append('mock ; python_version < "3.3"')
elif 'bdist_wheel' in sys.argv[1:]:
@@ -38,7 +38,7 @@ setup(
author="Certbot Project",
author_email='client-dev@letsencrypt.org',
license='Apache License 2.0',
python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*',
python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
@@ -47,10 +47,10 @@ setup(
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Security',
],

View File

@@ -21,8 +21,8 @@ Credentials
-----------
Use of this plugin requires a configuration file containing Cloudflare API
credentials, obtained from your
`Cloudflare dashboard <https://dash.cloudflare.com/?to=/:account/profile/api-tokens>`_.
credentials, obtained from your Cloudflare
`account page <https://dash.cloudflare.com/profile/api-tokens>`_.
Previously, Cloudflare's "Global API Key" was used for authentication, however
this key can access the entire Cloudflare API for all domains in your account,
@@ -31,8 +31,11 @@ meaning it could cause a lot of damage if leaked.
Cloudflare's newer API Tokens can be restricted to specific domains and
operations, and are therefore now the recommended authentication option.
The Token needed by Certbot requires ``Zone:DNS:Edit`` permissions for only the
zones you need certificates for.
However, due to some shortcomings in Cloudflare's implementation of Tokens,
Tokens created for Certbot currently require ``Zone:Zone:Read`` and ``Zone:DNS:Edit``
permissions for **all** zones in your account. While this is not ideal, your Token
will still have fewer permission than the Global key, so it's still worth doing.
Hopefully Cloudflare will improve this in the future.
Using Cloudflare Tokens also requires at least version 2.3.1 of the ``cloudflare``
python module. If the version that automatically installed with this plugin is

View File

@@ -14,7 +14,7 @@ from certbot.plugins import dns_common
logger = logging.getLogger(__name__)
ACCOUNT_URL = 'https://dash.cloudflare.com/?to=/:account/profile/api-tokens'
ACCOUNT_URL = 'https://dash.cloudflare.com/profile/api-tokens'
@zope.interface.implementer(interfaces.IAuthenticator)
@@ -118,7 +118,7 @@ class _CloudflareClient(object):
code = int(e)
hint = None
if code == 1009:
if code == 9109:
hint = 'Does your API token have "Zone:DNS:Edit" permissions?'
logger.error('Encountered CloudFlareAPIError adding TXT record: %d %s', e, e)
@@ -210,22 +210,11 @@ class _CloudflareClient(object):
logger.debug('Found zone_id of %s for %s using name %s', zone_id, domain, zone_name)
return zone_id
if msg is not None:
if 'com.cloudflare.api.account.zone.list' in msg:
raise errors.PluginError('Unable to determine zone_id for {0} using zone names: '
'{1}. Please confirm that the domain name has been '
'entered correctly and your Cloudflare Token has access '
'to the domain.'.format(domain, zone_name_guesses))
else:
raise errors.PluginError('Unable to determine zone_id for {0} using zone names: '
'{1}. The error from Cloudflare was: {2} {3}.'
.format(domain, zone_name_guesses, code, msg))
else:
raise errors.PluginError('Unable to determine zone_id for {0} using zone names: '
'{1}. Please confirm that the domain name has been '
'entered correctly and is already associated with the '
'supplied Cloudflare account.'
.format(domain, zone_name_guesses))
raise errors.PluginError('Unable to determine zone_id for {0} using zone names: {1}. '
'Please confirm that the domain name has been entered correctly '
'and is already associated with the supplied Cloudflare account.{2}'
.format(domain, zone_name_guesses, ' The error from Cloudflare was:'
' {0} {1}'.format(code, msg) if code is not None else ''))
def _find_txt_record_id(self, zone_id, record_name, record_content):
"""

View File

@@ -93,7 +93,7 @@ todo_include_todos = False
# a list of builtin themes.
#
# https://docs.readthedocs.io/en/stable/faq.html#i-want-to-use-the-read-the-docs-theme-locally
# http://docs.readthedocs.org/en/latest/theme.html#how-do-i-use-this-locally-and-on-read-the-docs
# on_rtd is whether we are on readthedocs.org
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
if not on_rtd: # only import and set the theme if we're building docs locally

View File

@@ -22,7 +22,7 @@ if errorlevel 9009 (
echo.may add the Sphinx directory to PATH.
echo.
echo.If you don't have Sphinx installed, grab it from
echo.https://www.sphinx-doc.org/
echo.http://sphinx-doc.org/
exit /b 1
)

View File

@@ -1,33 +1,24 @@
from distutils.version import LooseVersion
import os
from distutils.version import StrictVersion
import sys
from setuptools import __version__ as setuptools_version
from setuptools import find_packages
from setuptools import setup
from setuptools.command.test import test as TestCommand
version = '1.11.0.dev0'
version = '1.6.0.dev0'
# Remember to update local-oldest-requirements.txt when changing the minimum
# acme/certbot version.
install_requires = [
'acme>=0.29.0',
'certbot>=1.1.0',
'cloudflare>=1.5.1',
'setuptools',
'zope.interface',
]
if not os.environ.get('SNAP_BUILD'):
install_requires.extend([
'acme>=0.29.0',
'certbot>=1.1.0',
])
elif 'bdist_wheel' in sys.argv[1:]:
raise RuntimeError('Unset SNAP_BUILD when building wheels '
'to include certbot dependencies.')
if os.environ.get('SNAP_BUILD'):
install_requires.append('packaging')
setuptools_known_environment_markers = (LooseVersion(setuptools_version) >= LooseVersion('36.2'))
setuptools_known_environment_markers = (StrictVersion(setuptools_version) >= StrictVersion('36.2'))
if setuptools_known_environment_markers:
install_requires.append('mock ; python_version < "3.3"')
elif 'bdist_wheel' in sys.argv[1:]:
@@ -41,6 +32,20 @@ docs_extras = [
'sphinx_rtd_theme',
]
class PyTest(TestCommand):
user_options = []
def initialize_options(self):
TestCommand.initialize_options(self)
self.pytest_args = ''
def run_tests(self):
import shlex
# import here, cause outside the eggs aren't loaded
import pytest
errno = pytest.main(shlex.split(self.pytest_args))
sys.exit(errno)
setup(
name='certbot-dns-cloudflare',
version=version,
@@ -49,7 +54,7 @@ setup(
author="Certbot Project",
author_email='client-dev@letsencrypt.org',
license='Apache License 2.0',
python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*',
python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Plugins',
@@ -60,10 +65,10 @@ setup(
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Security',
'Topic :: System :: Installation/Setup',
@@ -83,4 +88,7 @@ setup(
'dns-cloudflare = certbot_dns_cloudflare._internal.dns_cloudflare:Authenticator',
],
},
tests_require=["pytest"],
test_suite='certbot_dns_cloudflare',
cmdclass={"test": PyTest},
)

View File

@@ -133,7 +133,7 @@ class CloudflareClientTest(unittest.TestCase):
def test_add_txt_record_error(self):
self.cf.zones.get.return_value = [{'id': self.zone_id}]
self.cf.zones.dns_records.post.side_effect = CloudFlare.exceptions.CloudFlareAPIError(1009, '', '')
self.cf.zones.dns_records.post.side_effect = CloudFlare.exceptions.CloudFlareAPIError(9109, '', '')
self.assertRaises(
errors.PluginError,
@@ -175,12 +175,6 @@ class CloudflareClientTest(unittest.TestCase):
self.cloudflare_client.add_txt_record,
DOMAIN, self.record_name, self.record_content, self.record_ttl)
self.cf.zones.get.side_effect = CloudFlare.exceptions.CloudFlareAPIError(0, 'com.cloudflare.api.account.zone.list', '')
self.assertRaises(
errors.PluginError,
self.cloudflare_client.add_txt_record,
DOMAIN, self.record_name, self.record_content, self.record_ttl)
def test_del_txt_record(self):
self.cf.zones.get.return_value = [{'id': self.zone_id}]
self.cf.zones.dns_records.get.return_value = [{'id': self.record_id}]

View File

@@ -93,7 +93,7 @@ todo_include_todos = False
# a list of builtin themes.
#
# https://docs.readthedocs.io/en/stable/faq.html#i-want-to-use-the-read-the-docs-theme-locally
# http://docs.readthedocs.org/en/latest/theme.html#how-do-i-use-this-locally-and-on-read-the-docs
# on_rtd is whether we are on readthedocs.org
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
if not on_rtd: # only import and set the theme if we're building docs locally

Some files were not shown because too many files have changed in this diff Show More