Compare commits

..

6 Commits

Author SHA1 Message Date
Brad Warren
cc85d309a9 let apacheconftest handle deps 2018-07-05 18:38:09 -07:00
Brad Warren
c1b4f21325 Revert "We don't need to run dpkg -s in before_install."
This reverts commit e5d35099a7.
2018-07-05 18:37:27 -07:00
Brad Warren
29a75eb8a7 Upgrade Python 3.6 tests to 3.7.
Let's continue the approach of testing on the oldest and newest versions of Python 3. We will continue testing on Python 3.6 in the nightly tests.
2018-07-05 17:52:34 -07:00
Brad Warren
309a70c3fe Remove augeas sources.
We only needed it for Ubuntu Precise which is dead and it doesn't work in Ubuntu Xenial.
2018-07-05 17:51:23 -07:00
Brad Warren
e5d35099a7 We don't need to run dpkg -s in before_install. 2018-07-05 17:51:05 -07:00
Brad Warren
9fade9c85c Remove apacheconftest packages.
The apacheconftests handle installing Apache dependencies, so let's remove it from the general case.
2018-07-05 17:48:19 -07:00
490 changed files with 9597 additions and 20212 deletions

View File

@@ -1,18 +0,0 @@
coverage:
status:
project:
default: off
linux:
flags: linux
# Fixed target instead of auto set by #7173, can
# be removed when flags in Codecov are added back.
target: 97.5
threshold: 0.1
base: auto
windows:
flags: windows
# Fixed target instead of auto set by #7173, can
# be removed when flags in Codecov are added back.
target: 97.6
threshold: 0.1
base: auto

35
.github/stale.yml vendored
View File

@@ -1,35 +0,0 @@
# Configuration for https://github.com/marketplace/stale
# Number of days of inactivity before an Issue or Pull Request becomes stale
daysUntilStale: 365
# Number of days of inactivity before an Issue or Pull Request with the stale label is closed.
# Set to false to disable. If disabled, issues still need to be closed manually, but will remain marked as stale.
# When changing this value, be sure to also update markComment below.
daysUntilClose: 30
# Ignore issues with an assignee (defaults to false)
exemptAssignees: true
# Label to use when marking as stale
staleLabel: needs-update
# Comment to post when marking as stale. Set to `false` to disable
markComment: >
We've made a lot of changes to Certbot since this issue was opened. If you
still have this issue with an up-to-date version of Certbot, can you please
add a comment letting us know? This helps us to better see what issues are
still affecting our users. If there is no activity in the next 30 days, this
issue will be automatically closed.
# Comment to post when closing a stale Issue or Pull Request.
closeComment: >
This issue has been closed due to lack of activity, but if you think it
should be reopened, please open a new issue with a link to this one and we'll
take a look.
# Limit the number of actions per hour, from 1-30. Default is 30
limitPerRun: 1
# Don't mark pull requests as stale.
only: issues

9
.gitignore vendored
View File

@@ -6,8 +6,7 @@ dist*/
/venv*/
/kgs/
/.tox/
/releases*/
/log*
/releases/
letsencrypt.log
certbot.log
letsencrypt-auto-source/letsencrypt-auto.sig.lzma.base64
@@ -40,12 +39,6 @@ tests/letstest/venv/
# pytest cache
.cache
.mypy_cache/
.pytest_cache/
# docker files
.docker
# certbot tests
.certbot_test_workspace
**/assets/pebble*
**/assets/challtestsrv*

View File

@@ -251,7 +251,7 @@ ignored-modules=pkg_resources,confargparse,argparse,six.moves,six.moves.urllib
# List of classes names for which member attributes should not be checked
# (useful for classes with attributes dynamically set).
ignored-classes=Field,Header,JWS,closing
ignored-classes=SQLObject
# When zope mode is activated, add a predefined set of Zope acquired attributes
# to generated-members.
@@ -304,7 +304,7 @@ max-args=6
# Argument names that match this expression will be ignored. Default to name
# with leading underscore
ignored-argument-names=(unused)?_.*|dummy
ignored-argument-names=_.*
# Maximum number of locals for function / method body
max-locals=15

View File

@@ -4,11 +4,64 @@ cache:
directories:
- $HOME/.cache/pip
before_install:
- '([ $TRAVIS_OS_NAME == linux ] && dpkg -s libaugeas0) || (brew update && brew install augeas python3 && brew upgrade python && brew link python)'
before_script:
- 'if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then ulimit -n 1024 ; fi'
# On Travis, the fastest parallelization for integration tests has proved to be 4.
- 'if [[ "$TOXENV" == *"integration"* ]]; then export PYTEST_ADDOPTS="--numprocesses 4"; fi'
- export TOX_TESTENV_PASSENV=TRAVIS
- 'if [ $TRAVIS_OS_NAME = osx ] ; then ulimit -n 1024 ; fi'
matrix:
include:
- python: "2.7"
env: TOXENV=py27_install BOULDER_INTEGRATION=v1
sudo: required
services: docker
- python: "2.7"
env: TOXENV=py27_install BOULDER_INTEGRATION=v2
sudo: required
services: docker
- python: "2.7"
env: TOXENV=cover FYI="this also tests py27"
- sudo: required
env: TOXENV=nginx_compat
services: docker
before_install:
addons:
- python: "2.7"
env: TOXENV=lint
- python: "3.4"
env: TOXENV=mypy
- python: "3.5"
env: TOXENV=mypy
- python: "2.7"
env: TOXENV='py27-{acme,apache,certbot,dns,nginx}-oldest'
sudo: required
services: docker
- python: "3.4"
env: TOXENV=py34
sudo: required
services: docker
- python: "3.7"
dist: xenial
env: TOXENV=py37
sudo: required
services: docker
- sudo: required
env: TOXENV=apache_compat
services: docker
before_install:
addons:
- sudo: required
env: TOXENV=le_auto_trusty
services: docker
before_install:
addons:
- python: "2.7"
env: TOXENV=apacheconftest
sudo: required
- python: "2.7"
env: TOXENV=nginxroundtrip
# Only build pushes to the master branch, PRs, and branches beginning with
# `test-` or of the form `digit(s).digit(s).x`. This reduces the number of
@@ -16,242 +69,10 @@ before_script:
# is a cap of on the number of simultaneous runs.
branches:
only:
# apache-parser-v2 is a temporary branch for doing work related to
# rewriting the parser in the Apache plugin.
- apache-parser-v2
- master
- /^\d+\.\d+\.x$/
- /^test-.*$/
# Jobs for the main test suite are always executed (including on PRs) except for pushes on master.
not-on-master: &not-on-master
if: NOT (type = push AND branch = master)
# Jobs for the extended test suite are executed for cron jobs and pushes to
# non-development branches. See the explanation for apache-parser-v2 above.
extended-test-suite: &extended-test-suite
if: type = cron OR (type = push AND branch NOT IN (apache-parser-v2, master))
matrix:
include:
# Main test suite
- python: "2.7"
env: ACME_SERVER=pebble TOXENV=integration
sudo: required
services: docker
<<: *not-on-master
# This job is always executed, including on master
- python: "2.7"
env: TOXENV=py27-cover FYI="py27 tests + code coverage"
- python: "2.7"
env: TOXENV=lint
<<: *not-on-master
- python: "3.4"
env: TOXENV=mypy
<<: *not-on-master
- python: "3.5"
env: TOXENV=mypy
<<: *not-on-master
- python: "2.7"
# Ubuntu Trusty or older must be used because the oldest version of
# cryptography we support cannot be compiled against the version of
# OpenSSL in Xenial or newer.
dist: trusty
env: TOXENV='py27-{acme,apache,certbot,dns,nginx}-oldest'
sudo: required
services: docker
<<: *not-on-master
- python: "3.4"
env: TOXENV=py34
sudo: required
services: docker
<<: *not-on-master
- python: "3.7"
dist: xenial
env: TOXENV=py37
sudo: required
services: docker
<<: *not-on-master
- sudo: required
env: TOXENV=apache_compat
services: docker
before_install:
addons:
<<: *not-on-master
- sudo: required
env: TOXENV=le_auto_xenial
services: docker
<<: *not-on-master
- python: "2.7"
env: TOXENV=apacheconftest-with-pebble
sudo: required
services: docker
<<: *not-on-master
- python: "2.7"
env: TOXENV=nginxroundtrip
<<: *not-on-master
# Extended test suite on cron jobs and pushes to tested branches other than master
- sudo: required
env: TOXENV=nginx_compat
services: docker
before_install:
addons:
<<: *extended-test-suite
- python: "2.7"
env:
- TOXENV=travis-test-farm-apache2
- secure: "f+j/Lj9s1lcuKo5sEFrlRd1kIAMnIJI4z0MTI7QF8jl9Fkmbx7KECGzw31TNgzrOSzxSapHbcueFYvNCLKST+kE/8ogMZBbwqXfEDuKpyF6BY3uYoJn+wPVE5pIb8Hhe08xPte8TTDSMIyHI3EyTfcAKrIreauoArePvh/cRvSw="
<<: *extended-test-suite
- python: "2.7"
env:
- TOXENV=travis-test-farm-leauto-upgrades
- secure: "f+j/Lj9s1lcuKo5sEFrlRd1kIAMnIJI4z0MTI7QF8jl9Fkmbx7KECGzw31TNgzrOSzxSapHbcueFYvNCLKST+kE/8ogMZBbwqXfEDuKpyF6BY3uYoJn+wPVE5pIb8Hhe08xPte8TTDSMIyHI3EyTfcAKrIreauoArePvh/cRvSw="
git:
depth: false # This is needed to have the history to checkout old versions of certbot-auto.
<<: *extended-test-suite
- python: "2.7"
env:
- TOXENV=travis-test-farm-certonly-standalone
- secure: "f+j/Lj9s1lcuKo5sEFrlRd1kIAMnIJI4z0MTI7QF8jl9Fkmbx7KECGzw31TNgzrOSzxSapHbcueFYvNCLKST+kE/8ogMZBbwqXfEDuKpyF6BY3uYoJn+wPVE5pIb8Hhe08xPte8TTDSMIyHI3EyTfcAKrIreauoArePvh/cRvSw="
<<: *extended-test-suite
- python: "2.7"
env:
- TOXENV=travis-test-farm-sdists
- secure: "f+j/Lj9s1lcuKo5sEFrlRd1kIAMnIJI4z0MTI7QF8jl9Fkmbx7KECGzw31TNgzrOSzxSapHbcueFYvNCLKST+kE/8ogMZBbwqXfEDuKpyF6BY3uYoJn+wPVE5pIb8Hhe08xPte8TTDSMIyHI3EyTfcAKrIreauoArePvh/cRvSw="
<<: *extended-test-suite
- python: "3.7"
dist: xenial
env: TOXENV=py37 CERTBOT_NO_PIN=1
<<: *extended-test-suite
- python: "2.7"
env: ACME_SERVER=boulder-v1 TOXENV=integration
sudo: required
services: docker
<<: *extended-test-suite
- python: "2.7"
env: ACME_SERVER=boulder-v2 TOXENV=integration
sudo: required
services: docker
<<: *extended-test-suite
- python: "2.7"
env: ACME_SERVER=boulder-v1 TOXENV=integration-certbot-oldest
sudo: required
services: docker
<<: *extended-test-suite
- python: "2.7"
env: ACME_SERVER=boulder-v2 TOXENV=integration-certbot-oldest
sudo: required
services: docker
<<: *extended-test-suite
- python: "2.7"
env: ACME_SERVER=boulder-v1 TOXENV=integration-nginx-oldest
sudo: required
services: docker
<<: *extended-test-suite
- python: "2.7"
env: ACME_SERVER=boulder-v2 TOXENV=integration-nginx-oldest
sudo: required
services: docker
<<: *extended-test-suite
- python: "3.4"
env: TOXENV=py34
<<: *extended-test-suite
- python: "3.5"
env: TOXENV=py35
<<: *extended-test-suite
- python: "3.6"
env: TOXENV=py36
<<: *extended-test-suite
- python: "3.7"
dist: xenial
env: TOXENV=py37
<<: *extended-test-suite
- python: "3.4"
env: ACME_SERVER=boulder-v1 TOXENV=integration
sudo: required
services: docker
<<: *extended-test-suite
- python: "3.4"
env: ACME_SERVER=boulder-v2 TOXENV=integration
sudo: required
services: docker
<<: *extended-test-suite
- python: "3.5"
env: ACME_SERVER=boulder-v1 TOXENV=integration
sudo: required
services: docker
<<: *extended-test-suite
- python: "3.5"
env: ACME_SERVER=boulder-v2 TOXENV=integration
sudo: required
services: docker
<<: *extended-test-suite
- python: "3.6"
env: ACME_SERVER=boulder-v1 TOXENV=integration
sudo: required
services: docker
<<: *extended-test-suite
- python: "3.6"
env: ACME_SERVER=boulder-v2 TOXENV=integration
sudo: required
services: docker
<<: *extended-test-suite
- python: "3.7"
dist: xenial
env: ACME_SERVER=boulder-v1 TOXENV=integration
sudo: required
services: docker
<<: *extended-test-suite
- python: "3.7"
dist: xenial
env: ACME_SERVER=boulder-v2 TOXENV=integration
sudo: required
services: docker
<<: *extended-test-suite
- sudo: required
env: TOXENV=le_auto_jessie
services: docker
<<: *extended-test-suite
- sudo: required
env: TOXENV=le_auto_centos6
services: docker
<<: *extended-test-suite
- sudo: required
env: TOXENV=docker_dev
services: docker
addons:
apt:
packages: # don't install nginx and apache
- libaugeas0
<<: *extended-test-suite
- language: generic
env: TOXENV=py27
os: osx
# Using this osx_image is a workaround for
# https://travis-ci.community/t/xcode-8-3-homebrew-outdated-error/3798.
osx_image: xcode10.2
addons:
homebrew:
packages:
- augeas
- python2
<<: *extended-test-suite
- language: generic
env: TOXENV=py3
os: osx
# Using this osx_image is a workaround for
# https://travis-ci.community/t/xcode-8-3-homebrew-outdated-error/3798.
osx_image: xcode10.2
addons:
homebrew:
packages:
- augeas
- python3
<<: *extended-test-suite
# container-based infrastructure
sudo: false
@@ -259,6 +80,7 @@ addons:
apt:
packages: # Keep in sync with letsencrypt-auto-source/pieces/bootstrappers/deb_common.sh and Boulder.
- python-dev
- python-virtualenv
- gcc
- libaugeas0
- libssl-dev
@@ -268,25 +90,19 @@ addons:
- nginx-light
- openssl
# tools/pip_install.py is used to pin packages to a known working version
# except in tests where the environment variable CERTBOT_NO_PIN is set.
# virtualenv is listed here explicitly to make sure it is upgraded when
# CERTBOT_NO_PIN is set to work around failures we've seen when using an older
# version of virtualenv.
install: "tools/pip_install.py -U codecov tox virtualenv"
script: tox
install: "travis_retry $(command -v pip || command -v pip3) install tox coveralls"
script:
- travis_retry tox
- '[ -z "${BOULDER_INTEGRATION+x}" ] || (travis_retry tests/boulder-fetch.sh && tests/tox-boulder-integration.sh)'
after_success: '[ "$TOXENV" == "py27-cover" ] && codecov -F linux'
after_success: '[ "$TOXENV" == "cover" ] && coveralls'
notifications:
email: false
irc:
channels:
# This is set to a secure variable to prevent forks from sending
# notifications. This value was created by installing
# https://github.com/travis-ci/travis.rb and running
# `travis encrypt "chat.freenode.net#certbot-devel"`.
- secure: "EWW66E2+KVPZyIPR8ViENZwfcup4Gx3/dlimmAZE0WuLwxDCshBBOd3O8Rf6pBokEoZlXM5eDT6XdyJj8n0DLslgjO62pExdunXpbcMwdY7l1ELxX2/UbnDTE6UnPYa09qVBHNG7156Z6yE0x2lH4M9Ykvp0G0cubjPQHylAwo0="
- secure: "SGWZl3ownKx9xKVV2VnGt7DqkTmutJ89oJV9tjKhSs84kLijU6EYdPnllqISpfHMTxXflNZuxtGo0wTDYHXBuZL47w1O32W6nzuXdra5zC+i4sYQwYULUsyfOv9gJX8zWAULiK0Z3r0oho45U+FR5ZN6TPCidi8/eGU+EEPwaAw="
on_cancel: never
on_success: never
on_failure: always
use_notice: true

View File

@@ -5,7 +5,6 @@ Authors
* Aaron Zuehlke
* Ada Lovelace
* [Adam Woodbeck](https://github.com/awoodbeck)
* [Adrien Ferrand](https://github.com/adferrand)
* [Aidin Gharibnavaz](https://github.com/aidin36)
* [AJ ONeal](https://github.com/coolaj86)
* [Alcaro](https://github.com/Alcaro)
@@ -15,7 +14,6 @@ Authors
* [Alex Gaynor](https://github.com/alex)
* [Alex Halderman](https://github.com/jhalderm)
* [Alex Jordan](https://github.com/strugee)
* [Alex Zorin](https://github.com/alexzorin)
* [Amjad Mashaal](https://github.com/TheNavigat)
* [Andrew Murray](https://github.com/radarhere)
* [Anselm Levskaya](https://github.com/levskaya)
@@ -77,7 +75,6 @@ Authors
* [Fabian](https://github.com/faerbit)
* [Faidon Liambotis](https://github.com/paravoid)
* [Fan Jiang](https://github.com/tcz001)
* [Felix Lechner](https://github.com/lechner)
* [Felix Schwarz](https://github.com/FelixSchwarz)
* [Felix Yan](https://github.com/felixonmars)
* [Filip Ochnik](https://github.com/filipochnik)
@@ -162,7 +159,6 @@ Authors
* [Michael Schumacher](https://github.com/schumaml)
* [Michael Strache](https://github.com/Jarodiv)
* [Michael Sverdlin](https://github.com/sveder)
* [Michael Watters](https://github.com/blackknight36)
* [Michal Moravec](https://github.com/https://github.com/Majkl578)
* [Michal Papis](https://github.com/mpapis)
* [Minn Soe](https://github.com/MinnSoe)

View File

@@ -1,621 +1,6 @@
# Certbot change log
Certbot adheres to [Semantic Versioning](https://semver.org/).
## 0.37.1 - 2019-08-08
### Fixed
* Stop disabling TLS session tickets in Apache as it caused TLS failures on
some systems.
More details about these changes can be found on our GitHub repo.
## 0.37.0 - 2019-08-07
### Added
* Turn off session tickets for apache plugin by default
* acme: Authz deactivation added to `acme` module.
### Changed
* Follow updated Mozilla recommendations for Nginx ssl_protocols, ssl_ciphers,
and ssl_prefer_server_ciphers
### Fixed
* Fix certbot-auto failures on RHEL 8.
More details about these changes can be found on our GitHub repo.
## 0.36.0 - 2019-07-11
### Added
* Turn off session tickets for nginx plugin by default
* Added missing error types from RFC8555 to acme
### Changed
* Support for Ubuntu 14.04 Trusty has been removed.
* Update the 'manage your account' help to be more generic.
* The error message when Certbot's Apache plugin is unable to modify your
Apache configuration has been improved.
* Certbot's config_changes subcommand has been deprecated and will be
removed in a future release.
* `certbot config_changes` no longer accepts a --num parameter.
* The functions `certbot.plugins.common.Installer.view_config_changes` and
`certbot.reverter.Reverter.view_config_changes` have been deprecated and will
be removed in a future release.
### Fixed
* Replace some unnecessary platform-specific line separation.
More details about these changes can be found on our GitHub repo.
## 0.35.1 - 2019-06-10
### Fixed
* Support for specifying an authoritative base domain in our dns-rfc2136 plugin
has been removed. This feature was added in our last release but had a bug
which caused the plugin to fail so the feature has been removed until it can
be added properly.
Despite us having broken lockstep, we are continuing to release new versions of
all Certbot components during releases for the time being, however, the only
package with changes other than its version number was:
* certbot-dns-rfc2136
More details about these changes can be found on our GitHub repo.
## 0.35.0 - 2019-06-05
### Added
* dns_rfc2136 plugin now supports explicitly specifing an authorative
base domain for cases when the automatic method does not work (e.g.
Split horizon DNS)
### Changed
*
### Fixed
* Renewal parameter `webroot_path` is always saved, avoiding some regressions
when `webroot` authenticator plugin is invoked with no challenge to perform.
* Certbot now accepts OCSP responses when an explicit authorized
responder, different from the issuer, is used to sign OCSP
responses.
* Scripts in Certbot hook directories are no longer executed when their
filenames end in a tilde.
Despite us having broken lockstep, we are continuing to release new versions of
all Certbot components during releases for the time being, however, the only
package with changes other than its version number was:
* certbot
* certbot-dns-rfc2136
More details about these changes can be found on our GitHub repo.
## 0.34.2 - 2019-05-07
### Fixed
* certbot-auto no longer writes a check_permissions.py script at the root
of the filesystem.
Despite us having broken lockstep, we are continuing to release new versions of
all Certbot components during releases for the time being, however, the only
changes in this release were to certbot-auto.
More details about these changes can be found on our GitHub repo.
## 0.34.1 - 2019-05-06
### Fixed
* certbot-auto no longer prints a blank line when there are no permissions
problems.
Despite us having broken lockstep, we are continuing to release new versions of
all Certbot components during releases for the time being, however, the only
changes in this release were to certbot-auto.
More details about these changes can be found on our GitHub repo.
## 0.34.0 - 2019-05-01
### Changed
* Apache plugin now tries to restart httpd on Fedora using systemctl if a
configuration test error is detected. This has to be done due to the way
Fedora now generates the self signed certificate files upon first
restart.
* Updated Certbot and its plugins to improve the handling of file system permissions
on Windows as a step towards adding proper Windows support to Certbot.
* Updated urllib3 to 1.24.2 in certbot-auto.
* Removed the fallback introduced with 0.32.0 in `acme` to retry a challenge response
with a `keyAuthorization` if sending the response without this field caused a
`malformed` error to be received from the ACME server.
* Linode DNS plugin now supports api keys created from their new panel
at [cloud.linode.com](https://cloud.linode.com)
### Fixed
* Fixed Google DNS Challenge issues when private zones exist
* Adding a warning noting that future versions of Certbot will automatically configure the
webserver so that all requests redirect to secure HTTPS access. You can control this
behavior and disable this warning with the --redirect and --no-redirect flags.
* certbot-auto now prints warnings when run as root with insecure file system
permissions. If you see these messages, you should fix the problem by
following the instructions at
https://community.letsencrypt.org/t/certbot-auto-deployment-best-practices/91979/,
however, these warnings can be disabled as necessary with the flag
--no-permissions-check.
* `acme` module uses now a POST-as-GET request to retrieve the registration
from an ACME v2 server
* Convert the tsig algorithm specified in the certbot_dns_rfc2136 configuration file to
all uppercase letters before validating. This makes the value in the config case
insensitive.
Despite us having broken lockstep, we are continuing to release new versions of
all Certbot components during releases for the time being, however, the only
package with changes other than its version number was:
* acme
* certbot
* certbot-apache
* certbot-dns-cloudflare
* certbot-dns-cloudxns
* certbot-dns-digitalocean
* certbot-dns-dnsimple
* certbot-dns-dnsmadeeasy
* certbot-dns-gehirn
* certbot-dns-google
* certbot-dns-linode
* certbot-dns-luadns
* certbot-dns-nsone
* certbot-dns-ovh
* certbot-dns-rfc2136
* certbot-dns-route53
* certbot-dns-sakuracloud
* certbot-nginx
More details about these changes can be found on our GitHub repo.
## 0.33.1 - 2019-04-04
### Fixed
* A bug causing certbot-auto to print warnings or crash on some RHEL based
systems has been resolved.
Despite us having broken lockstep, we are continuing to release new versions of
all Certbot components during releases for the time being, however, the only
changes in this release were to certbot-auto.
More details about these changes can be found on our GitHub repo.
## 0.33.0 - 2019-04-03
### Added
* Fedora 29+ is now supported by certbot-auto. Since Python 2.x is on a deprecation
path in Fedora, certbot-auto will install and use Python 3.x on Fedora 29+.
* CLI flag `--https-port` has been added for Nginx plugin exclusively, and replaces
`--tls-sni-01-port`. It defines the HTTPS port the Nginx plugin will use while
setting up a new SSL vhost. By default the HTTPS port is 443.
### Changed
* Support for TLS-SNI-01 has been removed from all official Certbot plugins.
* Attributes related to the TLS-SNI-01 challenge in `acme.challenges` and `acme.standalone`
modules are deprecated and will be removed soon.
* CLI flags `--tls-sni-01-port` and `--tls-sni-01-address` are now no-op, will
generate a deprecation warning if used, and will be removed soon.
* Options `tls-sni` and `tls-sni-01` in `--preferred-challenges` flag are now no-op,
will generate a deprecation warning if used, and will be removed soon.
* CLI flag `--standalone-supported-challenges` has been removed.
### Fixed
* Certbot uses the Python library cryptography for OCSP when cryptography>=2.5
is installed. We fixed a bug in Certbot causing it to interpret timestamps in
the OCSP response as being in the local timezone rather than UTC.
* Issue causing the default CentOS 6 TLS configuration to ignore some of the
HTTPS VirtualHosts created by Certbot. mod_ssl loading is now moved to main
http.conf for this environment where possible.
Despite us having broken lockstep, we are continuing to release new versions of
all Certbot components during releases for the time being, however, the only
package with changes other than its version number was:
* acme
* certbot
* certbot-apache
* certbot-nginx
More details about these changes can be found on our GitHub repo.
## 0.32.0 - 2019-03-06
### Added
* If possible, Certbot uses built-in support for OCSP from recent cryptography
versions instead of the OpenSSL binary: as a consequence Certbot does not need
the OpenSSL binary to be installed anymore if cryptography>=2.5 is installed.
### Changed
* Certbot and its acme module now depend on josepy>=1.1.0 to avoid printing the
warnings described at https://github.com/certbot/josepy/issues/13.
* Apache plugin now respects CERTBOT_DOCS environment variable when adding
command line defaults.
* The running of manual plugin hooks is now always included in Certbot's log
output.
* Tests execution for certbot, certbot-apache and certbot-nginx packages now relies on pytest.
* An ACME CA server may return a "Retry-After" HTTP header on authorization polling, as
specified in the ACME protocol, to indicate when the next polling should occur. Certbot now
reads this header if set and respect its value.
* The `acme` module avoids sending the `keyAuthorization` field in the JWS
payload when responding to a challenge as the field is not included in the
current ACME protocol. To ease the migration path for ACME CA servers,
Certbot and its `acme` module will first try the request without the
`keyAuthorization` field but will temporarily retry the request with the
field included if a `malformed` error is received. This fallback will be
removed in version 0.34.0.
Despite us having broken lockstep, we are continuing to release new versions of
all Certbot components during releases for the time being, however, the only
package with changes other than its version number was:
* acme
* certbot
* certbot-apache
* certbot-nginx
More details about these changes can be found on our GitHub repo.
## 0.31.0 - 2019-02-07
### Added
* Avoid reprocessing challenges that are already validated
when a certificate is issued.
* Support for initiating (but not solving end-to-end) TLS-ALPN-01 challenges
with the `acme` module.
### Changed
* Certbot's official Docker images are now based on Alpine Linux 3.9 rather
than 3.7. The new version comes with OpenSSL 1.1.1.
* Lexicon-based DNS plugins are now fully compatible with Lexicon 3.x (support
on 2.x branch is maintained).
* Apache plugin now attempts to configure all VirtualHosts matching requested
domain name instead of only a single one when answering the HTTP-01 challenge.
### Fixed
* Fixed accessing josepy contents through acme.jose when the full acme.jose
path is used.
* Clarify behavior for deleting certs as part of revocation.
Despite us having broken lockstep, we are continuing to release new versions of
all Certbot components during releases for the time being, however, the only
package with changes other than its version number was:
* acme
* certbot
* certbot-apache
* certbot-dns-cloudxns
* certbot-dns-dnsimple
* certbot-dns-dnsmadeeasy
* certbot-dns-gehirn
* certbot-dns-linode
* certbot-dns-luadns
* certbot-dns-nsone
* certbot-dns-ovh
* certbot-dns-sakuracloud
More details about these changes can be found on our GitHub repo.
## 0.30.2 - 2019-01-25
### Fixed
* Update the version of setuptools pinned in certbot-auto to 40.6.3 to
solve installation problems on newer OSes.
Despite us having broken lockstep, we are continuing to release new versions of
all Certbot components during releases for the time being, however, this
release only affects certbot-auto.
More details about these changes can be found on our GitHub repo.
## 0.30.1 - 2019-01-24
### Fixed
* Always download the pinned version of pip in pipstrap to address breakages
* Rename old,default.conf to old-and-default.conf to address commas in filenames
breaking recent versions of pip.
* Add VIRTUALENV_NO_DOWNLOAD=1 to all calls to virtualenv to address breakages
from venv downloading the latest pip
Despite us having broken lockstep, we are continuing to release new versions of
all Certbot components during releases for the time being, however, the only
package with changes other than its version number was:
* certbot-apache
More details about these changes can be found on our GitHub repo.
## 0.30.0 - 2019-01-02
### Added
* Added the `update_account` subcommand for account management commands.
### Changed
* Copied account management functionality from the `register` subcommand
to the `update_account` subcommand.
* Marked usage `register --update-registration` for deprecation and
removal in a future release.
### Fixed
* Older modules in the josepy library can now be accessed through acme.jose
like it could in previous versions of acme. This is only done to preserve
backwards compatibility and support for doing this with new modules in josepy
will not be added. Users of the acme library should switch to using josepy
directly if they haven't done so already.
Despite us having broken lockstep, we are continuing to release new versions of
all Certbot components during releases for the time being, however, the only
package with changes other than its version number was:
* acme
More details about these changes can be found on our GitHub repo.
## 0.29.1 - 2018-12-05
### Added
*
### Changed
*
### Fixed
* The default work and log directories have been changed back to
/var/lib/letsencrypt and /var/log/letsencrypt respectively.
Despite us having broken lockstep, we are continuing to release new versions of
all Certbot components during releases for the time being, however, the only
package with changes other than its version number was:
* certbot
More details about these changes can be found on our GitHub repo.
## 0.29.0 - 2018-12-05
### Added
* Noninteractive renewals with `certbot renew` (those not started from a
terminal) now randomly sleep 1-480 seconds before beginning work in
order to spread out load spikes on the server side.
* Added External Account Binding support in cli and acme library.
Command line arguments --eab-kid and --eab-hmac-key added.
### Changed
* Private key permissioning changes: Renewal preserves existing group mode
& gid of previous private key material. Private keys for new
lineages (i.e. new certs, not renewed) default to 0o600.
### Fixed
* Update code and dependencies to clean up Resource and Deprecation Warnings.
* Only depend on imgconverter extension for Sphinx >= 1.6
Despite us having broken lockstep, we are continuing to release new versions of
all Certbot components during releases for the time being, however, the only
package with changes other than its version number was:
* acme
* certbot
* certbot-apache
* certbot-dns-cloudflare
* certbot-dns-digitalocean
* certbot-dns-google
* certbot-nginx
More details about these changes can be found on our GitHub repo:
https://github.com/certbot/certbot/milestone/62?closed=1
## 0.28.0 - 2018-11-7
### Added
* `revoke` accepts `--cert-name`, and doesn't accept both `--cert-name` and `--cert-path`.
* Use the ACMEv2 newNonce endpoint when a new nonce is needed, and newNonce is available in the directory.
### Changed
* Removed documentation mentions of `#letsencrypt` IRC on Freenode.
* Write README to the base of (config-dir)/live directory
* `--manual` will explicitly warn users that earlier challenges should remain in place when setting up subsequent challenges.
* Warn when using deprecated acme.challenges.TLSSNI01
* Log warning about TLS-SNI deprecation in Certbot
* Stop preferring TLS-SNI in the Apache, Nginx, and standalone plugins
* OVH DNS plugin now relies on Lexicon>=2.7.14 to support HTTP proxies
* Default time the Linode plugin waits for DNS changes to propogate is now 1200 seconds.
### Fixed
* Match Nginx parser update in allowing variable names to start with `${`.
* Fix ranking of vhosts in Nginx so that all port-matching vhosts come first
* Correct OVH integration tests on machines without internet access.
* Stop caching the results of ipv6_info in http01.py
* Test fix for Route53 plugin to prevent boto3 making outgoing connections.
* The grammar used by Augeas parser in Apache plugin was updated to fix various parsing errors.
* The CloudXNS, DNSimple, DNS Made Easy, Gehirn, Linode, LuaDNS, NS1, OVH, and
Sakura Cloud DNS plugins are now compatible with Lexicon 3.0+.
Despite us having broken lockstep, we are continuing to release new versions of
all Certbot components during releases for the time being, however, the only
package with changes other than its version number was:
* acme
* certbot
* certbot-apache
* certbot-dns-cloudxns
* certbot-dns-dnsimple
* certbot-dns-dnsmadeeasy
* certbot-dns-gehirn
* certbot-dns-linode
* certbot-dns-luadns
* certbot-dns-nsone
* certbot-dns-ovh
* certbot-dns-route53
* certbot-dns-sakuracloud
* certbot-nginx
More details about these changes can be found on our GitHub repo:
https://github.com/certbot/certbot/milestone/59?closed=1
## 0.27.1 - 2018-09-06
### Fixed
* Fixed parameter name in OpenSUSE overrides for default parameters in the
Apache plugin. Certbot on OpenSUSE works again.
Despite us having broken lockstep, we are continuing to release new versions of
all Certbot components during releases for the time being, however, the only
package with changes other than its version number was:
* certbot-apache
More details about these changes can be found on our GitHub repo:
https://github.com/certbot/certbot/milestone/60?closed=1
## 0.27.0 - 2018-09-05
### Added
* The Apache plugin now accepts the parameter --apache-ctl which can be
used to configure the path to the Apache control script.
### Changed
* When using `acme.client.ClientV2` (or
`acme.client.BackwardsCompatibleClientV2` with an ACME server that supports a
newer version of the ACME protocol), an `acme.errors.ConflictError` will be
raised if you try to create an ACME account with a key that has already been
used. Previously, a JSON parsing error was raised in this scenario when using
the library with Let's Encrypt's ACMEv2 endpoint.
### Fixed
* When Apache is not installed, Certbot's Apache plugin no longer prints
messages about being unable to find apachectl to the terminal when the plugin
is not selected.
* If you're using the Apache plugin with the --apache-vhost-root flag set to a
directory containing a disabled virtual host for the domain you're requesting
a certificate for, the virtual host will now be temporarily enabled if
necessary to pass the HTTP challenge.
* The documentation for the Certbot package can now be built using Sphinx 1.6+.
* You can now call `query_registration` without having to first call
`new_account` on `acme.client.ClientV2` objects.
* The requirement of `setuptools>=1.0` has been removed from `certbot-dns-ovh`.
* Names in certbot-dns-sakuracloud's tests have been updated to refer to Sakura
Cloud rather than NS1 whose plugin certbot-dns-sakuracloud was based on.
Despite us having broken lockstep, we are continuing to release new versions of
all Certbot components during releases for the time being, however, the only
package with changes other than its version number was:
* acme
* certbot
* certbot-apache
* certbot-dns-ovh
* certbot-dns-sakuracloud
More details about these changes can be found on our GitHub repo:
https://github.com/certbot/certbot/milestone/57?closed=1
## 0.26.1 - 2018-07-17
### Fixed
* Fix a bug that was triggered when users who had previously manually set `--server` to get ACMEv2 certs tried to renew ACMEv1 certs.
Despite us having broken lockstep, we are continuing to release new versions of all Certbot components during releases for the time being, however, the only package with changes other than its version number was:
* certbot
More details about these changes can be found on our GitHub repo:
https://github.com/certbot/certbot/milestone/58?closed=1
## 0.26.0 - 2018-07-11
### Added
* A new security enhancement which we're calling AutoHSTS has been added to
Certbot's Apache plugin. This enhancement configures your webserver to send a
HTTP Strict Transport Security header with a low max-age value that is slowly
increased over time. The max-age value is not increased to a large value
until you've successfully managed to renew your certificate. This enhancement
can be requested with the --auto-hsts flag.
* New official DNS plugins have been created for Gehirn Infrastracture Service,
Linode, OVH, and Sakura Cloud. These plugins can be found on our Docker Hub
page at https://hub.docker.com/u/certbot and on PyPI.
* The ability to reuse ACME accounts from Let's Encrypt's ACMEv1 endpoint on
Let's Encrypt's ACMEv2 endpoint has been added.
* Certbot and its components now support Python 3.7.
* Certbot's install subcommand now allows you to interactively choose which
certificate to install from the list of certificates managed by Certbot.
* Certbot now accepts the flag `--no-autorenew` which causes any obtained
certificates to not be automatically renewed when it approaches expiration.
* Support for parsing the TLS-ALPN-01 challenge has been added back to the acme
library.
### Changed
* Certbot's default ACME server has been changed to Let's Encrypt's ACMEv2
endpoint. By default, this server will now be used for both new certificate
lineages and renewals.
* The Nginx plugin is no longer marked labeled as an "Alpha" version.
* The `prepare` method of Certbot's plugins is no longer called before running
"Updater" enhancements that are run on every invocation of `certbot renew`.
Despite us having broken lockstep, we are continuing to release new versions of
all Certbot components during releases for the time being, however, the only
packages with functional changes were:
* acme
* certbot
* certbot-apache
* certbot-dns-gehirn
* certbot-dns-linode
* certbot-dns-ovh
* certbot-dns-sakuracloud
* certbot-nginx
More details about these changes can be found on our GitHub repo:
https://github.com/certbot/certbot/milestone/55?closed=1
Certbot adheres to [Semantic Versioning](http://semver.org/).
## 0.25.1 - 2018-06-13

8
CHANGES.rst Normal file
View File

@@ -0,0 +1,8 @@
ChangeLog
=========
To see the changes in a given release, view the issues closed in a given
release's GitHub milestone:
- `Past releases <https://github.com/certbot/certbot/milestones?state=closed>`_
- `Upcoming releases <https://github.com/certbot/certbot/milestones>`_

View File

@@ -1 +0,0 @@
This project is governed by [EFF's Public Projects Code of Conduct](https://www.eff.org/pages/eppcode).

View File

@@ -33,5 +33,3 @@ started. In particular, we recommend you read these sections
- [Finding issues to work on](https://certbot.eff.org/docs/contributing.html#find-issues-to-work-on)
- [Coding style](https://certbot.eff.org/docs/contributing.html#coding-style)
- [Submitting a pull request](https://certbot.eff.org/docs/contributing.html#submitting-a-pull-request)
- [EFF's Public Projects Code of Conduct](https://www.eff.org/pages/eppcode)

27
Dockerfile Normal file
View File

@@ -0,0 +1,27 @@
FROM python:2-alpine3.7
ENTRYPOINT [ "certbot" ]
EXPOSE 80 443
VOLUME /etc/letsencrypt /var/lib/letsencrypt
WORKDIR /opt/certbot
COPY CHANGES.rst README.rst setup.py src/
COPY acme src/acme
COPY certbot src/certbot
RUN apk add --no-cache --virtual .certbot-deps \
libffi \
libssl1.0 \
openssl \
ca-certificates \
binutils
RUN apk add --no-cache --virtual .build-deps \
gcc \
linux-headers \
openssl-dev \
musl-dev \
libffi-dev \
&& pip install --no-cache-dir \
--editable /opt/certbot/src/acme \
--editable /opt/certbot/src \
&& apk del .build-deps

View File

@@ -1,5 +1,5 @@
# This Dockerfile builds an image for development.
FROM debian:buster
FROM ubuntu:xenial
# Note: this only exposes the port to other docker containers.
EXPOSE 80 443
@@ -16,6 +16,6 @@ RUN apt-get update && \
/tmp/* \
/var/tmp/*
RUN VENV_NAME="../venv" python tools/venv.py
RUN VENV_NAME="../venv" tools/venv.sh
ENV PATH /opt/certbot/venv/bin:$PATH

75
Dockerfile-old Normal file
View File

@@ -0,0 +1,75 @@
# https://github.com/letsencrypt/letsencrypt/pull/431#issuecomment-103659297
# it is more likely developers will already have ubuntu:trusty rather
# than e.g. debian:jessie and image size differences are negligible
FROM ubuntu:trusty
MAINTAINER Jakub Warmuz <jakub@warmuz.org>
MAINTAINER William Budington <bill@eff.org>
# Note: this only exposes the port to other docker containers. You
# still have to bind to 443@host at runtime, as per the ACME spec.
EXPOSE 443
# TODO: make sure --config-dir and --work-dir cannot be changed
# through the CLI (certbot-docker wrapper that uses standalone
# authenticator and text mode only?)
VOLUME /etc/letsencrypt /var/lib/letsencrypt
WORKDIR /opt/certbot
# no need to mkdir anything:
# https://docs.docker.com/reference/builder/#copy
# If <dest> doesn't exist, it is created along with all missing
# directories in its path.
ENV DEBIAN_FRONTEND=noninteractive
COPY letsencrypt-auto-source/letsencrypt-auto /opt/certbot/src/letsencrypt-auto-source/letsencrypt-auto
RUN /opt/certbot/src/letsencrypt-auto-source/letsencrypt-auto --os-packages-only && \
apt-get clean && \
rm -rf /var/lib/apt/lists/* \
/tmp/* \
/var/tmp/*
# the above is not likely to change, so by putting it further up the
# Dockerfile we make sure we cache as much as possible
COPY setup.py README.rst CHANGES.rst MANIFEST.in letsencrypt-auto-source/pieces/pipstrap.py /opt/certbot/src/
# all above files are necessary for setup.py and venv setup, however,
# package source code directory has to be copied separately to a
# subdirectory...
# https://docs.docker.com/reference/builder/#copy: "If <src> is a
# directory, the entire contents of the directory are copied,
# including filesystem metadata. Note: The directory itself is not
# copied, just its contents." Order again matters, three files are far
# more likely to be cached than the whole project directory
COPY certbot /opt/certbot/src/certbot/
COPY acme /opt/certbot/src/acme/
COPY certbot-apache /opt/certbot/src/certbot-apache/
COPY certbot-nginx /opt/certbot/src/certbot-nginx/
RUN virtualenv --no-site-packages -p python2 /opt/certbot/venv
# PATH is set now so pipstrap upgrades the correct (v)env
ENV PATH /opt/certbot/venv/bin:$PATH
RUN /opt/certbot/venv/bin/python /opt/certbot/src/pipstrap.py && \
/opt/certbot/venv/bin/pip install \
-e /opt/certbot/src/acme \
-e /opt/certbot/src \
-e /opt/certbot/src/certbot-apache \
-e /opt/certbot/src/certbot-nginx
# install in editable mode (-e) to save space: it's not possible to
# "rm -rf /opt/certbot/src" (it's stays in the underlaying image);
# this might also help in debugging: you can "docker run --entrypoint
# bash" and investigate, apply patches, etc.
# set up certbot/letsencrypt wrapper to warn people about Dockerfile changes
COPY tools/docker-warning.sh /opt/certbot/bin/certbot
RUN ln -s /opt/certbot/bin/certbot /opt/certbot/bin/letsencrypt
ENV PATH /opt/certbot/bin:$PATH
ENTRYPOINT [ "certbot" ]

View File

@@ -1,5 +1,5 @@
include README.rst
include CHANGELOG.md
include CHANGES.rst
include CONTRIBUTING.md
include LICENSE.txt
include linter_plugin.py

View File

@@ -6,7 +6,7 @@ Anyone who has gone through the trouble of setting up a secure website knows wha
How you use Certbot depends on the configuration of your web server. The best way to get started is to use our `interactive guide <https://certbot.eff.org>`_. It generates instructions based on your configuration settings. In most cases, youll need `root or administrator access <https://certbot.eff.org/faq/#does-certbot-require-root-administrator-privileges>`_ to your web server to run Certbot.
Certbot is meant to be run directly on your web server, not on your personal computer. If youre using a hosted service and dont have direct access to your web server, you might not be able to use Certbot. Check with your hosting provider for documentation about uploading certificates or using certificates issued by Lets Encrypt.
If youre using a hosted service and dont have direct access to your web server, you might not be able to use Certbot. Check with your hosting provider for documentation about uploading certificates or using certificates issued by Lets Encrypt.
Certbot is a fully-featured, extensible client for the Let's
Encrypt CA (or any other CA that speaks the `ACME
@@ -28,19 +28,45 @@ Contributing
If you'd like to contribute to this project please read `Developer Guide
<https://certbot.eff.org/docs/contributing.html>`_.
This project is governed by `EFF's Public Projects Code of Conduct <https://www.eff.org/pages/eppcode>`_.
.. _installation:
Installation
------------
The easiest way to install Certbot is by visiting `certbot.eff.org`_, where you can
find the correct installation instructions for many web server and OS combinations.
For more information, see `Get Certbot <https://certbot.eff.org/docs/install.html>`_.
.. _certbot.eff.org: https://certbot.eff.org/
How to run the client
---------------------
The easiest way to install and run Certbot is by visiting `certbot.eff.org`_,
where you can find the correct instructions for many web server and OS
combinations. For more information, see `Get Certbot
<https://certbot.eff.org/docs/install.html>`_.
In many cases, you can just run ``certbot-auto`` or ``certbot``, and the
client will guide you through the process of obtaining and installing certs
interactively.
For full command line help, you can type::
./certbot-auto --help all
You can also tell it exactly what you want it to do from the command line.
For instance, if you want to obtain a cert for ``example.com``,
``www.example.com``, and ``other.example.net``, using the Apache plugin to both
obtain and install the certs, you could do this::
./certbot-auto --apache -d example.com -d www.example.com -d other.example.net
(The first time you run the command, it will make an account, and ask for an
email and agreement to the Let's Encrypt Subscriber Agreement; you can
automate those with ``--email`` and ``--agree-tos``)
If you want to use a webserver that doesn't have full plugin support yet, you
can still use "standalone" or "webroot" plugins to obtain a certificate::
./certbot-auto certonly --standalone --email admin@example.com -d example.com -d www.example.com -d other.example.net
.. _certbot.eff.org: https://certbot.eff.org/
Understanding the client in more depth
--------------------------------------
@@ -65,6 +91,8 @@ Main Website: https://certbot.eff.org
Let's Encrypt Website: https://letsencrypt.org
IRC Channel: #letsencrypt on `Freenode`_
Community: https://community.letsencrypt.org
ACME spec: http://ietf-wg-acme.github.io/acme/
@@ -73,12 +101,14 @@ ACME working area in github: https://github.com/ietf-wg-acme/acme
|build-status| |coverage| |docs| |container|
.. |build-status| image:: https://travis-ci.com/certbot/certbot.svg?branch=master
:target: https://travis-ci.com/certbot/certbot
.. _Freenode: https://webchat.freenode.net?channels=%23letsencrypt
.. |build-status| image:: https://travis-ci.org/certbot/certbot.svg?branch=master
:target: https://travis-ci.org/certbot/certbot
:alt: Travis CI status
.. |coverage| image:: https://codecov.io/gh/certbot/certbot/branch/master/graph/badge.svg
:target: https://codecov.io/gh/certbot/certbot
.. |coverage| image:: https://coveralls.io/repos/certbot/certbot/badge.svg?branch=master
:target: https://coveralls.io/r/certbot/certbot
:alt: Coverage status
.. |docs| image:: https://readthedocs.org/projects/letsencrypt/badge/

View File

@@ -1,50 +1,12 @@
"""ACME protocol implementation.
This module is an implementation of the `ACME protocol`_.
This module is an implementation of the `ACME protocol`_. Latest
supported version: `draft-ietf-acme-01`_.
.. _`ACME protocol`: https://ietf-wg-acme.github.io/acme
.. _`draft-ietf-acme-01`:
https://github.com/ietf-wg-acme/acme/tree/draft-ietf-acme-acme-01
"""
import sys
import warnings
# This code exists to keep backwards compatibility with people using acme.jose
# before it became the standalone josepy package.
#
# It is based on
# https://github.com/requests/requests/blob/1278ecdf71a312dc2268f3bfc0aabfab3c006dcf/requests/packages.py
import josepy as jose
for mod in list(sys.modules):
# This traversal is apparently necessary such that the identities are
# preserved (acme.jose.* is josepy.*)
if mod == 'josepy' or mod.startswith('josepy.'):
sys.modules['acme.' + mod.replace('josepy', 'jose', 1)] = sys.modules[mod]
# This class takes a similar approach to the cryptography project to deprecate attributes
# in public modules. See the _ModuleWithDeprecation class here:
# https://github.com/pyca/cryptography/blob/91105952739442a74582d3e62b3d2111365b0dc7/src/cryptography/utils.py#L129
class _TLSSNI01DeprecationModule(object):
"""
Internal class delegating to a module, and displaying warnings when
attributes related to TLS-SNI-01 are accessed.
"""
def __init__(self, module):
self.__dict__['_module'] = module
def __getattr__(self, attr):
if 'TLSSNI01' in attr:
warnings.warn('{0} attribute is deprecated, and will be removed soon.'.format(attr),
DeprecationWarning, stacklevel=2)
return getattr(self._module, attr)
def __setattr__(self, attr, value): # pragma: no cover
setattr(self._module, attr, value)
def __delattr__(self, attr): # pragma: no cover
delattr(self._module, attr)
def __dir__(self): # pragma: no cover
return ['_module'] + dir(self._module)

View File

@@ -4,7 +4,6 @@ import functools
import hashlib
import logging
import socket
import sys
from cryptography.hazmat.primitives import hashes # type: ignore
import josepy as jose
@@ -15,13 +14,15 @@ import six
from acme import errors
from acme import crypto_util
from acme import fields
from acme import _TLSSNI01DeprecationModule
logger = logging.getLogger(__name__)
# pylint: disable=too-few-public-methods
class Challenge(jose.TypedJSONObjectWithFields):
# _fields_to_partial_json
# _fields_to_partial_json | pylint: disable=abstract-method
"""ACME challenge."""
TYPES = {} # type: dict
@@ -35,7 +36,7 @@ class Challenge(jose.TypedJSONObjectWithFields):
class ChallengeResponse(jose.TypedJSONObjectWithFields):
# _fields_to_partial_json
# _fields_to_partial_json | pylint: disable=abstract-method
"""ACME challenge response."""
TYPES = {} # type: dict
resource_type = 'challenge'
@@ -94,7 +95,6 @@ class _TokenChallenge(Challenge):
"""
# TODO: check that path combined with uri does not go above
# URI_ROOT_PATH!
# pylint: disable=unsupported-membership-test
return b'..' not in self.token and b'/' not in self.token
@@ -139,14 +139,10 @@ class KeyAuthorizationChallengeResponse(ChallengeResponse):
return True
def to_partial_json(self):
jobj = super(KeyAuthorizationChallengeResponse, self).to_partial_json()
jobj.pop('keyAuthorization', None)
return jobj
@six.add_metaclass(abc.ABCMeta)
class KeyAuthorizationChallenge(_TokenChallenge):
# pylint: disable=abstract-class-little-used,too-many-ancestors
"""Challenge based on Key Authorization.
:param response_cls: Subclass of `KeyAuthorizationChallengeResponse`
@@ -178,7 +174,7 @@ class KeyAuthorizationChallenge(_TokenChallenge):
:rtype: KeyAuthorizationChallengeResponse
"""
return self.response_cls( # pylint: disable=not-callable
return self.response_cls(
key_authorization=self.key_authorization(account_key))
@abc.abstractmethod
@@ -215,7 +211,7 @@ class DNS01Response(KeyAuthorizationChallengeResponse):
"""ACME dns-01 challenge response."""
typ = "dns-01"
def simple_verify(self, chall, domain, account_public_key): # pylint: disable=unused-argument
def simple_verify(self, chall, domain, account_public_key):
"""Simple verify.
This method no longer checks DNS records and is a simple wrapper
@@ -231,6 +227,7 @@ class DNS01Response(KeyAuthorizationChallengeResponse):
:rtype: bool
"""
# pylint: disable=unused-argument
verified = self.verify(chall, account_public_key)
if not verified:
logger.debug("Verification of key authorization in response failed")
@@ -435,6 +432,7 @@ class TLSSNI01Response(KeyAuthorizationChallengeResponse):
kwargs.setdefault("port", self.PORT)
kwargs["name"] = self.z_domain
# TODO: try different methods?
# pylint: disable=protected-access
return crypto_util.probe_sni(**kwargs)
def verify_cert(self, cert):
@@ -509,17 +507,6 @@ class TLSSNI01(KeyAuthorizationChallenge):
return self.response(account_key).gen_cert(key=kwargs.get('cert_key'))
@ChallengeResponse.register
class TLSALPN01Response(KeyAuthorizationChallengeResponse):
"""ACME TLS-ALPN-01 challenge response.
This class only allows initiating a TLS-ALPN-01 challenge returned from the
CA. Full support for responding to TLS-ALPN-01 challenges by generating and
serving the expected response certificate is not currently provided.
"""
typ = "tls-alpn-01"
@Challenge.register # pylint: disable=too-many-ancestors
class TLSALPN01(KeyAuthorizationChallenge):
"""ACME tls-alpn-01 challenge.
@@ -529,14 +516,13 @@ class TLSALPN01(KeyAuthorizationChallenge):
"""
typ = "tls-alpn-01"
response_cls = TLSALPN01Response
def validation(self, account_key, **kwargs):
"""Generate validation for the challenge."""
raise NotImplementedError()
@Challenge.register
@Challenge.register # pylint: disable=too-many-ancestors
class DNS(_TokenChallenge):
"""ACME "dns" challenge."""
typ = "dns"
@@ -617,7 +603,3 @@ class DNSResponse(ChallengeResponse):
"""
return chall.check_validation(self.validation, account_public_key)
# Patching ourselves to warn about TLS-SNI challenge deprecation and removal.
sys.modules[__name__] = _TLSSNI01DeprecationModule(sys.modules[__name__])

View File

@@ -6,7 +6,7 @@ import mock
import OpenSSL
import requests
from six.moves.urllib import parse as urllib_parse # pylint: disable=relative-import
from six.moves.urllib import parse as urllib_parse # pylint: disable=import-error
from acme import errors
from acme import test_util
@@ -93,8 +93,7 @@ class DNS01ResponseTest(unittest.TestCase):
self.response = self.chall.response(KEY)
def test_to_partial_json(self):
self.assertEqual({k: v for k, v in self.jmsg.items() if k != 'keyAuthorization'},
self.msg.to_partial_json())
self.assertEqual(self.jmsg, self.msg.to_partial_json())
def test_from_json(self):
from acme.challenges import DNS01Response
@@ -165,8 +164,7 @@ class HTTP01ResponseTest(unittest.TestCase):
self.response = self.chall.response(KEY)
def test_to_partial_json(self):
self.assertEqual({k: v for k, v in self.jmsg.items() if k != 'keyAuthorization'},
self.msg.to_partial_json())
self.assertEqual(self.jmsg, self.msg.to_partial_json())
def test_from_json(self):
from acme.challenges import HTTP01Response
@@ -286,8 +284,7 @@ class TLSSNI01ResponseTest(unittest.TestCase):
self.assertEqual(self.z_domain, self.response.z_domain)
def test_to_partial_json(self):
self.assertEqual({k: v for k, v in self.jmsg.items() if k != 'keyAuthorization'},
self.response.to_partial_json())
self.assertEqual(self.jmsg, self.response.to_partial_json())
def test_from_json(self):
from acme.challenges import TLSSNI01Response
@@ -363,13 +360,13 @@ class TLSSNI01ResponseTest(unittest.TestCase):
class TLSSNI01Test(unittest.TestCase):
def setUp(self):
from acme.challenges import TLSSNI01
self.msg = TLSSNI01(
token=jose.b64decode('a82d5ff8ef740d12881f6d3c2277ab2e'))
self.jmsg = {
'type': 'tls-sni-01',
'token': 'a82d5ff8ef740d12881f6d3c2277ab2e',
}
from acme.challenges import TLSSNI01
self.msg = TLSSNI01(
token=jose.b64decode('a82d5ff8ef740d12881f6d3c2277ab2e'))
def test_to_partial_json(self):
self.assertEqual(self.jmsg, self.msg.to_partial_json())
@@ -395,42 +392,6 @@ class TLSSNI01Test(unittest.TestCase):
KEY, cert_key=mock.sentinel.cert_key))
mock_gen_cert.assert_called_once_with(key=mock.sentinel.cert_key)
def test_deprecation_message(self):
with mock.patch('acme.warnings.warn') as mock_warn:
from acme.challenges import TLSSNI01
assert TLSSNI01
self.assertEqual(mock_warn.call_count, 1)
self.assertTrue('deprecated' in mock_warn.call_args[0][0])
class TLSALPN01ResponseTest(unittest.TestCase):
# pylint: disable=too-many-instance-attributes
def setUp(self):
from acme.challenges import TLSALPN01Response
self.msg = TLSALPN01Response(key_authorization=u'foo')
self.jmsg = {
'resource': 'challenge',
'type': 'tls-alpn-01',
'keyAuthorization': u'foo',
}
from acme.challenges import TLSALPN01
self.chall = TLSALPN01(token=(b'x' * 16))
self.response = self.chall.response(KEY)
def test_to_partial_json(self):
self.assertEqual({k: v for k, v in self.jmsg.items() if k != 'keyAuthorization'},
self.msg.to_partial_json())
def test_from_json(self):
from acme.challenges import TLSALPN01Response
self.assertEqual(self.msg, TLSALPN01Response.from_json(self.jmsg))
def test_from_json_hashable(self):
from acme.challenges import TLSALPN01Response
hash(TLSALPN01Response.from_json(self.jmsg))
class TLSALPN01Test(unittest.TestCase):

View File

@@ -6,16 +6,16 @@ from email.utils import parsedate_tz
import heapq
import logging
import time
import re
import sys
import six
from six.moves import http_client # pylint: disable=import-error
import josepy as jose
import OpenSSL
import re
from requests_toolbelt.adapters.source import SourceAddressAdapter
import requests
from requests.adapters import HTTPAdapter
from requests_toolbelt.adapters.source import SourceAddressAdapter
import sys
from acme import crypto_util
from acme import errors
@@ -33,7 +33,6 @@ logger = logging.getLogger(__name__)
# https://urllib3.readthedocs.org/en/latest/security.html#insecureplatformwarning
if sys.version_info < (2, 7, 9): # pragma: no cover
try:
# pylint: disable=no-member
requests.packages.urllib3.contrib.pyopenssl.inject_into_urllib3() # type: ignore
except AttributeError:
import urllib3.contrib.pyopenssl # pylint: disable=import-error
@@ -51,6 +50,7 @@ class ClientBase(object): # pylint: disable=too-many-instance-attributes
:ivar .ClientNetwork net: Client network.
:ivar int acme_version: ACME protocol version. 1 or 2.
"""
def __init__(self, directory, net, acme_version):
"""Initialize.
@@ -90,8 +90,6 @@ class ClientBase(object): # pylint: disable=too-many-instance-attributes
"""
kwargs.setdefault('acme_version', self.acme_version)
if hasattr(self.directory, 'newNonce'):
kwargs.setdefault('new_nonce_url', getattr(self.directory, 'newNonce'))
return self.net.post(*args, **kwargs)
def update_registration(self, regr, update=None):
@@ -123,20 +121,14 @@ class ClientBase(object): # pylint: disable=too-many-instance-attributes
"""
return self.update_registration(regr, update={'status': 'deactivated'})
def deactivate_authorization(self, authzr):
# type: (messages.AuthorizationResource) -> messages.AuthorizationResource
"""Deactivate authorization.
def query_registration(self, regr):
"""Query server about registration.
:param messages.AuthorizationResource authzr: The Authorization resource
to be deactivated.
:returns: The Authorization resource that was deactivated.
:rtype: `.AuthorizationResource`
:param messages.RegistrationResource: Existing Registration
Resource.
"""
body = messages.UpdateAuthorization(status='deactivated')
response = self._post(authzr.uri, body)
return self._authzr_from_response(response)
return self._send_recv_regr(regr, messages.UpdateRegistration())
def _authzr_from_response(self, response, identifier=None, uri=None):
authzr = messages.AuthorizationResource(
@@ -206,6 +198,22 @@ class ClientBase(object): # pylint: disable=too-many-instance-attributes
return datetime.datetime.now() + datetime.timedelta(seconds=seconds)
def poll(self, authzr):
"""Poll Authorization Resource for status.
:param authzr: Authorization Resource
:type authzr: `.AuthorizationResource`
:returns: Updated Authorization Resource and HTTP response.
:rtype: (`.AuthorizationResource`, `requests.Response`)
"""
response = self.net.get(authzr.uri)
updated_authzr = self._authzr_from_response(
response, authzr.body.identifier, authzr.uri)
return updated_authzr, response
def _revoke(self, cert, rsn, url):
"""Revoke certificate.
@@ -227,7 +235,6 @@ class ClientBase(object): # pylint: disable=too-many-instance-attributes
raise errors.ClientError(
'Successful revocation must return HTTP OK status')
class Client(ClientBase):
"""ACME client for a v1 API.
@@ -282,15 +289,6 @@ class Client(ClientBase):
# pylint: disable=no-member
return self._regr_from_response(response)
def query_registration(self, regr):
"""Query server about registration.
:param messages.RegistrationResource: Existing Registration
Resource.
"""
return self._send_recv_regr(regr, messages.UpdateRegistration())
def agree_to_tos(self, regr):
"""Agree to the terms-of-service.
@@ -389,22 +387,6 @@ class Client(ClientBase):
body=jose.ComparableX509(OpenSSL.crypto.load_certificate(
OpenSSL.crypto.FILETYPE_ASN1, response.content)))
def poll(self, authzr):
"""Poll Authorization Resource for status.
:param authzr: Authorization Resource
:type authzr: `.AuthorizationResource`
:returns: Updated Authorization Resource and HTTP response.
:rtype: (`.AuthorizationResource`, `requests.Response`)
"""
response = self.net.get(authzr.uri)
updated_authzr = self._authzr_from_response(
response, authzr.body.identifier, authzr.uri)
return updated_authzr, response
def poll_and_request_issuance(
self, csr, authzrs, mintime=5, max_attempts=10):
"""Poll and request issuance.
@@ -596,60 +578,16 @@ class ClientV2(ClientBase):
:param .NewRegistration new_account:
:raises .ConflictError: in case the account already exists
:returns: Registration Resource.
:rtype: `.RegistrationResource`
"""
response = self._post(self.directory['newAccount'], new_account)
# if account already exists
if response.status_code == 200 and 'Location' in response.headers:
raise errors.ConflictError(response.headers.get('Location'))
# "Instance of 'Field' has no key/contact member" bug:
# pylint: disable=no-member
regr = self._regr_from_response(response)
self.net.account = regr
return regr
def query_registration(self, regr):
"""Query server about registration.
:param messages.RegistrationResource: Existing Registration
Resource.
"""
self.net.account = regr # See certbot/certbot#6258
# ACME v2 requires to use a POST-as-GET request (POST an empty JWS) here.
# This is done by passing None instead of an empty UpdateRegistration to _post().
response = self._post(regr.uri, None)
self.net.account = self._regr_from_response(response, uri=regr.uri,
terms_of_service=regr.terms_of_service)
return self.net.account
def update_registration(self, regr, update=None):
"""Update registration.
:param messages.RegistrationResource regr: Registration Resource.
:param messages.Registration update: Updated body of the
resource. If not provided, body will be taken from `regr`.
:returns: Updated Registration Resource.
:rtype: `.RegistrationResource`
"""
# https://github.com/certbot/certbot/issues/6155
new_regr = self._get_v2_account(regr)
return super(ClientV2, self).update_registration(new_regr, update)
def _get_v2_account(self, regr):
self.net.account = None
only_existing_reg = regr.body.update(only_return_existing=True)
response = self._post(self.directory['newAccount'], only_existing_reg)
updated_uri = response.headers['Location']
new_regr = regr.update(uri=updated_uri)
self.net.account = new_regr
return new_regr
def new_order(self, csr_pem):
"""Request a new Order object from the server.
@@ -670,30 +608,14 @@ class ClientV2(ClientBase):
response = self._post(self.directory['newOrder'], order)
body = messages.Order.from_json(response.json())
authorizations = []
for url in body.authorizations: # pylint: disable=not-an-iterable
authorizations.append(self._authzr_from_response(self._post_as_get(url), uri=url))
for url in body.authorizations:
authorizations.append(self._authzr_from_response(self.net.get(url), uri=url))
return messages.OrderResource(
body=body,
uri=response.headers.get('Location'),
authorizations=authorizations,
csr_pem=csr_pem)
def poll(self, authzr):
"""Poll Authorization Resource for status.
:param authzr: Authorization Resource
:type authzr: `.AuthorizationResource`
:returns: Updated Authorization Resource and HTTP response.
:rtype: (`.AuthorizationResource`, `requests.Response`)
"""
response = self._post_as_get(authzr.uri)
updated_authzr = self._authzr_from_response(
response, authzr.body.identifier, authzr.uri)
return updated_authzr, response
def poll_and_finalize(self, orderr, deadline=None):
"""Poll authorizations and finalize the order.
@@ -717,7 +639,7 @@ class ClientV2(ClientBase):
responses = []
for url in orderr.body.authorizations:
while datetime.datetime.now() < deadline:
authzr = self._authzr_from_response(self._post_as_get(url), uri=url)
authzr = self._authzr_from_response(self.net.get(url), uri=url)
if authzr.body.status != messages.STATUS_PENDING:
responses.append(authzr)
break
@@ -732,7 +654,7 @@ class ClientV2(ClientBase):
for chall in authzr.body.challenges:
if chall.error != None:
failed.append(authzr)
if failed:
if len(failed) > 0:
raise errors.ValidationError(failed)
return orderr.update(authorizations=responses)
@@ -752,12 +674,13 @@ class ClientV2(ClientBase):
self._post(orderr.body.finalize, wrapped_csr)
while datetime.datetime.now() < deadline:
time.sleep(1)
response = self._post_as_get(orderr.uri)
response = self.net.get(orderr.uri)
body = messages.Order.from_json(response.json())
if body.error is not None:
raise errors.IssuanceError(body.error)
if body.certificate is not None:
certificate_response = self._post_as_get(body.certificate).text
certificate_response = self.net.get(body.certificate,
content_type=DER_CONTENT_TYPE).text
return orderr.update(body=body, fullchain_pem=certificate_response)
raise errors.TimeoutError()
@@ -774,36 +697,6 @@ class ClientV2(ClientBase):
"""
return self._revoke(cert, rsn, self.directory['revokeCert'])
def external_account_required(self):
"""Checks if ACME server requires External Account Binding authentication."""
return hasattr(self.directory, 'meta') and self.directory.meta.external_account_required
def _post_as_get(self, *args, **kwargs):
"""
Send GET request using the POST-as-GET protocol if needed.
The request will be first issued using POST-as-GET for ACME v2. If the ACME CA servers do
not support this yet and return an error, request will be retried using GET.
For ACME v1, only GET request will be tried, as POST-as-GET is not supported.
:param args:
:param kwargs:
:return:
"""
if self.acme_version >= 2:
# We add an empty payload for POST-as-GET requests
new_args = args[:1] + (None,) + args[1:]
try:
return self._post(*new_args, **kwargs)
except messages.Error as error:
if error.code == 'malformed':
logger.debug('Error during a POST-as-GET request, '
'your ACME CA server may not support it:\n%s', error)
logger.debug('Retrying request with GET.')
else: # pragma: no cover
raise
# If POST-as-GET is not supported yet, we use a GET instead.
return self.net.get(*args, **kwargs)
class BackwardsCompatibleClientV2(object):
"""ACME client wrapper that tends towards V2-style calls, but
@@ -833,7 +726,12 @@ class BackwardsCompatibleClientV2(object):
self.client = ClientV2(directory, net=net)
def __getattr__(self, name):
if name in vars(self.client):
return getattr(self.client, name)
elif name in dir(ClientBase):
return getattr(self.client, name)
else:
raise AttributeError()
def new_account_and_tos(self, regr, check_tos_cb=None):
"""Combined register and agree_tos for V1, new_account for V2
@@ -880,6 +778,7 @@ class BackwardsCompatibleClientV2(object):
for domain in dnsNames:
authorizations.append(self.client.request_domain_challenges(domain))
return messages.OrderResource(authorizations=authorizations, csr_pem=csr_pem)
else:
return self.client.new_order(csr_pem)
def finalize_order(self, orderr, deadline):
@@ -917,6 +816,7 @@ class BackwardsCompatibleClientV2(object):
chain = crypto_util.dump_pyopenssl_chain(chain).decode()
return orderr.update(fullchain_pem=(cert + chain))
else:
return self.client.finalize_order(orderr, deadline)
def revoke(self, cert, rsn):
@@ -935,16 +835,9 @@ class BackwardsCompatibleClientV2(object):
def _acme_version_from_directory(self, directory):
if hasattr(directory, 'newNonce'):
return 2
else:
return 1
def external_account_required(self):
"""Checks if the server requires an external account for ACMEv2 servers.
Always return False for ACMEv1 servers, as it doesn't use External Account Binding."""
if self.acme_version == 1:
return False
return self.client.external_account_required()
class ClientNetwork(object): # pylint: disable=too-many-instance-attributes
"""Wrapper around requests that signs POSTs for authentication.
@@ -1008,7 +901,7 @@ class ClientNetwork(object): # pylint: disable=too-many-instance-attributes
:rtype: `josepy.JWS`
"""
jobj = obj.json_dumps(indent=2).encode() if obj else b''
jobj = obj.json_dumps(indent=2).encode()
logger.debug('JWS payload:\n%s', jobj)
kwargs = {
"alg": self.alg,
@@ -1017,10 +910,10 @@ class ClientNetwork(object): # pylint: disable=too-many-instance-attributes
if acme_version == 2:
kwargs["url"] = url
# newAccount and revokeCert work without the kid
# newAccount must not have kid
if self.account is not None:
kwargs["kid"] = self.account["uri"]
kwargs["key"] = self.key
# pylint: disable=star-args
return jws.JWS.sign(jobj, **kwargs).json_dumps(indent=2)
@classmethod
@@ -1172,15 +1065,10 @@ class ClientNetwork(object): # pylint: disable=too-many-instance-attributes
else:
raise errors.MissingNonce(response)
def _get_nonce(self, url, new_nonce_url):
def _get_nonce(self, url):
if not self._nonces:
logger.debug('Requesting fresh nonce')
if new_nonce_url is None:
response = self.head(url)
else:
# request a new nonce from the acme newNonce endpoint
response = self._check_response(self.head(new_nonce_url), content_type=None)
self._add_nonce(response)
self._add_nonce(self.head(url))
return self._nonces.pop()
def post(self, *args, **kwargs):
@@ -1201,10 +1089,8 @@ class ClientNetwork(object): # pylint: disable=too-many-instance-attributes
def _post_once(self, url, obj, content_type=JOSE_CONTENT_TYPE,
acme_version=1, **kwargs):
new_nonce_url = kwargs.pop('new_nonce_url', None)
data = self._wrap_in_jws(obj, self._get_nonce(url, new_nonce_url), url, acme_version)
data = self._wrap_in_jws(obj, self._get_nonce(url), url, acme_version)
kwargs.setdefault('headers', {'Content-Type': content_type})
response = self._send_request('POST', url, data=data, **kwargs)
response = self._check_response(response, content_type=content_type)
self._add_nonce(response)
return response
return self._check_response(response, content_type=content_type)

View File

@@ -1,5 +1,4 @@
"""Tests for acme.client."""
# pylint: disable=too-many-lines
import copy
import datetime
import json
@@ -64,7 +63,7 @@ class ClientTestBase(unittest.TestCase):
reg = messages.Registration(
contact=self.contact, key=KEY.public_key())
the_arg = dict(reg) # type: Dict
self.new_reg = messages.NewRegistration(**the_arg)
self.new_reg = messages.NewRegistration(**the_arg) # pylint: disable=star-args
self.regr = messages.RegistrationResource(
body=reg, uri='https://www.letsencrypt-demo.org/acme/reg/1')
@@ -135,18 +134,12 @@ class BackwardsCompatibleClientV2Test(ClientTestBase):
client = self._init()
self.assertEqual(client.acme_version, 2)
def test_query_registration_client_v2(self):
self.response.json.return_value = DIRECTORY_V2.to_json()
client = self._init()
self.response.json.return_value = self.regr.body.to_json()
self.assertEqual(self.regr, client.query_registration(self.regr))
def test_forwarding(self):
self.response.json.return_value = DIRECTORY_V1.to_json()
client = self._init()
self.assertEqual(client.directory, client.client.directory)
self.assertEqual(client.key, KEY)
self.assertEqual(client.deactivate_registration, client.client.deactivate_registration)
self.assertEqual(client.update_registration, client.client.update_registration)
self.assertRaises(AttributeError, client.__getattr__, 'nonexistent')
self.assertRaises(AttributeError, client.__getattr__, 'new_account_and_tos')
self.assertRaises(AttributeError, client.__getattr__, 'new_account')
@@ -277,44 +270,6 @@ class BackwardsCompatibleClientV2Test(ClientTestBase):
client.revoke(messages_test.CERT, self.rsn)
mock_client().revoke.assert_called_once_with(messages_test.CERT, self.rsn)
def test_update_registration(self):
self.response.json.return_value = DIRECTORY_V1.to_json()
with mock.patch('acme.client.Client') as mock_client:
client = self._init()
client.update_registration(mock.sentinel.regr, None)
mock_client().update_registration.assert_called_once_with(mock.sentinel.regr, None)
# newNonce present means it will pick acme_version 2
def test_external_account_required_true(self):
self.response.json.return_value = messages.Directory({
'newNonce': 'http://letsencrypt-test.com/acme/new-nonce',
'meta': messages.Directory.Meta(external_account_required=True),
}).to_json()
client = self._init()
self.assertTrue(client.external_account_required())
# newNonce present means it will pick acme_version 2
def test_external_account_required_false(self):
self.response.json.return_value = messages.Directory({
'newNonce': 'http://letsencrypt-test.com/acme/new-nonce',
'meta': messages.Directory.Meta(external_account_required=False),
}).to_json()
client = self._init()
self.assertFalse(client.external_account_required())
def test_external_account_required_false_v1(self):
self.response.json.return_value = messages.Directory({
'meta': messages.Directory.Meta(external_account_required=False),
}).to_json()
client = self._init()
self.assertFalse(client.external_account_required())
class ClientTest(ClientTestBase):
"""Tests for acme.client.Client."""
@@ -358,6 +313,7 @@ class ClientTest(ClientTestBase):
def test_register(self):
# "Instance of 'Field' has no to_json/update member" bug:
# pylint: disable=no-member
self.response.status_code = http_client.CREATED
self.response.json.return_value = self.regr.body.to_json()
self.response.headers['Location'] = self.regr.uri
@@ -370,6 +326,7 @@ class ClientTest(ClientTestBase):
def test_update_registration(self):
# "Instance of 'Field' has no to_json/update member" bug:
# pylint: disable=no-member
self.response.headers['Location'] = self.regr.uri
self.response.json.return_value = self.regr.body.to_json()
self.assertEqual(self.regr, self.client.update_registration(self.regr))
@@ -637,14 +594,6 @@ class ClientTest(ClientTestBase):
errors.PollError, self.client.poll_and_request_issuance,
csr, authzrs, mintime=mintime, max_attempts=2)
def test_deactivate_authorization(self):
authzb = self.authzr.body.update(status=messages.STATUS_DEACTIVATED)
self.response.json.return_value = authzb.to_json()
authzr = self.client.deactivate_authorization(self.authzr)
self.assertEqual(authzb, authzr.body)
self.assertEqual(self.client.net.post.call_count, 1)
self.assertTrue(self.authzr.uri in self.net.post.call_args_list[0][0])
def test_check_cert(self):
self.response.headers['Location'] = self.certr.uri
self.response.content = CERT_DER
@@ -703,7 +652,7 @@ class ClientTest(ClientTestBase):
def test_revocation_payload(self):
obj = messages.Revocation(certificate=self.certr.body, reason=self.rsn)
self.assertTrue('reason' in obj.to_partial_json().keys())
self.assertEqual(self.rsn, obj.to_partial_json()['reason'])
self.assertEquals(self.rsn, obj.to_partial_json()['reason'])
def test_revoke_bad_status_raises_error(self):
self.response.status_code = http_client.METHOD_NOT_ALLOWED
@@ -713,7 +662,6 @@ class ClientTest(ClientTestBase):
self.certr,
self.rsn)
class ClientV2Test(ClientTestBase):
"""Tests for acme.client.ClientV2."""
@@ -751,11 +699,6 @@ class ClientV2Test(ClientTestBase):
self.assertEqual(self.regr, self.client.new_account(self.new_reg))
def test_new_account_conflict(self):
self.response.status_code = http_client.OK
self.response.headers['Location'] = self.regr.uri
self.assertRaises(errors.ConflictError, self.client.new_account, self.new_reg)
def test_new_order(self):
order_response = copy.deepcopy(self.response)
order_response.status_code = http_client.CREATED
@@ -769,9 +712,8 @@ class ClientV2Test(ClientTestBase):
authz_response2 = self.response
authz_response2.json.return_value = self.authz2.to_json()
authz_response2.headers['Location'] = self.authzr2.uri
self.net.get.side_effect = (authz_response, authz_response2)
with mock.patch('acme.client.ClientV2._post_as_get') as mock_post_as_get:
mock_post_as_get.side_effect = (authz_response, authz_response2)
self.assertEqual(self.client.new_order(CSR_SAN_PEM), self.orderr)
@mock.patch('acme.client.datetime')
@@ -845,61 +787,7 @@ class ClientV2Test(ClientTestBase):
def test_revoke(self):
self.client.revoke(messages_test.CERT, self.rsn)
self.net.post.assert_called_once_with(
self.directory["revokeCert"], mock.ANY, acme_version=2,
new_nonce_url=DIRECTORY_V2['newNonce'])
def test_update_registration(self):
# "Instance of 'Field' has no to_json/update member" bug:
self.response.headers['Location'] = self.regr.uri
self.response.json.return_value = self.regr.body.to_json()
self.assertEqual(self.regr, self.client.update_registration(self.regr))
self.assertNotEqual(self.client.net.account, None)
self.assertEqual(self.client.net.post.call_count, 2)
self.assertTrue(DIRECTORY_V2.newAccount in self.net.post.call_args_list[0][0])
self.response.json.return_value = self.regr.body.update(
contact=()).to_json()
def test_external_account_required_true(self):
self.client.directory = messages.Directory({
'meta': messages.Directory.Meta(external_account_required=True)
})
self.assertTrue(self.client.external_account_required())
def test_external_account_required_false(self):
self.client.directory = messages.Directory({
'meta': messages.Directory.Meta(external_account_required=False)
})
self.assertFalse(self.client.external_account_required())
def test_external_account_required_default(self):
self.assertFalse(self.client.external_account_required())
def test_post_as_get(self):
with mock.patch('acme.client.ClientV2._authzr_from_response') as mock_client:
mock_client.return_value = self.authzr2
self.client.poll(self.authzr2) # pylint: disable=protected-access
self.client.net.post.assert_called_once_with(
self.authzr2.uri, None, acme_version=2,
new_nonce_url='https://www.letsencrypt-demo.org/acme/new-nonce')
self.client.net.get.assert_not_called()
class FakeError(messages.Error): # pylint: disable=too-many-ancestors
"""Fake error to reproduce a malformed request ACME error"""
def __init__(self): # pylint: disable=super-init-not-called
pass
@property
def code(self):
return 'malformed'
self.client.net.post.side_effect = FakeError()
self.client.poll(self.authzr2) # pylint: disable=protected-access
self.client.net.get.assert_called_once_with(self.authzr2.uri)
self.directory["revokeCert"], mock.ANY, acme_version=2)
class MockJSONDeSerializable(jose.JSONDeSerializable):
@@ -911,7 +799,7 @@ class MockJSONDeSerializable(jose.JSONDeSerializable):
return {'foo': self.value}
@classmethod
def from_json(cls, jobj):
def from_json(cls, value):
pass # pragma: no cover
@@ -956,6 +844,7 @@ class ClientNetworkTest(unittest.TestCase):
self.assertEqual(jws.signature.combined.kid, u'acct-uri')
self.assertEqual(jws.signature.combined.url, u'url')
def test_check_response_not_ok_jobj_no_error(self):
self.response.ok = False
self.response.json.return_value = {}
@@ -1118,8 +1007,8 @@ class ClientNetworkTest(unittest.TestCase):
# Requests Library Exceptions
except requests.exceptions.ConnectionError as z: #pragma: no cover
self.assertTrue("'Connection aborted.'" in str(z) or "[WinError 10061]" in str(z))
self.assertEqual("('Connection aborted.', "
"error(111, 'Connection refused'))", str(z))
class ClientNetworkWithMockedResponseTest(unittest.TestCase):
"""Tests for acme.client.ClientNetwork which mock out response."""
@@ -1132,10 +1021,7 @@ class ClientNetworkWithMockedResponseTest(unittest.TestCase):
self.response = mock.MagicMock(ok=True, status_code=http_client.OK)
self.response.headers = {}
self.response.links = {}
self.response.checked = False
self.acmev1_nonce_response = mock.MagicMock(ok=False,
status_code=http_client.METHOD_NOT_ALLOWED)
self.acmev1_nonce_response.headers = {}
self.checked_response = mock.MagicMock()
self.obj = mock.MagicMock()
self.wrapped_obj = mock.MagicMock()
self.content_type = mock.sentinel.content_type
@@ -1147,21 +1033,13 @@ class ClientNetworkWithMockedResponseTest(unittest.TestCase):
def send_request(*args, **kwargs):
# pylint: disable=unused-argument,missing-docstring
self.assertFalse("new_nonce_url" in kwargs)
method = args[0]
uri = args[1]
if method == 'HEAD' and uri != "new_nonce_uri":
response = self.acmev1_nonce_response
else:
response = self.response
if self.available_nonces:
response.headers = {
self.response.headers = {
self.net.REPLAY_NONCE_HEADER:
self.available_nonces.pop().decode()}
else:
response.headers = {}
return response
self.response.headers = {}
return self.response
# pylint: disable=protected-access
self.net._send_request = self.send_request = mock.MagicMock(
@@ -1173,39 +1051,28 @@ class ClientNetworkWithMockedResponseTest(unittest.TestCase):
# pylint: disable=missing-docstring
self.assertEqual(self.response, response)
self.assertEqual(self.content_type, content_type)
self.assertTrue(self.response.ok)
self.response.checked = True
return self.response
return self.checked_response
def test_head(self):
self.assertEqual(self.acmev1_nonce_response, self.net.head(
self.assertEqual(self.response, self.net.head(
'http://example.com/', 'foo', bar='baz'))
self.send_request.assert_called_once_with(
'HEAD', 'http://example.com/', 'foo', bar='baz')
def test_head_v2(self):
self.assertEqual(self.response, self.net.head(
'new_nonce_uri', 'foo', bar='baz'))
self.send_request.assert_called_once_with(
'HEAD', 'new_nonce_uri', 'foo', bar='baz')
def test_get(self):
self.assertEqual(self.response, self.net.get(
self.assertEqual(self.checked_response, self.net.get(
'http://example.com/', content_type=self.content_type, bar='baz'))
self.assertTrue(self.response.checked)
self.send_request.assert_called_once_with(
'GET', 'http://example.com/', bar='baz')
def test_post_no_content_type(self):
self.content_type = self.net.JOSE_CONTENT_TYPE
self.assertEqual(self.response, self.net.post('uri', self.obj))
self.assertTrue(self.response.checked)
self.assertEqual(self.checked_response, self.net.post('uri', self.obj))
def test_post(self):
# pylint: disable=protected-access
self.assertEqual(self.response, self.net.post(
self.assertEqual(self.checked_response, self.net.post(
'uri', self.obj, content_type=self.content_type))
self.assertTrue(self.response.checked)
self.net._wrap_in_jws.assert_called_once_with(
self.obj, jose.b64decode(self.all_nonces.pop()), "uri", 1)
@@ -1237,7 +1104,7 @@ class ClientNetworkWithMockedResponseTest(unittest.TestCase):
def test_post_not_retried(self):
check_response = mock.MagicMock()
check_response.side_effect = [messages.Error.with_code('malformed'),
self.response]
self.checked_response]
# pylint: disable=protected-access
self.net._check_response = check_response
@@ -1245,12 +1112,13 @@ class ClientNetworkWithMockedResponseTest(unittest.TestCase):
self.obj, content_type=self.content_type)
def test_post_successful_retry(self):
post_once = mock.MagicMock()
post_once.side_effect = [messages.Error.with_code('badNonce'),
self.response]
check_response = mock.MagicMock()
check_response.side_effect = [messages.Error.with_code('badNonce'),
self.checked_response]
# pylint: disable=protected-access
self.assertEqual(self.response, self.net.post(
self.net._check_response = check_response
self.assertEqual(self.checked_response, self.net.post(
'uri', self.obj, content_type=self.content_type))
def test_head_get_post_error_passthrough(self):
@@ -1261,26 +1129,6 @@ class ClientNetworkWithMockedResponseTest(unittest.TestCase):
self.assertRaises(requests.exceptions.RequestException,
self.net.post, 'uri', obj=self.obj)
def test_post_bad_nonce_head(self):
# pylint: disable=protected-access
# regression test for https://github.com/certbot/certbot/issues/6092
bad_response = mock.MagicMock(ok=False, status_code=http_client.SERVICE_UNAVAILABLE)
self.net._send_request = mock.MagicMock()
self.net._send_request.return_value = bad_response
self.content_type = None
check_response = mock.MagicMock()
self.net._check_response = check_response
self.assertRaises(errors.ClientError, self.net.post, 'uri',
self.obj, content_type=self.content_type, acme_version=2,
new_nonce_url='new_nonce_uri')
self.assertEqual(check_response.call_count, 1)
def test_new_nonce_uri_removed(self):
self.content_type = None
self.net.post('uri', self.obj, content_type=None,
acme_version=2, new_nonce_url='new_nonce_uri')
class ClientNetworkSourceAddressBindingTest(unittest.TestCase):
"""Tests that if ClientNetwork has a source IP set manually, the underlying library has
used the provided source address."""

View File

@@ -18,14 +18,17 @@ from acme.magic_typing import Callable, Union, Tuple, Optional
logger = logging.getLogger(__name__)
# Default SSL method selected here is the most compatible, while secure
# SSL method: TLSv1_METHOD is only compatible with
# TLSSNI01 certificate serving and probing is not affected by SSL
# vulnerabilities: prober needs to check certificate for expected
# contents anyway. Working SNI is the only thing that's necessary for
# the challenge and thus scoping down SSL/TLS method (version) would
# cause interoperability issues: TLSv1_METHOD is only compatible with
# TLSv1_METHOD, while SSLv23_METHOD is compatible with all other
# methods, including TLSv2_METHOD (read more at
# https://www.openssl.org/docs/ssl/SSLv23_method.html). _serve_sni
# should be changed to use "set_options" to disable SSLv2 and SSLv3,
# in case it's used for things other than probing/serving!
_DEFAULT_SSL_METHOD = SSL.SSLv23_METHOD # type: ignore
_DEFAULT_TLSSNI01_SSL_METHOD = SSL.SSLv23_METHOD # type: ignore
class SSLSocket(object): # pylint: disable=too-few-public-methods
@@ -37,7 +40,7 @@ class SSLSocket(object): # pylint: disable=too-few-public-methods
:ivar method: See `OpenSSL.SSL.Context` for allowed values.
"""
def __init__(self, sock, certs, method=_DEFAULT_SSL_METHOD):
def __init__(self, sock, certs, method=_DEFAULT_TLSSNI01_SSL_METHOD):
self.sock = sock
self.certs = certs
self.method = method
@@ -109,7 +112,7 @@ class SSLSocket(object): # pylint: disable=too-few-public-methods
def probe_sni(name, host, port=443, timeout=300,
method=_DEFAULT_SSL_METHOD, source_address=('', 0)):
method=_DEFAULT_TLSSNI01_SSL_METHOD, source_address=('', 0)):
"""Probe SNI server for SSL certificate.
:param bytes name: Byte string to send as the server name in the
@@ -133,15 +136,22 @@ def probe_sni(name, host, port=443, timeout=300,
socket_kwargs = {'source_address': source_address}
host_protocol_agnostic = host
if host == '::' or host == '0':
# https://github.com/python/typeshed/pull/2136
# while PR is not merged, we need to ignore
host_protocol_agnostic = None
try:
# pylint: disable=star-args
logger.debug(
"Attempting to connect to %s:%d%s.", host, port,
"Attempting to connect to %s:%d%s.", host_protocol_agnostic, port,
" from {0}:{1}".format(
source_address[0],
source_address[1]
) if socket_kwargs else ""
)
socket_tuple = (host, port) # type: Tuple[str, int]
socket_tuple = (host_protocol_agnostic, port) # type: Tuple[Optional[str], int]
sock = socket.create_connection(socket_tuple, **socket_kwargs) # type: ignore
except socket.error as error:
raise errors.Error(error)
@@ -194,6 +204,7 @@ def _pyopenssl_cert_or_req_all_names(loaded_cert_or_req):
if common_name is None:
return sans
else:
return [common_name] + [d for d in sans if d != common_name]
def _pyopenssl_cert_or_req_san(cert_or_req):

View File

@@ -209,8 +209,8 @@ class MakeCSRTest(unittest.TestCase):
# have a get_extensions() method, so we skip this test if the method
# isn't available.
if hasattr(csr, 'get_extensions'):
self.assertEqual(len(csr.get_extensions()), 1)
self.assertEqual(csr.get_extensions()[0].get_data(),
self.assertEquals(len(csr.get_extensions()), 1)
self.assertEquals(csr.get_extensions()[0].get_data(),
OpenSSL.crypto.X509Extension(
b'subjectAltName',
critical=False,
@@ -227,7 +227,7 @@ class MakeCSRTest(unittest.TestCase):
# have a get_extensions() method, so we skip this test if the method
# isn't available.
if hasattr(csr, 'get_extensions'):
self.assertEqual(len(csr.get_extensions()), 2)
self.assertEquals(len(csr.get_extensions()), 2)
# NOTE: Ideally we would filter by the TLS Feature OID, but
# OpenSSL.crypto.X509Extension doesn't give us the extension's raw OID,
# and the shortname field is just "UNDEF"

View File

@@ -110,8 +110,6 @@ class ConflictError(ClientError):
In the version of ACME implemented by Boulder, this is used to find an
account if you only have the private key, but don't know the account URL.
Also used in V2 of the ACME client for the same purpose.
"""
def __init__(self, location):
self.location = location

View File

@@ -1,53 +0,0 @@
"""Tests for acme.jose shim."""
import importlib
import unittest
class JoseTest(unittest.TestCase):
"""Tests for acme.jose shim."""
def _test_it(self, submodule, attribute):
if submodule:
acme_jose_path = 'acme.jose.' + submodule
josepy_path = 'josepy.' + submodule
else:
acme_jose_path = 'acme.jose'
josepy_path = 'josepy'
acme_jose_mod = importlib.import_module(acme_jose_path)
josepy_mod = importlib.import_module(josepy_path)
self.assertIs(acme_jose_mod, josepy_mod)
self.assertIs(getattr(acme_jose_mod, attribute), getattr(josepy_mod, attribute))
# We use the imports below with eval, but pylint doesn't
# understand that.
# pylint: disable=eval-used,unused-variable
import acme
import josepy
acme_jose_mod = eval(acme_jose_path)
josepy_mod = eval(josepy_path)
self.assertIs(acme_jose_mod, josepy_mod)
self.assertIs(getattr(acme_jose_mod, attribute), getattr(josepy_mod, attribute))
def test_top_level(self):
self._test_it('', 'RS512')
def test_submodules(self):
# This test ensures that the modules in josepy that were
# available at the time it was moved into its own package are
# available under acme.jose. Backwards compatibility with new
# modules or testing code is not maintained.
mods_and_attrs = [('b64', 'b64decode',),
('errors', 'Error',),
('interfaces', 'JSONDeSerializable',),
('json_util', 'Field',),
('jwa', 'HS256',),
('jwk', 'JWK',),
('jws', 'JWS',),
('util', 'ImmutableMap',),]
for mod, attr in mods_and_attrs:
self._test_it(mod, attr)
if __name__ == '__main__':
unittest.main() # pragma: no cover

View File

@@ -1,10 +1,6 @@
"""ACME protocol messages."""
import json
import collections
import six
try:
from collections.abc import Hashable # pylint: disable=no-name-in-module
except ImportError: # pragma: no cover
from collections import Hashable
import josepy as jose
@@ -12,42 +8,25 @@ from acme import challenges
from acme import errors
from acme import fields
from acme import util
from acme import jws
OLD_ERROR_PREFIX = "urn:acme:error:"
ERROR_PREFIX = "urn:ietf:params:acme:error:"
ERROR_CODES = {
'accountDoesNotExist': 'The request specified an account that does not exist',
'alreadyRevoked': 'The request specified a certificate to be revoked that has' \
' already been revoked',
'badCSR': 'The CSR is unacceptable (e.g., due to a short key)',
'badNonce': 'The client sent an unacceptable anti-replay nonce',
'badPublicKey': 'The JWS was signed by a public key the server does not support',
'badRevocationReason': 'The revocation reason provided is not allowed by the server',
'badSignatureAlgorithm': 'The JWS was signed with an algorithm the server does not support',
'caa': 'Certification Authority Authorization (CAA) records forbid the CA from issuing' \
' a certificate',
'compound': 'Specific error conditions are indicated in the "subproblems" array',
'connection': ('The server could not connect to the client to verify the'
' domain'),
'dns': 'There was a problem with a DNS query during identifier validation',
'dnssec': 'The server could not validate a DNSSEC signed domain',
'incorrectResponse': 'Response recieved didn\'t match the challenge\'s requirements',
# deprecate invalidEmail
'invalidEmail': 'The provided email for a registration was invalid',
'invalidContact': 'The provided contact URI was invalid',
'malformed': 'The request message was malformed',
'rejectedIdentifier': 'The server will not issue certificates for the identifier',
'orderNotReady': 'The request attempted to finalize an order that is not ready to be finalized',
'rateLimited': 'There were too many requests of a given type',
'serverInternal': 'The server experienced an internal error',
'tls': 'The server experienced a TLS error during domain verification',
'unauthorized': 'The client lacks sufficient authorization',
'unsupportedContact': 'A contact URL for an account used an unsupported protocol scheme',
'unknownHost': 'The server could not resolve a domain name',
'unsupportedIdentifier': 'An identifier is of an unsupported type',
'externalAccountRequired': 'The server requires external account binding',
}
ERROR_TYPE_DESCRIPTIONS = dict(
@@ -61,6 +40,7 @@ def is_acme_error(err):
"""Check if argument is an ACME error."""
if isinstance(err, Error) and (err.typ is not None):
return (ERROR_PREFIX in err.typ) or (OLD_ERROR_PREFIX in err.typ)
else:
return False
@@ -116,7 +96,6 @@ class Error(jose.JSONObjectWithFields, errors.Error):
code = str(self.typ).split(':')[-1]
if code in ERROR_CODES:
return code
return None
def __str__(self):
return b' :: '.join(
@@ -125,25 +104,24 @@ class Error(jose.JSONObjectWithFields, errors.Error):
if part is not None).decode()
class _Constant(jose.JSONDeSerializable, Hashable): # type: ignore
class _Constant(jose.JSONDeSerializable, collections.Hashable): # type: ignore
"""ACME constant."""
__slots__ = ('name',)
POSSIBLE_NAMES = NotImplemented
def __init__(self, name):
super(_Constant, self).__init__()
self.POSSIBLE_NAMES[name] = self # pylint: disable=unsupported-assignment-operation
self.POSSIBLE_NAMES[name] = self
self.name = name
def to_partial_json(self):
return self.name
@classmethod
def from_json(cls, jobj):
if jobj not in cls.POSSIBLE_NAMES: # pylint: disable=unsupported-membership-test
def from_json(cls, value):
if value not in cls.POSSIBLE_NAMES:
raise jose.DeserializationError(
'{0} not recognized'.format(cls.__name__))
return cls.POSSIBLE_NAMES[jobj] # pylint: disable=unsubscriptable-object
return cls.POSSIBLE_NAMES[value]
def __repr__(self):
return '{0}({1})'.format(self.__class__.__name__, self.name)
@@ -168,7 +146,6 @@ STATUS_VALID = Status('valid')
STATUS_INVALID = Status('invalid')
STATUS_REVOKED = Status('revoked')
STATUS_READY = Status('ready')
STATUS_DEACTIVATED = Status('deactivated')
class IdentifierType(_Constant):
@@ -199,10 +176,10 @@ class Directory(jose.JSONDeSerializable):
_terms_of_service_v2 = jose.Field('termsOfService', omitempty=True)
website = jose.Field('website', omitempty=True)
caa_identities = jose.Field('caaIdentities', omitempty=True)
external_account_required = jose.Field('externalAccountRequired', omitempty=True)
def __init__(self, **kwargs):
kwargs = dict((self._internal_name(k), v) for k, v in kwargs.items())
# pylint: disable=star-args
super(Directory.Meta, self).__init__(**kwargs)
@property
@@ -281,24 +258,6 @@ class ResourceBody(jose.JSONObjectWithFields):
"""ACME Resource Body."""
class ExternalAccountBinding(object):
"""ACME External Account Binding"""
@classmethod
def from_data(cls, account_public_key, kid, hmac_key, directory):
"""Create External Account Binding Resource from contact details, kid and hmac."""
key_json = json.dumps(account_public_key.to_partial_json()).encode()
decoded_hmac_key = jose.b64.b64decode(hmac_key)
url = directory["newAccount"]
eab = jws.JWS.sign(key_json, jose.jwk.JWKOct(key=decoded_hmac_key),
jose.jwa.HS256, None,
url, kid)
return eab.to_partial_json()
class Registration(ResourceBody):
"""Registration Resource Body.
@@ -315,14 +274,12 @@ class Registration(ResourceBody):
agreement = jose.Field('agreement', omitempty=True)
status = jose.Field('status', omitempty=True)
terms_of_service_agreed = jose.Field('termsOfServiceAgreed', omitempty=True)
only_return_existing = jose.Field('onlyReturnExisting', omitempty=True)
external_account_binding = jose.Field('externalAccountBinding', omitempty=True)
phone_prefix = 'tel:'
email_prefix = 'mailto:'
@classmethod
def from_data(cls, phone=None, email=None, external_account_binding=None, **kwargs):
def from_data(cls, phone=None, email=None, **kwargs):
"""Create registration resource from contact details."""
details = list(kwargs.pop('contact', ()))
if phone is not None:
@@ -330,15 +287,11 @@ class Registration(ResourceBody):
if email is not None:
details.extend([cls.email_prefix + mail for mail in email.split(',')])
kwargs['contact'] = tuple(details)
if external_account_binding:
kwargs['external_account_binding'] = external_account_binding
return cls(**kwargs)
def _filter_contact(self, prefix):
return tuple(
detail[len(prefix):] for detail in self.contact # pylint: disable=not-an-iterable
detail[len(prefix):] for detail in self.contact
if detail.startswith(prefix))
@property
@@ -410,6 +363,7 @@ class ChallengeBody(ResourceBody):
def __init__(self, **kwargs):
kwargs = dict((self._internal_name(k), v) for k, v in kwargs.items())
# pylint: disable=star-args
super(ChallengeBody, self).__init__(**kwargs)
def encode(self, name):
@@ -472,7 +426,7 @@ class Authorization(ResourceBody):
:ivar datetime.datetime expires:
"""
identifier = jose.Field('identifier', decoder=Identifier.from_json, omitempty=True)
identifier = jose.Field('identifier', decoder=Identifier.from_json)
challenges = jose.Field('challenges', omitempty=True)
combinations = jose.Field('combinations', omitempty=True)
@@ -492,7 +446,7 @@ class Authorization(ResourceBody):
def resolved_combinations(self):
"""Combinations with challenges instead of indices."""
return tuple(tuple(self.challenges[idx] for idx in combo)
for combo in self.combinations) # pylint: disable=not-an-iterable
for combo in self.combinations)
@Directory.register
@@ -502,12 +456,6 @@ class NewAuthorization(Authorization):
resource = fields.Resource(resource_type)
class UpdateAuthorization(Authorization):
"""Update authorization."""
resource_type = 'authz'
resource = fields.Resource(resource_type)
class AuthorizationResource(ResourceWithURI):
"""Authorization Resource.
@@ -574,7 +522,7 @@ class Order(ResourceBody):
"""
identifiers = jose.Field('identifiers', omitempty=True)
status = jose.Field('status', decoder=Status.from_json,
omitempty=True)
omitempty=True, default=STATUS_PENDING)
authorizations = jose.Field('authorizations', omitempty=True)
certificate = jose.Field('certificate', omitempty=True)
finalize = jose.Field('finalize', omitempty=True)
@@ -604,3 +552,4 @@ class OrderResource(ResourceWithURI):
class NewOrder(Order):
"""New order."""
resource_type = 'new-order'
resource = fields.Resource(resource_type)

View File

@@ -174,24 +174,6 @@ class DirectoryTest(unittest.TestCase):
self.assertTrue(result)
class ExternalAccountBindingTest(unittest.TestCase):
def setUp(self):
from acme.messages import Directory
self.key = jose.jwk.JWKRSA(key=KEY.public_key())
self.kid = "kid-for-testing"
self.hmac_key = "hmac-key-for-testing"
self.dir = Directory({
'newAccount': 'http://url/acme/new-account',
})
def test_from_data(self):
from acme.messages import ExternalAccountBinding
eab = ExternalAccountBinding.from_data(self.key, self.kid, self.hmac_key, self.dir)
self.assertEqual(len(eab), 3)
self.assertEqual(sorted(eab.keys()), sorted(['protected', 'payload', 'signature']))
class RegistrationTest(unittest.TestCase):
"""Tests for acme.messages.Registration."""
@@ -223,22 +205,6 @@ class RegistrationTest(unittest.TestCase):
'mailto:admin@foo.com',
))
def test_new_registration_from_data_with_eab(self):
from acme.messages import NewRegistration, ExternalAccountBinding, Directory
key = jose.jwk.JWKRSA(key=KEY.public_key())
kid = "kid-for-testing"
hmac_key = "hmac-key-for-testing"
directory = Directory({
'newAccount': 'http://url/acme/new-account',
})
eab = ExternalAccountBinding.from_data(key, kid, hmac_key, directory)
reg = NewRegistration.from_data(email='admin@foo.com', external_account_binding=eab)
self.assertEqual(reg.contact, (
'mailto:admin@foo.com',
))
self.assertEqual(sorted(reg.external_account_binding.keys()),
sorted(['protected', 'payload', 'signature']))
def test_phones(self):
self.assertEqual(('1234',), self.reg.phones)
@@ -458,19 +424,6 @@ class OrderResourceTest(unittest.TestCase):
'authorizations': None,
})
class NewOrderTest(unittest.TestCase):
"""Tests for acme.messages.NewOrder."""
def setUp(self):
from acme.messages import NewOrder
self.reg = NewOrder(
identifiers=mock.sentinel.identifiers)
def test_to_partial_json(self):
self.assertEqual(self.reg.to_json(), {
'identifiers': mock.sentinel.identifiers,
})
if __name__ == '__main__':
unittest.main() # pragma: no cover

View File

@@ -17,7 +17,6 @@ import OpenSSL
from acme import challenges
from acme import crypto_util
from acme.magic_typing import List # pylint: disable=unused-import, no-name-in-module
from acme import _TLSSNI01DeprecationModule
logger = logging.getLogger(__name__)
@@ -38,7 +37,7 @@ class TLSServer(socketserver.TCPServer):
self.certs = kwargs.pop("certs", {})
self.method = kwargs.pop(
# pylint: disable=protected-access
"method", crypto_util._DEFAULT_SSL_METHOD)
"method", crypto_util._DEFAULT_TLSSNI01_SSL_METHOD)
self.allow_reuse_address = kwargs.pop("allow_reuse_address", True)
socketserver.TCPServer.__init__(self, *args, **kwargs)
@@ -83,7 +82,7 @@ class BaseDualNetworkedServers(object):
kwargs["ipv6"] = ip_version
new_address = (server_address[0],) + (port,) + server_address[2:]
new_args = (new_address,) + remaining_args
server = ServerClass(*new_args, **kwargs)
server = ServerClass(*new_args, **kwargs) # pylint: disable=star-args
logger.debug(
"Successfully bound to %s:%s using %s", new_address[0],
new_address[1], "IPv6" if ip_version else "IPv4")
@@ -91,8 +90,8 @@ class BaseDualNetworkedServers(object):
if self.servers:
# Already bound using IPv6.
logger.debug(
"Certbot wasn't able to bind to %s:%s using %s, this "
"is often expected due to the dual stack nature of "
"Certbot wasn't able to bind to %s:%s using %s, this " +
"is often expected due to the dual stack nature of " +
"IPv6 socket implementations.",
new_address[0], new_address[1],
"IPv6" if ip_version else "IPv4")
@@ -105,7 +104,7 @@ class BaseDualNetworkedServers(object):
# If two servers are set up and port 0 was passed in, ensure we always
# bind to the same port for both servers.
port = server.socket.getsockname()[1]
if not self.servers:
if len(self.servers) == 0:
raise socket.error("Could not bind to IPv4 or IPv6.")
def serve_forever(self):
@@ -297,9 +296,5 @@ def simple_tls_sni_01_server(cli_args, forever=True):
server.handle_request()
# Patching ourselves to warn about TLS-SNI challenge deprecation and removal.
sys.modules[__name__] = _TLSSNI01DeprecationModule(sys.modules[__name__])
if __name__ == "__main__":
sys.exit(simple_tls_sni_01_server(sys.argv)) # pragma: no cover

View File

@@ -1,15 +1,13 @@
"""Tests for acme.standalone."""
import multiprocessing
import os
import shutil
import socket
import threading
import tempfile
import unittest
import time
from contextlib import closing
from six.moves import http_client # pylint: disable=import-error
from six.moves import queue # pylint: disable=import-error
from six.moves import socketserver # type: ignore # pylint: disable=import-error
import josepy as jose
@@ -18,7 +16,6 @@ import requests
from acme import challenges
from acme import crypto_util
from acme import errors
from acme import test_util
from acme.magic_typing import Set # pylint: disable=unused-import, no-name-in-module
@@ -31,14 +28,14 @@ class TLSServerTest(unittest.TestCase):
from acme.standalone import TLSServer
server = TLSServer(
('', 0), socketserver.BaseRequestHandler, bind_and_activate=True)
server.server_close()
server.server_close() # pylint: disable=no-member
def test_ipv6(self):
if socket.has_ipv6:
from acme.standalone import TLSServer
server = TLSServer(
('', 0), socketserver.BaseRequestHandler, bind_and_activate=True, ipv6=True)
server.server_close()
server.server_close() # pylint: disable=no-member
class TLSSNI01ServerTest(unittest.TestCase):
@@ -51,12 +48,13 @@ class TLSSNI01ServerTest(unittest.TestCase):
test_util.load_cert('rsa2048_cert.pem'),
)}
from acme.standalone import TLSSNI01Server
self.server = TLSSNI01Server(('localhost', 0), certs=self.certs)
self.server = TLSSNI01Server(("", 0), certs=self.certs)
# pylint: disable=no-member
self.thread = threading.Thread(target=self.server.serve_forever)
self.thread.start()
def tearDown(self):
self.server.shutdown()
self.server.shutdown() # pylint: disable=no-member
self.thread.join()
def test_it(self):
@@ -79,12 +77,13 @@ class HTTP01ServerTest(unittest.TestCase):
from acme.standalone import HTTP01Server
self.server = HTTP01Server(('', 0), resources=self.resources)
# pylint: disable=no-member
self.port = self.server.socket.getsockname()[1]
self.thread = threading.Thread(target=self.server.serve_forever)
self.thread.start()
def tearDown(self):
self.server.shutdown()
self.server.shutdown() # pylint: disable=no-member
self.thread.join()
def test_index(self):
@@ -134,10 +133,8 @@ class BaseDualNetworkedServersTest(unittest.TestCase):
self.address_family = socket.AF_INET
socketserver.TCPServer.__init__(self, *args, **kwargs)
if ipv6:
# NB: On Windows, socket.IPPROTO_IPV6 constant may be missing.
# We use the corresponding value (41) instead.
level = getattr(socket, "IPPROTO_IPV6", 41)
self.socket.setsockopt(level, socket.IPV6_V6ONLY, 1)
# pylint: disable=no-member
self.socket.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_V6ONLY, 1)
try:
self.server_bind()
self.server_activate()
@@ -151,14 +148,14 @@ class BaseDualNetworkedServersTest(unittest.TestCase):
from acme.standalone import BaseDualNetworkedServers
self.assertRaises(socket.error, BaseDualNetworkedServers,
BaseDualNetworkedServersTest.SingleProtocolServer,
('', 0),
("", 0),
socketserver.BaseRequestHandler)
def test_ports_equal(self):
from acme.standalone import BaseDualNetworkedServers
servers = BaseDualNetworkedServers(
BaseDualNetworkedServersTest.SingleProtocolServer,
('', 0),
("", 0),
socketserver.BaseRequestHandler)
socknames = servers.getsocknames()
prev_port = None
@@ -180,7 +177,7 @@ class TLSSNI01DualNetworkedServersTest(unittest.TestCase):
test_util.load_cert('rsa2048_cert.pem'),
)}
from acme.standalone import TLSSNI01DualNetworkedServers
self.servers = TLSSNI01DualNetworkedServers(('localhost', 0), certs=self.certs)
self.servers = TLSSNI01DualNetworkedServers(("", 0), certs=self.certs)
self.servers.serve_forever()
def tearDown(self):
@@ -209,6 +206,7 @@ class HTTP01DualNetworkedServersTest(unittest.TestCase):
from acme.standalone import HTTP01DualNetworkedServers
self.servers = HTTP01DualNetworkedServers(('', 0), resources=self.resources)
# pylint: disable=no-member
self.port = self.servers.getsocknames()[0][1]
self.servers.serve_forever()
@@ -261,45 +259,35 @@ class TestSimpleTLSSNI01Server(unittest.TestCase):
shutil.copy(test_util.vector_path('rsa2048_key.pem'),
os.path.join(localhost_dir, 'key.pem'))
with closing(socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as sock:
sock.bind(('', 0))
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.port = sock.getsockname()[1]
from acme.standalone import simple_tls_sni_01_server
self.process = multiprocessing.Process(target=simple_tls_sni_01_server,
args=(['path', '-p', str(self.port)],))
self.thread = threading.Thread(
target=simple_tls_sni_01_server, kwargs={
'cli_args': ('filename',),
'forever': False,
},
)
self.old_cwd = os.getcwd()
os.chdir(self.test_cwd)
def tearDown(self):
os.chdir(self.old_cwd)
if self.process.is_alive():
self.process.terminate()
self.process.join(timeout=5)
# Check that we didn't timeout waiting for the process to
# terminate.
self.assertNotEqual(self.process.exitcode, None)
self.thread.join()
shutil.rmtree(self.test_cwd)
@mock.patch('acme.standalone.TLSSNI01Server.handle_request')
def test_mock(self, handle):
from acme.standalone import simple_tls_sni_01_server
simple_tls_sni_01_server(cli_args=['path', '-p', str(self.port)], forever=False)
self.assertEqual(handle.call_count, 1)
@mock.patch('acme.standalone.logger')
def test_it(self, mock_logger):
# Use a Queue because mock objects aren't thread safe.
q = queue.Queue() # type: queue.Queue[int]
# Add port number to the queue.
mock_logger.info.side_effect = lambda *args: q.put(args[-1])
self.thread.start()
def test_live(self):
self.process.start()
cert = None
for _ in range(50):
time.sleep(0.1)
try:
cert = crypto_util.probe_sni(b'localhost', b'127.0.0.1', self.port)
break
except errors.Error: # pragma: no cover
pass
# After the timeout, an exception is raised if the queue is empty.
port = q.get(timeout=5)
cert = crypto_util.probe_sni(b'localhost', b'0.0.0.0', port)
self.assertEqual(jose.ComparableX509(cert),
test_util.load_comparable_cert('rsa2048_cert.pem'))
test_util.load_comparable_cert(
'rsa2048_cert.pem'))
if __name__ == "__main__":

View File

@@ -4,8 +4,8 @@
"""
import os
import unittest
import pkg_resources
import unittest
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives import serialization
@@ -92,4 +92,5 @@ def skip_unless(condition, reason): # pragma: no cover
return unittest.skipUnless(condition, reason)
elif condition:
return lambda cls: cls
else:
return lambda cls: None

View File

@@ -16,6 +16,13 @@ Contents:
.. automodule:: acme
:members:
Example client:
.. include:: ../examples/example_client.py
:code: python
Indices and tables
==================

View File

@@ -0,0 +1,47 @@
"""Example script showing how to use acme client API."""
import logging
import os
import pkg_resources
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives.asymmetric import rsa
import josepy as jose
import OpenSSL
from acme import client
from acme import messages
logging.basicConfig(level=logging.DEBUG)
DIRECTORY_URL = 'https://acme-staging.api.letsencrypt.org/directory'
BITS = 2048 # minimum for Boulder
DOMAIN = 'example1.com' # example.com is ignored by Boulder
# generate_private_key requires cryptography>=0.5
key = jose.JWKRSA(key=rsa.generate_private_key(
public_exponent=65537,
key_size=BITS,
backend=default_backend()))
acme = client.Client(DIRECTORY_URL, key)
regr = acme.register()
logging.info('Auto-accepting TOS: %s', regr.terms_of_service)
acme.agree_to_tos(regr)
logging.debug(regr)
authzr = acme.request_challenges(
identifier=messages.Identifier(typ=messages.IDENTIFIER_FQDN, value=DOMAIN))
logging.debug(authzr)
authzr, authzr_response = acme.poll(authzr)
csr = OpenSSL.crypto.load_certificate_request(
OpenSSL.crypto.FILETYPE_ASN1, pkg_resources.resource_string(
'acme', os.path.join('testdata', 'csr.der')))
try:
acme.request_issuance(jose.util.ComparableX509(csr), (authzr,))
except messages.Error as error:
print ("This script is doomed to fail as no authorization "
"challenges are ever solved. Error from server: {0}".format(error))

View File

@@ -1,240 +0,0 @@
"""Example ACME-V2 API for HTTP-01 challenge.
Brief:
This a complete usage example of the python-acme API.
Limitations of this example:
- Works for only one Domain name
- Performs only HTTP-01 challenge
- Uses ACME-v2
Workflow:
(Account creation)
- Create account key
- Register account and accept TOS
(Certificate actions)
- Select HTTP-01 within offered challenges by the CA server
- Set up http challenge resource
- Set up standalone web server
- Create domain private key and CSR
- Issue certificate
- Renew certificate
- Revoke certificate
(Account update actions)
- Change contact information
- Deactivate Account
"""
from contextlib import contextmanager
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives.asymmetric import rsa
import OpenSSL
from acme import challenges
from acme import client
from acme import crypto_util
from acme import errors
from acme import messages
from acme import standalone
import josepy as jose
# Constants:
# This is the staging point for ACME-V2 within Let's Encrypt.
DIRECTORY_URL = 'https://acme-staging-v02.api.letsencrypt.org/directory'
USER_AGENT = 'python-acme-example'
# Account key size
ACC_KEY_BITS = 2048
# Certificate private key size
CERT_PKEY_BITS = 2048
# Domain name for the certificate.
DOMAIN = 'client.example.com'
# If you are running Boulder locally, it is possible to configure any port
# number to execute the challenge, but real CA servers will always use port
# 80, as described in the ACME specification.
PORT = 80
# Useful methods and classes:
def new_csr_comp(domain_name, pkey_pem=None):
"""Create certificate signing request."""
if pkey_pem is None:
# Create private key.
pkey = OpenSSL.crypto.PKey()
pkey.generate_key(OpenSSL.crypto.TYPE_RSA, CERT_PKEY_BITS)
pkey_pem = OpenSSL.crypto.dump_privatekey(OpenSSL.crypto.FILETYPE_PEM,
pkey)
csr_pem = crypto_util.make_csr(pkey_pem, [domain_name])
return pkey_pem, csr_pem
def select_http01_chall(orderr):
"""Extract authorization resource from within order resource."""
# Authorization Resource: authz.
# This object holds the offered challenges by the server and their status.
authz_list = orderr.authorizations
for authz in authz_list:
# Choosing challenge.
# authz.body.challenges is a set of ChallengeBody objects.
for i in authz.body.challenges:
# Find the supported challenge.
if isinstance(i.chall, challenges.HTTP01):
return i
raise Exception('HTTP-01 challenge was not offered by the CA server.')
@contextmanager
def challenge_server(http_01_resources):
"""Manage standalone server set up and shutdown."""
# Setting up a fake server that binds at PORT and any address.
address = ('', PORT)
try:
servers = standalone.HTTP01DualNetworkedServers(address,
http_01_resources)
# Start client standalone web server.
servers.serve_forever()
yield servers
finally:
# Shutdown client web server and unbind from PORT
servers.shutdown_and_server_close()
def perform_http01(client_acme, challb, orderr):
"""Set up standalone webserver and perform HTTP-01 challenge."""
response, validation = challb.response_and_validation(client_acme.net.key)
resource = standalone.HTTP01RequestHandler.HTTP01Resource(
chall=challb.chall, response=response, validation=validation)
with challenge_server({resource}):
# Let the CA server know that we are ready for the challenge.
client_acme.answer_challenge(challb, response)
# Wait for challenge status and then issue a certificate.
# It is possible to set a deadline time.
finalized_orderr = client_acme.poll_and_finalize(orderr)
return finalized_orderr.fullchain_pem
# Main examples:
def example_http():
"""This example executes the whole process of fulfilling a HTTP-01
challenge for one specific domain.
The workflow consists of:
(Account creation)
- Create account key
- Register account and accept TOS
(Certificate actions)
- Select HTTP-01 within offered challenges by the CA server
- Set up http challenge resource
- Set up standalone web server
- Create domain private key and CSR
- Issue certificate
- Renew certificate
- Revoke certificate
(Account update actions)
- Change contact information
- Deactivate Account
"""
# Create account key
acc_key = jose.JWKRSA(
key=rsa.generate_private_key(public_exponent=65537,
key_size=ACC_KEY_BITS,
backend=default_backend()))
# Register account and accept TOS
net = client.ClientNetwork(acc_key, user_agent=USER_AGENT)
directory = messages.Directory.from_json(net.get(DIRECTORY_URL).json())
client_acme = client.ClientV2(directory, net=net)
# Terms of Service URL is in client_acme.directory.meta.terms_of_service
# Registration Resource: regr
# Creates account with contact information.
email = ('fake@example.com')
regr = client_acme.new_account(
messages.NewRegistration.from_data(
email=email, terms_of_service_agreed=True))
# Create domain private key and CSR
pkey_pem, csr_pem = new_csr_comp(DOMAIN)
# Issue certificate
orderr = client_acme.new_order(csr_pem)
# Select HTTP-01 within offered challenges by the CA server
challb = select_http01_chall(orderr)
# The certificate is ready to be used in the variable "fullchain_pem".
fullchain_pem = perform_http01(client_acme, challb, orderr)
# Renew certificate
_, csr_pem = new_csr_comp(DOMAIN, pkey_pem)
orderr = client_acme.new_order(csr_pem)
challb = select_http01_chall(orderr)
# Performing challenge
fullchain_pem = perform_http01(client_acme, challb, orderr)
# Revoke certificate
fullchain_com = jose.ComparableX509(
OpenSSL.crypto.load_certificate(
OpenSSL.crypto.FILETYPE_PEM, fullchain_pem))
try:
client_acme.revoke(fullchain_com, 0) # revocation reason = 0
except errors.ConflictError:
# Certificate already revoked.
pass
# Query registration status.
client_acme.net.account = regr
try:
regr = client_acme.query_registration(regr)
except errors.Error as err:
if err.typ == messages.OLD_ERROR_PREFIX + 'unauthorized' \
or err.typ == messages.ERROR_PREFIX + 'unauthorized':
# Status is deactivated.
pass
raise
# Change contact information
email = 'newfake@example.com'
regr = client_acme.update_registration(
regr.update(
body=regr.body.update(
contact=('mailto:' + email,)
)
)
)
# Deactivate account/registration
regr = client_acme.deactivate_registration(regr)
if __name__ == "__main__":
example_http()

View File

@@ -3,23 +3,21 @@ from setuptools import find_packages
from setuptools.command.test import test as TestCommand
import sys
version = '0.37.1'
version = '0.26.0.dev0'
# Please update tox.ini when modifying dependency version requirements
install_requires = [
# load_pem_private/public_key (>=0.6)
# rsa_recover_prime_factors (>=0.8)
'cryptography>=1.2.3',
'cryptography>=0.8',
# formerly known as acme.jose:
# 1.1.0+ is required to avoid the warnings described at
# https://github.com/certbot/josepy/issues/13.
'josepy>=1.1.0',
'josepy>=1.0.0',
# Connection.set_tlsext_host_name (>=0.13)
'mock',
'PyOpenSSL>=0.13.1',
'PyOpenSSL>=0.13',
'pyrfc3339',
'pytz',
'requests[security]>=2.6.0', # security extras added in 2.4.1
'requests[security]>=2.4.1', # security extras added in 2.4.1
'requests-toolbelt>=0.3.0',
'setuptools',
'six>=1.9.0', # needed for python_2_unicode_compatible
@@ -36,7 +34,6 @@ docs_extras = [
'sphinx_rtd_theme',
]
class PyTest(TestCommand):
user_options = []
@@ -51,7 +48,6 @@ class PyTest(TestCommand):
errno = pytest.main(shlex.split(self.pytest_args))
sys.exit(errno)
setup(
name='acme',
version=version,
@@ -62,7 +58,7 @@ setup(
license='Apache License 2.0',
python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python',
@@ -72,7 +68,6 @@ setup(
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Security',
],
@@ -84,7 +79,7 @@ setup(
'dev': dev_extras,
'docs': docs_extras,
},
test_suite='acme',
tests_require=["pytest"],
test_suite='acme',
cmdclass={"test": PyTest},
)

View File

@@ -1,47 +0,0 @@
image: Visual Studio 2015
environment:
matrix:
- TOXENV: py35
- TOXENV: py37-cover
- TOXENV: integration-certbot
branches:
only:
# apache-parser-v2 is a temporary branch for doing work related to
# rewriting the parser in the Apache plugin.
- apache-parser-v2
- master
- /^\d+\.\d+\.x$/ # Version branches like X.X.X
- /^test-.*$/
init:
# Since master can receive only commits from PR that have already been tested, following
# condition avoid to launch all jobs except the coverage one for commits pushed to master.
- ps: |
if (-Not $Env:APPVEYOR_PULL_REQUEST_NUMBER -And $Env:APPVEYOR_REPO_BRANCH -Eq 'master' `
-And -Not ($Env:TOXENV -Like '*-cover'))
{ $Env:APPVEYOR_SKIP_FINALIZE_ON_EXIT = 'true'; Exit-AppVeyorBuild }
install:
# Use Python 3.7 by default
- SET PATH=C:\\Python37;C:\\Python37\\Scripts;%PATH%
# Using 4 processes is proven to be the most efficient integration tests config for AppVeyor
- IF %TOXENV%==integration-certbot SET PYTEST_ADDOPTS=--numprocesses=4
# Check env
- python --version
# Upgrade pip to avoid warnings
- python -m pip install --upgrade pip
# Ready to install tox and coverage
# tools/pip_install.py is used to pin packages to a known working version.
- python tools\\pip_install.py tox codecov
build: off
test_script:
- set TOX_TESTENV_PASSENV=APPVEYOR
# Test env is set by TOXENV env variable
- tox
on_success:
- if exist .coverage codecov -F windows

View File

@@ -1,9 +1,8 @@
""" Utility functions for certbot-apache plugin """
import binascii
import os
from certbot import util
from certbot.compat import os
def get_mod_deps(mod_name):
"""Get known module dependencies.

View File

@@ -0,0 +1,207 @@
"""Class of Augeas Configurators."""
import logging
from certbot import errors
from certbot.plugins import common
from certbot_apache import constants
logger = logging.getLogger(__name__)
class AugeasConfigurator(common.Installer):
"""Base Augeas Configurator class.
:ivar config: Configuration.
:type config: :class:`~certbot.interfaces.IConfig`
:ivar aug: Augeas object
:type aug: :class:`augeas.Augeas`
:ivar str save_notes: Human-readable configuration change notes
:ivar reverter: saves and reverts checkpoints
:type reverter: :class:`certbot.reverter.Reverter`
"""
def __init__(self, *args, **kwargs):
super(AugeasConfigurator, self).__init__(*args, **kwargs)
# Placeholder for augeas
self.aug = None
self.save_notes = ""
def init_augeas(self):
""" Initialize the actual Augeas instance """
import augeas
self.aug = augeas.Augeas(
# specify a directory to load our preferred lens from
loadpath=constants.AUGEAS_LENS_DIR,
# Do not save backup (we do it ourselves), do not load
# anything by default
flags=(augeas.Augeas.NONE |
augeas.Augeas.NO_MODL_AUTOLOAD |
augeas.Augeas.ENABLE_SPAN))
# See if any temporary changes need to be recovered
# This needs to occur before VirtualHost objects are setup...
# because this will change the underlying configuration and potential
# vhosts
self.recovery_routine()
def check_parsing_errors(self, lens):
"""Verify Augeas can parse all of the lens files.
:param str lens: lens to check for errors
:raises .errors.PluginError: If there has been an error in parsing with
the specified lens.
"""
error_files = self.aug.match("/augeas//error")
for path in error_files:
# Check to see if it was an error resulting from the use of
# the httpd lens
lens_path = self.aug.get(path + "/lens")
# As aug.get may return null
if lens_path and lens in lens_path:
msg = (
"There has been an error in parsing the file {0} on line {1}: "
"{2}".format(
# Strip off /augeas/files and /error
path[13:len(path) - 6],
self.aug.get(path + "/line"),
self.aug.get(path + "/message")))
raise errors.PluginError(msg)
def ensure_augeas_state(self):
"""Makes sure that all Augeas dom changes are written to files to avoid
loss of configuration directives when doing additional augeas parsing,
causing a possible augeas.load() resulting dom reset
"""
if self.unsaved_files():
self.save_notes += "(autosave)"
self.save()
def unsaved_files(self):
"""Lists files that have modified Augeas DOM but the changes have not
been written to the filesystem yet, used by `self.save()` and
ApacheConfigurator to check the file state.
:raises .errors.PluginError: If there was an error in Augeas, in
an attempt to save the configuration, or an error creating a
checkpoint
:returns: `set` of unsaved files
"""
save_state = self.aug.get("/augeas/save")
self.aug.set("/augeas/save", "noop")
# Existing Errors
ex_errs = self.aug.match("/augeas//error")
try:
# This is a noop save
self.aug.save()
except (RuntimeError, IOError):
self._log_save_errors(ex_errs)
# Erase Save Notes
self.save_notes = ""
raise errors.PluginError(
"Error saving files, check logs for more info.")
# Return the original save method
self.aug.set("/augeas/save", save_state)
# Retrieve list of modified files
# Note: Noop saves can cause the file to be listed twice, I used a
# set to remove this possibility. This is a known augeas 0.10 error.
save_paths = self.aug.match("/augeas/events/saved")
save_files = set()
if save_paths:
for path in save_paths:
save_files.add(self.aug.get(path)[6:])
return save_files
def save(self, title=None, temporary=False):
"""Saves all changes to the configuration files.
This function first checks for save errors, if none are found,
all configuration changes made will be saved. According to the
function parameters. If an exception is raised, a new checkpoint
was not created.
:param str title: The title of the save. If a title is given, the
configuration will be saved as a new checkpoint and put in a
timestamped directory.
:param bool temporary: Indicates whether the changes made will
be quickly reversed in the future (ie. challenges)
"""
save_files = self.unsaved_files()
if save_files:
self.add_to_checkpoint(save_files,
self.save_notes, temporary=temporary)
self.save_notes = ""
self.aug.save()
# Force reload if files were modified
# This is needed to recalculate augeas directive span
if save_files:
for sf in save_files:
self.aug.remove("/files/"+sf)
self.aug.load()
if title and not temporary:
self.finalize_checkpoint(title)
def _log_save_errors(self, ex_errs):
"""Log errors due to bad Augeas save.
:param list ex_errs: Existing errors before save
"""
# Check for the root of save problems
new_errs = self.aug.match("/augeas//error")
# logger.error("During Save - %s", mod_conf)
logger.error("Unable to save files: %s. Attempted Save Notes: %s",
", ".join(err[13:len(err) - 6] for err in new_errs
# Only new errors caused by recent save
if err not in ex_errs), self.save_notes)
# Wrapper functions for Reverter class
def recovery_routine(self):
"""Revert all previously modified files.
Reverts all modified files that have not been saved as a checkpoint
:raises .errors.PluginError: If unable to recover the configuration
"""
super(AugeasConfigurator, self).recovery_routine()
# Need to reload configuration after these changes take effect
self.aug.load()
def revert_challenge_config(self):
"""Used to cleanup challenge configurations.
:raises .errors.PluginError: If unable to revert the challenge config.
"""
self.revert_temporary_config()
self.aug.load()
def rollback_checkpoints(self, rollback=1):
"""Rollback saved checkpoints.
:param int rollback: Number of checkpoints to revert
:raises .errors.PluginError: If there is a problem with the input or
the function is unable to correctly revert the configuration
"""
super(AugeasConfigurator, self).rollback_checkpoints(rollback)
self.aug.load()

View File

@@ -44,134 +44,67 @@ autoload xfm
*****************************************************************)
let dels (s:string) = del s s
(* The continuation sequence that indicates that we should consider the
* next line part of the current line *)
let cont = /\\\\\r?\n/
(* Whitespace within a line: space, tab, and the continuation sequence *)
let ws = /[ \t]/ | cont
(* Any possible character - '.' does not match \n *)
let any = /(.|\n)/
(* Any character preceded by a backslash *)
let esc_any = /\\\\(.|\n)/
(* Newline sequence - both for Unix and DOS newlines *)
let nl = /\r?\n/
(* Whitespace at the end of a line *)
let eol = del (ws* . nl) "\n"
(* deal with continuation lines *)
let sep_spc = del ws+ " "
let sep_osp = del ws* ""
let sep_eq = del (ws* . "=" . ws*) "="
let sep_spc = del /([ \t]+|[ \t]*\\\\\r?\n[ \t]*)+/ " "
let sep_osp = del /([ \t]*|[ \t]*\\\\\r?\n[ \t]*)*/ ""
let sep_eq = del /[ \t]*=[ \t]*/ "="
let nmtoken = /[a-zA-Z:_][a-zA-Z0-9:_.-]*/
let word = /[a-z][a-z0-9._-]*/i
(* A complete line that is either just whitespace or a comment that only
* contains whitespace *)
let empty = [ del (ws* . /#?/ . ws* . nl) "\n" ]
let eol = Util.doseol
let empty = Util.empty_dos
let indent = Util.indent
(* A comment that is not just whitespace. We define it in terms of the
* things that are not allowed as part of such a comment:
* 1) Starts with whitespace
* 2) Ends with whitespace, a backslash or \r
* 3) Unescaped newlines
*)
let comment =
let comment_start = del (ws* . "#" . ws* ) "# " in
let unesc_eol = /[^\]?/ . nl in
let w = /[^\t\n\r \\]/ in
let r = /[\r\\]/ in
let s = /[\t\r ]/ in
(*
* we'd like to write
* let b = /\\\\/ in
* let t = /[\t\n\r ]/ in
* let x = b . (t? . (s|w)* ) in
* but the definition of b depends on commit 244c0edd in 1.9.0 and
* would make the lens unusable with versions before 1.9.0. So we write
* x out which works in older versions, too
*)
let x = /\\\\[\t\n\r ]?[^\n\\]*/ in
let line = ((r . s* . w|w|r) . (s|w)* . x*|(r.s* )?).w.(s*.w)* in
[ label "#comment" . comment_start . store line . eol ]
let comment_val_re = /([^ \t\r\n](.|\\\\\r?\n)*[^ \\\t\r\n]|[^ \t\r\n])/
let comment = [ label "#comment" . del /[ \t]*#[ \t]*/ "# "
. store comment_val_re . eol ]
(* borrowed from shellvars.aug *)
let char_arg_dir = /([^\\ '"{\t\r\n]|[^ '"{\t\r\n]+[^\\ \t\r\n])|\\\\"|\\\\'|\\\\ /
let char_arg_sec = /([^\\ '"\t\r\n>]|[^ '"\t\r\n>]+[^\\ \t\r\n>])|\\\\"|\\\\'|\\\\ /
let char_arg_wl = /([^\\ '"},\t\r\n]|[^ '"},\t\r\n]+[^\\ '"},\t\r\n])/
let cdot = /\\\\./
let cl = /\\\\\n/
let dquot =
let no_dquot = /[^"\\\r\n]/
in /"/ . (no_dquot|esc_any)* . /"/
in /"/ . (no_dquot|cdot|cl)* . /"/
let dquot_msg =
let no_dquot = /([^ \t"\\\r\n]|[^"\\\r\n]+[^ \t"\\\r\n])/
in /"/ . (no_dquot|esc_any)* . no_dquot
in /"/ . (no_dquot|cdot|cl)*
let squot =
let no_squot = /[^'\\\r\n]/
in /'/ . (no_squot|esc_any)* . /'/
in /'/ . (no_squot|cdot|cl)* . /'/
let comp = /[<>=]?=/
(******************************************************************
* Attributes
*****************************************************************)
(* The arguments for a directive come in two flavors: quoted with single or
* double quotes, or bare. Bare arguments may not start with a single or
* double quote; since we also treat "word lists" special, i.e. lists
* enclosed in curly braces, bare arguments may not start with those,
* either.
*
* Bare arguments may not contain unescaped spaces, but we allow escaping
* with '\\'. Quoted arguments can contain anything, though the quote must
* be escaped with '\\'.
*)
let bare = /([^{"' \t\n\r]|\\\\.)([^ \t\n\r]|\\\\.)*[^ \t\n\r\\]|[^{"' \t\n\r\\]/
let arg_quoted = [ label "arg" . store (dquot|squot) ]
let arg_bare = [ label "arg" . store bare ]
let arg_dir = [ label "arg" . store (char_arg_dir+|dquot|squot) ]
(* message argument starts with " but ends at EOL *)
let arg_dir_msg = [ label "arg" . store dquot_msg ]
let arg_sec = [ label "arg" . store (char_arg_sec+|comp|dquot|squot) ]
let arg_wl = [ label "arg" . store (char_arg_wl+|dquot|squot) ]
(* comma-separated wordlist as permitted in the SSLRequire directive *)
let arg_wordlist =
let wl_start = dels "{" in
let wl_end = dels "}" in
let wl_start = Util.del_str "{" in
let wl_end = Util.del_str "}" in
let wl_sep = del /[ \t]*,[ \t]*/ ", "
in [ label "wordlist" . wl_start . arg_wl . (wl_sep . arg_wl)* . wl_end ]
let argv (l:lens) = l . (sep_spc . l)*
(* the arguments of a directive. We use this once we have parsed the name
* of the directive, and the space right after it. When dir_args is used,
* we also know that we have at least one argument. We need to be careful
* with the spacing between arguments: quoted arguments and word lists do
* not need to have space between them, but bare arguments do.
*
* Apache apparently is also happy if the last argument starts with a double
* quote, but has no corresponding closing duoble quote, which is what
* arg_dir_msg handles
*)
let dir_args =
let arg_nospc = arg_quoted|arg_wordlist in
(arg_bare . sep_spc | arg_nospc . sep_osp)* . (arg_bare|arg_nospc|arg_dir_msg)
let directive =
[ indent . label "directive" . store word . (sep_spc . dir_args)? . eol ]
let arg_sec = [ label "arg" . store (char_arg_sec+|comp|dquot|squot) ]
(* arg_dir_msg may be the last or only argument *)
let dir_args = (argv (arg_dir|arg_wordlist) . (sep_spc . arg_dir_msg)?) | arg_dir_msg
in [ indent . label "directive" . store word . (sep_spc . dir_args)? . eol ]
let section (body:lens) =
(* opt_eol includes empty lines *)
let opt_eol = del /([ \t]*#?[ \t]*\r?\n)*/ "\n" in
let opt_eol = del /([ \t]*#?\r?\n)*/ "\n" in
let inner = (sep_spc . argv arg_sec)? . sep_osp .
dels ">" . opt_eol . ((body|comment) . (body|empty|comment)*)? .
indent . dels "</" in
@@ -200,7 +133,6 @@ let filter = (incl "/etc/apache2/apache2.conf") .
(incl "/etc/httpd/conf.d/*.conf") .
(incl "/etc/httpd/httpd.conf") .
(incl "/etc/httpd/conf/httpd.conf") .
(incl "/etc/httpd/conf.modules.d/*.conf") .
Util.stdexcl
let xfm = transform lns filter

View File

@@ -1,40 +1,39 @@
"""Apache Configurator."""
"""Apache Configuration based off of Augeas Configurator."""
# pylint: disable=too-many-lines
import copy
import fnmatch
import logging
import os
import pkg_resources
import re
import six
import socket
import time
from collections import defaultdict
import pkg_resources
import six
import zope.component
import zope.interface
from acme import challenges
from acme.magic_typing import DefaultDict, Dict, List, Set, Union # pylint: disable=unused-import, no-name-in-module
from acme.magic_typing import Any, DefaultDict, Dict, List, Set, Union # pylint: disable=unused-import, no-name-in-module
from certbot import errors
from certbot import interfaces
from certbot import util
from certbot.achallenges import KeyAuthorizationAnnotatedChallenge # pylint: disable=unused-import
from certbot.compat import filesystem
from certbot.compat import os
from certbot.plugins import common
from certbot.plugins.util import path_surgery
from certbot.plugins.enhancements import AutoHSTSEnhancement
from certbot_apache import apache_util
from certbot_apache import augeas_configurator
from certbot_apache import constants
from certbot_apache import display_ops
from certbot_apache import http_01
from certbot_apache import obj
from certbot_apache import parser
from certbot_apache import tls_sni_01
from collections import defaultdict
logger = logging.getLogger(__name__)
@@ -70,10 +69,13 @@ logger = logging.getLogger(__name__)
@zope.interface.implementer(interfaces.IAuthenticator, interfaces.IInstaller)
@zope.interface.provider(interfaces.IPluginFactory)
class ApacheConfigurator(common.Installer):
class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
# pylint: disable=too-many-instance-attributes,too-many-public-methods
"""Apache configurator.
State of Configurator: This code has been been tested and built for Ubuntu
14.04 Apache 2.4 and it works for Ubuntu 12.04 Apache 2.2
:ivar config: Configuration.
:type config: :class:`~certbot.interfaces.IConfig`
@@ -88,92 +90,55 @@ class ApacheConfigurator(common.Installer):
"""
description = "Apache Web Server plugin"
if os.environ.get("CERTBOT_DOCS") == "1":
description += ( # pragma: no cover
" (Please note that the default values of the Apache plugin options"
" change depending on the operating system Certbot is run on.)"
)
description = "Apache Web Server plugin - Beta"
OS_DEFAULTS = dict(
server_root="/etc/apache2",
vhost_root="/etc/apache2/sites-available",
vhost_files="*",
logs_root="/var/log/apache2",
ctl="apache2ctl",
version_cmd=['apache2ctl', '-v'],
apache_cmd="apache2ctl",
restart_cmd=['apache2ctl', 'graceful'],
conftest_cmd=['apache2ctl', 'configtest'],
enmod=None,
dismod=None,
le_vhost_ext="-le-ssl.conf",
handle_modules=False,
handle_mods=False,
handle_sites=False,
challenge_location="/etc/apache2",
MOD_SSL_CONF_SRC=pkg_resources.resource_filename(
"certbot_apache", "options-ssl-apache.conf")
)
def option(self, key):
"""Get a value from options"""
return self.options.get(key)
def _prepare_options(self):
"""
Set the values possibly changed by command line parameters to
OS_DEFAULTS constant dictionary
"""
opts = ["enmod", "dismod", "le_vhost_ext", "server_root", "vhost_root",
"logs_root", "challenge_location", "handle_modules", "handle_sites",
"ctl"]
for o in opts:
# Config options use dashes instead of underscores
if self.conf(o.replace("_", "-")) is not None:
self.options[o] = self.conf(o.replace("_", "-"))
else:
self.options[o] = self.OS_DEFAULTS[o]
# Special cases
self.options["version_cmd"][0] = self.option("ctl")
self.options["restart_cmd"][0] = self.option("ctl")
self.options["conftest_cmd"][0] = self.option("ctl")
def constant(self, key):
"""Get constant for OS_DEFAULTS"""
return self.OS_DEFAULTS.get(key)
@classmethod
def add_parser_arguments(cls, add):
# When adding, modifying or deleting command line arguments, be sure to
# include the changes in the list used in method _prepare_options() to
# ensure consistent behavior.
# Respect CERTBOT_DOCS environment variable and use default values from
# base class regardless of the underlying distribution (overrides).
if os.environ.get("CERTBOT_DOCS") == "1":
DEFAULTS = ApacheConfigurator.OS_DEFAULTS
else:
# cls.OS_DEFAULTS can be distribution specific, see override classes
DEFAULTS = cls.OS_DEFAULTS
add("enmod", default=DEFAULTS["enmod"],
help="Path to the Apache 'a2enmod' binary")
add("dismod", default=DEFAULTS["dismod"],
help="Path to the Apache 'a2dismod' binary")
add("le-vhost-ext", default=DEFAULTS["le_vhost_ext"],
help="SSL vhost configuration extension")
add("server-root", default=DEFAULTS["server_root"],
help="Apache server root directory")
add("enmod", default=cls.OS_DEFAULTS["enmod"],
help="Path to the Apache 'a2enmod' binary.")
add("dismod", default=cls.OS_DEFAULTS["dismod"],
help="Path to the Apache 'a2dismod' binary.")
add("le-vhost-ext", default=cls.OS_DEFAULTS["le_vhost_ext"],
help="SSL vhost configuration extension.")
add("server-root", default=cls.OS_DEFAULTS["server_root"],
help="Apache server root directory.")
add("vhost-root", default=None,
help="Apache server VirtualHost configuration root")
add("logs-root", default=DEFAULTS["logs_root"],
add("logs-root", default=cls.OS_DEFAULTS["logs_root"],
help="Apache server logs directory")
add("challenge-location",
default=DEFAULTS["challenge_location"],
help="Directory path for challenge configuration")
add("handle-modules", default=DEFAULTS["handle_modules"],
help="Let installer handle enabling required modules for you " +
default=cls.OS_DEFAULTS["challenge_location"],
help="Directory path for challenge configuration.")
add("handle-modules", default=cls.OS_DEFAULTS["handle_mods"],
help="Let installer handle enabling required modules for you. " +
"(Only Ubuntu/Debian currently)")
add("handle-sites", default=DEFAULTS["handle_sites"],
help="Let installer handle enabling sites for you " +
add("handle-sites", default=cls.OS_DEFAULTS["handle_sites"],
help="Let installer handle enabling sites for you. " +
"(Only Ubuntu/Debian currently)")
add("ctl", default=DEFAULTS["ctl"],
help="Full path to Apache control script")
util.add_deprecated_argument(add, argument_name="ctl", nargs=1)
util.add_deprecated_argument(
add, argument_name="init-script", nargs=1)
@@ -198,15 +163,12 @@ class ApacheConfigurator(common.Installer):
self._enhanced_vhosts = defaultdict(set) # type: DefaultDict[str, Set[obj.VirtualHost]]
# Temporary state for AutoHSTS enhancement
self._autohsts = {} # type: Dict[str, Dict[str, Union[int, float]]]
# Reverter save notes
self.save_notes = ""
# These will be set in the prepare function
self._prepared = False
self.parser = None
self.version = version
self.vhosts = None
self.options = copy.deepcopy(self.OS_DEFAULTS)
self.vhostroot = None
self._enhance_func = {"redirect": self._enable_redirect,
"ensure-http-header": self._set_http_header,
"staple-ocsp": self._enable_ocsp_stapling}
@@ -214,13 +176,15 @@ class ApacheConfigurator(common.Installer):
@property
def mod_ssl_conf(self):
"""Full absolute path to SSL configuration file."""
return os.path.join(self.config.config_dir, constants.MOD_SSL_CONF_DEST)
return os.path.join(self.config.config_dir,
constants.MOD_SSL_CONF_DEST)
@property
def updated_mod_ssl_conf_digest(self):
"""Full absolute path to digest of updated SSL configuration file."""
return os.path.join(self.config.config_dir, constants.UPDATED_MOD_SSL_CONF_DIGEST)
def prepare(self):
"""Prepare the authenticator/installer.
@@ -230,10 +194,18 @@ class ApacheConfigurator(common.Installer):
:raises .errors.PluginError: If there is any other error
"""
self._prepare_options()
# Perform the actual Augeas initialization to be able to react
try:
self.init_augeas()
except ImportError:
raise errors.NoInstallationError("Problem in Augeas installation")
# Verify Apache is installed
self._verify_exe_availability(self.option("ctl"))
restart_cmd = self.constant("restart_cmd")[0]
if not util.exe_exists(restart_cmd):
if not path_surgery(restart_cmd):
raise errors.NoInstallationError(
'Cannot find Apache control command {0}'.format(restart_cmd))
# Make sure configuration is valid
self.config_test()
@@ -245,16 +217,24 @@ class ApacheConfigurator(common.Installer):
'.'.join(str(i) for i in self.version))
if self.version < (2, 2):
raise errors.NotSupportedError(
"Apache Version {0} not supported.".format(str(self.version)))
"Apache Version %s not supported.", str(self.version))
if not self._check_aug_version():
raise errors.NotSupportedError(
"Apache plugin support requires libaugeas0 and augeas-lenses "
"version 1.2.0 or higher, please make sure you have you have "
"those installed.")
# Parse vhost-root if defined on cli
if not self.conf("vhost-root"):
self.vhostroot = self.constant("vhost_root")
else:
self.vhostroot = os.path.abspath(self.conf("vhost-root"))
# Recover from previous crash before Augeas initialization to have the
# correct parse tree from the get go.
self.recovery_routine()
# Perform the actual Augeas initialization to be able to react
self.parser = self.get_parser()
# Check for errors in parsing files with Augeas
self.parser.check_parsing_errors("httpd.aug")
self.check_parsing_errors("httpd.aug")
# Get all of the available vhosts
self.vhosts = self.get_virtual_hosts()
@@ -264,88 +244,31 @@ class ApacheConfigurator(common.Installer):
# Prevent two Apache plugins from modifying a config at once
try:
util.lock_dir_until_exit(self.option("server_root"))
util.lock_dir_until_exit(self.conf("server-root"))
except (OSError, errors.LockError):
logger.debug("Encountered error:", exc_info=True)
raise errors.PluginError(
"Unable to create a lock file in {0}. Are you running"
" Certbot with sufficient privileges to modify your"
" Apache configuration?".format(self.option("server_root")))
self._prepared = True
"Unable to lock %s", self.conf("server-root"))
def save(self, title=None, temporary=False):
"""Saves all changes to the configuration files.
def _check_aug_version(self):
""" Checks that we have recent enough version of libaugeas.
If augeas version is recent enough, it will support case insensitive
regexp matching"""
This function first checks for save errors, if none are found,
all configuration changes made will be saved. According to the
function parameters. If an exception is raised, a new checkpoint
was not created.
:param str title: The title of the save. If a title is given, the
configuration will be saved as a new checkpoint and put in a
timestamped directory.
:param bool temporary: Indicates whether the changes made will
be quickly reversed in the future (ie. challenges)
"""
save_files = self.parser.unsaved_files()
if save_files:
self.add_to_checkpoint(save_files,
self.save_notes, temporary=temporary)
# Handle the parser specific tasks
self.parser.save(save_files)
if title and not temporary:
self.finalize_checkpoint(title)
def recovery_routine(self):
"""Revert all previously modified files.
Reverts all modified files that have not been saved as a checkpoint
:raises .errors.PluginError: If unable to recover the configuration
"""
super(ApacheConfigurator, self).recovery_routine()
# Reload configuration after these changes take effect if needed
# ie. ApacheParser has been initialized.
if self.parser:
# TODO: wrap into non-implementation specific parser interface
self.parser.aug.load()
def revert_challenge_config(self):
"""Used to cleanup challenge configurations.
:raises .errors.PluginError: If unable to revert the challenge config.
"""
self.revert_temporary_config()
self.parser.aug.load()
def rollback_checkpoints(self, rollback=1):
"""Rollback saved checkpoints.
:param int rollback: Number of checkpoints to revert
:raises .errors.PluginError: If there is a problem with the input or
the function is unable to correctly revert the configuration
"""
super(ApacheConfigurator, self).rollback_checkpoints(rollback)
self.parser.aug.load()
def _verify_exe_availability(self, exe):
"""Checks availability of Apache executable"""
if not util.exe_exists(exe):
if not path_surgery(exe):
raise errors.NoInstallationError(
'Cannot find Apache executable {0}'.format(exe))
self.aug.set("/test/path/testing/arg", "aRgUMeNT")
try:
matches = self.aug.match(
"/test//*[self::arg=~regexp('argument', 'i')]")
except RuntimeError:
self.aug.remove("/test/path")
return False
self.aug.remove("/test/path")
return matches
def get_parser(self):
"""Initializes the ApacheParser"""
# If user provided vhost_root value in command line, use it
return parser.ApacheParser(
self.option("server_root"), self.conf("vhost-root"),
self.aug, self.conf("server-root"), self.conf("vhost-root"),
self.version, configurator=self)
def _wildcard_domain(self, domain):
@@ -432,7 +355,7 @@ class ApacheConfigurator(common.Installer):
"""
if len(name.split(".")) == len(domain.split(".")):
return fnmatch.fnmatch(name, domain)
return None
def _choose_vhosts_wildcard(self, domain, create_ssl=True):
"""Prompts user to choose vhosts to install a wildcard certificate for"""
@@ -478,6 +401,7 @@ class ApacheConfigurator(common.Installer):
self._wildcard_vhosts[domain] = return_vhosts
return return_vhosts
def _deploy_cert(self, vhost, cert_path, key_path, chain_path, fullchain_path):
"""
Helper function for deploy_cert() that handles the actual deployment
@@ -485,6 +409,8 @@ class ApacheConfigurator(common.Installer):
domain originally passed for deploy_cert(). This is especially true
with wildcard certificates
"""
# This is done first so that ssl module is enabled and cert_path,
# cert_key... can all be parsed appropriately
self.prepare_server_https("443")
@@ -524,8 +450,8 @@ class ApacheConfigurator(common.Installer):
# install SSLCertificateFile, SSLCertificateKeyFile,
# and SSLCertificateChainFile directives
set_cert_path = cert_path
self.parser.aug.set(path["cert_path"][-1], cert_path)
self.parser.aug.set(path["cert_key"][-1], key_path)
self.aug.set(path["cert_path"][-1], cert_path)
self.aug.set(path["cert_key"][-1], key_path)
if chain_path is not None:
self.parser.add_dir(vhost.path,
"SSLCertificateChainFile", chain_path)
@@ -537,8 +463,8 @@ class ApacheConfigurator(common.Installer):
raise errors.PluginError("Please provide the --fullchain-path "
"option pointing to your full chain file")
set_cert_path = fullchain_path
self.parser.aug.set(path["cert_path"][-1], fullchain_path)
self.parser.aug.set(path["cert_key"][-1], key_path)
self.aug.set(path["cert_path"][-1], fullchain_path)
self.aug.set(path["cert_key"][-1], key_path)
# Enable the new vhost if needed
if not vhost.enabled:
@@ -624,9 +550,8 @@ class ApacheConfigurator(common.Installer):
self.assoc[target_name] = vhost
return vhost
def domain_in_names(self, names, target_name):
"""Checks if target domain is covered by one or more of the provided
names. The target name is matched by wildcard as well as exact match.
def included_in_wildcard(self, names, target_name):
"""Is target_name covered by a wildcard?
:param names: server aliases
:type names: `collections.Iterable` of `str`
@@ -697,7 +622,7 @@ class ApacheConfigurator(common.Installer):
names = vhost.get_names()
if target_name in names:
points = 3
elif self.domain_in_names(names, target_name):
elif self.included_in_wildcard(names, target_name):
points = 2
elif any(addr.get_addr() == target_name for addr in vhost.addrs):
points = 1
@@ -756,7 +681,7 @@ class ApacheConfigurator(common.Installer):
if name:
all_names.add(name)
if vhost_macro:
if len(vhost_macro) > 0:
zope.component.getUtility(interfaces.IDisplay).notification(
"Apache mod_macro seems to be in use in file(s):\n{0}"
"\n\nUnfortunately mod_macro is not yet supported".format(
@@ -838,7 +763,7 @@ class ApacheConfigurator(common.Installer):
"""
addrs = set()
try:
args = self.parser.aug.match(path + "/arg")
args = self.aug.match(path + "/arg")
except RuntimeError:
logger.warning("Encountered a problem while parsing file: %s, skipping", path)
return None
@@ -856,7 +781,7 @@ class ApacheConfigurator(common.Installer):
is_ssl = True
filename = apache_util.get_file_path(
self.parser.aug.get("/augeas/files%s/path" % apache_util.get_file_path(path)))
self.aug.get("/augeas/files%s/path" % apache_util.get_file_path(path)))
if filename is None:
return None
@@ -886,7 +811,7 @@ class ApacheConfigurator(common.Installer):
# Make a list of parser paths because the parser_paths
# dictionary may be modified during the loop.
for vhost_path in list(self.parser.parser_paths):
paths = self.parser.aug.match(
paths = self.aug.match(
("/files%s//*[label()=~regexp('%s')]" %
(vhost_path, parser.case_i("VirtualHost"))))
paths = [path for path in paths if
@@ -896,7 +821,7 @@ class ApacheConfigurator(common.Installer):
if not new_vhost:
continue
internal_path = apache_util.get_internal_aug_path(new_vhost.path)
realpath = filesystem.realpath(new_vhost.filep)
realpath = os.path.realpath(new_vhost.filep)
if realpath not in file_paths:
file_paths[realpath] = new_vhost.filep
internal_paths[realpath].add(internal_path)
@@ -1102,7 +1027,6 @@ class ApacheConfigurator(common.Installer):
# Ugly but takes care of protocol def, eg: 1.1.1.1:443 https
if listen.split(":")[-1].split(" ")[0] == port:
return True
return None
def prepare_https_modules(self, temp):
"""Helper method for prepare_server_https, taking care of enabling
@@ -1111,19 +1035,36 @@ class ApacheConfigurator(common.Installer):
:param boolean temp: If the change is temporary
"""
if self.option("handle_modules"):
if self.conf("handle-modules"):
if self.version >= (2, 4) and ("socache_shmcb_module" not in
self.parser.modules):
self.enable_mod("socache_shmcb", temp=temp)
if "ssl_module" not in self.parser.modules:
self.enable_mod("ssl", temp=temp)
def make_addrs_sni_ready(self, addrs):
"""Checks to see if the server is ready for SNI challenges.
:param addrs: Addresses to check SNI compatibility
:type addrs: :class:`~certbot_apache.obj.Addr`
"""
# Version 2.4 and later are automatically SNI ready.
if self.version >= (2, 4):
return
for addr in addrs:
if not self.is_name_vhost(addr):
logger.debug("Setting VirtualHost at %s to be a name "
"based virtual host", addr)
self.add_name_vhost(addr)
def make_vhost_ssl(self, nonssl_vhost): # pylint: disable=too-many-locals
"""Makes an ssl_vhost version of a nonssl_vhost.
Duplicates vhost and adds default ssl options
New vhost will reside as (nonssl_vhost.path) +
``self.option("le_vhost_ext")``
``self.constant("le_vhost_ext")``
.. note:: This function saves the configuration
@@ -1140,16 +1081,16 @@ class ApacheConfigurator(common.Installer):
avail_fp = nonssl_vhost.filep
ssl_fp = self._get_ssl_vhost_path(avail_fp)
orig_matches = self.parser.aug.match("/files%s//* [label()=~regexp('%s')]" %
orig_matches = self.aug.match("/files%s//* [label()=~regexp('%s')]" %
(self._escape(ssl_fp),
parser.case_i("VirtualHost")))
self._copy_create_ssl_vhost_skeleton(nonssl_vhost, ssl_fp)
# Reload augeas to take into account the new vhost
self.parser.aug.load()
self.aug.load()
# Get Vhost augeas path for new vhost
new_matches = self.parser.aug.match("/files%s//* [label()=~regexp('%s')]" %
new_matches = self.aug.match("/files%s//* [label()=~regexp('%s')]" %
(self._escape(ssl_fp),
parser.case_i("VirtualHost")))
@@ -1160,7 +1101,7 @@ class ApacheConfigurator(common.Installer):
# Make Augeas aware of the new vhost
self.parser.parse_file(ssl_fp)
# Try to search again
new_matches = self.parser.aug.match(
new_matches = self.aug.match(
"/files%s//* [label()=~regexp('%s')]" %
(self._escape(ssl_fp),
parser.case_i("VirtualHost")))
@@ -1222,15 +1163,18 @@ class ApacheConfigurator(common.Installer):
"""
if self.conf("vhost-root") and os.path.exists(self.conf("vhost-root")):
fp = os.path.join(filesystem.realpath(self.option("vhost_root")),
# Defined by user on CLI
fp = os.path.join(os.path.realpath(self.vhostroot),
os.path.basename(non_ssl_vh_fp))
else:
# Use non-ssl filepath
fp = filesystem.realpath(non_ssl_vh_fp)
fp = os.path.realpath(non_ssl_vh_fp)
if fp.endswith(".conf"):
return fp[:-(len(".conf"))] + self.option("le_vhost_ext")
return fp + self.option("le_vhost_ext")
return fp[:-(len(".conf"))] + self.conf("le_vhost_ext")
else:
return fp + self.conf("le_vhost_ext")
def _sift_rewrite_rule(self, line):
"""Decides whether a line should be copied to a SSL vhost.
@@ -1310,8 +1254,8 @@ class ApacheConfigurator(common.Installer):
"vhost for your HTTPS site located at {1} because they have "
"the potential to create redirection loops.".format(
vhost.filep, ssl_fp), reporter.MEDIUM_PRIORITY)
self.parser.aug.set("/augeas/files%s/mtime" % (self._escape(ssl_fp)), "0")
self.parser.aug.set("/augeas/files%s/mtime" % (self._escape(vhost.filep)), "0")
self.aug.set("/augeas/files%s/mtime" % (self._escape(ssl_fp)), "0")
self.aug.set("/augeas/files%s/mtime" % (self._escape(vhost.filep)), "0")
def _sift_rewrite_rules(self, contents):
""" Helper function for _copy_create_ssl_vhost_skeleton to prepare the
@@ -1386,7 +1330,7 @@ class ApacheConfigurator(common.Installer):
"""
try:
span_val = self.parser.aug.span(vhost.path)
span_val = self.aug.span(vhost.path)
except ValueError:
logger.critical("Error while reading the VirtualHost %s from "
"file %s", vhost.name, vhost.filep, exc_info=True)
@@ -1421,13 +1365,13 @@ class ApacheConfigurator(common.Installer):
def _update_ssl_vhosts_addrs(self, vh_path):
ssl_addrs = set()
ssl_addr_p = self.parser.aug.match(vh_path + "/arg")
ssl_addr_p = self.aug.match(vh_path + "/arg")
for addr in ssl_addr_p:
old_addr = obj.Addr.fromstring(
str(self.parser.get_arg(addr)))
ssl_addr = old_addr.get_addr_obj("443")
self.parser.aug.set(addr, str(ssl_addr))
self.aug.set(addr, str(ssl_addr))
ssl_addrs.add(ssl_addr)
return ssl_addrs
@@ -1446,14 +1390,15 @@ class ApacheConfigurator(common.Installer):
vh_path, False)) > 1:
directive_path = self.parser.find_dir(directive, None,
vh_path, False)
self.parser.aug.remove(re.sub(r"/\w*$", "", directive_path[0]))
self.aug.remove(re.sub(r"/\w*$", "", directive_path[0]))
def _remove_directives(self, vh_path, directives):
for directive in directives:
while self.parser.find_dir(directive, None, vh_path, False):
while len(self.parser.find_dir(directive, None,
vh_path, False)) > 0:
directive_path = self.parser.find_dir(directive, None,
vh_path, False)
self.parser.aug.remove(re.sub(r"/\w*$", "", directive_path[0]))
self.aug.remove(re.sub(r"/\w*$", "", directive_path[0]))
def _add_dummy_ssl_directives(self, vh_path):
self.parser.add_dir(vh_path, "SSLCertificateFile",
@@ -1492,8 +1437,8 @@ class ApacheConfigurator(common.Installer):
"""
matches = self.parser.find_dir(
"ServerAlias", start=vh_path, exclude=False)
aliases = (self.parser.aug.get(match) for match in matches)
return self.domain_in_names(aliases, target_name)
aliases = (self.aug.get(match) for match in matches)
return self.included_in_wildcard(aliases, target_name)
def _add_name_vhost_if_necessary(self, vhost):
"""Add NameVirtualHost Directives if necessary for new vhost.
@@ -1675,7 +1620,7 @@ class ApacheConfigurator(common.Installer):
if header_path:
pat = '(?:[ "]|^)(strict-transport-security)(?:[ "]|$)'
for match in header_path:
if re.search(pat, self.parser.aug.get(match).lower()):
if re.search(pat, self.aug.get(match).lower()):
hsts_dirpath = match
if not hsts_dirpath:
err_msg = ("Certbot was unable to find the existing HSTS header "
@@ -1689,7 +1634,7 @@ class ApacheConfigurator(common.Installer):
# Our match statement was for string strict-transport-security, but
# we need to update the value instead. The next index is for the value
hsts_dirpath = hsts_dirpath.replace("arg[3]", "arg[4]")
self.parser.aug.set(hsts_dirpath, hsts_maxage)
self.aug.set(hsts_dirpath, hsts_maxage)
note_msg = ("Increasing HSTS max-age value to {0} for VirtualHost "
"in {1}\n".format(nextstep_value, vhost.filep))
logger.debug(note_msg)
@@ -1771,7 +1716,7 @@ class ApacheConfigurator(common.Installer):
# We'll simply delete the directive, so that we'll have a
# consistent OCSP cache path.
if stapling_cache_aug_path:
self.parser.aug.remove(
self.aug.remove(
re.sub(r"/\w*$", "", stapling_cache_aug_path[0]))
self.parser.add_dir_to_ifmodssl(ssl_vhost_aug_path,
@@ -1848,7 +1793,7 @@ class ApacheConfigurator(common.Installer):
# "Existing Header directive for virtualhost"
pat = '(?:[ "]|^)(%s)(?:[ "]|$)' % (header_substring.lower())
for match in header_path:
if re.search(pat, self.parser.aug.get(match).lower()):
if re.search(pat, self.aug.get(match).lower()):
raise errors.PluginEnhancementAlreadyPresent(
"Existing %s header" % (header_substring))
@@ -1941,6 +1886,7 @@ class ApacheConfigurator(common.Installer):
self.parser.add_dir(vhost.path, "RewriteRule",
constants.REWRITE_HTTPS_ARGS)
def _verify_no_certbot_redirect(self, vhost):
"""Checks to see if a redirect was already installed by certbot.
@@ -1975,11 +1921,11 @@ class ApacheConfigurator(common.Installer):
constants.REWRITE_HTTPS_ARGS_WITH_END]
for dir_path, args_paths in rewrite_args_dict.items():
arg_vals = [self.parser.aug.get(x) for x in args_paths]
arg_vals = [self.aug.get(x) for x in args_paths]
# Search for past redirection rule, delete it, set the new one
if arg_vals in constants.OLD_REWRITE_HTTPS_ARGS:
self.parser.aug.remove(dir_path)
self.aug.remove(dir_path)
self._set_https_redirection_rewrite_rule(vhost)
self.save()
raise errors.PluginEnhancementAlreadyPresent(
@@ -2035,7 +1981,7 @@ class ApacheConfigurator(common.Installer):
redirect_filepath = self._write_out_redirect(ssl_vhost, text)
self.parser.aug.load()
self.aug.load()
# Make a new vhost data structure and add it to the lists
new_vhost = self._create_vhost(parser.get_aug_path(self._escape(redirect_filepath)))
self.vhosts.append(new_vhost)
@@ -2077,7 +2023,7 @@ class ApacheConfigurator(common.Installer):
addr in self._get_proposed_addrs(ssl_vhost)),
servername, serveralias,
" ".join(rewrite_rule_args),
self.option("logs_root")))
self.conf("logs-root")))
def _write_out_redirect(self, ssl_vhost, text):
# This is the default name
@@ -2089,7 +2035,7 @@ class ApacheConfigurator(common.Installer):
if len(ssl_vhost.name) < (255 - (len(redirect_filename) + 1)):
redirect_filename = "le-redirect-%s.conf" % ssl_vhost.name
redirect_filepath = os.path.join(self.option("vhost_root"),
redirect_filepath = os.path.join(self.vhostroot,
redirect_filename)
# Register the new file that will be created
@@ -2208,19 +2154,20 @@ class ApacheConfigurator(common.Installer):
:raises .errors.MisconfigurationError: If reload fails
"""
error = ""
try:
util.run_script(self.option("restart_cmd"))
util.run_script(self.constant("restart_cmd"))
except errors.SubprocessError as err:
logger.info("Unable to restart apache using %s",
self.option("restart_cmd"))
alt_restart = self.option("restart_cmd_alt")
self.constant("restart_cmd"))
alt_restart = self.constant("restart_cmd_alt")
if alt_restart:
logger.debug("Trying alternative restart command: %s",
alt_restart)
# There is an alternative restart command available
# This usually is "restart" verb while original is "graceful"
try:
util.run_script(self.option(
util.run_script(self.constant(
"restart_cmd_alt"))
return
except errors.SubprocessError as secerr:
@@ -2236,7 +2183,7 @@ class ApacheConfigurator(common.Installer):
"""
try:
util.run_script(self.option("conftest_cmd"))
util.run_script(self.constant("conftest_cmd"))
except errors.SubprocessError as err:
raise errors.MisconfigurationError(str(err))
@@ -2252,11 +2199,11 @@ class ApacheConfigurator(common.Installer):
"""
try:
stdout, _ = util.run_script(self.option("version_cmd"))
stdout, _ = util.run_script(self.constant("version_cmd"))
except errors.SubprocessError:
raise errors.PluginError(
"Unable to run %s -v" %
self.option("version_cmd"))
self.constant("version_cmd"))
regex = re.compile(r"Apache/([0-9\.]*)", re.IGNORECASE)
matches = regex.findall(stdout)
@@ -2281,7 +2228,7 @@ class ApacheConfigurator(common.Installer):
###########################################################################
def get_chall_pref(self, unused_domain): # pylint: disable=no-self-use
"""Return list of challenge preferences."""
return [challenges.HTTP01]
return [challenges.TLSSNI01, challenges.HTTP01]
def perform(self, achalls):
"""Perform the configuration related challenge.
@@ -2294,15 +2241,20 @@ class ApacheConfigurator(common.Installer):
self._chall_out.update(achalls)
responses = [None] * len(achalls)
http_doer = http_01.ApacheHttp01(self)
sni_doer = tls_sni_01.ApacheTlsSni01(self)
for i, achall in enumerate(achalls):
# Currently also have chall_doer hold associated index of the
# challenge. This helps to put all of the responses back together
# when they are all complete.
if isinstance(achall.chall, challenges.HTTP01):
http_doer.add_chall(achall, i)
else: # tls-sni-01
sni_doer.add_chall(achall, i)
http_response = http_doer.perform()
if http_response:
sni_response = sni_doer.perform()
if http_response or sni_response:
# Must reload in order to activate the challenges.
# Handled here because we may be able to load up other challenge
# types
@@ -2313,6 +2265,7 @@ class ApacheConfigurator(common.Installer):
time.sleep(3)
self._update_responses(responses, http_response, http_doer)
self._update_responses(responses, sni_response, sni_doer)
return responses
@@ -2340,7 +2293,7 @@ class ApacheConfigurator(common.Installer):
# certbot for unprivileged users via setuid), this function will need
# to be modified.
return common.install_version_controlled_file(options_ssl, options_ssl_digest,
self.option("MOD_SSL_CONF_SRC"), constants.ALL_SSL_OPTIONS_HASHES)
self.constant("MOD_SSL_CONF_SRC"), constants.ALL_SSL_OPTIONS_HASHES)
def enable_autohsts(self, _unused_lineage, domains):
"""
@@ -2441,9 +2394,6 @@ class ApacheConfigurator(common.Installer):
continue
nextstep = config["laststep"] + 1
if nextstep < len(constants.AUTOHSTS_STEPS):
# If installer hasn't been prepared yet, do it now
if not self._prepared:
self.prepare()
# Have not reached the max value yet
try:
vhost = self.find_vhost_by_id(id_str)

View File

@@ -9,7 +9,6 @@ MOD_SSL_CONF_DEST = "options-ssl-apache.conf"
UPDATED_MOD_SSL_CONF_DIGEST = ".updated-options-ssl-apache-conf-digest.txt"
"""Name of the hash of the updated or informed mod_ssl_conf as saved in `IConfig.config_dir`."""
# NEVER REMOVE A SINGLE HASH FROM THIS LIST UNLESS YOU KNOW EXACTLY WHAT YOU ARE DOING!
ALL_SSL_OPTIONS_HASHES = [
'2086bca02db48daf93468332543c60ac6acdb6f0b58c7bfdf578a5d47092f82a',
'4844d36c9a0f587172d9fa10f4f1c9518e3bcfa1947379f155e16a70a728c21a',
@@ -19,10 +18,6 @@ ALL_SSL_OPTIONS_HASHES = [
'cfdd7c18d2025836ea3307399f509cfb1ebf2612c87dd600a65da2a8e2f2797b',
'80720bd171ccdc2e6b917ded340defae66919e4624962396b992b7218a561791',
'c0c022ea6b8a51ecc8f1003d0a04af6c3f2bc1c3ce506b3c2dfc1f11ef931082',
'717b0a89f5e4c39b09a42813ac6e747cfbdeb93439499e73f4f70a1fe1473f20',
'0fcdc81280cd179a07ec4d29d3595068b9326b455c488de4b09f585d5dafc137',
'86cc09ad5415cd6d5f09a947fe2501a9344328b1e8a8b458107ea903e80baa6c',
'06675349e457eae856120cdebb564efe546f0b87399f2264baeb41e442c724c7',
]
"""SHA256 hashes of the contents of previous versions of all versions of MOD_SSL_CONF_SRC"""

View File

@@ -1,12 +1,14 @@
"""Contains UI methods for Apache operations."""
import logging
import os
import zope.component
import certbot.display.util as display_util
from certbot import errors
from certbot import interfaces
from certbot.compat import os
import certbot.display.util as display_util
logger = logging.getLogger(__name__)
@@ -24,7 +26,7 @@ def select_vhost_multiple(vhosts):
return list()
tags_list = [vhost.display_repr()+"\n" for vhost in vhosts]
# Remove the extra newline from the last entry
if tags_list:
if len(tags_list):
tags_list[-1] = tags_list[-1][:-1]
code, names = zope.component.getUtility(interfaces.IDisplay).checklist(
"Which VirtualHosts would you like to install the wildcard certificate for?",
@@ -60,6 +62,7 @@ def select_vhost(domain, vhosts):
code, tag = _vhost_menu(domain, vhosts)
if code == display_util.OK:
return vhosts[tag]
else:
return None
def _vhost_menu(domain, vhosts):
@@ -90,7 +93,7 @@ def _vhost_menu(domain, vhosts):
for vhost in vhosts:
if len(vhost.get_names()) == 1:
disp_name = next(iter(vhost.get_names()))
elif not vhost.get_names():
elif len(vhost.get_names()) == 0:
disp_name = ""
else:
disp_name = "Multiple Names"
@@ -110,7 +113,8 @@ def _vhost_menu(domain, vhosts):
code, tag = zope.component.getUtility(interfaces.IDisplay).menu(
"We were unable to find a vhost with a ServerName "
"or Address of {0}.{1}Which virtual host would you "
"like to choose?".format(domain, os.linesep),
"like to choose?\n(note: conf files with multiple "
"vhosts are not yet supported)".format(domain, os.linesep),
choices, force_interactive=True)
except errors.MissingCommandlineFlag:
msg = (

View File

@@ -1,13 +1,8 @@
""" Entry point for Apache Plugin """
# Pylint does not like disutils.version when running inside a venv.
# See: https://github.com/PyCQA/pylint/issues/73
from distutils.version import LooseVersion # pylint: disable=no-name-in-module,import-error
from certbot import util
from certbot_apache import configurator
from certbot_apache import override_arch
from certbot_apache import override_fedora
from certbot_apache import override_darwin
from certbot_apache import override_debian
from certbot_apache import override_centos
@@ -21,8 +16,7 @@ OVERRIDE_CLASSES = {
"ubuntu": override_debian.DebianConfigurator,
"centos": override_centos.CentOSConfigurator,
"centos linux": override_centos.CentOSConfigurator,
"fedora_old": override_centos.CentOSConfigurator,
"fedora": override_fedora.FedoraConfigurator,
"fedora": override_centos.CentOSConfigurator,
"ol": override_centos.CentOSConfigurator,
"red hat enterprise linux server": override_centos.CentOSConfigurator,
"rhel": override_centos.CentOSConfigurator,
@@ -33,19 +27,12 @@ OVERRIDE_CLASSES = {
"suse": override_suse.OpenSUSEConfigurator,
}
def get_configurator():
""" Get correct configurator class based on the OS fingerprint """
os_name, os_version = util.get_os_info()
os_name = os_name.lower()
os_info = util.get_os_info()
override_class = None
# Special case for older Fedora versions
if os_name == 'fedora' and LooseVersion(os_version) < LooseVersion('29'):
os_name = 'fedora_old'
try:
override_class = OVERRIDE_CLASSES[os_name]
override_class = OVERRIDE_CLASSES[os_info[0].lower()]
except KeyError:
# OS not found in the list
os_like = util.get_systemd_os_like()
@@ -58,5 +45,4 @@ def get_configurator():
override_class = configurator.ApacheConfigurator
return override_class
ENTRYPOINT = get_configurator()

View File

@@ -1,19 +1,14 @@
"""A class that performs HTTP-01 challenges for Apache"""
import logging
import os
from acme.magic_typing import List, Set # pylint: disable=unused-import, no-name-in-module
from acme.magic_typing import Set # pylint: disable=unused-import, no-name-in-module
from certbot import errors
from certbot.compat import os
from certbot.compat import filesystem
from certbot.plugins import common
from certbot_apache.obj import VirtualHost # pylint: disable=unused-import
from certbot_apache.parser import get_aug_path
logger = logging.getLogger(__name__)
class ApacheHttp01(common.TLSSNI01):
"""Class that performs HTTP-01 challenges within the Apache configurator."""
@@ -93,26 +88,14 @@ class ApacheHttp01(common.TLSSNI01):
self.configurator.enable_mod(mod, temp=True)
def _mod_config(self):
selected_vhosts = [] # type: List[VirtualHost]
http_port = str(self.configurator.config.http01_port)
for chall in self.achalls:
# Search for matching VirtualHosts
for vh in self._matching_vhosts(chall.domain):
selected_vhosts.append(vh)
# Ensure that we have one or more VirtualHosts that we can continue
# with. (one that listens to port configured with --http-01-port)
found = False
for vhost in selected_vhosts:
if any(a.is_wildcard() or a.get_port() == http_port for a in vhost.addrs):
found = True
if not found:
vh = self.configurator.find_best_http_vhost(
chall.domain, filter_defaults=False,
port=str(self.configurator.config.http01_port))
if vh:
self._set_up_include_directives(vh)
else:
for vh in self._relevant_vhosts():
selected_vhosts.append(vh)
# Add the challenge configuration
for vh in selected_vhosts:
self._set_up_include_directives(vh)
self.configurator.reverter.register_file_creation(
@@ -137,20 +120,6 @@ class ApacheHttp01(common.TLSSNI01):
with open(self.challenge_conf_post, "w") as new_conf:
new_conf.write(config_text_post)
def _matching_vhosts(self, domain):
"""Return all VirtualHost objects that have the requested domain name or
a wildcard name that would match the domain in ServerName or ServerAlias
directive.
"""
matching_vhosts = []
for vhost in self.configurator.vhosts:
if self.configurator.domain_in_names(vhost.get_names(), domain):
# domain_in_names also matches the exact names, so no need
# to check "domain in vhost.get_names()" explicitly here
matching_vhosts.append(vhost)
return matching_vhosts
def _relevant_vhosts(self):
http01_port = str(self.configurator.config.http01_port)
relevant_vhosts = []
@@ -169,7 +138,8 @@ class ApacheHttp01(common.TLSSNI01):
def _set_up_challenges(self):
if not os.path.isdir(self.challenge_dir):
filesystem.makedirs(self.challenge_dir, 0o755)
os.makedirs(self.challenge_dir)
os.chmod(self.challenge_dir, 0o755)
responses = []
for achall in self.achalls:
@@ -185,7 +155,7 @@ class ApacheHttp01(common.TLSSNI01):
self.configurator.reverter.register_file_creation(True, name)
with open(name, 'wb') as f:
f.write(validation.encode())
filesystem.chmod(name, 0o644)
os.chmod(name, 0o644)
return response
@@ -202,9 +172,4 @@ class ApacheHttp01(common.TLSSNI01):
self.configurator.parser.add_dir(
vhost.path, "Include", self.challenge_conf_post)
if not vhost.enabled:
self.configurator.parser.add_dir(
get_aug_path(self.configurator.parser.loc["default"]),
"Include", vhost.filep)
self.moded_vhosts.add(vhost)

View File

@@ -26,7 +26,7 @@ class Addr(common.Addr):
def __repr__(self):
return "certbot_apache.obj.Addr(" + repr(self.tup) + ")"
def __hash__(self): # pylint: disable=useless-super-delegation
def __hash__(self):
# Python 3 requires explicit overridden for __hash__ if __eq__ or
# __cmp__ is overridden. See https://bugs.python.org/issue2235
return super(Addr, self).__hash__()
@@ -47,6 +47,7 @@ class Addr(common.Addr):
return 0
elif self.get_addr() == "*":
return 1
else:
return 2
def conflicts(self, addr):

View File

@@ -16,14 +16,14 @@ class ArchConfigurator(configurator.ApacheConfigurator):
vhost_root="/etc/httpd/conf",
vhost_files="*.conf",
logs_root="/var/log/httpd",
ctl="apachectl",
version_cmd=['apachectl', '-v'],
apache_cmd="apachectl",
restart_cmd=['apachectl', 'graceful'],
conftest_cmd=['apachectl', 'configtest'],
enmod=None,
dismod=None,
le_vhost_ext="-le-ssl.conf",
handle_modules=False,
handle_mods=False,
handle_sites=False,
challenge_location="/etc/httpd/conf",
MOD_SSL_CONF_SRC=pkg_resources.resource_filename(

View File

@@ -1,24 +1,14 @@
""" Distribution specific override class for CentOS family (RHEL, Fedora) """
import logging
import pkg_resources
import zope.interface
from certbot import errors
from certbot import interfaces
from certbot import util
from certbot.errors import MisconfigurationError
from acme.magic_typing import List # pylint: disable=unused-import, no-name-in-module
from certbot_apache import apache_util
from certbot_apache import configurator
from certbot_apache import parser
logger = logging.getLogger(__name__)
@zope.interface.provider(interfaces.IPluginFactory)
class CentOSConfigurator(configurator.ApacheConfigurator):
"""CentOS specific ApacheConfigurator override class"""
@@ -28,144 +18,27 @@ class CentOSConfigurator(configurator.ApacheConfigurator):
vhost_root="/etc/httpd/conf.d",
vhost_files="*.conf",
logs_root="/var/log/httpd",
ctl="apachectl",
version_cmd=['apachectl', '-v'],
apache_cmd="apachectl",
restart_cmd=['apachectl', 'graceful'],
restart_cmd_alt=['apachectl', 'restart'],
conftest_cmd=['apachectl', 'configtest'],
enmod=None,
dismod=None,
le_vhost_ext="-le-ssl.conf",
handle_modules=False,
handle_mods=False,
handle_sites=False,
challenge_location="/etc/httpd/conf.d",
MOD_SSL_CONF_SRC=pkg_resources.resource_filename(
"certbot_apache", "centos-options-ssl-apache.conf")
)
def config_test(self):
"""
Override config_test to mitigate configtest error in vanilla installation
of mod_ssl in Fedora. The error is caused by non-existent self-signed
certificates referenced by the configuration, that would be autogenerated
during the first (re)start of httpd.
"""
os_info = util.get_os_info()
fedora = os_info[0].lower() == "fedora"
try:
super(CentOSConfigurator, self).config_test()
except errors.MisconfigurationError:
if fedora:
self._try_restart_fedora()
else:
raise
def _try_restart_fedora(self):
"""
Tries to restart httpd using systemctl to generate the self signed keypair.
"""
try:
util.run_script(['systemctl', 'restart', 'httpd'])
except errors.SubprocessError as err:
raise errors.MisconfigurationError(str(err))
# Finish with actual config check to see if systemctl restart helped
super(CentOSConfigurator, self).config_test()
def _prepare_options(self):
"""
Override the options dictionary initialization in order to support
alternative restart cmd used in CentOS.
"""
super(CentOSConfigurator, self)._prepare_options()
self.options["restart_cmd_alt"][0] = self.option("ctl")
def get_parser(self):
"""Initializes the ApacheParser"""
return CentOSParser(
self.option("server_root"), self.option("vhost_root"),
self.aug, self.conf("server-root"), self.conf("vhost-root"),
self.version, configurator=self)
def _deploy_cert(self, *args, **kwargs): # pylint: disable=arguments-differ
"""
Override _deploy_cert in order to ensure that the Apache configuration
has "LoadModule ssl_module..." before parsing the VirtualHost configuration
that was created by Certbot
"""
super(CentOSConfigurator, self)._deploy_cert(*args, **kwargs)
if self.version < (2, 4, 0):
self._deploy_loadmodule_ssl_if_needed()
def _deploy_loadmodule_ssl_if_needed(self):
"""
Add "LoadModule ssl_module <pre-existing path>" to main httpd.conf if
it doesn't exist there already.
"""
loadmods = self.parser.find_dir("LoadModule", "ssl_module", exclude=False)
correct_ifmods = [] # type: List[str]
loadmod_args = [] # type: List[str]
loadmod_paths = [] # type: List[str]
for m in loadmods:
noarg_path = m.rpartition("/")[0]
path_args = self.parser.get_all_args(noarg_path)
if loadmod_args:
if loadmod_args != path_args:
msg = ("Certbot encountered multiple LoadModule directives "
"for LoadModule ssl_module with differing library paths. "
"Please remove or comment out the one(s) that are not in "
"use, and run Certbot again.")
raise MisconfigurationError(msg)
else:
loadmod_args = path_args
if self.parser.not_modssl_ifmodule(noarg_path): # pylint: disable=no-member
if self.parser.loc["default"] in noarg_path:
# LoadModule already in the main configuration file
if ("ifmodule/" in noarg_path.lower() or
"ifmodule[1]" in noarg_path.lower()):
# It's the first or only IfModule in the file
return
# Populate the list of known !mod_ssl.c IfModules
nodir_path = noarg_path.rpartition("/directive")[0]
correct_ifmods.append(nodir_path)
else:
loadmod_paths.append(noarg_path)
if not loadmod_args:
# Do not try to enable mod_ssl
return
# Force creation as the directive wasn't found from the beginning of
# httpd.conf
rootconf_ifmod = self.parser.create_ifmod(
parser.get_aug_path(self.parser.loc["default"]),
"!mod_ssl.c", beginning=True)
# parser.get_ifmod returns a path postfixed with "/", remove that
self.parser.add_dir(rootconf_ifmod[:-1], "LoadModule", loadmod_args)
correct_ifmods.append(rootconf_ifmod[:-1])
self.save_notes += "Added LoadModule ssl_module to main configuration.\n"
# Wrap LoadModule mod_ssl inside of <IfModule !mod_ssl.c> if it's not
# configured like this already.
for loadmod_path in loadmod_paths:
nodir_path = loadmod_path.split("/directive")[0]
# Remove the old LoadModule directive
self.parser.aug.remove(loadmod_path)
# Create a new IfModule !mod_ssl.c if not already found on path
ssl_ifmod = self.parser.get_ifmod(nodir_path, "!mod_ssl.c",
beginning=True)[:-1]
if ssl_ifmod not in correct_ifmods:
self.parser.add_dir(ssl_ifmod, "LoadModule", loadmod_args)
correct_ifmods.append(ssl_ifmod)
self.save_notes += ("Wrapped pre-existing LoadModule ssl_module "
"inside of <IfModule !mod_ssl> block.\n")
class CentOSParser(parser.ApacheParser):
"""CentOS specific ApacheParser override class"""
@@ -183,35 +56,5 @@ class CentOSParser(parser.ApacheParser):
def parse_sysconfig_var(self):
""" Parses Apache CLI options from CentOS configuration file """
defines = apache_util.parse_define_file(self.sysconfig_filep, "OPTIONS")
for k in defines:
for k in defines.keys():
self.variables[k] = defines[k]
def not_modssl_ifmodule(self, path):
"""Checks if the provided Augeas path has argument !mod_ssl"""
if "ifmodule" not in path.lower():
return False
# Trim the path to the last ifmodule
workpath = path.lower()
while workpath:
# Get path to the last IfModule (ignore the tail)
parts = workpath.rpartition("ifmodule")
if not parts[0]:
# IfModule not found
break
ifmod_path = parts[0] + parts[1]
# Check if ifmodule had an index
if parts[2].startswith("["):
# Append the index from tail
ifmod_path += parts[2].partition("/")[0]
# Get the original path trimmed to correct length
# This is required to preserve cases
ifmod_real_path = path[0:len(ifmod_path)]
if "!mod_ssl.c" in self.get_all_args(ifmod_real_path):
return True
# Set the workpath to the heading part
workpath = parts[0]
return False

View File

@@ -16,14 +16,14 @@ class DarwinConfigurator(configurator.ApacheConfigurator):
vhost_root="/etc/apache2/other",
vhost_files="*.conf",
logs_root="/var/log/apache2",
ctl="apachectl",
version_cmd=['apachectl', '-v'],
version_cmd=['/usr/sbin/httpd', '-v'],
apache_cmd="/usr/sbin/httpd",
restart_cmd=['apachectl', 'graceful'],
conftest_cmd=['apachectl', 'configtest'],
enmod=None,
dismod=None,
le_vhost_ext="-le-ssl.conf",
handle_modules=False,
handle_mods=False,
handle_sites=False,
challenge_location="/etc/apache2/other",
MOD_SSL_CONF_SRC=pkg_resources.resource_filename(

View File

@@ -1,21 +1,19 @@
""" Distribution specific override class for Debian family (Ubuntu/Debian) """
import logging
import os
import pkg_resources
import zope.interface
from certbot import errors
from certbot import interfaces
from certbot import util
from certbot.compat import filesystem
from certbot.compat import os
from certbot_apache import apache_util
from certbot_apache import configurator
logger = logging.getLogger(__name__)
@zope.interface.provider(interfaces.IPluginFactory)
class DebianConfigurator(configurator.ApacheConfigurator):
"""Debian specific ApacheConfigurator override class"""
@@ -25,14 +23,14 @@ class DebianConfigurator(configurator.ApacheConfigurator):
vhost_root="/etc/apache2/sites-available",
vhost_files="*",
logs_root="/var/log/apache2",
ctl="apache2ctl",
version_cmd=['apache2ctl', '-v'],
apache_cmd="apache2ctl",
restart_cmd=['apache2ctl', 'graceful'],
conftest_cmd=['apache2ctl', 'configtest'],
enmod="a2enmod",
dismod="a2dismod",
le_vhost_ext="-le-ssl.conf",
handle_modules=True,
handle_mods=True,
handle_sites=True,
challenge_location="/etc/apache2",
MOD_SSL_CONF_SRC=pkg_resources.resource_filename(
@@ -53,7 +51,7 @@ class DebianConfigurator(configurator.ApacheConfigurator):
"""
if vhost.enabled:
return None
return
enabled_path = ("%s/sites-enabled/%s" %
(self.parser.root,
@@ -66,11 +64,11 @@ class DebianConfigurator(configurator.ApacheConfigurator):
try:
os.symlink(vhost.filep, enabled_path)
except OSError as err:
if os.path.islink(enabled_path) and filesystem.realpath(
if os.path.islink(enabled_path) and os.path.realpath(
enabled_path) == vhost.filep:
# Already in shape
vhost.enabled = True
return None
return
else:
logger.warning(
"Could not symlink %s to %s, got error: %s", enabled_path,
@@ -83,9 +81,9 @@ class DebianConfigurator(configurator.ApacheConfigurator):
vhost.enabled = True
logger.info("Enabling available site: %s", vhost.filep)
self.save_notes += "Enabled site %s\n" % vhost.filep
return None
def enable_mod(self, mod_name, temp=False):
# pylint: disable=unused-argument
"""Enables module in Apache.
Both enables and reloads Apache so module is active.
@@ -136,11 +134,11 @@ class DebianConfigurator(configurator.ApacheConfigurator):
# Generate reversal command.
# Try to be safe here... check that we can probably reverse before
# applying enmod command
if not util.exe_exists(self.option("dismod")):
if not util.exe_exists(self.conf("dismod")):
raise errors.MisconfigurationError(
"Unable to find a2dismod, please make sure a2enmod and "
"a2dismod are configured correctly for certbot.")
self.reverter.register_undo_command(
temp, [self.option("dismod"), "-f", mod_name])
util.run_script([self.option("enmod"), mod_name])
temp, [self.conf("dismod"), "-f", mod_name])
util.run_script([self.conf("enmod"), mod_name])

View File

@@ -1,98 +0,0 @@
""" Distribution specific override class for Fedora 29+ """
import pkg_resources
import zope.interface
from certbot import errors
from certbot import interfaces
from certbot import util
from certbot_apache import apache_util
from certbot_apache import configurator
from certbot_apache import parser
@zope.interface.provider(interfaces.IPluginFactory)
class FedoraConfigurator(configurator.ApacheConfigurator):
"""Fedora 29+ specific ApacheConfigurator override class"""
OS_DEFAULTS = dict(
server_root="/etc/httpd",
vhost_root="/etc/httpd/conf.d",
vhost_files="*.conf",
logs_root="/var/log/httpd",
ctl="httpd",
version_cmd=['httpd', '-v'],
restart_cmd=['apachectl', 'graceful'],
restart_cmd_alt=['apachectl', 'restart'],
conftest_cmd=['apachectl', 'configtest'],
enmod=None,
dismod=None,
le_vhost_ext="-le-ssl.conf",
handle_modules=False,
handle_sites=False,
challenge_location="/etc/httpd/conf.d",
MOD_SSL_CONF_SRC=pkg_resources.resource_filename(
# TODO: eventually newest version of Fedora will need their own config
"certbot_apache", "centos-options-ssl-apache.conf")
)
def config_test(self):
"""
Override config_test to mitigate configtest error in vanilla installation
of mod_ssl in Fedora. The error is caused by non-existent self-signed
certificates referenced by the configuration, that would be autogenerated
during the first (re)start of httpd.
"""
try:
super(FedoraConfigurator, self).config_test()
except errors.MisconfigurationError:
self._try_restart_fedora()
def get_parser(self):
"""Initializes the ApacheParser"""
return FedoraParser(
self.option("server_root"), self.option("vhost_root"),
self.version, configurator=self)
def _try_restart_fedora(self):
"""
Tries to restart httpd using systemctl to generate the self signed keypair.
"""
try:
util.run_script(['systemctl', 'restart', 'httpd'])
except errors.SubprocessError as err:
raise errors.MisconfigurationError(str(err))
# Finish with actual config check to see if systemctl restart helped
super(FedoraConfigurator, self).config_test()
def _prepare_options(self):
"""
Override the options dictionary initialization to keep using apachectl
instead of httpd and so take advantages of this new bash script in newer versions
of Fedora to restart httpd.
"""
super(FedoraConfigurator, self)._prepare_options()
self.options["restart_cmd"][0] = 'apachectl'
self.options["restart_cmd_alt"][0] = 'apachectl'
self.options["conftest_cmd"][0] = 'apachectl'
class FedoraParser(parser.ApacheParser):
"""Fedora 29+ specific ApacheParser override class"""
def __init__(self, *args, **kwargs):
# Fedora 29+ specific configuration file for Apache
self.sysconfig_filep = "/etc/sysconfig/httpd"
super(FedoraParser, self).__init__(*args, **kwargs)
def update_runtime_variables(self):
""" Override for update_runtime_variables for custom parsing """
# Opportunistic, works if SELinux not enforced
super(FedoraParser, self).update_runtime_variables()
self._parse_sysconfig_var()
def _parse_sysconfig_var(self):
""" Parses Apache CLI options from Fedora configuration file """
defines = apache_util.parse_define_file(self.sysconfig_filep, "OPTIONS")
for k in defines:
self.variables[k] = defines[k]

View File

@@ -18,33 +18,25 @@ class GentooConfigurator(configurator.ApacheConfigurator):
vhost_root="/etc/apache2/vhosts.d",
vhost_files="*.conf",
logs_root="/var/log/apache2",
ctl="apache2ctl",
version_cmd=['apache2ctl', '-v'],
version_cmd=['/usr/sbin/apache2', '-v'],
apache_cmd="apache2ctl",
restart_cmd=['apache2ctl', 'graceful'],
restart_cmd_alt=['apache2ctl', 'restart'],
conftest_cmd=['apache2ctl', 'configtest'],
enmod=None,
dismod=None,
le_vhost_ext="-le-ssl.conf",
handle_modules=False,
handle_mods=False,
handle_sites=False,
challenge_location="/etc/apache2/vhosts.d",
MOD_SSL_CONF_SRC=pkg_resources.resource_filename(
"certbot_apache", "options-ssl-apache.conf")
)
def _prepare_options(self):
"""
Override the options dictionary initialization in order to support
alternative restart cmd used in Gentoo.
"""
super(GentooConfigurator, self)._prepare_options()
self.options["restart_cmd_alt"][0] = self.option("ctl")
def get_parser(self):
"""Initializes the ApacheParser"""
return GentooParser(
self.option("server_root"), self.option("vhost_root"),
self.aug, self.conf("server-root"), self.conf("vhost-root"),
self.version, configurator=self)
@@ -64,12 +56,12 @@ class GentooParser(parser.ApacheParser):
""" Parses Apache CLI options from Gentoo configuration file """
defines = apache_util.parse_define_file(self.apacheconfig_filep,
"APACHE2_OPTS")
for k in defines:
for k in defines.keys():
self.variables[k] = defines[k]
def update_modules(self):
"""Get loaded modules from httpd process, and add them to DOM"""
mod_cmd = [self.configurator.option("ctl"), "modules"]
mod_cmd = [self.configurator.constant("apache_cmd"), "modules"]
matches = self.parse_from_subprocess(mod_cmd, r"(.*)_module")
for mod in matches:
self.add_mod(mod.strip())

View File

@@ -16,14 +16,14 @@ class OpenSUSEConfigurator(configurator.ApacheConfigurator):
vhost_root="/etc/apache2/vhosts.d",
vhost_files="*.conf",
logs_root="/var/log/apache2",
ctl="apache2ctl",
version_cmd=['apache2ctl', '-v'],
apache_cmd="apache2ctl",
restart_cmd=['apache2ctl', 'graceful'],
conftest_cmd=['apache2ctl', 'configtest'],
enmod="a2enmod",
dismod="a2dismod",
le_vhost_ext="-le-ssl.conf",
handle_modules=False,
handle_mods=False,
handle_sites=False,
challenge_location="/etc/apache2/vhosts.d",
MOD_SSL_CONF_SRC=pkg_resources.resource_filename(

View File

@@ -2,6 +2,7 @@
import copy
import fnmatch
import logging
import os
import re
import subprocess
import sys
@@ -9,11 +10,7 @@ import sys
import six
from acme.magic_typing import Dict, List, Set # pylint: disable=unused-import, no-name-in-module
from certbot import errors
from certbot.compat import os
from certbot_apache import constants
logger = logging.getLogger(__name__)
@@ -34,7 +31,7 @@ class ApacheParser(object):
arg_var_interpreter = re.compile(r"\$\{[^ \}]*}")
fnmatch_chars = set(["*", "?", "\\", "[", "]"])
def __init__(self, root, vhostroot=None, version=(2, 4),
def __init__(self, aug, root, vhostroot=None, version=(2, 4),
configurator=None):
# Note: Order is important here.
@@ -43,20 +40,11 @@ class ApacheParser(object):
# issues with aug.load() after adding new files / defines to parse tree
self.configurator = configurator
# Initialize augeas
self.aug = None
self.init_augeas()
if not self.check_aug_version():
raise errors.NotSupportedError(
"Apache plugin support requires libaugeas0 and augeas-lenses "
"version 1.2.0 or higher, please make sure you have you have "
"those installed.")
self.modules = set() # type: Set[str]
self.parser_paths = {} # type: Dict[str, List[str]]
self.variables = {} # type: Dict[str, str]
self.aug = aug
# Find configuration root and make sure augeas can parse it.
self.root = os.path.abspath(root)
self.loc = {"root": self._find_config_root()}
@@ -81,153 +69,13 @@ class ApacheParser(object):
# Must also attempt to parse additional virtual host root
if vhostroot:
self.parse_file(os.path.abspath(vhostroot) + "/" +
self.configurator.option("vhost_files"))
self.configurator.constant("vhost_files"))
# check to see if there were unparsed define statements
if version < (2, 4):
if self.find_dir("Define", exclude=False):
raise errors.PluginError("Error parsing runtime variables")
def init_augeas(self):
""" Initialize the actual Augeas instance """
try:
import augeas
except ImportError: # pragma: no cover
raise errors.NoInstallationError("Problem in Augeas installation")
self.aug = augeas.Augeas(
# specify a directory to load our preferred lens from
loadpath=constants.AUGEAS_LENS_DIR,
# Do not save backup (we do it ourselves), do not load
# anything by default
flags=(augeas.Augeas.NONE |
augeas.Augeas.NO_MODL_AUTOLOAD |
augeas.Augeas.ENABLE_SPAN))
def check_parsing_errors(self, lens):
"""Verify Augeas can parse all of the lens files.
:param str lens: lens to check for errors
:raises .errors.PluginError: If there has been an error in parsing with
the specified lens.
"""
error_files = self.aug.match("/augeas//error")
for path in error_files:
# Check to see if it was an error resulting from the use of
# the httpd lens
lens_path = self.aug.get(path + "/lens")
# As aug.get may return null
if lens_path and lens in lens_path:
msg = (
"There has been an error in parsing the file {0} on line {1}: "
"{2}".format(
# Strip off /augeas/files and /error
path[13:len(path) - 6],
self.aug.get(path + "/line"),
self.aug.get(path + "/message")))
raise errors.PluginError(msg)
def check_aug_version(self):
""" Checks that we have recent enough version of libaugeas.
If augeas version is recent enough, it will support case insensitive
regexp matching"""
self.aug.set("/test/path/testing/arg", "aRgUMeNT")
try:
matches = self.aug.match(
"/test//*[self::arg=~regexp('argument', 'i')]")
except RuntimeError:
self.aug.remove("/test/path")
return False
self.aug.remove("/test/path")
return matches
def unsaved_files(self):
"""Lists files that have modified Augeas DOM but the changes have not
been written to the filesystem yet, used by `self.save()` and
ApacheConfigurator to check the file state.
:raises .errors.PluginError: If there was an error in Augeas, in
an attempt to save the configuration, or an error creating a
checkpoint
:returns: `set` of unsaved files
"""
save_state = self.aug.get("/augeas/save")
self.aug.set("/augeas/save", "noop")
# Existing Errors
ex_errs = self.aug.match("/augeas//error")
try:
# This is a noop save
self.aug.save()
except (RuntimeError, IOError):
self._log_save_errors(ex_errs)
# Erase Save Notes
self.configurator.save_notes = ""
raise errors.PluginError(
"Error saving files, check logs for more info.")
# Return the original save method
self.aug.set("/augeas/save", save_state)
# Retrieve list of modified files
# Note: Noop saves can cause the file to be listed twice, I used a
# set to remove this possibility. This is a known augeas 0.10 error.
save_paths = self.aug.match("/augeas/events/saved")
save_files = set()
if save_paths:
for path in save_paths:
save_files.add(self.aug.get(path)[6:])
return save_files
def ensure_augeas_state(self):
"""Makes sure that all Augeas dom changes are written to files to avoid
loss of configuration directives when doing additional augeas parsing,
causing a possible augeas.load() resulting dom reset
"""
if self.unsaved_files():
self.configurator.save_notes += "(autosave)"
self.configurator.save()
def save(self, save_files):
"""Saves all changes to the configuration files.
save() is called from ApacheConfigurator to handle the parser specific
tasks of saving.
:param list save_files: list of strings of file paths that we need to save.
"""
self.configurator.save_notes = ""
self.aug.save()
# Force reload if files were modified
# This is needed to recalculate augeas directive span
if save_files:
for sf in save_files:
self.aug.remove("/files/"+sf)
self.aug.load()
def _log_save_errors(self, ex_errs):
"""Log errors due to bad Augeas save.
:param list ex_errs: Existing errors before save
"""
# Check for the root of save problems
new_errs = self.aug.match("/augeas//error")
# logger.error("During Save - %s", mod_conf)
logger.error("Unable to save files: %s. Attempted Save Notes: %s",
", ".join(err[13:len(err) - 6] for err in new_errs
# Only new errors caused by recent save
if err not in ex_errs), self.configurator.save_notes)
def add_include(self, main_config, inc_path):
"""Add Include for a new configuration file if one does not exist
@@ -235,7 +83,7 @@ class ApacheParser(object):
:param str inc_path: path of file to include
"""
if not self.find_dir(case_i("Include"), inc_path):
if len(self.find_dir(case_i("Include"), inc_path)) == 0:
logger.debug("Adding Include %s to %s",
inc_path, get_aug_path(main_config))
self.add_dir(
@@ -245,7 +93,12 @@ class ApacheParser(object):
# Add new path to parser paths
new_dir = os.path.dirname(inc_path)
new_file = os.path.basename(inc_path)
self.existing_paths.setdefault(new_dir, []).append(new_file)
if new_dir in self.existing_paths.keys():
# Add to existing path
self.existing_paths[new_dir].append(new_file)
else:
# Create a new path
self.existing_paths[new_dir] = [new_file]
def add_mod(self, mod_name):
"""Shortcut for updating parser modules."""
@@ -286,7 +139,7 @@ class ApacheParser(object):
mods.add(os.path.basename(mod_filename)[:-2] + "c")
else:
logger.debug("Could not read LoadModule directive from " +
"Augeas path: %s", match_name[6:])
"Augeas path: {0}".format(match_name[6:]))
self.modules.update(mods)
def update_runtime_variables(self):
@@ -299,7 +152,7 @@ class ApacheParser(object):
"""Get Defines from httpd process"""
variables = dict()
define_cmd = [self.configurator.option("ctl"), "-t", "-D",
define_cmd = [self.configurator.constant("apache_cmd"), "-t", "-D",
"DUMP_RUN_CFG"]
matches = self.parse_from_subprocess(define_cmd, r"Define: ([^ \n]*)")
try:
@@ -326,7 +179,7 @@ class ApacheParser(object):
# configuration files
_ = self.find_dir("Include")
inc_cmd = [self.configurator.option("ctl"), "-t", "-D",
inc_cmd = [self.configurator.constant("apache_cmd"), "-t", "-D",
"DUMP_INCLUDES"]
matches = self.parse_from_subprocess(inc_cmd, r"\(.*\) (.*)")
if matches:
@@ -337,7 +190,7 @@ class ApacheParser(object):
def update_modules(self):
"""Get loaded modules from httpd process, and add them to DOM"""
mod_cmd = [self.configurator.option("ctl"), "-t", "-D",
mod_cmd = [self.configurator.constant("apache_cmd"), "-t", "-D",
"DUMP_MODULES"]
matches = self.parse_from_subprocess(mod_cmd, r"(.*)_module")
for mod in matches:
@@ -376,8 +229,8 @@ class ApacheParser(object):
"Error running command %s for runtime parameters!%s",
command, os.linesep)
raise errors.MisconfigurationError(
"Error accessing loaded Apache parameters: {0}".format(
command))
"Error accessing loaded Apache parameters: %s",
command)
# Small errors that do not impede
if proc.returncode != 0:
logger.warning("Error in checking parameter list: %s", stderr)
@@ -403,12 +256,12 @@ class ApacheParser(object):
"""
filtered = []
if args == 1:
for i, match in enumerate(matches):
if match.endswith("/arg"):
for i in range(len(matches)):
if matches[i].endswith("/arg"):
filtered.append(matches[i][:-4])
else:
for i, match in enumerate(matches):
if match.endswith("/arg[%d]" % args):
for i in range(len(matches)):
if matches[i].endswith("/arg[%d]" % args):
# Make sure we don't cause an IndexError (end of list)
# Check to make sure arg + 1 doesn't exist
if (i == (len(matches) - 1) or
@@ -433,7 +286,7 @@ class ApacheParser(object):
"""
# TODO: Add error checking code... does the path given even exist?
# Does it throw exceptions?
if_mod_path = self.get_ifmod(aug_conf_path, "mod_ssl.c")
if_mod_path = self._get_ifmod(aug_conf_path, "mod_ssl.c")
# IfModule can have only one valid argument, so append after
self.aug.insert(if_mod_path + "arg", "directive", False)
nvh_path = if_mod_path + "directive[1]"
@@ -444,54 +297,22 @@ class ApacheParser(object):
for i, arg in enumerate(args):
self.aug.set("%s/arg[%d]" % (nvh_path, i + 1), arg)
def get_ifmod(self, aug_conf_path, mod, beginning=False):
def _get_ifmod(self, aug_conf_path, mod):
"""Returns the path to <IfMod mod> and creates one if it doesn't exist.
:param str aug_conf_path: Augeas configuration path
:param str mod: module ie. mod_ssl.c
:param bool beginning: If the IfModule should be created to the beginning
of augeas path DOM tree.
:returns: Augeas path of the requested IfModule directive that pre-existed
or was created during the process. The path may be dynamic,
i.e. .../IfModule[last()]
:rtype: str
"""
if_mods = self.aug.match(("%s/IfModule/*[self::arg='%s']" %
(aug_conf_path, mod)))
if not if_mods:
return self.create_ifmod(aug_conf_path, mod, beginning)
if len(if_mods) == 0:
self.aug.set("%s/IfModule[last() + 1]" % aug_conf_path, "")
self.aug.set("%s/IfModule[last()]/arg" % aug_conf_path, mod)
if_mods = self.aug.match(("%s/IfModule/*[self::arg='%s']" %
(aug_conf_path, mod)))
# Strip off "arg" at end of first ifmod path
return if_mods[0].rpartition("arg")[0]
def create_ifmod(self, aug_conf_path, mod, beginning=False):
"""Creates a new <IfMod mod> and returns its path.
:param str aug_conf_path: Augeas configuration path
:param str mod: module ie. mod_ssl.c
:param bool beginning: If the IfModule should be created to the beginning
of augeas path DOM tree.
:returns: Augeas path of the newly created IfModule directive.
The path may be dynamic, i.e. .../IfModule[last()]
:rtype: str
"""
if beginning:
c_path_arg = "{}/IfModule[1]/arg".format(aug_conf_path)
# Insert IfModule before the first directive
self.aug.insert("{}/directive[1]".format(aug_conf_path),
"IfModule", True)
retpath = "{}/IfModule[1]/".format(aug_conf_path)
else:
c_path = "{}/IfModule[last() + 1]".format(aug_conf_path)
c_path_arg = "{}/IfModule[last()]/arg".format(aug_conf_path)
self.aug.set(c_path, "")
retpath = "{}/IfModule[last()]/".format(aug_conf_path)
self.aug.set(c_path_arg, mod)
return retpath
return if_mods[0][:len(if_mods[0]) - 3]
def add_dir(self, aug_conf_path, directive, args):
"""Appends directive to the end fo the file given by aug_conf_path.
@@ -637,20 +458,6 @@ class ApacheParser(object):
return ordered_matches
def get_all_args(self, match):
"""
Tries to fetch all arguments for a directive. See get_arg.
Note that if match is an ancestor node, it returns all names of
child directives as well as the list of arguments.
"""
if match[-1] != "/":
match = match+"/"
allargs = self.aug.match(match + '*')
return [self.get_arg(arg) for arg in allargs]
def get_arg(self, match):
"""Uses augeas.get to get argument value and interprets result.
@@ -794,8 +601,9 @@ class ApacheParser(object):
if sys.version_info < (3, 6):
# This strips off final /Z(?ms)
return fnmatch.translate(clean_fn_match)[:-7]
else: # pragma: no cover
# Since Python 3.6, it returns a different pattern like (?s:.*\.load)\Z
return fnmatch.translate(clean_fn_match)[4:-3] # pragma: no cover
return fnmatch.translate(clean_fn_match)[4:-3]
def parse_file(self, filepath):
"""Parse file with Augeas
@@ -809,7 +617,8 @@ class ApacheParser(object):
use_new, remove_old = self._check_path_actions(filepath)
# Ensure that we have the latest Augeas DOM state on disk before
# calling aug.load() which reloads the state from disk
self.ensure_augeas_state()
if self.configurator:
self.configurator.ensure_augeas_state()
# Test if augeas included file for Httpd.lens
# Note: This works for augeas globs, ie. *.conf
if use_new:
@@ -876,7 +685,10 @@ class ApacheParser(object):
use_new = False
else:
use_new = True
remove_old = new_file_match == "*"
if new_file_match == "*":
remove_old = True
else:
remove_old = False
except KeyError:
use_new = True
remove_old = False

View File

@@ -3,11 +3,6 @@
# A hackish script to see if the client is behaving as expected
# with each of the "passing" conf files.
if [ -z "$SERVER" ]; then
echo "Please set SERVER to the ACME server's directory URL."
exit 1
fi
export EA=/etc/apache2/
TESTDIR="`dirname $0`"
cd $TESTDIR/passing
@@ -61,16 +56,13 @@ if [ "$1" = --debian-modules ] ; then
done
fi
CERTBOT_CMD="sudo $(command -v certbot) --server $SERVER -vvvv"
CERTBOT_CMD="$CERTBOT_CMD --debug --apache --register-unsafely-without-email"
CERTBOT_CMD="$CERTBOT_CMD --agree-tos certonly -t --no-verify-ssl"
FAILS=0
trap CleanupExit INT
for f in *.conf ; do
echo -n testing "$f"...
Setup
RESULT=`echo c | $CERTBOT_CMD 2>&1`
RESULT=`echo c | sudo $(command -v certbot) -vvvv --debug --staging --apache --register-unsafely-without-email --agree-tos certonly -t 2>&1`
if echo $RESULT | grep -Eq \("Which names would you like"\|"mod_macro is not yet"\) ; then
echo passed
else

View File

@@ -1,27 +0,0 @@
#!/usr/bin/env python
"""
This executable script wraps the apache-conf-test bash script, in order to setup a pebble instance
before its execution. Directory URL is passed through the SERVER environment variable.
"""
import os
import subprocess
import sys
from certbot_integration_tests.utils import acme_server
SCRIPT_DIRNAME = os.path.dirname(__file__)
def main(args=None):
if not args:
args = sys.argv[1:]
with acme_server.ACMEServer('pebble', [], False) as acme_xdist:
environ = os.environ.copy()
environ['SERVER'] = acme_xdist['directory_url']
command = [os.path.join(SCRIPT_DIRNAME, 'apache-conf-test')]
command.extend(args)
return subprocess.call(command, env=environ)
if __name__ == '__main__':
sys.exit(main())

View File

@@ -1,7 +1,7 @@
#LoadModule ssl_module modules/mod_ssl.so
Listen 4443
<VirtualHost *:4443>
Listen 443
<VirtualHost *:443>
# The ServerName directive sets the request scheme, hostname and port that
# the server uses to identify itself. This is used when creating
# redirection URLs. In the context of virtual hosts, the ServerName

View File

@@ -1,4 +1,5 @@
"""Test for certbot_apache.configurator implementations of reverter"""
"""Test for certbot_apache.augeas_configurator."""
import os
import shutil
import unittest
@@ -9,12 +10,12 @@ from certbot import errors
from certbot_apache.tests import util
class ConfiguratorReverterTest(util.ApacheTest):
"""Test for ApacheConfigurator reverter methods"""
class AugeasConfiguratorTest(util.ApacheTest):
"""Test for Augeas Configurator base class."""
def setUp(self): # pylint: disable=arguments-differ
super(ConfiguratorReverterTest, self).setUp()
super(AugeasConfiguratorTest, self).setUp()
self.config = util.get_apache_configurator(
self.config_path, self.vhost_path, self.config_dir, self.work_dir)
@@ -27,6 +28,20 @@ class ConfiguratorReverterTest(util.ApacheTest):
shutil.rmtree(self.work_dir)
shutil.rmtree(self.temp_dir)
def test_bad_parse(self):
# pylint: disable=protected-access
self.config.parser.parse_file(os.path.join(
self.config.parser.root, "conf-available", "bad_conf_file.conf"))
self.assertRaises(
errors.PluginError, self.config.check_parsing_errors, "httpd.aug")
def test_bad_save(self):
mock_save = mock.Mock()
mock_save.side_effect = IOError
self.config.aug.save = mock_save
self.assertRaises(errors.PluginError, self.config.save)
def test_bad_save_checkpoint(self):
self.config.reverter.add_to_checkpoint = mock.Mock(
side_effect=errors.ReverterError)
@@ -48,9 +63,23 @@ class ConfiguratorReverterTest(util.ApacheTest):
self.assertTrue(mock_finalize.is_called)
def test_recovery_routine(self):
mock_load = mock.Mock()
self.config.aug.load = mock_load
self.config.recovery_routine()
self.assertEqual(mock_load.call_count, 1)
def test_recovery_routine_error(self):
self.config.reverter.recovery_routine = mock.Mock(
side_effect=errors.ReverterError)
self.assertRaises(
errors.PluginError, self.config.recovery_routine)
def test_revert_challenge_config(self):
mock_load = mock.Mock()
self.config.parser.aug.load = mock_load
self.config.aug.load = mock_load
self.config.revert_challenge_config()
self.assertEqual(mock_load.call_count, 1)
@@ -64,7 +93,7 @@ class ConfiguratorReverterTest(util.ApacheTest):
def test_rollback_checkpoints(self):
mock_load = mock.Mock()
self.config.parser.aug.load = mock_load
self.config.aug.load = mock_load
self.config.rollback_checkpoints()
self.assertEqual(mock_load.call_count, 1)
@@ -74,11 +103,13 @@ class ConfiguratorReverterTest(util.ApacheTest):
side_effect=errors.ReverterError)
self.assertRaises(errors.PluginError, self.config.rollback_checkpoints)
def test_recovery_routine_reload(self):
mock_load = mock.Mock()
self.config.parser.aug.load = mock_load
self.config.recovery_routine()
self.assertEqual(mock_load.call_count, 1)
def test_view_config_changes(self):
self.config.view_config_changes()
def test_view_config_changes_error(self):
self.config.reverter.view_config_changes = mock.Mock(
side_effect=errors.ReverterError)
self.assertRaises(errors.PluginError, self.config.view_config_changes)
if __name__ == "__main__":

View File

@@ -35,9 +35,8 @@ class AutoHSTSTest(util.ApacheTest):
pat = '(?:[ "]|^)(strict-transport-security)(?:[ "]|$)'
for head in header_path:
if re.search(pat, self.config.parser.aug.get(head).lower()):
return self.config.parser.aug.get(
head.replace("arg[3]", "arg[4]"))
return None # pragma: no cover
return self.config.parser.aug.get(head.replace("arg[3]",
"arg[4]"))
@mock.patch("certbot_apache.configurator.ApacheConfigurator.restart")
@mock.patch("certbot_apache.configurator.ApacheConfigurator.enable_mod")
@@ -56,23 +55,20 @@ class AutoHSTSTest(util.ApacheTest):
@mock.patch("certbot_apache.constants.AUTOHSTS_FREQ", 0)
@mock.patch("certbot_apache.configurator.ApacheConfigurator.restart")
@mock.patch("certbot_apache.configurator.ApacheConfigurator.prepare")
def test_autohsts_increase(self, mock_prepare, _mock_restart):
self.config._prepared = False
def test_autohsts_increase(self, _mock_restart):
maxage = "\"max-age={0}\""
initial_val = maxage.format(constants.AUTOHSTS_STEPS[0])
inc_val = maxage.format(constants.AUTOHSTS_STEPS[1])
self.config.enable_autohsts(mock.MagicMock(), ["ocspvhost.com"])
# Verify initial value
self.assertEqual(self.get_autohsts_value(self.vh_truth[7].path),
self.assertEquals(self.get_autohsts_value(self.vh_truth[7].path),
initial_val)
# Increase
self.config.update_autohsts(mock.MagicMock())
# Verify increased value
self.assertEqual(self.get_autohsts_value(self.vh_truth[7].path),
self.assertEquals(self.get_autohsts_value(self.vh_truth[7].path),
inc_val)
self.assertTrue(mock_prepare.called)
@mock.patch("certbot_apache.configurator.ApacheConfigurator.restart")
@mock.patch("certbot_apache.configurator.ApacheConfigurator._autohsts_increase")
@@ -81,7 +77,7 @@ class AutoHSTSTest(util.ApacheTest):
initial_val = maxage.format(constants.AUTOHSTS_STEPS[0])
self.config.enable_autohsts(mock.MagicMock(), ["ocspvhost.com"])
# Verify initial value
self.assertEqual(self.get_autohsts_value(self.vh_truth[7].path),
self.assertEquals(self.get_autohsts_value(self.vh_truth[7].path),
initial_val)
self.config.update_autohsts(mock.MagicMock())
@@ -113,19 +109,16 @@ class AutoHSTSTest(util.ApacheTest):
for i in range(len(constants.AUTOHSTS_STEPS)-1):
# Ensure that value is not made permanent prematurely
self.config.deploy_autohsts(mock_lineage)
self.assertNotEqual(self.get_autohsts_value(self.vh_truth[7].path),
self.assertNotEquals(self.get_autohsts_value(self.vh_truth[7].path),
max_val)
self.config.update_autohsts(mock.MagicMock())
# Value should match pre-permanent increment step
cur_val = maxage.format(constants.AUTOHSTS_STEPS[i+1])
self.assertEqual(self.get_autohsts_value(self.vh_truth[7].path),
self.assertEquals(self.get_autohsts_value(self.vh_truth[7].path),
cur_val)
# Ensure that the value is raised to max
self.assertEqual(self.get_autohsts_value(self.vh_truth[7].path),
maxage.format(constants.AUTOHSTS_STEPS[-1]))
# Make permanent
self.config.deploy_autohsts(mock_lineage)
self.assertEqual(self.get_autohsts_value(self.vh_truth[7].path),
self.assertEquals(self.get_autohsts_value(self.vh_truth[7].path),
max_val)
def test_autohsts_update_noop(self):
@@ -157,7 +150,7 @@ class AutoHSTSTest(util.ApacheTest):
mock_id.return_value = "1234567"
self.config.enable_autohsts(mock.MagicMock(),
["ocspvhost.com", "ocspvhost.com"])
self.assertEqual(mock_id.call_count, 1)
self.assertEquals(mock_id.call_count, 1)
def test_autohsts_remove_orphaned(self):
# pylint: disable=protected-access

View File

@@ -1,222 +0,0 @@
"""Test for certbot_apache.configurator for CentOS 6 overrides"""
import unittest
from certbot.compat import os
from certbot.errors import MisconfigurationError
from certbot_apache import obj
from certbot_apache import override_centos
from certbot_apache import parser
from certbot_apache.tests import util
def get_vh_truth(temp_dir, config_name):
"""Return the ground truth for the specified directory."""
prefix = os.path.join(
temp_dir, config_name, "httpd/conf.d")
aug_pre = "/files" + prefix
vh_truth = [
obj.VirtualHost(
os.path.join(prefix, "test.example.com.conf"),
os.path.join(aug_pre, "test.example.com.conf/VirtualHost"),
set([obj.Addr.fromstring("*:80")]),
False, True, "test.example.com"),
obj.VirtualHost(
os.path.join(prefix, "ssl.conf"),
os.path.join(aug_pre, "ssl.conf/VirtualHost"),
set([obj.Addr.fromstring("_default_:443")]),
True, True, None)
]
return vh_truth
class CentOS6Tests(util.ApacheTest):
"""Tests for CentOS 6"""
def setUp(self): # pylint: disable=arguments-differ
test_dir = "centos6_apache/apache"
config_root = "centos6_apache/apache/httpd"
vhost_root = "centos6_apache/apache/httpd/conf.d"
super(CentOS6Tests, self).setUp(test_dir=test_dir,
config_root=config_root,
vhost_root=vhost_root)
self.config = util.get_apache_configurator(
self.config_path, self.vhost_path, self.config_dir, self.work_dir,
version=(2, 2, 15), os_info="centos")
self.vh_truth = get_vh_truth(
self.temp_dir, "centos6_apache/apache")
def test_get_parser(self):
self.assertTrue(isinstance(self.config.parser,
override_centos.CentOSParser))
def test_get_virtual_hosts(self):
"""Make sure all vhosts are being properly found."""
vhs = self.config.get_virtual_hosts()
self.assertEqual(len(vhs), 2)
found = 0
for vhost in vhs:
for centos_truth in self.vh_truth:
if vhost == centos_truth:
found += 1
break
else:
raise Exception("Missed: %s" % vhost) # pragma: no cover
self.assertEqual(found, 2)
def test_loadmod_default(self):
ssl_loadmods = self.config.parser.find_dir(
"LoadModule", "ssl_module", exclude=False)
self.assertEqual(len(ssl_loadmods), 1)
# Make sure the LoadModule ssl_module is in ssl.conf (default)
self.assertTrue("ssl.conf" in ssl_loadmods[0])
# ...and that it's not inside of <IfModule>
self.assertFalse("IfModule" in ssl_loadmods[0])
# Get the example vhost
self.config.assoc["test.example.com"] = self.vh_truth[0]
self.config.deploy_cert(
"random.demo", "example/cert.pem", "example/key.pem",
"example/cert_chain.pem", "example/fullchain.pem")
self.config.save()
post_loadmods = self.config.parser.find_dir(
"LoadModule", "ssl_module", exclude=False)
# We should now have LoadModule ssl_module in root conf and ssl.conf
self.assertEqual(len(post_loadmods), 2)
for lm in post_loadmods:
# lm[:-7] removes "/arg[#]" from the path
arguments = self.config.parser.get_all_args(lm[:-7])
self.assertEqual(arguments, ["ssl_module", "modules/mod_ssl.so"])
# ...and both of them should be wrapped in <IfModule !mod_ssl.c>
# lm[:-17] strips off /directive/arg[1] from the path.
ifmod_args = self.config.parser.get_all_args(lm[:-17])
self.assertTrue("!mod_ssl.c" in ifmod_args)
def test_loadmod_multiple(self):
sslmod_args = ["ssl_module", "modules/mod_ssl.so"]
# Adds another LoadModule to main httpd.conf in addtition to ssl.conf
self.config.parser.add_dir(self.config.parser.loc["default"], "LoadModule",
sslmod_args)
self.config.save()
pre_loadmods = self.config.parser.find_dir(
"LoadModule", "ssl_module", exclude=False)
# LoadModules are not within IfModule blocks
self.assertFalse(any(["ifmodule" in m.lower() for m in pre_loadmods]))
self.config.assoc["test.example.com"] = self.vh_truth[0]
self.config.deploy_cert(
"random.demo", "example/cert.pem", "example/key.pem",
"example/cert_chain.pem", "example/fullchain.pem")
post_loadmods = self.config.parser.find_dir(
"LoadModule", "ssl_module", exclude=False)
for mod in post_loadmods:
self.assertTrue(self.config.parser.not_modssl_ifmodule(mod)) #pylint: disable=no-member
def test_loadmod_rootconf_exists(self):
sslmod_args = ["ssl_module", "modules/mod_ssl.so"]
rootconf_ifmod = self.config.parser.get_ifmod(
parser.get_aug_path(self.config.parser.loc["default"]),
"!mod_ssl.c", beginning=True)
self.config.parser.add_dir(rootconf_ifmod[:-1], "LoadModule", sslmod_args)
self.config.save()
# Get the example vhost
self.config.assoc["test.example.com"] = self.vh_truth[0]
self.config.deploy_cert(
"random.demo", "example/cert.pem", "example/key.pem",
"example/cert_chain.pem", "example/fullchain.pem")
self.config.save()
root_loadmods = self.config.parser.find_dir(
"LoadModule", "ssl_module",
start=parser.get_aug_path(self.config.parser.loc["default"]),
exclude=False)
mods = [lm for lm in root_loadmods if self.config.parser.loc["default"] in lm]
self.assertEqual(len(mods), 1)
# [:-7] removes "/arg[#]" from the path
self.assertEqual(
self.config.parser.get_all_args(mods[0][:-7]),
sslmod_args)
def test_neg_loadmod_already_on_path(self):
loadmod_args = ["ssl_module", "modules/mod_ssl.so"]
ifmod = self.config.parser.get_ifmod(
self.vh_truth[1].path, "!mod_ssl.c", beginning=True)
self.config.parser.add_dir(ifmod[:-1], "LoadModule", loadmod_args)
self.config.parser.add_dir(self.vh_truth[1].path, "LoadModule", loadmod_args)
self.config.save()
pre_loadmods = self.config.parser.find_dir(
"LoadModule", "ssl_module", start=self.vh_truth[1].path, exclude=False)
self.assertEqual(len(pre_loadmods), 2)
# The ssl.conf now has two LoadModule directives, one inside of
# !mod_ssl.c IfModule
self.config.assoc["test.example.com"] = self.vh_truth[0]
self.config.deploy_cert(
"random.demo", "example/cert.pem", "example/key.pem",
"example/cert_chain.pem", "example/fullchain.pem")
self.config.save()
# Ensure that the additional LoadModule wasn't written into the IfModule
post_loadmods = self.config.parser.find_dir(
"LoadModule", "ssl_module", start=self.vh_truth[1].path, exclude=False)
self.assertEqual(len(post_loadmods), 1)
def test_loadmod_non_duplicate(self):
# the modules/mod_ssl.so exists in ssl.conf
sslmod_args = ["ssl_module", "modules/mod_somethingelse.so"]
rootconf_ifmod = self.config.parser.get_ifmod(
parser.get_aug_path(self.config.parser.loc["default"]),
"!mod_ssl.c", beginning=True)
self.config.parser.add_dir(rootconf_ifmod[:-1], "LoadModule", sslmod_args)
self.config.save()
self.config.assoc["test.example.com"] = self.vh_truth[0]
pre_matches = self.config.parser.find_dir("LoadModule",
"ssl_module", exclude=False)
self.assertRaises(MisconfigurationError, self.config.deploy_cert,
"random.demo", "example/cert.pem", "example/key.pem",
"example/cert_chain.pem", "example/fullchain.pem")
post_matches = self.config.parser.find_dir("LoadModule",
"ssl_module", exclude=False)
# Make sure that none was changed
self.assertEqual(pre_matches, post_matches)
def test_loadmod_not_found(self):
# Remove all existing LoadModule ssl_module... directives
orig_loadmods = self.config.parser.find_dir("LoadModule",
"ssl_module",
exclude=False)
for mod in orig_loadmods:
noarg_path = mod.rpartition("/")[0]
self.config.parser.aug.remove(noarg_path)
self.config.save()
self.config.deploy_cert(
"random.demo", "example/cert.pem", "example/key.pem",
"example/cert_chain.pem", "example/fullchain.pem")
post_loadmods = self.config.parser.find_dir("LoadModule",
"ssl_module",
exclude=False)
self.assertFalse(post_loadmods)
def test_no_ifmod_search_false(self):
#pylint: disable=no-member
self.assertFalse(self.config.parser.not_modssl_ifmodule(
"/path/does/not/include/ifmod"
))
self.assertFalse(self.config.parser.not_modssl_ifmodule(
""
))
self.assertFalse(self.config.parser.not_modssl_ifmodule(
"/path/includes/IfModule/but/no/arguments"
))
if __name__ == "__main__":
unittest.main() # pragma: no cover

View File

@@ -1,17 +1,15 @@
"""Test for certbot_apache.configurator for Centos overrides"""
import os
import unittest
import mock
from certbot import errors
from certbot.compat import filesystem
from certbot.compat import os
from certbot_apache import obj
from certbot_apache import override_centos
from certbot_apache.tests import util
def get_vh_truth(temp_dir, config_name):
"""Return the ground truth for the specified directory."""
prefix = os.path.join(
@@ -32,59 +30,6 @@ def get_vh_truth(temp_dir, config_name):
]
return vh_truth
class FedoraRestartTest(util.ApacheTest):
"""Tests for Fedora specific self-signed certificate override"""
def setUp(self): # pylint: disable=arguments-differ
test_dir = "centos7_apache/apache"
config_root = "centos7_apache/apache/httpd"
vhost_root = "centos7_apache/apache/httpd/conf.d"
super(FedoraRestartTest, self).setUp(test_dir=test_dir,
config_root=config_root,
vhost_root=vhost_root)
self.config = util.get_apache_configurator(
self.config_path, self.vhost_path, self.config_dir, self.work_dir,
os_info="fedora_old")
self.vh_truth = get_vh_truth(
self.temp_dir, "centos7_apache/apache")
def _run_fedora_test(self):
self.assertIsInstance(self.config, override_centos.CentOSConfigurator)
with mock.patch("certbot.util.get_os_info") as mock_info:
mock_info.return_value = ["fedora", "28"]
self.config.config_test()
def test_non_fedora_error(self):
c_test = "certbot_apache.configurator.ApacheConfigurator.config_test"
with mock.patch(c_test) as mock_test:
mock_test.side_effect = errors.MisconfigurationError
with mock.patch("certbot.util.get_os_info") as mock_info:
mock_info.return_value = ["not_fedora"]
self.assertRaises(errors.MisconfigurationError,
self.config.config_test)
def test_fedora_restart_error(self):
c_test = "certbot_apache.configurator.ApacheConfigurator.config_test"
with mock.patch(c_test) as mock_test:
# First call raises error, second doesn't
mock_test.side_effect = [errors.MisconfigurationError, '']
with mock.patch("certbot.util.run_script") as mock_run:
mock_run.side_effect = errors.SubprocessError
self.assertRaises(errors.MisconfigurationError,
self._run_fedora_test)
def test_fedora_restart(self):
c_test = "certbot_apache.configurator.ApacheConfigurator.config_test"
with mock.patch(c_test) as mock_test:
with mock.patch("certbot.util.run_script") as mock_run:
# First call raises error, second doesn't
mock_test.side_effect = [errors.MisconfigurationError, '']
self._run_fedora_test()
self.assertEqual(mock_test.call_count, 2)
self.assertEqual(mock_run.call_args[0][0],
['systemctl', 'restart', 'httpd'])
class MultipleVhostsTestCentOS(util.ApacheTest):
"""Multiple vhost tests for CentOS / RHEL family of distros"""
@@ -105,7 +50,8 @@ class MultipleVhostsTestCentOS(util.ApacheTest):
self.temp_dir, "centos7_apache/apache")
def test_get_parser(self):
self.assertIsInstance(self.config.parser, override_centos.CentOSParser)
self.assertTrue(isinstance(self.config.parser,
override_centos.CentOSParser))
@mock.patch("certbot_apache.parser.ApacheParser._get_runtime_cfg")
def test_opportunistic_httpd_runtime_parsing(self, mock_get):
@@ -135,9 +81,9 @@ class MultipleVhostsTestCentOS(util.ApacheTest):
mock_osi.return_value = ("centos", "7")
self.config.parser.update_runtime_variables()
self.assertEqual(mock_get.call_count, 3)
self.assertEqual(len(self.config.parser.modules), 4)
self.assertEqual(len(self.config.parser.variables), 2)
self.assertEquals(mock_get.call_count, 3)
self.assertEquals(len(self.config.parser.modules), 4)
self.assertEquals(len(self.config.parser.variables), 2)
self.assertTrue("TEST2" in self.config.parser.variables.keys())
self.assertTrue("mod_another.c" in self.config.parser.modules)
@@ -161,7 +107,7 @@ class MultipleVhostsTestCentOS(util.ApacheTest):
"""Make sure we read the sysconfig OPTIONS variable correctly"""
# Return nothing for the process calls
mock_cfg.return_value = ""
self.config.parser.sysconfig_filep = filesystem.realpath(
self.config.parser.sysconfig_filep = os.path.realpath(
os.path.join(self.config.parser.root, "../sysconfig/httpd"))
self.config.parser.variables = {}
@@ -181,7 +127,7 @@ class MultipleVhostsTestCentOS(util.ApacheTest):
def test_alt_restart_works(self, mock_run_script):
mock_run_script.side_effect = [None, errors.SubprocessError, None]
self.config.restart()
self.assertEqual(mock_run_script.call_count, 3)
self.assertEquals(mock_run_script.call_count, 3)
@mock.patch("certbot_apache.configurator.util.run_script")
def test_alt_restart_errors(self, mock_run_script):
@@ -189,7 +135,5 @@ class MultipleVhostsTestCentOS(util.ApacheTest):
errors.SubprocessError,
errors.SubprocessError]
self.assertRaises(errors.MisconfigurationError, self.config.restart)
if __name__ == "__main__":
unittest.main() # pragma: no cover

View File

@@ -1,9 +1,9 @@
"""Tests for certbot_apache.parser."""
import os
import shutil
import unittest
from certbot import errors
from certbot.compat import os
from certbot_apache.tests import util

View File

@@ -1,6 +1,6 @@
# pylint: disable=too-many-public-methods,too-many-lines
"""Test for certbot_apache.configurator."""
import copy
import os
import shutil
import socket
import tempfile
@@ -15,21 +15,22 @@ from acme import challenges
from certbot import achallenges
from certbot import crypto_util
from certbot import errors
from certbot.compat import os
from certbot.compat import filesystem
from certbot.tests import acme_util
from certbot.tests import util as certbot_util
from certbot_apache import apache_util
from certbot_apache import constants
from certbot_apache import obj
from certbot_apache import parser
from certbot_apache import obj
from certbot_apache.tests import util
class MultipleVhostsTest(util.ApacheTest):
"""Test two standard well-configured HTTP vhosts."""
def setUp(self): # pylint: disable=arguments-differ
super(MultipleVhostsTest, self).setUp()
@@ -51,14 +52,25 @@ class MultipleVhostsTest(util.ApacheTest):
self.config.deploy_cert = mocked_deploy_cert
return self.config
@mock.patch("certbot_apache.configurator.ApacheConfigurator.init_augeas")
@mock.patch("certbot_apache.configurator.path_surgery")
def test_prepare_no_install(self, mock_surgery):
def test_prepare_no_install(self, mock_surgery, _init_augeas):
silly_path = {"PATH": "/tmp/nothingness2342"}
mock_surgery.return_value = False
with mock.patch.dict('os.environ', silly_path):
self.assertRaises(errors.NoInstallationError, self.config.prepare)
self.assertEqual(mock_surgery.call_count, 1)
@mock.patch("certbot_apache.augeas_configurator.AugeasConfigurator.init_augeas")
def test_prepare_no_augeas(self, mock_init_augeas):
""" Test augeas initialization ImportError """
def side_effect_error():
""" Side effect error for the test """
raise ImportError
mock_init_augeas.side_effect = side_effect_error
self.assertRaises(
errors.NoInstallationError, self.config.prepare)
@mock.patch("certbot_apache.parser.ApacheParser")
@mock.patch("certbot_apache.configurator.util.exe_exists")
def test_prepare_version(self, mock_exe_exists, _):
@@ -70,6 +82,16 @@ class MultipleVhostsTest(util.ApacheTest):
self.assertRaises(
errors.NotSupportedError, self.config.prepare)
@mock.patch("certbot_apache.parser.ApacheParser")
@mock.patch("certbot_apache.configurator.util.exe_exists")
def test_prepare_old_aug(self, mock_exe_exists, _):
mock_exe_exists.return_value = True
self.config.config_test = mock.Mock()
# pylint: disable=protected-access
self.config._check_aug_version = mock.Mock(return_value=False)
self.assertRaises(
errors.NotSupportedError, self.config.prepare)
def test_prepare_locked(self):
server_root = self.config.conf("server-root")
self.config.config_test = mock.Mock()
@@ -93,53 +115,9 @@ class MultipleVhostsTest(util.ApacheTest):
# Weak test..
ApacheConfigurator.add_parser_arguments(mock.MagicMock())
def test_docs_parser_arguments(self):
os.environ["CERTBOT_DOCS"] = "1"
from certbot_apache.configurator import ApacheConfigurator
mock_add = mock.MagicMock()
ApacheConfigurator.add_parser_arguments(mock_add)
parserargs = ["server_root", "enmod", "dismod", "le_vhost_ext",
"vhost_root", "logs_root", "challenge_location",
"handle_modules", "handle_sites", "ctl"]
exp = dict()
for k in ApacheConfigurator.OS_DEFAULTS:
if k in parserargs:
exp[k.replace("_", "-")] = ApacheConfigurator.OS_DEFAULTS[k]
# Special cases
exp["vhost-root"] = None
exp["init-script"] = None
found = set()
for call in mock_add.call_args_list:
# init-script is a special case: deprecated argument
if call[0][0] != "init-script":
self.assertEqual(exp[call[0][0]], call[1]['default'])
found.add(call[0][0])
# Make sure that all (and only) the expected values exist
self.assertEqual(len(mock_add.call_args_list), len(found))
for e in exp:
self.assertTrue(e in found)
del os.environ["CERTBOT_DOCS"]
def test_add_parser_arguments_all_configurators(self): # pylint: disable=no-self-use
from certbot_apache.entrypoint import OVERRIDE_CLASSES
for cls in OVERRIDE_CLASSES.values():
cls.add_parser_arguments(mock.MagicMock())
def test_all_configurators_defaults_defined(self):
from certbot_apache.entrypoint import OVERRIDE_CLASSES
from certbot_apache.configurator import ApacheConfigurator
parameters = set(ApacheConfigurator.OS_DEFAULTS.keys())
for cls in OVERRIDE_CLASSES.values():
self.assertTrue(parameters.issubset(set(cls.OS_DEFAULTS.keys())))
def test_constant(self):
self.assertTrue("debian_apache_2_4/multiple_vhosts/apache" in
self.config.option("server_root"))
self.assertEqual(self.config.option("nonexistent"), None)
self.assertEqual(self.config.constant("server_root"), "/etc/apache2")
self.assertEqual(self.config.constant("nonexistent"), None)
@certbot_util.patch_get_utility()
def test_get_all_names(self, mock_getutility):
@@ -148,8 +126,7 @@ class MultipleVhostsTest(util.ApacheTest):
names = self.config.get_all_names()
self.assertEqual(names, set(
["certbot.demo", "ocspvhost.com", "encryption-example.demo",
"nonsym.link", "vhost.in.rootconf", "www.certbot.demo",
"duplicate.example.com"]
"nonsym.link", "vhost.in.rootconf", "www.certbot.demo"]
))
@certbot_util.patch_get_utility()
@@ -168,7 +145,8 @@ class MultipleVhostsTest(util.ApacheTest):
self.config.vhosts.append(vhost)
names = self.config.get_all_names()
self.assertEqual(len(names), 9)
# Names get filtered, only 5 are returned
self.assertEqual(len(names), 8)
self.assertTrue("zombo.com" in names)
self.assertTrue("google.com" in names)
self.assertTrue("certbot.demo" in names)
@@ -209,7 +187,7 @@ class MultipleVhostsTest(util.ApacheTest):
def test_get_virtual_hosts(self):
"""Make sure all vhosts are being properly found."""
vhs = self.config.get_virtual_hosts()
self.assertEqual(len(vhs), 12)
self.assertEqual(len(vhs), 10)
found = 0
for vhost in vhs:
@@ -220,7 +198,7 @@ class MultipleVhostsTest(util.ApacheTest):
else:
raise Exception("Missed: %s" % vhost) # pragma: no cover
self.assertEqual(found, 12)
self.assertEqual(found, 10)
# Handle case of non-debian layout get_virtual_hosts
with mock.patch(
@@ -228,7 +206,7 @@ class MultipleVhostsTest(util.ApacheTest):
) as mock_conf:
mock_conf.return_value = False
vhs = self.config.get_virtual_hosts()
self.assertEqual(len(vhs), 12)
self.assertEqual(len(vhs), 10)
@mock.patch("certbot_apache.display_ops.select_vhost")
def test_choose_vhost_none_avail(self, mock_select):
@@ -331,7 +309,7 @@ class MultipleVhostsTest(util.ApacheTest):
self.config.vhosts = [
vh for vh in self.config.vhosts
if vh.name not in ["certbot.demo", "nonsym.link",
"encryption-example.demo", "duplicate.example.com",
"encryption-example.demo",
"ocspvhost.com", "vhost.in.rootconf"]
and "*.blue.purple.com" not in vh.aliases
]
@@ -342,7 +320,7 @@ class MultipleVhostsTest(util.ApacheTest):
def test_non_default_vhosts(self):
# pylint: disable=protected-access
vhosts = self.config._non_default_vhosts(self.config.vhosts)
self.assertEqual(len(vhosts), 10)
self.assertEqual(len(vhosts), 8)
def test_deploy_cert_enable_new_vhost(self):
# Create
@@ -362,7 +340,6 @@ class MultipleVhostsTest(util.ApacheTest):
"""Mock method for parser.find_dir"""
if directive == "Include" and argument.endswith("options-ssl-apache.conf"):
return ["/path/to/whatever"]
return None # pragma: no cover
mock_add = mock.MagicMock()
self.config.parser.add_dir = mock_add
@@ -474,6 +451,7 @@ class MultipleVhostsTest(util.ApacheTest):
but an SSLCertificateKeyFile directive is missing."""
if "SSLCertificateFile" in args:
return ["example/cert.pem"]
else:
return []
mock_find_dir = mock.MagicMock(return_value=[])
@@ -654,7 +632,7 @@ class MultipleVhostsTest(util.ApacheTest):
# span excludes the closing </VirtualHost> tag in older versions
# of Augeas
return_value = [self.vh_truth[0].filep, 1, 12, 0, 0, 0, 1142]
with mock.patch.object(self.config.parser.aug, 'span') as mock_span:
with mock.patch.object(self.config.aug, 'span') as mock_span:
mock_span.return_value = return_value
self.test_make_vhost_ssl()
@@ -662,7 +640,7 @@ class MultipleVhostsTest(util.ApacheTest):
# span includes the closing </VirtualHost> tag in newer versions
# of Augeas
return_value = [self.vh_truth[0].filep, 1, 12, 0, 0, 0, 1157]
with mock.patch.object(self.config.parser.aug, 'span') as mock_span:
with mock.patch.object(self.config.aug, 'span') as mock_span:
mock_span.return_value = return_value
self.test_make_vhost_ssl()
@@ -673,9 +651,22 @@ class MultipleVhostsTest(util.ApacheTest):
self.assertEqual(ssl_vhost_slink.name, "nonsym.link")
def test_make_vhost_ssl_nonexistent_vhost_path(self):
def conf_side_effect(arg):
""" Mock function for ApacheConfigurator.conf """
confvars = {
"vhost-root": "/tmp/nonexistent",
"le_vhost_ext": "-le-ssl.conf",
"handle-sites": True}
return confvars[arg]
with mock.patch(
"certbot_apache.configurator.ApacheConfigurator.conf"
) as mock_conf:
mock_conf.side_effect = conf_side_effect
ssl_vhost = self.config.make_vhost_ssl(self.vh_truth[1])
self.assertEqual(os.path.dirname(ssl_vhost.filep),
os.path.dirname(filesystem.realpath(self.vh_truth[1].filep)))
os.path.dirname(os.path.realpath(
self.vh_truth[1].filep)))
def test_make_vhost_ssl(self):
ssl_vhost = self.config.make_vhost_ssl(self.vh_truth[0])
@@ -696,7 +687,7 @@ class MultipleVhostsTest(util.ApacheTest):
self.assertEqual(self.config.is_name_vhost(self.vh_truth[0]),
self.config.is_name_vhost(ssl_vhost))
self.assertEqual(len(self.config.vhosts), 13)
self.assertEqual(len(self.config.vhosts), 11)
def test_clean_vhost_ssl(self):
# pylint: disable=protected-access
@@ -789,19 +780,32 @@ class MultipleVhostsTest(util.ApacheTest):
self.assertEqual(self.config.add_name_vhost.call_count, 2)
@mock.patch("certbot_apache.configurator.http_01.ApacheHttp01.perform")
@mock.patch("certbot_apache.configurator.tls_sni_01.ApacheTlsSni01.perform")
@mock.patch("certbot_apache.configurator.ApacheConfigurator.restart")
def test_perform(self, mock_restart, mock_http_perform):
def test_perform(self, mock_restart, mock_tls_perform, mock_http_perform):
# Only tests functionality specific to configurator.perform
# Note: As more challenges are offered this will have to be expanded
account_key, achalls = self.get_key_and_achalls()
expected = [achall.response(account_key) for achall in achalls]
mock_http_perform.return_value = expected
all_expected = []
http_expected = []
tls_expected = []
for achall in achalls:
response = achall.response(account_key)
if isinstance(achall.chall, challenges.HTTP01):
http_expected.append(response)
else:
tls_expected.append(response)
all_expected.append(response)
mock_http_perform.return_value = http_expected
mock_tls_perform.return_value = tls_expected
responses = self.config.perform(achalls)
self.assertEqual(mock_http_perform.call_count, 1)
self.assertEqual(responses, expected)
self.assertEqual(mock_tls_perform.call_count, 1)
self.assertEqual(responses, all_expected)
self.assertEqual(mock_restart.call_count, 1)
@@ -1021,7 +1025,7 @@ class MultipleVhostsTest(util.ApacheTest):
# pylint: disable=protected-access
http_vh = self.config._get_http_vhost(ssl_vh)
self.assertFalse(http_vh.ssl)
self.assertTrue(http_vh.ssl == False)
@mock.patch("certbot.util.run_script")
@mock.patch("certbot.util.exe_exists")
@@ -1210,7 +1214,7 @@ class MultipleVhostsTest(util.ApacheTest):
except errors.PluginEnhancementAlreadyPresent:
args_paths = self.config.parser.find_dir(
"RewriteRule", None, http_vhost.path, False)
arg_vals = [self.config.parser.aug.get(x) for x in args_paths]
arg_vals = [self.config.aug.get(x) for x in args_paths]
self.assertEqual(arg_vals, constants.REWRITE_HTTPS_ARGS)
@@ -1264,7 +1268,7 @@ class MultipleVhostsTest(util.ApacheTest):
# pylint: disable=protected-access
self.config._enable_redirect(self.vh_truth[1], "")
self.assertEqual(len(self.config.vhosts), 13)
self.assertEqual(len(self.config.vhosts), 11)
def test_create_own_redirect_for_old_apache_version(self):
self.config.parser.modules.add("rewrite_module")
@@ -1275,7 +1279,7 @@ class MultipleVhostsTest(util.ApacheTest):
# pylint: disable=protected-access
self.config._enable_redirect(self.vh_truth[1], "")
self.assertEqual(len(self.config.vhosts), 13)
self.assertEqual(len(self.config.vhosts), 11)
def test_sift_rewrite_rule(self):
# pylint: disable=protected-access
@@ -1313,6 +1317,24 @@ class MultipleVhostsTest(util.ApacheTest):
return account_key, (achall1, achall2, achall3)
def test_make_addrs_sni_ready(self):
self.config.version = (2, 2)
self.config.make_addrs_sni_ready(
set([obj.Addr.fromstring("*:443"), obj.Addr.fromstring("*:80")]))
self.assertTrue(self.config.parser.find_dir(
"NameVirtualHost", "*:80", exclude=False))
self.assertTrue(self.config.parser.find_dir(
"NameVirtualHost", "*:443", exclude=False))
def test_aug_version(self):
mock_match = mock.Mock(return_value=["something"])
self.config.aug.match = mock_match
# pylint: disable=protected-access
self.assertEqual(self.config._check_aug_version(),
["something"])
self.config.aug.match.side_effect = RuntimeError
self.assertFalse(self.config._check_aug_version())
def test_enable_site_nondebian(self):
inc_path = "/path/to/wherever"
vhost = self.vh_truth[0]
@@ -1335,8 +1357,8 @@ class MultipleVhostsTest(util.ApacheTest):
self.config.parser.modules.add("ssl_module")
self.config.parser.modules.add("mod_ssl.c")
self.config.parser.modules.add("socache_shmcb_module")
tmp_path = filesystem.realpath(tempfile.mkdtemp("vhostroot"))
filesystem.chmod(tmp_path, 0o755)
tmp_path = os.path.realpath(tempfile.mkdtemp("vhostroot"))
os.chmod(tmp_path, 0o755)
mock_p = "certbot_apache.configurator.ApacheConfigurator._get_ssl_vhost_path"
mock_a = "certbot_apache.parser.ApacheParser.add_include"
@@ -1368,7 +1390,7 @@ class MultipleVhostsTest(util.ApacheTest):
# pylint: disable=protected-access
cases = {u"*.example.org": True, b"*.x.example.org": True,
u"a.example.org": False, b"a.x.example.org": False}
for key in cases:
for key in cases.keys():
self.assertEqual(self.config._wildcard_domain(key), cases[key])
def test_choose_vhosts_wildcard(self):
@@ -1379,11 +1401,11 @@ class MultipleVhostsTest(util.ApacheTest):
vhs = self.config._choose_vhosts_wildcard("*.certbot.demo",
create_ssl=True)
# Check that the dialog was called with one vh: certbot.demo
self.assertEqual(mock_select_vhs.call_args[0][0][0], self.vh_truth[3])
self.assertEqual(len(mock_select_vhs.call_args_list), 1)
self.assertEquals(mock_select_vhs.call_args[0][0][0], self.vh_truth[3])
self.assertEquals(len(mock_select_vhs.call_args_list), 1)
# And the actual returned values
self.assertEqual(len(vhs), 1)
self.assertEquals(len(vhs), 1)
self.assertTrue(vhs[0].name == "certbot.demo")
self.assertTrue(vhs[0].ssl)
@@ -1398,7 +1420,7 @@ class MultipleVhostsTest(util.ApacheTest):
vhs = self.config._choose_vhosts_wildcard("*.certbot.demo",
create_ssl=False)
self.assertFalse(mock_makessl.called)
self.assertEqual(vhs[0], self.vh_truth[1])
self.assertEquals(vhs[0], self.vh_truth[1])
@mock.patch("certbot_apache.configurator.ApacheConfigurator._vhosts_for_wildcard")
@mock.patch("certbot_apache.configurator.ApacheConfigurator.make_vhost_ssl")
@@ -1411,15 +1433,15 @@ class MultipleVhostsTest(util.ApacheTest):
mock_select_vhs.return_value = [self.vh_truth[7]]
vhs = self.config._choose_vhosts_wildcard("whatever",
create_ssl=True)
self.assertEqual(mock_select_vhs.call_args[0][0][0], self.vh_truth[7])
self.assertEqual(len(mock_select_vhs.call_args_list), 1)
self.assertEquals(mock_select_vhs.call_args[0][0][0], self.vh_truth[7])
self.assertEquals(len(mock_select_vhs.call_args_list), 1)
# Ensure that make_vhost_ssl was not called, vhost.ssl == true
self.assertFalse(mock_makessl.called)
# And the actual returned values
self.assertEqual(len(vhs), 1)
self.assertEquals(len(vhs), 1)
self.assertTrue(vhs[0].ssl)
self.assertEqual(vhs[0], self.vh_truth[7])
self.assertEquals(vhs[0], self.vh_truth[7])
def test_deploy_cert_wildcard(self):
@@ -1432,7 +1454,7 @@ class MultipleVhostsTest(util.ApacheTest):
self.config.deploy_cert("*.wildcard.example.org", "/tmp/path",
"/tmp/path", "/tmp/path", "/tmp/path")
self.assertTrue(mock_dep.called)
self.assertEqual(len(mock_dep.call_args_list), 1)
self.assertEquals(len(mock_dep.call_args_list), 1)
self.assertEqual(self.vh_truth[7], mock_dep.call_args_list[0][0][0])
@mock.patch("certbot_apache.display_ops.select_vhost_multiple")
@@ -1480,29 +1502,6 @@ class MultipleVhostsTest(util.ApacheTest):
second_id = self.config.add_vhost_id(self.vh_truth[0])
self.assertEqual(first_id, second_id)
def test_realpath_replaces_symlink(self):
orig_match = self.config.parser.aug.match
mock_vhost = copy.deepcopy(self.vh_truth[0])
mock_vhost.filep = mock_vhost.filep.replace('sites-enabled', u'sites-available')
mock_vhost.path = mock_vhost.path.replace('sites-enabled', 'sites-available')
mock_vhost.enabled = False
self.config.parser.parse_file(mock_vhost.filep)
def mock_match(aug_expr):
"""Return a mocked match list of VirtualHosts"""
if "/mocked/path" in aug_expr:
return [self.vh_truth[1].path, self.vh_truth[0].path, mock_vhost.path]
return orig_match(aug_expr)
self.config.parser.parser_paths = ["/mocked/path"]
self.config.parser.aug.match = mock_match
vhs = self.config.get_virtual_hosts()
self.assertEqual(len(vhs), 2)
self.assertTrue(vhs[0] == self.vh_truth[1])
# mock_vhost should have replaced the vh_truth[0], because its filepath
# isn't a symlink
self.assertTrue(vhs[1] == mock_vhost)
class AugeasVhostsTest(util.ApacheTest):
"""Test vhosts with illegal names dependent on augeas version."""
@@ -1521,16 +1520,16 @@ class AugeasVhostsTest(util.ApacheTest):
self.work_dir)
def test_choosevhost_with_illegal_name(self):
self.config.parser.aug = mock.MagicMock()
self.config.parser.aug.match.side_effect = RuntimeError
path = "debian_apache_2_4/augeas_vhosts/apache2/sites-available/old-and-default.conf"
self.config.aug = mock.MagicMock()
self.config.aug.match.side_effect = RuntimeError
path = "debian_apache_2_4/augeas_vhosts/apache2/sites-available/old,default.conf"
chosen_vhost = self.config._create_vhost(path)
self.assertEqual(None, chosen_vhost)
def test_choosevhost_works(self):
path = "debian_apache_2_4/augeas_vhosts/apache2/sites-available/old-and-default.conf"
path = "debian_apache_2_4/augeas_vhosts/apache2/sites-available/old,default.conf"
chosen_vhost = self.config._create_vhost(path)
self.assertTrue(chosen_vhost is None or chosen_vhost.path == path)
self.assertTrue(chosen_vhost == None or chosen_vhost.path == path)
@mock.patch("certbot_apache.configurator.ApacheConfigurator._create_vhost")
def test_get_vhost_continue(self, mock_vhost):
@@ -1584,7 +1583,7 @@ class AugeasVhostsTest(util.ApacheTest):
broken_vhost)
class MultiVhostsTest(util.ApacheTest):
"""Test configuration with multiple virtualhosts in a single file."""
"""Test vhosts with illegal names dependent on augeas version."""
# pylint: disable=protected-access
def setUp(self): # pylint: disable=arguments-differ
@@ -1651,8 +1650,7 @@ class MultiVhostsTest(util.ApacheTest):
self.assertTrue(self.config.parser.find_dir(
"RewriteEngine", "on", ssl_vhost.path, False))
with open(ssl_vhost.filep) as the_file:
conf_text = the_file.read()
conf_text = open(ssl_vhost.filep).read()
commented_rewrite_rule = ("# RewriteRule \"^/secrets/(.+)\" "
"\"https://new.example.com/docs/$1\" [R,L]")
uncommented_rewrite_rule = ("RewriteRule \"^/docs/(.+)\" "
@@ -1668,8 +1666,7 @@ class MultiVhostsTest(util.ApacheTest):
ssl_vhost = self.config.make_vhost_ssl(self.vh_truth[3])
with open(ssl_vhost.filep) as the_file:
conf_lines = the_file.readlines()
conf_lines = open(ssl_vhost.filep).readlines()
conf_line_set = [l.strip() for l in conf_lines]
not_commented_cond1 = ("RewriteCond "
"%{DOCUMENT_ROOT}/%{REQUEST_FILENAME} !-f")
@@ -1706,7 +1703,7 @@ class InstallSslOptionsConfTest(util.ApacheTest):
self.config.updated_mod_ssl_conf_digest)
def _current_ssl_options_hash(self):
return crypto_util.sha256sum(self.config.option("MOD_SSL_CONF_SRC"))
return crypto_util.sha256sum(self.config.constant("MOD_SSL_CONF_SRC"))
def _assert_current_file(self):
self.assertTrue(os.path.isfile(self.config.mod_ssl_conf))
@@ -1742,7 +1739,7 @@ class InstallSslOptionsConfTest(util.ApacheTest):
self.assertFalse(mock_logger.warning.called)
self.assertTrue(os.path.isfile(self.config.mod_ssl_conf))
self.assertEqual(crypto_util.sha256sum(
self.config.option("MOD_SSL_CONF_SRC")),
self.config.constant("MOD_SSL_CONF_SRC")),
self._current_ssl_options_hash())
self.assertNotEqual(crypto_util.sha256sum(self.config.mod_ssl_conf),
self._current_ssl_options_hash())
@@ -1758,7 +1755,7 @@ class InstallSslOptionsConfTest(util.ApacheTest):
"%s has been manually modified; updated file "
"saved to %s. We recommend updating %s for security purposes.")
self.assertEqual(crypto_util.sha256sum(
self.config.option("MOD_SSL_CONF_SRC")),
self.config.constant("MOD_SSL_CONF_SRC")),
self._current_ssl_options_hash())
# only print warning once
with mock.patch("certbot.plugins.common.logger") as mock_logger:

View File

@@ -1,11 +1,11 @@
"""Test for certbot_apache.configurator for Debian overrides"""
import os
import shutil
import unittest
import mock
from certbot import errors
from certbot.compat import os
from certbot_apache import apache_util
from certbot_apache import obj
@@ -20,7 +20,7 @@ class MultipleVhostsTestDebian(util.ApacheTest):
def setUp(self): # pylint: disable=arguments-differ
super(MultipleVhostsTestDebian, self).setUp()
self.config = util.get_apache_configurator(
self.config_path, self.vhost_path, self.config_dir, self.work_dir,
self.config_path, None, self.config_dir, self.work_dir,
os_info="debian")
self.config = self.mock_deploy_cert(self.config)
self.vh_truth = util.get_vh_truth(self.temp_dir,
@@ -79,9 +79,9 @@ class MultipleVhostsTestDebian(util.ApacheTest):
def test_enable_site_failure(self):
self.config.parser.root = "/tmp/nonexistent"
with mock.patch("certbot.compat.os.path.isdir") as mock_dir:
with mock.patch("os.path.isdir") as mock_dir:
mock_dir.return_value = True
with mock.patch("certbot.compat.os.path.islink") as mock_link:
with mock.patch("os.path.islink") as mock_link:
mock_link.return_value = False
self.assertRaises(
errors.NotSupportedError,

View File

@@ -6,7 +6,6 @@ import mock
from certbot_apache import configurator
from certbot_apache import entrypoint
class EntryPointTest(unittest.TestCase):
"""Entrypoint tests"""
@@ -15,13 +14,8 @@ class EntryPointTest(unittest.TestCase):
def test_get_configurator(self):
with mock.patch("certbot.util.get_os_info") as mock_info:
for distro in entrypoint.OVERRIDE_CLASSES:
return_value = (distro, "whatever")
if distro == 'fedora_old':
return_value = ('fedora', '28')
elif distro == 'fedora':
return_value = ('fedora', '29')
mock_info.return_value = return_value
for distro in entrypoint.OVERRIDE_CLASSES.keys():
mock_info.return_value = (distro, "whatever")
self.assertEqual(entrypoint.get_configurator(),
entrypoint.OVERRIDE_CLASSES[distro])
@@ -29,7 +23,7 @@ class EntryPointTest(unittest.TestCase):
with mock.patch("certbot.util.get_os_info") as mock_info:
mock_info.return_value = ("nonexistent", "irrelevant")
with mock.patch("certbot.util.get_systemd_os_like") as mock_like:
for like in entrypoint.OVERRIDE_CLASSES:
for like in entrypoint.OVERRIDE_CLASSES.keys():
mock_like.return_value = [like]
self.assertEqual(entrypoint.get_configurator(),
entrypoint.OVERRIDE_CLASSES[like])

View File

@@ -1,195 +0,0 @@
"""Test for certbot_apache.configurator for Fedora 29+ overrides"""
import unittest
import mock
from certbot import errors
from certbot.compat import filesystem
from certbot.compat import os
from certbot_apache import obj
from certbot_apache import override_fedora
from certbot_apache.tests import util
def get_vh_truth(temp_dir, config_name):
"""Return the ground truth for the specified directory."""
prefix = os.path.join(
temp_dir, config_name, "httpd/conf.d")
aug_pre = "/files" + prefix
# TODO: eventually, these tests should have a dedicated configuration instead
# of reusing the ones from centos_test
vh_truth = [
obj.VirtualHost(
os.path.join(prefix, "centos.example.com.conf"),
os.path.join(aug_pre, "centos.example.com.conf/VirtualHost"),
{obj.Addr.fromstring("*:80")},
False, True, "centos.example.com"),
obj.VirtualHost(
os.path.join(prefix, "ssl.conf"),
os.path.join(aug_pre, "ssl.conf/VirtualHost"),
{obj.Addr.fromstring("_default_:443")},
True, True, None)
]
return vh_truth
class FedoraRestartTest(util.ApacheTest):
"""Tests for Fedora specific self-signed certificate override"""
# TODO: eventually, these tests should have a dedicated configuration instead
# of reusing the ones from centos_test
def setUp(self): # pylint: disable=arguments-differ
test_dir = "centos7_apache/apache"
config_root = "centos7_apache/apache/httpd"
vhost_root = "centos7_apache/apache/httpd/conf.d"
super(FedoraRestartTest, self).setUp(test_dir=test_dir,
config_root=config_root,
vhost_root=vhost_root)
self.config = util.get_apache_configurator(
self.config_path, self.vhost_path, self.config_dir, self.work_dir,
os_info="fedora")
self.vh_truth = get_vh_truth(
self.temp_dir, "centos7_apache/apache")
def _run_fedora_test(self):
self.assertIsInstance(self.config, override_fedora.FedoraConfigurator)
self.config.config_test()
def test_fedora_restart_error(self):
c_test = "certbot_apache.configurator.ApacheConfigurator.config_test"
with mock.patch(c_test) as mock_test:
# First call raises error, second doesn't
mock_test.side_effect = [errors.MisconfigurationError, '']
with mock.patch("certbot.util.run_script") as mock_run:
mock_run.side_effect = errors.SubprocessError
self.assertRaises(errors.MisconfigurationError,
self._run_fedora_test)
def test_fedora_restart(self):
c_test = "certbot_apache.configurator.ApacheConfigurator.config_test"
with mock.patch(c_test) as mock_test:
with mock.patch("certbot.util.run_script") as mock_run:
# First call raises error, second doesn't
mock_test.side_effect = [errors.MisconfigurationError, '']
self._run_fedora_test()
self.assertEqual(mock_test.call_count, 2)
self.assertEqual(mock_run.call_args[0][0],
['systemctl', 'restart', 'httpd'])
class MultipleVhostsTestFedora(util.ApacheTest):
"""Multiple vhost tests for CentOS / RHEL family of distros"""
_multiprocess_can_split_ = True
def setUp(self): # pylint: disable=arguments-differ
test_dir = "centos7_apache/apache"
config_root = "centos7_apache/apache/httpd"
vhost_root = "centos7_apache/apache/httpd/conf.d"
super(MultipleVhostsTestFedora, self).setUp(test_dir=test_dir,
config_root=config_root,
vhost_root=vhost_root)
self.config = util.get_apache_configurator(
self.config_path, self.vhost_path, self.config_dir, self.work_dir,
os_info="fedora")
self.vh_truth = get_vh_truth(
self.temp_dir, "centos7_apache/apache")
def test_get_parser(self):
self.assertIsInstance(self.config.parser, override_fedora.FedoraParser)
@mock.patch("certbot_apache.parser.ApacheParser._get_runtime_cfg")
def test_opportunistic_httpd_runtime_parsing(self, mock_get):
define_val = (
'Define: TEST1\n'
'Define: TEST2\n'
'Define: DUMP_RUN_CFG\n'
)
mod_val = (
'Loaded Modules:\n'
' mock_module (static)\n'
' another_module (static)\n'
)
def mock_get_cfg(command):
"""Mock httpd process stdout"""
if command == ['httpd', '-t', '-D', 'DUMP_RUN_CFG']:
return define_val
elif command == ['httpd', '-t', '-D', 'DUMP_MODULES']:
return mod_val
return ""
mock_get.side_effect = mock_get_cfg
self.config.parser.modules = set()
self.config.parser.variables = {}
with mock.patch("certbot.util.get_os_info") as mock_osi:
# Make sure we have the have the CentOS httpd constants
mock_osi.return_value = ("fedora", "29")
self.config.parser.update_runtime_variables()
self.assertEqual(mock_get.call_count, 3)
self.assertEqual(len(self.config.parser.modules), 4)
self.assertEqual(len(self.config.parser.variables), 2)
self.assertTrue("TEST2" in self.config.parser.variables.keys())
self.assertTrue("mod_another.c" in self.config.parser.modules)
@mock.patch("certbot_apache.configurator.util.run_script")
def test_get_version(self, mock_run_script):
mock_run_script.return_value = ('', None)
self.assertRaises(errors.PluginError, self.config.get_version)
self.assertEqual(mock_run_script.call_args[0][0][0], 'httpd')
def test_get_virtual_hosts(self):
"""Make sure all vhosts are being properly found."""
vhs = self.config.get_virtual_hosts()
self.assertEqual(len(vhs), 2)
found = 0
for vhost in vhs:
for centos_truth in self.vh_truth:
if vhost == centos_truth:
found += 1
break
else:
raise Exception("Missed: %s" % vhost) # pragma: no cover
self.assertEqual(found, 2)
@mock.patch("certbot_apache.parser.ApacheParser._get_runtime_cfg")
def test_get_sysconfig_vars(self, mock_cfg):
"""Make sure we read the sysconfig OPTIONS variable correctly"""
# Return nothing for the process calls
mock_cfg.return_value = ""
self.config.parser.sysconfig_filep = filesystem.realpath(
os.path.join(self.config.parser.root, "../sysconfig/httpd"))
self.config.parser.variables = {}
with mock.patch("certbot.util.get_os_info") as mock_osi:
# Make sure we have the have the CentOS httpd constants
mock_osi.return_value = ("fedora", "29")
self.config.parser.update_runtime_variables()
self.assertTrue("mock_define" in self.config.parser.variables.keys())
self.assertTrue("mock_define_too" in self.config.parser.variables.keys())
self.assertTrue("mock_value" in self.config.parser.variables.keys())
self.assertEqual("TRUE", self.config.parser.variables["mock_value"])
self.assertTrue("MOCK_NOSEP" in self.config.parser.variables.keys())
self.assertEqual("NOSEP_VAL", self.config.parser.variables["NOSEP_TWO"])
@mock.patch("certbot_apache.configurator.util.run_script")
def test_alt_restart_works(self, mock_run_script):
mock_run_script.side_effect = [None, errors.SubprocessError, None]
self.config.restart()
self.assertEqual(mock_run_script.call_count, 3)
@mock.patch("certbot_apache.configurator.util.run_script")
def test_alt_restart_errors(self, mock_run_script):
mock_run_script.side_effect = [None,
errors.SubprocessError,
errors.SubprocessError]
self.assertRaises(errors.MisconfigurationError, self.config.restart)
if __name__ == "__main__":
unittest.main() # pragma: no cover

View File

@@ -1,17 +1,15 @@
"""Test for certbot_apache.configurator for Gentoo overrides"""
import os
import unittest
import mock
from certbot import errors
from certbot.compat import filesystem
from certbot.compat import os
from certbot_apache import obj
from certbot_apache import override_gentoo
from certbot_apache import obj
from certbot_apache.tests import util
def get_vh_truth(temp_dir, config_name):
"""Return the ground truth for the specified directory."""
prefix = os.path.join(
@@ -82,7 +80,7 @@ class MultipleVhostsTestGentoo(util.ApacheTest):
"""Make sure we read the Gentoo APACHE2_OPTS variable correctly"""
defines = ['DEFAULT_VHOST', 'INFO',
'SSL', 'SSL_DEFAULT_VHOST', 'LANGUAGE']
self.config.parser.apacheconfig_filep = filesystem.realpath(
self.config.parser.apacheconfig_filep = os.path.realpath(
os.path.join(self.config.parser.root, "../conf.d/apache2"))
self.config.parser.variables = {}
with mock.patch("certbot_apache.override_gentoo.GentooParser.update_modules"):
@@ -115,24 +113,23 @@ class MultipleVhostsTestGentoo(util.ApacheTest):
"""Mock httpd process stdout"""
if command == ['apache2ctl', 'modules']:
return mod_val
return None # pragma: no cover
mock_get.side_effect = mock_get_cfg
self.config.parser.modules = set()
with mock.patch("certbot.util.get_os_info") as mock_osi:
# Make sure we have the have the Gentoo httpd constants
# Make sure we have the have the CentOS httpd constants
mock_osi.return_value = ("gentoo", "123")
self.config.parser.update_runtime_variables()
self.assertEqual(mock_get.call_count, 1)
self.assertEqual(len(self.config.parser.modules), 4)
self.assertEquals(mock_get.call_count, 1)
self.assertEquals(len(self.config.parser.modules), 4)
self.assertTrue("mod_another.c" in self.config.parser.modules)
@mock.patch("certbot_apache.configurator.util.run_script")
def test_alt_restart_works(self, mock_run_script):
mock_run_script.side_effect = [None, errors.SubprocessError, None]
self.config.restart()
self.assertEqual(mock_run_script.call_count, 3)
self.assertEquals(mock_run_script.call_count, 3)
if __name__ == "__main__":
unittest.main() # pragma: no cover

View File

@@ -1,16 +1,15 @@
"""Test for certbot_apache.http_01."""
import unittest
import mock
import os
import unittest
from acme import challenges
from acme.magic_typing import List # pylint: disable=unused-import, no-name-in-module
from certbot import achallenges
from certbot import errors
from certbot.compat import os
from certbot.tests import acme_util
from certbot_apache.parser import get_aug_path
from certbot.tests import acme_util
from certbot_apache.tests import util
@@ -20,15 +19,15 @@ NUM_ACHALLS = 3
class ApacheHttp01Test(util.ApacheTest):
"""Test for certbot_apache.http_01.ApacheHttp01."""
def setUp(self, *args, **kwargs): # pylint: disable=arguments-differ
def setUp(self, *args, **kwargs):
super(ApacheHttp01Test, self).setUp(*args, **kwargs)
self.account_key = self.rsa512jwk
self.achalls = [] # type: List[achallenges.KeyAuthorizationAnnotatedChallenge]
vh_truth = util.get_vh_truth(
self.temp_dir, "debian_apache_2_4/multiple_vhosts")
# Takes the vhosts for encryption-example.demo, certbot.demo
# and vhost.in.rootconf
# Takes the vhosts for encryption-example.demo, certbot.demo, and
# vhost.in.rootconf
self.vhosts = [vh_truth[0], vh_truth[3], vh_truth[10]]
for i in range(NUM_ACHALLS):
@@ -39,7 +38,7 @@ class ApacheHttp01Test(util.ApacheTest):
"pending"),
domain=self.vhosts[i].name, account_key=self.account_key))
modules = ["ssl", "rewrite", "authz_core", "authz_host"]
modules = ["rewrite", "authz_core", "authz_host"]
for mod in modules:
self.config.parser.modules.add("mod_{0}.c".format(mod))
self.config.parser.modules.add(mod + "_module")
@@ -78,7 +77,7 @@ class ApacheHttp01Test(util.ApacheTest):
calls = mock_enmod.call_args_list
other_calls = []
for call in calls:
if call[0][0] != "rewrite":
if "rewrite" != call[0][0]:
other_calls.append(call)
# If these lists are equal, we never enabled mod_rewrite
@@ -111,17 +110,6 @@ class ApacheHttp01Test(util.ApacheTest):
domain="something.nonexistent", account_key=self.account_key)]
self.common_perform_test(achalls, vhosts)
def test_configure_multiple_vhosts(self):
vhosts = [v for v in self.config.vhosts if "duplicate.example.com" in v.get_names()]
self.assertEqual(len(vhosts), 2)
achalls = [
achallenges.KeyAuthorizationAnnotatedChallenge(
challb=acme_util.chall_to_challb(
challenges.HTTP01(token=((b'a' * 16))),
"pending"),
domain="duplicate.example.com", account_key=self.account_key)]
self.common_perform_test(achalls, vhosts)
def test_no_vhost(self):
for achall in self.achalls:
self.http.add_chall(achall)
@@ -146,21 +134,6 @@ class ApacheHttp01Test(util.ApacheTest):
def test_perform_3_achall_apache_2_4(self):
self.combinations_perform_test(num_achalls=3, minor_version=4)
def test_activate_disabled_vhost(self):
vhosts = [v for v in self.config.vhosts if v.name == "certbot.demo"]
achalls = [
achallenges.KeyAuthorizationAnnotatedChallenge(
challb=acme_util.chall_to_challb(
challenges.HTTP01(token=((b'a' * 16))),
"pending"),
domain="certbot.demo", account_key=self.account_key)]
vhosts[0].enabled = False
self.common_perform_test(achalls, vhosts)
matches = self.config.parser.find_dir(
"Include", vhosts[0].filep,
get_aug_path(self.config.parser.loc["default"]))
self.assertEqual(len(matches), 1)
def combinations_perform_test(self, num_achalls, minor_version):
"""Test perform with the given achall count and Apache version."""
achalls = self.achalls[:num_achalls]
@@ -187,6 +160,7 @@ class ApacheHttp01Test(util.ApacheTest):
self._test_challenge_file(achall)
for vhost in vhosts:
if not vhost.ssl:
matches = self.config.parser.find_dir("Include",
self.http.challenge_conf_pre,
vhost.path)

View File

@@ -1,12 +1,12 @@
# pylint: disable=too-many-public-methods
"""Tests for certbot_apache.parser."""
import os
import shutil
import unittest
import augeas
import mock
from certbot import errors
from certbot.compat import os
from certbot_apache.tests import util
@@ -22,27 +22,6 @@ class BasicParserTest(util.ParserTest):
shutil.rmtree(self.config_dir)
shutil.rmtree(self.work_dir)
def test_bad_parse(self):
self.parser.parse_file(os.path.join(self.parser.root,
"conf-available", "bad_conf_file.conf"))
self.assertRaises(
errors.PluginError, self.parser.check_parsing_errors, "httpd.aug")
def test_bad_save(self):
mock_save = mock.Mock()
mock_save.side_effect = IOError
self.parser.aug.save = mock_save
self.assertRaises(errors.PluginError, self.parser.unsaved_files)
def test_aug_version(self):
mock_match = mock.Mock(return_value=["something"])
self.parser.aug.match = mock_match
# pylint: disable=protected-access
self.assertEqual(self.parser.check_aug_version(),
["something"])
self.parser.aug.match.side_effect = RuntimeError
self.assertFalse(self.parser.check_aug_version())
def test_find_config_root_no_root(self):
# pylint: disable=protected-access
os.remove(self.parser.loc["root"])
@@ -73,7 +52,7 @@ class BasicParserTest(util.ParserTest):
test2 = self.parser.find_dir("documentroot")
self.assertEqual(len(test), 1)
self.assertEqual(len(test2), 8)
self.assertEqual(len(test2), 7)
def test_add_dir(self):
aug_default = "/files" + self.parser.loc["default"]
@@ -105,7 +84,7 @@ class BasicParserTest(util.ParserTest):
self.assertEqual(self.parser.aug.get(match), str(i + 1))
def test_empty_arg(self):
self.assertEqual(None,
self.assertEquals(None,
self.parser.get_arg("/files/whatever/nonexistent"))
def test_add_dir_to_ifmodssl(self):
@@ -255,7 +234,6 @@ class BasicParserTest(util.ParserTest):
return inc_val
elif cmd[-1] == "DUMP_MODULES":
return mod_val
return None # pragma: no cover
mock_cfg.side_effect = mock_get_vars
@@ -304,11 +282,11 @@ class BasicParserTest(util.ParserTest):
self.assertRaises(
errors.PluginError, self.parser.update_runtime_variables)
@mock.patch("certbot_apache.configurator.ApacheConfigurator.option")
@mock.patch("certbot_apache.configurator.ApacheConfigurator.constant")
@mock.patch("certbot_apache.parser.subprocess.Popen")
def test_update_runtime_vars_bad_ctl(self, mock_popen, mock_opt):
def test_update_runtime_vars_bad_ctl(self, mock_popen, mock_const):
mock_popen.side_effect = OSError
mock_opt.return_value = "nonexistent"
mock_const.return_value = "nonexistent"
self.assertRaises(
errors.MisconfigurationError,
self.parser.update_runtime_variables)
@@ -325,45 +303,28 @@ class BasicParserTest(util.ParserTest):
from certbot_apache.parser import get_aug_path
self.parser.add_comment(get_aug_path(self.parser.loc["name"]), "123456")
comm = self.parser.find_comments("123456")
self.assertEqual(len(comm), 1)
self.assertEquals(len(comm), 1)
self.assertTrue(self.parser.loc["name"] in comm[0])
class ParserInitTest(util.ApacheTest):
def setUp(self): # pylint: disable=arguments-differ
super(ParserInitTest, self).setUp()
self.aug = augeas.Augeas(
flags=augeas.Augeas.NONE | augeas.Augeas.NO_MODL_AUTOLOAD)
def tearDown(self):
shutil.rmtree(self.temp_dir)
shutil.rmtree(self.config_dir)
shutil.rmtree(self.work_dir)
@mock.patch("certbot_apache.parser.ApacheParser.init_augeas")
def test_prepare_no_augeas(self, mock_init_augeas):
from certbot_apache.parser import ApacheParser
mock_init_augeas.side_effect = errors.NoInstallationError
self.config.config_test = mock.Mock()
self.assertRaises(
errors.NoInstallationError, ApacheParser,
os.path.relpath(self.config_path), "/dummy/vhostpath",
version=(2, 4, 22), configurator=self.config)
def test_init_old_aug(self):
from certbot_apache.parser import ApacheParser
with mock.patch("certbot_apache.parser.ApacheParser.check_aug_version") as mock_c:
mock_c.return_value = False
self.assertRaises(
errors.NotSupportedError,
ApacheParser, os.path.relpath(self.config_path),
"/dummy/vhostpath", version=(2, 4, 22), configurator=self.config)
@mock.patch("certbot_apache.parser.ApacheParser._get_runtime_cfg")
def test_unparseable(self, mock_cfg):
from certbot_apache.parser import ApacheParser
mock_cfg.return_value = ('Define: TEST')
self.assertRaises(
errors.PluginError,
ApacheParser, os.path.relpath(self.config_path),
ApacheParser, self.aug, os.path.relpath(self.config_path),
"/dummy/vhostpath", version=(2, 2, 22), configurator=self.config)
def test_root_normalized(self):
@@ -375,7 +336,8 @@ class ParserInitTest(util.ApacheTest):
self.temp_dir,
"debian_apache_2_4/////multiple_vhosts/../multiple_vhosts/apache2")
parser = ApacheParser(path, "/dummy/vhostpath", configurator=self.config)
parser = ApacheParser(self.aug, path,
"/dummy/vhostpath", configurator=self.config)
self.assertEqual(parser.root, self.config_path)
@@ -384,7 +346,7 @@ class ParserInitTest(util.ApacheTest):
with mock.patch("certbot_apache.parser.ApacheParser."
"update_runtime_variables"):
parser = ApacheParser(
os.path.relpath(self.config_path),
self.aug, os.path.relpath(self.config_path),
"/dummy/vhostpath", configurator=self.config)
self.assertEqual(parser.root, self.config_path)
@@ -394,7 +356,7 @@ class ParserInitTest(util.ApacheTest):
with mock.patch("certbot_apache.parser.ApacheParser."
"update_runtime_variables"):
parser = ApacheParser(
self.config_path + os.path.sep,
self.aug, self.config_path + os.path.sep,
"/dummy/vhostpath", configurator=self.config)
self.assertEqual(parser.root, self.config_path)

View File

@@ -1,9 +0,0 @@
This directory holds Apache 2.0 module-specific configuration files;
any files in this directory which have the ".conf" extension will be
processed as Apache configuration files.
Files are processed in alphabetical order, so if using configuration
directives which depend on, say, mod_perl being loaded, ensure that
these are placed in a filename later in the sort order than "perl.conf".

View File

@@ -1,222 +0,0 @@
#
# This is the Apache server configuration file providing SSL support.
# It contains the configuration directives to instruct the server how to
# serve pages over an https connection. For detailing information about these
# directives see <URL:http://httpd.apache.org/docs/2.2/mod/mod_ssl.html>
#
# Do NOT simply read the instructions in here without understanding
# what they do. They're here only as hints or reminders. If you are unsure
# consult the online docs. You have been warned.
#
LoadModule ssl_module modules/mod_ssl.so
#
# When we also provide SSL we have to listen to the
# the HTTPS port in addition.
#
Listen 443
##
## SSL Global Context
##
## All SSL configuration in this context applies both to
## the main server and all SSL-enabled virtual hosts.
##
# Pass Phrase Dialog:
# Configure the pass phrase gathering process.
# The filtering dialog program (`builtin' is a internal
# terminal dialog) has to provide the pass phrase on stdout.
SSLPassPhraseDialog builtin
# Inter-Process Session Cache:
# Configure the SSL Session Cache: First the mechanism
# to use and second the expiring timeout (in seconds).
SSLSessionCache shmcb:/var/cache/mod_ssl/scache(512000)
SSLSessionCacheTimeout 300
# Semaphore:
# Configure the path to the mutual exclusion semaphore the
# SSL engine uses internally for inter-process synchronization.
SSLMutex default
# Pseudo Random Number Generator (PRNG):
# Configure one or more sources to seed the PRNG of the
# SSL library. The seed data should be of good random quality.
# WARNING! On some platforms /dev/random blocks if not enough entropy
# is available. This means you then cannot use the /dev/random device
# because it would lead to very long connection times (as long as
# it requires to make more entropy available). But usually those
# platforms additionally provide a /dev/urandom device which doesn't
# block. So, if available, use this one instead. Read the mod_ssl User
# Manual for more details.
SSLRandomSeed startup file:/dev/urandom 256
SSLRandomSeed connect builtin
#SSLRandomSeed startup file:/dev/random 512
#SSLRandomSeed connect file:/dev/random 512
#SSLRandomSeed connect file:/dev/urandom 512
#
# Use "SSLCryptoDevice" to enable any supported hardware
# accelerators. Use "openssl engine -v" to list supported
# engine names. NOTE: If you enable an accelerator and the
# server does not start, consult the error logs and ensure
# your accelerator is functioning properly.
#
SSLCryptoDevice builtin
#SSLCryptoDevice ubsec
##
## SSL Virtual Host Context
##
<VirtualHost _default_:443>
# General setup for the virtual host, inherited from global configuration
#DocumentRoot "/var/www/html"
#ServerName www.example.com:443
# Use separate log files for the SSL virtual host; note that LogLevel
# is not inherited from httpd.conf.
ErrorLog logs/ssl_error_log
TransferLog logs/ssl_access_log
LogLevel warn
# SSL Engine Switch:
# Enable/Disable SSL for this virtual host.
SSLEngine on
# SSL Protocol support:
# List the enable protocol levels with which clients will be able to
# connect. Disable SSLv2 access by default:
SSLProtocol all -SSLv2
# SSL Cipher Suite:
# List the ciphers that the client is permitted to negotiate.
# See the mod_ssl documentation for a complete list.
SSLCipherSuite DEFAULT:!EXP:!SSLv2:!DES:!IDEA:!SEED:+3DES
# Server Certificate:
# Point SSLCertificateFile at a PEM encoded certificate. If
# the certificate is encrypted, then you will be prompted for a
# pass phrase. Note that a kill -HUP will prompt again. A new
# certificate can be generated using the genkey(1) command.
SSLCertificateFile /etc/pki/tls/certs/localhost.crt
# Server Private Key:
# If the key is not combined with the certificate, use this
# directive to point at the key file. Keep in mind that if
# you've both a RSA and a DSA private key you can configure
# both in parallel (to also allow the use of DSA ciphers, etc.)
SSLCertificateKeyFile /etc/pki/tls/private/localhost.key
# Server Certificate Chain:
# Point SSLCertificateChainFile at a file containing the
# concatenation of PEM encoded CA certificates which form the
# certificate chain for the server certificate. Alternatively
# the referenced file can be the same as SSLCertificateFile
# when the CA certificates are directly appended to the server
# certificate for convinience.
#SSLCertificateChainFile /etc/pki/tls/certs/server-chain.crt
# Certificate Authority (CA):
# Set the CA certificate verification path where to find CA
# certificates for client authentication or alternatively one
# huge file containing all of them (file must be PEM encoded)
#SSLCACertificateFile /etc/pki/tls/certs/ca-bundle.crt
# Client Authentication (Type):
# Client certificate verification type and depth. Types are
# none, optional, require and optional_no_ca. Depth is a
# number which specifies how deeply to verify the certificate
# issuer chain before deciding the certificate is not valid.
#SSLVerifyClient require
#SSLVerifyDepth 10
# Access Control:
# With SSLRequire you can do per-directory access control based
# on arbitrary complex boolean expressions containing server
# variable checks and other lookup directives. The syntax is a
# mixture between C and Perl. See the mod_ssl documentation
# for more details.
#<Location />
#SSLRequire ( %{SSL_CIPHER} !~ m/^(EXP|NULL)/ \
# and %{SSL_CLIENT_S_DN_O} eq "Snake Oil, Ltd." \
# and %{SSL_CLIENT_S_DN_OU} in {"Staff", "CA", "Dev"} \
# and %{TIME_WDAY} >= 1 and %{TIME_WDAY} <= 5 \
# and %{TIME_HOUR} >= 8 and %{TIME_HOUR} <= 20 ) \
# or %{REMOTE_ADDR} =~ m/^192\.76\.162\.[0-9]+$/
#</Location>
# SSL Engine Options:
# Set various options for the SSL engine.
# o FakeBasicAuth:
# Translate the client X.509 into a Basic Authorisation. This means that
# the standard Auth/DBMAuth methods can be used for access control. The
# user name is the `one line' version of the client's X.509 certificate.
# Note that no password is obtained from the user. Every entry in the user
# file needs this password: `xxj31ZMTZzkVA'.
# o ExportCertData:
# This exports two additional environment variables: SSL_CLIENT_CERT and
# SSL_SERVER_CERT. These contain the PEM-encoded certificates of the
# server (always existing) and the client (only existing when client
# authentication is used). This can be used to import the certificates
# into CGI scripts.
# o StdEnvVars:
# This exports the standard SSL/TLS related `SSL_*' environment variables.
# Per default this exportation is switched off for performance reasons,
# because the extraction step is an expensive operation and is usually
# useless for serving static content. So one usually enables the
# exportation for CGI and SSI requests only.
# o StrictRequire:
# This denies access when "SSLRequireSSL" or "SSLRequire" applied even
# under a "Satisfy any" situation, i.e. when it applies access is denied
# and no other module can change it.
# o OptRenegotiate:
# This enables optimized SSL connection renegotiation handling when SSL
# directives are used in per-directory context.
#SSLOptions +FakeBasicAuth +ExportCertData +StrictRequire
<Files ~ "\.(cgi|shtml|phtml|php3?)$">
SSLOptions +StdEnvVars
</Files>
<Directory "/var/www/cgi-bin">
SSLOptions +StdEnvVars
</Directory>
# SSL Protocol Adjustments:
# The safe and default but still SSL/TLS standard compliant shutdown
# approach is that mod_ssl sends the close notify alert but doesn't wait for
# the close notify alert from client. When you need a different shutdown
# approach you can use one of the following variables:
# o ssl-unclean-shutdown:
# This forces an unclean shutdown when the connection is closed, i.e. no
# SSL close notify alert is send or allowed to received. This violates
# the SSL/TLS standard but is needed for some brain-dead browsers. Use
# this when you receive I/O errors because of the standard approach where
# mod_ssl sends the close notify alert.
# o ssl-accurate-shutdown:
# This forces an accurate shutdown when the connection is closed, i.e. a
# SSL close notify alert is send and mod_ssl waits for the close notify
# alert of the client. This is 100% SSL/TLS standard compliant, but in
# practice often causes hanging connections with brain-dead browsers. Use
# this only for browsers where you know that their SSL implementation
# works correctly.
# Notice: Most problems of broken clients are also related to the HTTP
# keep-alive facility, so you usually additionally want to disable
# keep-alive for those clients, too. Use variable "nokeepalive" for this.
# Similarly, one has to force some clients to use HTTP/1.0 to workaround
# their broken HTTP/1.1 implementation. Use variables "downgrade-1.0" and
# "force-response-1.0" for this.
SetEnvIf User-Agent ".*MSIE.*" \
nokeepalive ssl-unclean-shutdown \
downgrade-1.0 force-response-1.0
# Per-Server Logging:
# The home of a custom SSL log file. Use this when you want a
# compact non-error SSL logfile on a virtual host basis.
CustomLog logs/ssl_request_log \
"%t %h %{SSL_PROTOCOL}x %{SSL_CIPHER}x \"%r\" %b"
</VirtualHost>

View File

@@ -1,7 +0,0 @@
<VirtualHost *:80>
ServerName test.example.com
ServerAdmin webmaster@dummy-host.example.com
DocumentRoot /var/www/htdocs
ErrorLog logs/dummy-host.example.com-error_log
CustomLog logs/dummy-host.example.com-access_log common
</VirtualHost>

View File

@@ -1,11 +0,0 @@
#
# This configuration file enables the default "Welcome"
# page if there is no default index page present for
# the root URL. To disable the Welcome page, comment
# out all the lines below.
#
<LocationMatch "^/+$">
Options -Indexes
ErrorDocument 403 /error/noindex.html
</LocationMatch>

View File

@@ -0,0 +1 @@
../sites-available/old,default.conf

View File

@@ -1,9 +0,0 @@
<VirtualHost 10.2.3.4:80>
ServerName duplicate.example.com
ServerAdmin webmaster@certbot.demo
DocumentRoot /var/www/html
ErrorLog ${APACHE_LOG_DIR}/error.log
CustomLog ${APACHE_LOG_DIR}/access.log combined
</VirtualHost>

View File

@@ -1,14 +0,0 @@
<IfModule mod_ssl.c>
<VirtualHost 10.2.3.4:443>
ServerName duplicate.example.com
ServerAdmin webmaster@certbot.demo
DocumentRoot /var/www/html
ErrorLog ${APACHE_LOG_DIR}/error.log
CustomLog ${APACHE_LOG_DIR}/access.log combined
SSLCertificateFile /etc/apache2/certs/certbot-cert_5.pem
SSLCertificateKeyFile /etc/apache2/ssl/key-certbot_15.pem
</VirtualHost>
</IfModule>

View File

@@ -0,0 +1,151 @@
"""Test for certbot_apache.tls_sni_01."""
import shutil
import unittest
import mock
from certbot import errors
from certbot.plugins import common_test
from certbot_apache import obj
from certbot_apache.tests import util
from six.moves import xrange # pylint: disable=redefined-builtin, import-error
class TlsSniPerformTest(util.ApacheTest):
"""Test the ApacheTlsSni01 challenge."""
auth_key = common_test.AUTH_KEY
achalls = common_test.ACHALLS
def setUp(self): # pylint: disable=arguments-differ
super(TlsSniPerformTest, self).setUp()
config = util.get_apache_configurator(
self.config_path, self.vhost_path, self.config_dir,
self.work_dir)
config.config.tls_sni_01_port = 443
from certbot_apache import tls_sni_01
self.sni = tls_sni_01.ApacheTlsSni01(config)
def tearDown(self):
shutil.rmtree(self.temp_dir)
shutil.rmtree(self.config_dir)
shutil.rmtree(self.work_dir)
def test_perform0(self):
resp = self.sni.perform()
self.assertEqual(len(resp), 0)
@mock.patch("certbot.util.exe_exists")
@mock.patch("certbot.util.run_script")
def test_perform1(self, _, mock_exists):
self.sni.configurator.parser.modules.add("socache_shmcb_module")
self.sni.configurator.parser.modules.add("ssl_module")
mock_exists.return_value = True
self.sni.configurator.parser.update_runtime_variables = mock.Mock()
achall = self.achalls[0]
self.sni.add_chall(achall)
response = self.achalls[0].response(self.auth_key)
mock_setup_cert = mock.MagicMock(return_value=response)
# pylint: disable=protected-access
self.sni._setup_challenge_cert = mock_setup_cert
responses = self.sni.perform()
mock_setup_cert.assert_called_once_with(achall)
# Check to make sure challenge config path is included in apache config
self.assertEqual(
len(self.sni.configurator.parser.find_dir(
"Include", self.sni.challenge_conf)), 1)
self.assertEqual(len(responses), 1)
self.assertEqual(responses[0], response)
def test_perform2(self):
# Avoid load module
self.sni.configurator.parser.modules.add("ssl_module")
self.sni.configurator.parser.modules.add("socache_shmcb_module")
acme_responses = []
for achall in self.achalls:
self.sni.add_chall(achall)
acme_responses.append(achall.response(self.auth_key))
mock_setup_cert = mock.MagicMock(side_effect=acme_responses)
# pylint: disable=protected-access
self.sni._setup_challenge_cert = mock_setup_cert
with mock.patch(
"certbot_apache.override_debian.DebianConfigurator.enable_mod"):
sni_responses = self.sni.perform()
self.assertEqual(mock_setup_cert.call_count, 2)
# Make sure calls made to mocked function were correct
self.assertEqual(
mock_setup_cert.call_args_list[0], mock.call(self.achalls[0]))
self.assertEqual(
mock_setup_cert.call_args_list[1], mock.call(self.achalls[1]))
self.assertEqual(
len(self.sni.configurator.parser.find_dir(
"Include", self.sni.challenge_conf)),
1)
self.assertEqual(len(sni_responses), 2)
for i in xrange(2):
self.assertEqual(sni_responses[i], acme_responses[i])
def test_mod_config(self):
z_domains = []
for achall in self.achalls:
self.sni.add_chall(achall)
z_domain = achall.response(self.auth_key).z_domain
z_domains.append(set([z_domain.decode('ascii')]))
self.sni._mod_config() # pylint: disable=protected-access
self.sni.configurator.save()
self.sni.configurator.parser.find_dir(
"Include", self.sni.challenge_conf)
vh_match = self.sni.configurator.aug.match(
"/files" + self.sni.challenge_conf + "//VirtualHost")
vhs = []
for match in vh_match:
# pylint: disable=protected-access
vhs.append(self.sni.configurator._create_vhost(match))
self.assertEqual(len(vhs), 2)
for vhost in vhs:
self.assertEqual(vhost.addrs, set([obj.Addr.fromstring("*:443")]))
names = vhost.get_names()
self.assertTrue(names in z_domains)
def test_get_addrs_default(self):
self.sni.configurator.choose_vhost = mock.Mock(
return_value=obj.VirtualHost(
"path", "aug_path",
set([obj.Addr.fromstring("_default_:443")]),
False, False)
)
# pylint: disable=protected-access
self.assertEqual(
set([obj.Addr.fromstring("*:443")]),
self.sni._get_addrs(self.achalls[0]))
def test_get_addrs_no_vhost_found(self):
self.sni.configurator.choose_vhost = mock.Mock(
side_effect=errors.MissingCommandlineFlag(
"Failed to run Apache plugin non-interactively"))
# pylint: disable=protected-access
self.assertEqual(
set([obj.Addr.fromstring("*:443")]),
self.sni._get_addrs(self.achalls[0]))
if __name__ == "__main__":
unittest.main() # pragma: no cover

View File

@@ -1,4 +1,5 @@
"""Common utilities for certbot_apache."""
import os
import shutil
import sys
import unittest
@@ -8,9 +9,10 @@ import josepy as jose
import mock
import zope.component
from certbot.compat import os
from certbot.display import util as display_util
from certbot.plugins import common
from certbot.tests import util as test_util
from certbot_apache import configurator
@@ -78,7 +80,8 @@ class ParserTest(ApacheTest):
with mock.patch("certbot_apache.parser.ApacheParser."
"update_runtime_variables"):
self.parser = ApacheParser(
self.config_path, self.vhost_path, configurator=self.config)
self.aug, self.config_path, self.vhost_path,
configurator=self.config)
def get_apache_configurator( # pylint: disable=too-many-arguments, too-many-locals
@@ -94,10 +97,9 @@ def get_apache_configurator( # pylint: disable=too-many-arguments, too-many-loc
backups = os.path.join(work_dir, "backups")
mock_le_config = mock.MagicMock(
apache_server_root=config_path,
apache_vhost_root=None,
apache_vhost_root=conf_vhost_path,
apache_le_vhost_ext="-le-ssl.conf",
apache_challenge_location=config_path,
apache_enmod=None,
backup_dir=backups,
config_dir=config_dir,
http01_port=80,
@@ -105,6 +107,19 @@ def get_apache_configurator( # pylint: disable=too-many-arguments, too-many-loc
in_progress_dir=os.path.join(backups, "IN_PROGRESS"),
work_dir=work_dir)
orig_os_constant = configurator.ApacheConfigurator(mock_le_config,
name="apache",
version=version).constant
def mock_os_constant(key, vhost_path=vhost_path):
"""Mock default vhost path"""
if key == "vhost_root":
return vhost_path
else:
return orig_os_constant(key)
with mock.patch("certbot_apache.configurator.ApacheConfigurator.constant") as mock_cons:
mock_cons.side_effect = mock_os_constant
with mock.patch("certbot_apache.configurator.util.run_script"):
with mock.patch("certbot_apache.configurator.util."
"exe_exists") as mock_exe_exists:
@@ -117,12 +132,7 @@ def get_apache_configurator( # pylint: disable=too-many-arguments, too-many-loc
config_class = configurator.ApacheConfigurator
config = config_class(config=mock_le_config, name="apache",
version=version)
if not conf_vhost_path:
config_class.OS_DEFAULTS["vhost_root"] = vhost_path
else:
# Custom virtualhost path was requested
config.config.apache_vhost_root = conf_vhost_path
config.config.apache_ctl = config_class.OS_DEFAULTS["ctl"]
config.prepare()
return config
@@ -193,17 +203,7 @@ def get_vh_truth(temp_dir, config_name):
"/files" + os.path.join(temp_dir, config_name,
"apache2/apache2.conf/VirtualHost"),
set([obj.Addr.fromstring("*:80")]), False, True,
"vhost.in.rootconf"),
obj.VirtualHost(
os.path.join(prefix, "duplicatehttp.conf"),
os.path.join(aug_pre, "duplicatehttp.conf/VirtualHost"),
set([obj.Addr.fromstring("10.2.3.4:80")]), False, True,
"duplicate.example.com"),
obj.VirtualHost(
os.path.join(prefix, "duplicatehttps.conf"),
os.path.join(aug_pre, "duplicatehttps.conf/IfModule/VirtualHost"),
set([obj.Addr.fromstring("10.2.3.4:443")]), True, True,
"duplicate.example.com")]
"vhost.in.rootconf")]
return vh_truth
if config_name == "debian_apache_2_4/multi_vhosts":
prefix = os.path.join(

View File

@@ -0,0 +1,174 @@
"""A class that performs TLS-SNI-01 challenges for Apache"""
import os
import logging
from acme.magic_typing import Set # pylint: disable=unused-import, no-name-in-module
from certbot.plugins import common
from certbot.errors import PluginError, MissingCommandlineFlag
from certbot_apache import obj
logger = logging.getLogger(__name__)
class ApacheTlsSni01(common.TLSSNI01):
"""Class that performs TLS-SNI-01 challenges within the Apache configurator
:ivar configurator: ApacheConfigurator object
:type configurator: :class:`~apache.configurator.ApacheConfigurator`
:ivar list achalls: Annotated TLS-SNI-01
(`.KeyAuthorizationAnnotatedChallenge`) challenges.
:param list indices: Meant to hold indices of challenges in a
larger array. ApacheTlsSni01 is capable of solving many challenges
at once which causes an indexing issue within ApacheConfigurator
who must return all responses in order. Imagine ApacheConfigurator
maintaining state about where all of the http-01 Challenges,
TLS-SNI-01 Challenges belong in the response array. This is an
optional utility.
:param str challenge_conf: location of the challenge config file
"""
VHOST_TEMPLATE = """\
<VirtualHost {vhost}>
ServerName {server_name}
UseCanonicalName on
SSLStrictSNIVHostCheck on
LimitRequestBody 1048576
Include {ssl_options_conf_path}
SSLCertificateFile {cert_path}
SSLCertificateKeyFile {key_path}
DocumentRoot {document_root}
</VirtualHost>
"""
def __init__(self, *args, **kwargs):
super(ApacheTlsSni01, self).__init__(*args, **kwargs)
self.challenge_conf = os.path.join(
self.configurator.conf("challenge-location"),
"le_tls_sni_01_cert_challenge.conf")
def perform(self):
"""Perform a TLS-SNI-01 challenge."""
if not self.achalls:
return []
# Save any changes to the configuration as a precaution
# About to make temporary changes to the config
self.configurator.save("Changes before challenge setup", True)
# Prepare the server for HTTPS
self.configurator.prepare_server_https(
str(self.configurator.config.tls_sni_01_port), True)
responses = []
# Create all of the challenge certs
for achall in self.achalls:
responses.append(self._setup_challenge_cert(achall))
# Setup the configuration
addrs = self._mod_config()
self.configurator.save("Don't lose mod_config changes", True)
self.configurator.make_addrs_sni_ready(addrs)
# Save reversible changes
self.configurator.save("SNI Challenge", True)
return responses
def _mod_config(self):
"""Modifies Apache config files to include challenge vhosts.
Result: Apache config includes virtual servers for issued challs
:returns: All TLS-SNI-01 addresses used
:rtype: set
"""
addrs = set() # type: Set[obj.Addr]
config_text = "<IfModule mod_ssl.c>\n"
for achall in self.achalls:
achall_addrs = self._get_addrs(achall)
addrs.update(achall_addrs)
config_text += self._get_config_text(achall, achall_addrs)
config_text += "</IfModule>\n"
self.configurator.parser.add_include(
self.configurator.parser.loc["default"], self.challenge_conf)
self.configurator.reverter.register_file_creation(
True, self.challenge_conf)
logger.debug("writing a config file with text:\n %s", config_text)
with open(self.challenge_conf, "w") as new_conf:
new_conf.write(config_text)
return addrs
def _get_addrs(self, achall):
"""Return the Apache addresses needed for TLS-SNI-01."""
# TODO: Checkout _default_ rules.
addrs = set()
default_addr = obj.Addr(("*", str(
self.configurator.config.tls_sni_01_port)))
try:
vhost = self.configurator.choose_vhost(achall.domain,
create_if_no_ssl=False)
except (PluginError, MissingCommandlineFlag):
# We couldn't find the virtualhost for this domain, possibly
# because it's a new vhost that's not configured yet
# (GH #677). See also GH #2600.
logger.warning("Falling back to default vhost %s...", default_addr)
addrs.add(default_addr)
return addrs
for addr in vhost.addrs:
if "_default_" == addr.get_addr():
addrs.add(default_addr)
else:
addrs.add(
addr.get_sni_addr(
self.configurator.config.tls_sni_01_port))
return addrs
def _get_config_text(self, achall, ip_addrs):
"""Chocolate virtual server configuration text
:param .KeyAuthorizationAnnotatedChallenge achall: Annotated
TLS-SNI-01 challenge.
:param list ip_addrs: addresses of challenged domain
:class:`list` of type `~.obj.Addr`
:returns: virtual host configuration text
:rtype: str
"""
ips = " ".join(str(i) for i in ip_addrs)
document_root = os.path.join(
self.configurator.config.work_dir, "tls_sni_01_page/")
# TODO: Python docs is not clear how multiline string literal
# newlines are parsed on different platforms. At least on
# Linux (Debian sid), when source file uses CRLF, Python still
# parses it as "\n"... c.f.:
# https://docs.python.org/2.7/reference/lexical_analysis.html
return self.VHOST_TEMPLATE.format(
vhost=ips,
server_name=achall.response(achall.account_key).z_domain.decode('ascii'),
ssl_options_conf_path=self.configurator.mod_ssl_conf,
cert_path=self.get_cert_path(achall),
key_path=self.get_key_path(achall),
document_root=document_root).replace("\n", os.linesep)

View File

@@ -13,7 +13,7 @@
# serve to show the default.
import sys
from certbot.compat import os
import os
import shlex
import mock

View File

@@ -1,3 +1,2 @@
# Remember to update setup.py to match the package versions below.
acme[dev]==0.29.0
certbot[dev]==0.37.0
acme[dev]==0.25.0
-e .[dev]

View File

@@ -1,16 +1,14 @@
from setuptools import setup
from setuptools import find_packages
from setuptools.command.test import test as TestCommand
import sys
version = '0.37.1'
version = '0.26.0.dev0'
# Remember to update local-oldest-requirements.txt when changing the minimum
# acme/certbot version.
install_requires = [
'acme>=0.29.0',
'certbot>=0.37.0',
'acme>=0.25.0',
'certbot>=0.26.0.dev0',
'mock',
'python-augeas',
'setuptools',
@@ -23,22 +21,6 @@ docs_extras = [
'sphinx_rtd_theme',
]
class PyTest(TestCommand):
user_options = []
def initialize_options(self):
TestCommand.initialize_options(self)
self.pytest_args = ''
def run_tests(self):
import shlex
# import here, cause outside the eggs aren't loaded
import pytest
errno = pytest.main(shlex.split(self.pytest_args))
sys.exit(errno)
setup(
name='certbot-apache',
version=version,
@@ -49,7 +31,7 @@ setup(
license='Apache License 2.0',
python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Development Status :: 3 - Alpha',
'Environment :: Plugins',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: Apache Software License',
@@ -61,7 +43,6 @@ setup(
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Security',
'Topic :: System :: Installation/Setup',
@@ -82,6 +63,4 @@ setup(
],
},
test_suite='certbot_apache',
tests_require=["pytest"],
cmdclass={"test": PyTest},
)

View File

@@ -31,7 +31,7 @@ if [ -z "$VENV_PATH" ]; then
fi
VENV_BIN="$VENV_PATH/bin"
BOOTSTRAP_VERSION_PATH="$VENV_PATH/certbot-auto-bootstrap-version.txt"
LE_AUTO_VERSION="0.37.1"
LE_AUTO_VERSION="0.25.1"
BASENAME=$(basename $0)
USAGE="Usage: $BASENAME [OPTIONS]
A self-updating wrapper script for the Certbot ACME client. When run, updates
@@ -45,7 +45,6 @@ Help for certbot itself cannot be provided until it is installed.
-h, --help print this help
-n, --non-interactive, --noninteractive run without asking for user input
--no-bootstrap do not install OS dependencies
--no-permissions-check do not warn about file system permissions
--no-self-upgrade do not download updates
--os-packages-only install OS dependencies and exit
--install-only install certbot, upgrade if needed, and exit
@@ -68,8 +67,6 @@ for arg in "$@" ; do
# Do not upgrade this script (also prevents client upgrades, because each
# copy of the script pins a hash of the python client)
NO_SELF_UPGRADE=1;;
--no-permissions-check)
NO_PERMISSIONS_CHECK=1;;
--no-bootstrap)
NO_BOOTSTRAP=1;;
--help)
@@ -175,11 +172,7 @@ SetRootAuthMechanism() {
sudo)
SUDO="sudo -E"
;;
'')
# If we're not running with root, don't check that this script can only
# be modified by system users and groups.
NO_PERMISSIONS_CHECK=1
;;
'') ;; # Nothing to do for plain root method.
*)
error "Error: unknown root authorization mechanism '$LE_AUTO_SUDO'."
exit 1
@@ -202,7 +195,7 @@ if [ "$1" = "--cb-auto-has-root" ]; then
else
SetRootAuthMechanism
if [ -n "$SUDO" ]; then
say "Requesting to rerun $0 with root privileges..."
echo "Requesting to rerun $0 with root privileges..."
$SUDO "$0" --cb-auto-has-root "$@"
exit 0
fi
@@ -340,11 +333,63 @@ BootstrapDebCommon() {
fi
augeas_pkg="libaugeas0 augeas-lenses"
AUGVERSION=`LC_ALL=C apt-cache show --no-all-versions libaugeas0 | grep ^Version: | cut -d" " -f2`
if [ "$ASSUME_YES" = 1 ]; then
YES_FLAG="-y"
fi
AddBackportRepo() {
# ARGS:
BACKPORT_NAME="$1"
BACKPORT_SOURCELINE="$2"
say "To use the Apache Certbot plugin, augeas needs to be installed from $BACKPORT_NAME."
if ! grep -v -e ' *#' /etc/apt/sources.list | grep -q "$BACKPORT_NAME" ; then
# This can theoretically error if sources.list.d is empty, but in that case we don't care.
if ! grep -v -e ' *#' /etc/apt/sources.list.d/* 2>/dev/null | grep -q "$BACKPORT_NAME"; then
if [ "$ASSUME_YES" = 1 ]; then
/bin/echo -n "Installing augeas from $BACKPORT_NAME in 3 seconds..."
sleep 1s
/bin/echo -ne "\e[0K\rInstalling augeas from $BACKPORT_NAME in 2 seconds..."
sleep 1s
/bin/echo -e "\e[0K\rInstalling augeas from $BACKPORT_NAME in 1 second ..."
sleep 1s
add_backports=1
else
read -p "Would you like to enable the $BACKPORT_NAME repository [Y/n]? " response
case $response in
[yY][eE][sS]|[yY]|"")
add_backports=1;;
*)
add_backports=0;;
esac
fi
if [ "$add_backports" = 1 ]; then
sh -c "echo $BACKPORT_SOURCELINE >> /etc/apt/sources.list.d/$BACKPORT_NAME.list"
apt-get $QUIET_FLAG update
fi
fi
fi
if [ "$add_backports" != 0 ]; then
apt-get install $QUIET_FLAG $YES_FLAG --no-install-recommends -t "$BACKPORT_NAME" $augeas_pkg
augeas_pkg=
fi
}
if dpkg --compare-versions 1.0 gt "$AUGVERSION" ; then
if lsb_release -a | grep -q wheezy ; then
AddBackportRepo wheezy-backports "deb http://http.debian.net/debian wheezy-backports main"
elif lsb_release -a | grep -q precise ; then
# XXX add ARM case
AddBackportRepo precise-backports "deb http://archive.ubuntu.com/ubuntu precise-backports main restricted universe multiverse"
else
echo "No libaugeas0 version is available that's new enough to run the"
echo "Certbot apache plugin..."
fi
# XXX add a case for ubuntu PPAs
fi
apt-get install $QUIET_FLAG $YES_FLAG --no-install-recommends \
python \
python-dev \
@@ -495,18 +540,11 @@ BOOTSTRAP_RPM_PYTHON3_VERSION=1
BootstrapRpmPython3() {
# Tested with:
# - CentOS 6
# - Fedora 29
InitializeRPMCommonBase
# Fedora 29 must use python3-virtualenv
if $TOOL list python3-virtualenv >/dev/null 2>&1; then
python_pkgs="python3
python3-virtualenv
python3-devel
"
# EPEL uses python34
elif $TOOL list python34 >/dev/null 2>&1; then
if $TOOL list python34 >/dev/null 2>&1; then
python_pkgs="python34
python34-devel
python34-tools
@@ -535,20 +573,10 @@ BootstrapSuseCommon() {
QUIET_FLAG='-qq'
fi
if zypper search -x python-virtualenv >/dev/null 2>&1; then
OPENSUSE_VIRTUALENV_PACKAGES="python-virtualenv"
else
# Since Leap 15.0 (and associated Tumbleweed version), python-virtualenv
# is a source package, and python2-virtualenv must be used instead.
# Also currently python2-setuptools is not a dependency of python2-virtualenv,
# while it should be. Installing it explicitly until upstream fix.
OPENSUSE_VIRTUALENV_PACKAGES="python2-virtualenv python2-setuptools"
fi
zypper $QUIET_FLAG $zypper_flags in $install_flags \
python \
python-devel \
$OPENSUSE_VIRTUALENV_PACKAGES \
python-virtualenv \
gcc \
augeas-lenses \
libopenssl-devel \
@@ -565,7 +593,8 @@ BootstrapArchCommon() {
# - ArchLinux (x86_64)
#
# "python-virtualenv" is Python3, but "python2-virtualenv" provides
# only "virtualenv2" binary, not "virtualenv".
# only "virtualenv2" binary, not "virtualenv" necessary in
# ./tools/_venv_common.sh
deps="
python2
@@ -755,31 +784,7 @@ elif [ -f /etc/redhat-release ]; then
prev_le_python="$LE_PYTHON"
unset LE_PYTHON
DeterminePythonVersion "NOCRASH"
RPM_DIST_NAME=`(. /etc/os-release 2> /dev/null && echo $ID) || echo "unknown"`
# Set RPM_DIST_VERSION to VERSION_ID from /etc/os-release after splitting on
# '.' characters (e.g. "8.0" becomes "8"). If the command exits with an
# error, RPM_DIST_VERSION is set to "unknown".
RPM_DIST_VERSION=$( (. /etc/os-release 2> /dev/null && echo "$VERSION_ID") | cut -d '.' -f1 || echo "unknown")
# If RPM_DIST_VERSION is an empty string or it contains any nonnumeric
# characters, the value is unexpected so we set RPM_DIST_VERSION to 0.
if [ -z "$RPM_DIST_VERSION" ] || [ -n "$(echo "$RPM_DIST_VERSION" | tr -d '[0-9]')" ]; then
RPM_DIST_VERSION=0
fi
# Starting to Fedora 29, python2 is on a deprecation path. Let's move to python3 then.
# RHEL 8 also uses python3 by default.
if [ "$RPM_DIST_NAME" = "fedora" -a "$RPM_DIST_VERSION" -ge 29 -o "$PYVER" -eq 26 ]; then
RPM_USE_PYTHON_3=1
elif [ "$RPM_DIST_NAME" = "rhel" -a "$RPM_DIST_VERSION" -ge 8 ]; then
RPM_USE_PYTHON_3=1
else
RPM_USE_PYTHON_3=0
fi
if [ "$RPM_USE_PYTHON_3" = 1 ]; then
if [ "$PYVER" -eq 26 ]; then
Bootstrap() {
BootstrapMessage "RedHat-based OSes that will use Python3"
BootstrapRpmPython3
@@ -793,7 +798,6 @@ elif [ -f /etc/redhat-release ]; then
}
BOOTSTRAP_VERSION="BootstrapRpmCommon $BOOTSTRAP_RPM_COMMON_VERSION"
fi
LE_PYTHON="$prev_le_python"
elif [ -f /etc/os-release ] && `grep -q openSUSE /etc/os-release` ; then
Bootstrap() {
@@ -908,159 +912,6 @@ OldVenvExists() {
[ -n "$OLD_VENV_PATH" -a -f "$OLD_VENV_PATH/bin/letsencrypt" ]
}
# Given python path, version 1 and version 2, check if version 1 is outdated compared to version 2.
# An unofficial version provided as version 1 (eg. 0.28.0.dev0) will be treated
# specifically by printing "UNOFFICIAL". Otherwise, print "OUTDATED" if version 1
# is outdated, and "UP_TO_DATE" if not.
# This function relies only on installed python environment (2.x or 3.x) by certbot-auto.
CompareVersions() {
"$1" - "$2" "$3" << "UNLIKELY_EOF"
import sys
from distutils.version import StrictVersion
try:
current = StrictVersion(sys.argv[1])
except ValueError:
sys.stdout.write('UNOFFICIAL')
sys.exit()
try:
remote = StrictVersion(sys.argv[2])
except ValueError:
sys.stdout.write('UP_TO_DATE')
sys.exit()
if current < remote:
sys.stdout.write('OUTDATED')
else:
sys.stdout.write('UP_TO_DATE')
UNLIKELY_EOF
}
# Create a new virtual environment for Certbot. It will overwrite any existing one.
# Parameters: LE_PYTHON, VENV_PATH, PYVER, VERBOSE
CreateVenv() {
"$1" - "$2" "$3" "$4" << "UNLIKELY_EOF"
#!/usr/bin/env python
import os
import shutil
import subprocess
import sys
def create_venv(venv_path, pyver, verbose):
if os.path.exists(venv_path):
shutil.rmtree(venv_path)
stdout = sys.stdout if verbose == '1' else open(os.devnull, 'w')
if int(pyver) <= 27:
# Use virtualenv binary
environ = os.environ.copy()
environ['VIRTUALENV_NO_DOWNLOAD'] = '1'
command = ['virtualenv', '--no-site-packages', '--python', sys.executable, venv_path]
subprocess.check_call(command, stdout=stdout, env=environ)
else:
# Use embedded venv module in Python 3
command = [sys.executable, '-m', 'venv', venv_path]
subprocess.check_call(command, stdout=stdout)
if __name__ == '__main__':
create_venv(*sys.argv[1:])
UNLIKELY_EOF
}
# Check that the given PATH_TO_CHECK has secured permissions.
# Parameters: LE_PYTHON, PATH_TO_CHECK
CheckPathPermissions() {
"$1" - "$2" << "UNLIKELY_EOF"
"""Verifies certbot-auto cannot be modified by unprivileged users.
This script takes the path to certbot-auto as its only command line
argument. It then checks that the file can only be modified by uid/gid
< 1000 and if other users can modify the file, it prints a warning with
a suggestion on how to solve the problem.
Permissions on symlinks in the absolute path of certbot-auto are ignored
and only the canonical path to certbot-auto is checked. There could be
permissions problems due to the symlinks that are unreported by this
script, however, issues like this were not caused by our documentation
and are ignored for the sake of simplicity.
All warnings are printed to stdout rather than stderr so all stderr
output from this script can be suppressed to avoid printing messages if
this script fails for some reason.
"""
from __future__ import print_function
import os
import stat
import sys
FORUM_POST_URL = 'https://community.letsencrypt.org/t/certbot-auto-deployment-best-practices/91979/'
def has_safe_permissions(path):
"""Returns True if the given path has secure permissions.
The permissions are considered safe if the file is only writable by
uid/gid < 1000.
The reason we allow more IDs than 0 is because on some systems such
as Debian, system users/groups other than uid/gid 0 are used for the
path we recommend in our instructions which is /usr/local/bin. 1000
was chosen because on Debian 0-999 is reserved for system IDs[1] and
on RHEL either 0-499 or 0-999 is reserved depending on the
version[2][3]. Due to these differences across different OSes, this
detection isn't perfect so we only determine permissions are
insecure when we can be reasonably confident there is a problem
regardless of the underlying OS.
[1] https://www.debian.org/doc/debian-policy/ch-opersys.html#uid-and-gid-classes
[2] https://access.redhat.com/documentation/en-us/red_hat_enterprise_linux/6/html/deployment_guide/ch-managing_users_and_groups
[3] https://access.redhat.com/documentation/en-us/red_hat_enterprise_linux/7/html/system_administrators_guide/ch-managing_users_and_groups
:param str path: filesystem path to check
:returns: True if the path has secure permissions, otherwise, False
:rtype: bool
"""
# os.stat follows symlinks before obtaining information about a file.
stat_result = os.stat(path)
if stat_result.st_mode & stat.S_IWOTH:
return False
if stat_result.st_mode & stat.S_IWGRP and stat_result.st_gid >= 1000:
return False
if stat_result.st_mode & stat.S_IWUSR and stat_result.st_uid >= 1000:
return False
return True
def main(certbot_auto_path):
current_path = os.path.realpath(certbot_auto_path)
last_path = None
permissions_ok = True
# This loop makes use of the fact that os.path.dirname('/') == '/'.
while current_path != last_path and permissions_ok:
permissions_ok = has_safe_permissions(current_path)
last_path = current_path
current_path = os.path.dirname(current_path)
if not permissions_ok:
print('{0} has insecure permissions!'.format(certbot_auto_path))
print('To learn how to fix them, visit {0}'.format(FORUM_POST_URL))
if __name__ == '__main__':
main(sys.argv[1])
UNLIKELY_EOF
}
if [ "$1" = "--le-auto-phase2" ]; then
# Phase 2: Create venv, install LE, and run.
@@ -1116,7 +967,20 @@ if [ "$1" = "--le-auto-phase2" ]; then
if [ "$LE_AUTO_VERSION" != "$INSTALLED_VERSION" ]; then
say "Creating virtual environment..."
DeterminePythonVersion
CreateVenv "$LE_PYTHON" "$VENV_PATH" "$PYVER" "$VERBOSE"
rm -rf "$VENV_PATH"
if [ "$PYVER" -le 27 ]; then
if [ "$VERBOSE" = 1 ]; then
virtualenv --no-site-packages --python "$LE_PYTHON" "$VENV_PATH"
else
virtualenv --no-site-packages --python "$LE_PYTHON" "$VENV_PATH" > /dev/null
fi
else
if [ "$VERBOSE" = 1 ]; then
"$LE_PYTHON" -m venv "$VENV_PATH"
else
"$LE_PYTHON" -m venv "$VENV_PATH" > /dev/null
fi
fi
if [ -n "$BOOTSTRAP_VERSION" ]; then
echo "$BOOTSTRAP_VERSION" > "$BOOTSTRAP_VERSION_PATH"
@@ -1130,197 +994,208 @@ if [ "$1" = "--le-auto-phase2" ]; then
# There is no $ interpolation due to quotes on starting heredoc delimiter.
# -------------------------------------------------------------------------
cat << "UNLIKELY_EOF" > "$TEMP_DIR/letsencrypt-auto-requirements.txt"
# This is the flattened list of packages certbot-auto installs.
# To generate this, do (with docker and package hashin installed):
# ```
# letsencrypt-auto-source/rebuild_dependencies.py \
# letsencrypt-auto-sources/pieces/dependency-requirements.txt
# ```
ConfigArgParse==0.14.0 \
--hash=sha256:2e2efe2be3f90577aca9415e32cb629aa2ecd92078adbe27b53a03e53ff12e91
asn1crypto==0.24.0 \
--hash=sha256:2f1adbb7546ed199e3c90ef23ec95c5cf3585bac7d11fb7eb562a3fe89c64e87 \
--hash=sha256:9d5c20441baf0cb60a4ac34cc447c6c189024b6b4c6cd7877034f4965c464e49
certifi==2019.3.9 \
--hash=sha256:59b7658e26ca9c7339e00f8f4636cdfe59d34fa37b9b04f6f9e9926b3cece1a5 \
--hash=sha256:b26104d6835d1f5e49452a26eb2ff87fe7090b89dfcaee5ea2212697e1e1d7ae
cffi==1.12.2 \
--hash=sha256:00b97afa72c233495560a0793cdc86c2571721b4271c0667addc83c417f3d90f \
--hash=sha256:0ba1b0c90f2124459f6966a10c03794082a2f3985cd699d7d63c4a8dae113e11 \
--hash=sha256:0bffb69da295a4fc3349f2ec7cbe16b8ba057b0a593a92cbe8396e535244ee9d \
--hash=sha256:21469a2b1082088d11ccd79dd84157ba42d940064abbfa59cf5f024c19cf4891 \
--hash=sha256:2e4812f7fa984bf1ab253a40f1f4391b604f7fc424a3e21f7de542a7f8f7aedf \
--hash=sha256:2eac2cdd07b9049dd4e68449b90d3ef1adc7c759463af5beb53a84f1db62e36c \
--hash=sha256:2f9089979d7456c74d21303c7851f158833d48fb265876923edcb2d0194104ed \
--hash=sha256:3dd13feff00bddb0bd2d650cdb7338f815c1789a91a6f68fdc00e5c5ed40329b \
--hash=sha256:4065c32b52f4b142f417af6f33a5024edc1336aa845b9d5a8d86071f6fcaac5a \
--hash=sha256:51a4ba1256e9003a3acf508e3b4f4661bebd015b8180cc31849da222426ef585 \
--hash=sha256:59888faac06403767c0cf8cfb3f4a777b2939b1fbd9f729299b5384f097f05ea \
--hash=sha256:59c87886640574d8b14910840327f5cd15954e26ed0bbd4e7cef95fa5aef218f \
--hash=sha256:610fc7d6db6c56a244c2701575f6851461753c60f73f2de89c79bbf1cc807f33 \
--hash=sha256:70aeadeecb281ea901bf4230c6222af0248c41044d6f57401a614ea59d96d145 \
--hash=sha256:71e1296d5e66c59cd2c0f2d72dc476d42afe02aeddc833d8e05630a0551dad7a \
--hash=sha256:8fc7a49b440ea752cfdf1d51a586fd08d395ff7a5d555dc69e84b1939f7ddee3 \
--hash=sha256:9b5c2afd2d6e3771d516045a6cfa11a8da9a60e3d128746a7fe9ab36dfe7221f \
--hash=sha256:9c759051ebcb244d9d55ee791259ddd158188d15adee3c152502d3b69005e6bd \
--hash=sha256:b4d1011fec5ec12aa7cc10c05a2f2f12dfa0adfe958e56ae38dc140614035804 \
--hash=sha256:b4f1d6332339ecc61275bebd1f7b674098a66fea11a00c84d1c58851e618dc0d \
--hash=sha256:c030cda3dc8e62b814831faa4eb93dd9a46498af8cd1d5c178c2de856972fd92 \
--hash=sha256:c2e1f2012e56d61390c0e668c20c4fb0ae667c44d6f6a2eeea5d7148dcd3df9f \
--hash=sha256:c37c77d6562074452120fc6c02ad86ec928f5710fbc435a181d69334b4de1d84 \
--hash=sha256:c8149780c60f8fd02752d0429246088c6c04e234b895c4a42e1ea9b4de8d27fb \
--hash=sha256:cbeeef1dc3c4299bd746b774f019de9e4672f7cc666c777cd5b409f0b746dac7 \
--hash=sha256:e113878a446c6228669144ae8a56e268c91b7f1fafae927adc4879d9849e0ea7 \
--hash=sha256:e21162bf941b85c0cda08224dade5def9360f53b09f9f259adb85fc7dd0e7b35 \
--hash=sha256:fb6934ef4744becbda3143d30c6604718871495a5e36c408431bf33d9c146889
chardet==3.0.4 \
--hash=sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae \
--hash=sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691
# This is the flattened list of packages certbot-auto installs. To generate
# this, do
# `pip install --no-cache-dir -e acme -e . -e certbot-apache -e certbot-nginx`,
# and then use `hashin` or a more secure method to gather the hashes.
# Hashin example:
# pip install hashin
# hashin -r dependency-requirements.txt cryptography==1.5.2
# sets the new certbot-auto pinned version of cryptography to 1.5.2
argparse==1.4.0 \
--hash=sha256:c31647edb69fd3d465a847ea3157d37bed1f95f19760b11a47aa91c04b666314 \
--hash=sha256:62b089a55be1d8949cd2bc7e0df0bddb9e028faefc8c32038cc84862aefdd6e4
# This comes before cffi because cffi will otherwise install an unchecked
# version via setup_requires.
pycparser==2.14 \
--hash=sha256:7959b4a74abdc27b312fed1c21e6caf9309ce0b29ea86b591fd2e99ecdf27f73 \
--no-binary pycparser
asn1crypto==0.22.0 \
--hash=sha256:d232509fefcfcdb9a331f37e9c9dc20441019ad927c7d2176cf18ed5da0ba097 \
--hash=sha256:cbbadd640d3165ab24b06ef25d1dca09a3441611ac15f6a6b452474fdf0aed1a
cffi==1.10.0 \
--hash=sha256:446699c10f3c390633d0722bc19edbc7ac4b94761918a4a4f7908a24e86ebbd0 \
--hash=sha256:562326fc7f55a59ef3fef5e82908fe938cdc4bbda32d734c424c7cd9ed73e93a \
--hash=sha256:7f732ad4a30db0b39400c3f7011249f7d0701007d511bf09604729aea222871f \
--hash=sha256:94fb8410c6c4fc48e7ea759d3d1d9ca561171a88d00faddd4aa0306f698ad6a0 \
--hash=sha256:587a5043df4b00a2130e09fed42da02a4ed3c688bd9bf07a3ac89d2271f4fb07 \
--hash=sha256:ec08b88bef627ec1cea210e1608c85d3cf44893bcde74e41b7f7dbdfd2c1bad6 \
--hash=sha256:a41406f6d62abcdf3eef9fd998d8dcff04fd2a7746644143045feeebd76352d1 \
--hash=sha256:b560916546b2f209d74b82bdbc3223cee9a165b0242fa00a06dfc48a2054864a \
--hash=sha256:e74896774e437f4715c57edeb5cf3d3a40d7727f541c2c12156617b5a15d1829 \
--hash=sha256:9a31c18ba4881a116e448c52f3f5d3e14401cf7a9c43cc88f06f2a7f5428da0e \
--hash=sha256:80796ea68e11624a0279d3b802f88a7fe7214122b97a15a6c97189934a2cc776 \
--hash=sha256:f4019826a2dec066c909a1f483ef0dcf9325d6740cc0bd15308942b28b0930f7 \
--hash=sha256:7248506981eeba23888b4140a69a53c4c0c0a386abcdca61ed8dd790a73e64b9 \
--hash=sha256:a8955265d146e86fe2ce116394be4eaf0cb40314a79b19f11c4fa574cd639572 \
--hash=sha256:c49187260043bd4c1d6a52186f9774f17d9b1da0a406798ebf4bfc12da166ade \
--hash=sha256:c1d8b3d8dcb5c23ac1a8bf56422036f3f305a3c5a8bc8c354256579a1e2aa2c1 \
--hash=sha256:9e389615bcecb8c782a87939d752340bb0a3a097e90bae54d7f0915bc12f45bd \
--hash=sha256:d09ff358f75a874f69fa7d1c2b4acecf4282a950293fcfcf89aa606da8a9a500 \
--hash=sha256:b69b4557aae7de18b7c174a917fe19873529d927ac592762d9771661875bbd40 \
--hash=sha256:5de52b081a2775e76b971de9d997d85c4457fc0a09079e12d66849548ae60981 \
--hash=sha256:e7d88fecb7b6250a1fd432e6dc64890342c372fce13dbfe4bb6f16348ad00c14 \
--hash=sha256:1426e67e855ef7f5030c9184f4f1a9f4bfa020c31c962cd41fd129ec5aef4a6a \
--hash=sha256:267dd2c66a5760c5f4d47e2ebcf8eeac7ef01e1ae6ae7a6d0d241a290068bc38 \
--hash=sha256:e553eb489511cacf19eda6e52bc9e151316f0d721724997dda2c4d3079b778db \
--hash=sha256:98b89b2c57f97ce2db7aeba60db173c84871d73b40e41a11ea95de1500ddc57e \
--hash=sha256:e2b7e090188833bc58b2ae03fb864c22688654ebd2096bcf38bc860c4f38a3d8 \
--hash=sha256:afa7d8b8d38ad40db8713ee053d41b36d87d6ae5ec5ad36f9210b548a18dc214 \
--hash=sha256:4fc9c2ff7924b3a1fa326e1799e5dd58cac585d7fb25fe53ccaa1333b0453d65 \
--hash=sha256:937db39a1ec5af3003b16357b2042bba67c88d43bc11aaa203fa8a5924524209 \
--hash=sha256:ab22285797631df3b513b2cd3ecdc51cd8e3d36788e3991d93d0759d6883b027 \
--hash=sha256:96e599b924ef009aa867f725b3249ee51d76489f484d3a45b4bd219c5ec6ed59 \
--hash=sha256:bea842a0512be6a8007e585790bccd5d530520fc025ce63b03e139be373b0063 \
--hash=sha256:e7175287f7fe7b1cc203bb958b17db40abd732690c1e18e700f10e0843a58598 \
--hash=sha256:285ab352552f52f1398c912556d4d36d4ea9b8450e5c65d03809bf9886755533 \
--hash=sha256:5576644b859197da7bbd8f8c7c2fb5dcc6cd505cadb42992d5f104c013f8a214 \
--hash=sha256:b3b02911eb1f6ada203b0763ba924234629b51586f72a21faacc638269f4ced5
ConfigArgParse==0.12.0 \
--hash=sha256:28cd7d67669651f2a4518367838c49539457504584a139709b2b8f6c208ef339 \
--no-binary ConfigArgParse
configobj==5.0.6 \
--hash=sha256:a2f5650770e1c87fb335af19a9b7eb73fc05ccf22144eb68db7d00cd2bcb0902
cryptography==2.6.1 \
--hash=sha256:066f815f1fe46020877c5983a7e747ae140f517f1b09030ec098503575265ce1 \
--hash=sha256:210210d9df0afba9e000636e97810117dc55b7157c903a55716bb73e3ae07705 \
--hash=sha256:26c821cbeb683facb966045e2064303029d572a87ee69ca5a1bf54bf55f93ca6 \
--hash=sha256:2afb83308dc5c5255149ff7d3fb9964f7c9ee3d59b603ec18ccf5b0a8852e2b1 \
--hash=sha256:2db34e5c45988f36f7a08a7ab2b69638994a8923853dec2d4af121f689c66dc8 \
--hash=sha256:409c4653e0f719fa78febcb71ac417076ae5e20160aec7270c91d009837b9151 \
--hash=sha256:45a4f4cf4f4e6a55c8128f8b76b4c057027b27d4c67e3fe157fa02f27e37830d \
--hash=sha256:48eab46ef38faf1031e58dfcc9c3e71756a1108f4c9c966150b605d4a1a7f659 \
--hash=sha256:6b9e0ae298ab20d371fc26e2129fd683cfc0cfde4d157c6341722de645146537 \
--hash=sha256:6c4778afe50f413707f604828c1ad1ff81fadf6c110cb669579dea7e2e98a75e \
--hash=sha256:8c33fb99025d353c9520141f8bc989c2134a1f76bac6369cea060812f5b5c2bb \
--hash=sha256:9873a1760a274b620a135054b756f9f218fa61ca030e42df31b409f0fb738b6c \
--hash=sha256:9b069768c627f3f5623b1cbd3248c5e7e92aec62f4c98827059eed7053138cc9 \
--hash=sha256:9e4ce27a507e4886efbd3c32d120db5089b906979a4debf1d5939ec01b9dd6c5 \
--hash=sha256:acb424eaca214cb08735f1a744eceb97d014de6530c1ea23beb86d9c6f13c2ad \
--hash=sha256:c8181c7d77388fe26ab8418bb088b1a1ef5fde058c6926790c8a0a3d94075a4a \
--hash=sha256:d4afbb0840f489b60f5a580a41a1b9c3622e08ecb5eec8614d4fb4cd914c4460 \
--hash=sha256:d9ed28030797c00f4bc43c86bf819266c76a5ea61d006cd4078a93ebf7da6bfd \
--hash=sha256:e603aa7bb52e4e8ed4119a58a03b60323918467ef209e6ff9db3ac382e5cf2c6
# Package enum34 needs to be explicitly limited to Python2.x, in order to avoid
# certbot-auto failures on Python 3.6+ which enum34 doesn't support. See #5456.
enum34==1.1.6 ; python_version < '3.4' \
--hash=sha256:2d81cbbe0e73112bdfe6ef8576f2238f2ba27dd0d55752a776c41d38b7da2850 \
--hash=sha256:644837f692e5f550741432dd3f223bbb9852018674981b1664e5dc339387588a \
--hash=sha256:6bd0f6ad48ec2aa117d3d141940d484deccda84d4fcd884f5c3d93c23ecd8c79 \
--hash=sha256:8ad8c4783bf61ded74527bffb48ed9b54166685e4230386a9ed9b1279e2df5b1
--hash=sha256:a2f5650770e1c87fb335af19a9b7eb73fc05ccf22144eb68db7d00cd2bcb0902 \
--no-binary configobj
cryptography==2.0.2 \
--hash=sha256:187ae17358436d2c760f28c2aeb02fefa3f37647a9c5b6f7f7c3e83cd1c5a972 \
--hash=sha256:19e43a13bbf52028dd1e810c803f2ad8880d0692d772f98d42e1eaf34bdee3d6 \
--hash=sha256:da9291502cbc87dc0284a20c56876e4d2e68deac61cc43df4aec934e44ca97b1 \
--hash=sha256:0954f8813095f581669330e0a2d5e726c33ac7f450c1458fac58bab54595e516 \
--hash=sha256:d68b0cc40a8432ed3fc84876c519de704d6001800ec22b136e75ae841910c45b \
--hash=sha256:2f8ad9580ab4da645cfea52a91d2da99a49a1e76616d8be68441a986fad652b0 \
--hash=sha256:cc00b4511294f5f6b65c4e77a1a9c62f52490a63d2c120f3872176b40a82351e \
--hash=sha256:cf896020f6a9f095a547b3d672c8db1ef2ed71fca11250731fa1d4a4cb8b1590 \
--hash=sha256:e0fdb8322206fa02aa38f71519ff75dce2eb481b7e1110e2936795cb376bb6ee \
--hash=sha256:277538466657ca5d6637f80be100242f9831d75138b788d718edd3aab34621f8 \
--hash=sha256:2c77eb0560f54ce654ab82d6b2a64327a71ee969b29022bf9746ca311c9f5069 \
--hash=sha256:755a7853b679e79d0a799351c092a9b0271f95ff54c8dd8823d8b527a2926a86 \
--hash=sha256:77197a2d525e761cdd4c771180b4bd0d80703654c6385e4311cbbbe2beb56fa1 \
--hash=sha256:eb8bb79d0ab00c931c8333b745f06fec481a51c52d70acd4ee95d6093ba5c386 \
--hash=sha256:131f61de82ef28f3e20beb4bfc24f9692d28cecfd704e20e6c7f070f7793013a \
--hash=sha256:ac35435974b2e27cd4520f29c191d7da36f4189aa3264e52c4c6c6d089ab6142 \
--hash=sha256:04b6ea99daa2a8460728794213d76d45ad58ea247dc7e7ff148d7dd726e87863 \
--hash=sha256:2b9442f8b4c3d575f6cc3db0e856034e0f5a9d55ecd636f52d8c496795b26952 \
--hash=sha256:b3d3b3ecba1fe1bdb6f180770a137f877c8f07571f7b2934bb269475bcf0e5e8 \
--hash=sha256:670a58c0d75cb0e78e73dd003bd96d4440bbb1f2bc041dcf7b81767ca4fb0ce9 \
--hash=sha256:5af84d23bdb86b5e90aca263df1424b43f1748480bfcde3ac2a3cbe622612468 \
--hash=sha256:ba22e8eefabdd7aca37d0c0c00d2274000d2cebb5cce9e5a710cb55bf8797b31 \
--hash=sha256:b798b22fa7e92b439547323b8b719d217f1e1b7677585cfeeedf3b55c70bb7fb \
--hash=sha256:59cff28af8cce96cb7e94a459726e1d88f6f5fa75097f9dcbebd99118d64ea4c \
--hash=sha256:fe859e445abc9ba9e97950ddafb904e23234c4ecb76b0fae6c86e80592ce464a \
--hash=sha256:655f3c474067f1e277430f23cc0549f0b1dc99b82aec6e53f80b9b2db7f76f11 \
--hash=sha256:0ebc2be053c9a03a2f3e20a466e87bf12a51586b3c79bd2a22171b073a805346 \
--hash=sha256:01e6e60654df64cca53733cda39446d67100c819c181d403afb120e0d2a71e1b \
--hash=sha256:d46f4e5d455cb5563685c52ef212696f0a6cc1ea627603218eabbd8a095291d8 \
--hash=sha256:3780b2663ee7ebb37cb83263326e3cd7f8b2ea439c448539d4b87de12c8d06ab
enum34==1.1.2 ; python_version < '3.4' \
--hash=sha256:2475d7fcddf5951e92ff546972758802de5260bf409319a9f1934e6bbc8b1dc7 \
--hash=sha256:35907defb0f992b75ab7788f65fedc1cf20ffa22688e0e6f6f12afc06b3ea501
funcsigs==1.0.2 \
--hash=sha256:330cc27ccbf7f1e992e69fef78261dc7c6569012cf397db8d3de0234e6c937ca \
--hash=sha256:a7bb0f2cf3a3fd1ab2732cb49eba4252c2af4240442415b4abce3b87022a8f50
future==0.17.1 \
--hash=sha256:67045236dcfd6816dc439556d009594abf643e5eb48992e36beac09c2ca659b8
idna==2.8 \
--hash=sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407 \
--hash=sha256:ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c
ipaddress==1.0.22 \
--hash=sha256:64b28eec5e78e7510698f6d4da08800a5c575caa4a286c93d651c5d3ff7b6794 \
--hash=sha256:b146c751ea45cad6188dd6cf2d9b757f6f4f8d6ffb96a023e6f2e26eea02a72c
josepy==1.1.0 \
--hash=sha256:1309a25aac3caeff5239729c58ff9b583f7d022ffdb1553406ddfc8e5b52b76e \
--hash=sha256:fb5c62c77d26e04df29cb5ecd01b9ce69b6fcc9e521eb1ca193b7faa2afa7086
idna==2.5 \
--hash=sha256:cc19709fd6d0cbfed39ea875d29ba6d4e22c0cebc510a76d6302a28385e8bb70 \
--hash=sha256:3cb5ce08046c4e3a560fc02f138d0ac63e00f8ce5901a56b32ec8b7994082aab
ipaddress==1.0.16 \
--hash=sha256:935712800ce4760701d89ad677666cd52691fd2f6f0b340c8b4239a3c17988a5 \
--hash=sha256:5a3182b322a706525c46282ca6f064d27a02cffbd449f9f47416f1dc96aa71b0
josepy==1.0.1 \
--hash=sha256:354a3513038a38bbcd27c97b7c68a8f3dfaff0a135b20a92c6db4cc4ea72915e \
--hash=sha256:9f48b88ca37f0244238b1cc77723989f7c54f7b90b2eee6294390bacfe870acc
linecache2==1.0.0 \
--hash=sha256:e78be9c0a0dfcbac712fe04fbf92b96cddae80b1b842f24248214c8496f006ef \
--hash=sha256:4b26ff4e7110db76eeb6f5a7b64a82623839d595c2038eeda662f2a2db78e97c
# Using an older version of mock here prevents regressions of #5276.
mock==1.3.0 \
--hash=sha256:1e247dbecc6ce057299eb7ee019ad68314bb93152e81d9a6110d35f4d5eca0f6 \
--hash=sha256:3f573a18be94de886d1191f27c168427ef693e8dcfcecf95b170577b2eb69cbb
parsedatetime==2.4 \
--hash=sha256:3d817c58fb9570d1eec1dd46fa9448cd644eeed4fb612684b02dfda3a79cb84b \
--hash=sha256:9ee3529454bf35c40a77115f5a596771e59e1aee8c53306f346c461b8e913094
pbr==5.1.3 \
--hash=sha256:8257baf496c8522437e8a6cfe0f15e00aedc6c0e0e7c9d55eeeeab31e0853843 \
--hash=sha256:8c361cc353d988e4f5b998555c88098b9d5964c2e11acf7b0d21925a66bb5824
pyOpenSSL==19.0.0 \
--hash=sha256:aeca66338f6de19d1aa46ed634c3b9ae519a64b458f8468aec688e7e3c20f200 \
--hash=sha256:c727930ad54b10fc157015014b666f2d8b41f70c0d03e83ab67624fd3dd5d1e6
pyRFC3339==1.1 \
--hash=sha256:67196cb83b470709c580bb4738b83165e67c6cc60e1f2e4f286cfcb402a926f4 \
--hash=sha256:81b8cbe1519cdb79bed04910dd6fa4e181faf8c88dff1e1b987b5f7ab23a5b1a
pycparser==2.19 \
--hash=sha256:a988718abfad80b6b157acce7bf130a30876d27603738ac39f140993246b25b3
pyparsing==2.3.1 \
--hash=sha256:66c9268862641abcac4a96ba74506e594c884e3f57690a696d21ad8210ed667a \
--hash=sha256:f6c5ef0d7480ad048c054c37632c67fca55299990fff127850181659eea33fc3
--hash=sha256:3f573a18be94de886d1191f27c168427ef693e8dcfcecf95b170577b2eb69cbb \
--hash=sha256:1e247dbecc6ce057299eb7ee019ad68314bb93152e81d9a6110d35f4d5eca0f6
ordereddict==1.1 \
--hash=sha256:1c35b4ac206cef2d24816c89f89cf289dd3d38cf7c449bb3fab7bf6d43f01b1f \
--no-binary ordereddict
packaging==16.8 \
--hash=sha256:99276dc6e3a7851f32027a68f1095cd3f77c148091b092ea867a351811cfe388 \
--hash=sha256:5d50835fdf0a7edf0b55e311b7c887786504efea1177abd7e69329a8e5ea619e
parsedatetime==2.1 \
--hash=sha256:ce9d422165cf6e963905cd5f74f274ebf7cc98c941916169178ef93f0e557838 \
--hash=sha256:17c578775520c99131634e09cfca5a05ea9e1bd2a05cd06967ebece10df7af2d
pbr==1.8.1 \
--hash=sha256:46c8db75ae75a056bd1cc07fa21734fe2e603d11a07833ecc1eeb74c35c72e0c \
--hash=sha256:e2127626a91e6c885db89668976db31020f0af2da728924b56480fc7ccf09649
pyOpenSSL==16.2.0 \
--hash=sha256:26ca380ddf272f7556e48064bbcd5bd71f83dfc144f3583501c7ddbd9434ee17 \
--hash=sha256:7779a3bbb74e79db234af6a08775568c6769b5821faecf6e2f4143edb227516e
pyparsing==2.1.8 \
--hash=sha256:2f0f5ceb14eccd5aef809d6382e87df22ca1da583c79f6db01675ce7d7f49c18 \
--hash=sha256:03a4869b9f3493807ee1f1cb405e6d576a1a2ca4d81a982677c0c1ad6177c56b \
--hash=sha256:ab09aee814c0241ff0c503cff30018219fe1fc14501d89f406f4664a0ec9fbcd \
--hash=sha256:6e9a7f052f8e26bcf749e4033e3115b6dc7e3c85aafcb794b9a88c9d9ef13c97 \
--hash=sha256:9f463a6bcc4eeb6c08f1ed84439b17818e2085937c0dee0d7674ac127c67c12b \
--hash=sha256:3626b4d81cfb300dad57f52f2f791caaf7b06c09b368c0aa7b868e53a5775424 \
--hash=sha256:367b90cc877b46af56d4580cd0ae278062903f02b8204ab631f5a2c0f50adfd0 \
--hash=sha256:9f1ea360086cd68681e7f4ca8f1f38df47bf81942a0d76a9673c2d23eff35b13
pyRFC3339==1.0 \
--hash=sha256:eea31835c56e2096af4363a5745a784878a61d043e247d3a6d6a0a32a9741f56 \
--hash=sha256:8dfbc6c458b8daba1c0f3620a8c78008b323a268b27b7359e92a4ae41325f535
python-augeas==0.5.0 \
--hash=sha256:67d59d66cdba8d624e0389b87b2a83a176f21f16a87553b50f5703b23f29bac2
pytz==2018.9 \
--hash=sha256:32b0891edff07e28efe91284ed9c31e123d84bea3fd98e1f72be2508f43ef8d9 \
--hash=sha256:d5f05e487007e29e03409f9398d074e158d920d36eb82eaf66fb1136b0c5374c
requests==2.21.0 \
--hash=sha256:502a824f31acdacb3a35b6690b5fbf0bc41d63a24a45c4004352b0242707598e \
--hash=sha256:7bf2a778576d825600030a110f3c0e3e8edc51dfaafe1c146e39a2027784957b
requests-toolbelt==0.9.1 \
--hash=sha256:380606e1d10dc85c3bd47bf5a6095f815ec007be7a8b69c878507068df059e6f \
--hash=sha256:968089d4584ad4ad7c171454f0a5c6dac23971e9472521ea3b6d49d610aa6fc0
six==1.12.0 \
--hash=sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c \
--hash=sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73
urllib3==1.24.2 \
--hash=sha256:4c291ca23bbb55c76518905869ef34bdd5f0e46af7afe6861e8375643ffee1a0 \
--hash=sha256:9a247273df709c4fedb38c711e44292304f73f39ab01beda9f6b9fc375669ac3
zope.component==4.5 \
--hash=sha256:6edfd626c3b593b72895a8cfcf79bff41f4619194ce996a85bce31ac02b94e55 \
--hash=sha256:984a06ba3def0b02b1117fa4c45b56e772e8c29c0340820fbf367e440a93a3a4
zope.deferredimport==4.3 \
--hash=sha256:2ddef5a7ecfff132a2dd796253366ecf9748a446e30f1a0b3a636aec9d9c05c5 \
--hash=sha256:4aae9cbacb2146cca58e62be0a914f0cec034d3b2d41135ea212ca8a96f4b5ec
zope.deprecation==4.4.0 \
--hash=sha256:0d453338f04bacf91bbfba545d8bcdf529aa829e67b705eac8c1a7fdce66e2df \
--hash=sha256:f1480b74995958b24ce37b0ef04d3663d2683e5d6debc96726eff18acf4ea113
zope.event==4.4 \
--hash=sha256:69c27debad9bdacd9ce9b735dad382142281ac770c4a432b533d6d65c4614bcf \
--hash=sha256:d8e97d165fd5a0997b45f5303ae11ea3338becfe68c401dd88ffd2113fe5cae7
zope.hookable==4.2.0 \
--hash=sha256:22886e421234e7e8cedc21202e1d0ab59960e40a47dd7240e9659a2d82c51370 \
--hash=sha256:39912f446e45b4e1f1951b5ffa2d5c8b074d25727ec51855ae9eab5408f105ab \
--hash=sha256:3adb7ea0871dbc56b78f62c4f5c024851fc74299f4f2a95f913025b076cde220 \
--hash=sha256:3d7c4b96341c02553d8b8d71065a9366ef67e6c6feca714f269894646bb8268b \
--hash=sha256:4e826a11a529ed0464ffcecf34b0b7bd1b4928dd5848c5c61bedd7833e8f4801 \
--hash=sha256:700d68cc30728de1c4c62088a981c6daeaefdf20a0d81995d2c0b7f442c5f88c \
--hash=sha256:77c82a430cedfbf508d1aa406b2f437363c24fa90c73f577ead0fb5295749b83 \
--hash=sha256:c1df3929a3666fc5a0c80d60a0c1e6f6ef97c7f6ed2f1b7cf49f3e6f3d4dde15 \
--hash=sha256:dba8b2dd2cd41cb5f37bfa3f3d82721b8ae10e492944e48ddd90a439227f2893 \
--hash=sha256:f492540305b15b5591bd7195d61f28946bb071de071cee5d68b6b8414da90fd2
zope.interface==4.6.0 \
--hash=sha256:086707e0f413ff8800d9c4bc26e174f7ee4c9c8b0302fbad68d083071822316c \
--hash=sha256:1157b1ec2a1f5bf45668421e3955c60c610e31913cc695b407a574efdbae1f7b \
--hash=sha256:11ebddf765bff3bbe8dbce10c86884d87f90ed66ee410a7e6c392086e2c63d02 \
--hash=sha256:14b242d53f6f35c2d07aa2c0e13ccb710392bcd203e1b82a1828d216f6f6b11f \
--hash=sha256:1b3d0dcabc7c90b470e59e38a9acaa361be43b3a6ea644c0063951964717f0e5 \
--hash=sha256:20a12ab46a7e72b89ce0671e7d7a6c3c1ca2c2766ac98112f78c5bddaa6e4375 \
--hash=sha256:298f82c0ab1b182bd1f34f347ea97dde0fffb9ecf850ecf7f8904b8442a07487 \
--hash=sha256:2f6175722da6f23dbfc76c26c241b67b020e1e83ec7fe93c9e5d3dd18667ada2 \
--hash=sha256:3b877de633a0f6d81b600624ff9137312d8b1d0f517064dfc39999352ab659f0 \
--hash=sha256:4265681e77f5ac5bac0905812b828c9fe1ce80c6f3e3f8574acfb5643aeabc5b \
--hash=sha256:550695c4e7313555549aa1cdb978dc9413d61307531f123558e438871a883d63 \
--hash=sha256:5f4d42baed3a14c290a078e2696c5f565501abde1b2f3f1a1c0a94fbf6fbcc39 \
--hash=sha256:62dd71dbed8cc6a18379700701d959307823b3b2451bdc018594c48956ace745 \
--hash=sha256:7040547e5b882349c0a2cc9b50674b1745db551f330746af434aad4f09fba2cc \
--hash=sha256:7e099fde2cce8b29434684f82977db4e24f0efa8b0508179fce1602d103296a2 \
--hash=sha256:7e5c9a5012b2b33e87980cee7d1c82412b2ebabcb5862d53413ba1a2cfde23aa \
--hash=sha256:81295629128f929e73be4ccfdd943a0906e5fe3cdb0d43ff1e5144d16fbb52b1 \
--hash=sha256:95cc574b0b83b85be9917d37cd2fad0ce5a0d21b024e1a5804d044aabea636fc \
--hash=sha256:968d5c5702da15c5bf8e4a6e4b67a4d92164e334e9c0b6acf080106678230b98 \
--hash=sha256:9e998ba87df77a85c7bed53240a7257afe51a07ee6bc3445a0bf841886da0b97 \
--hash=sha256:a0c39e2535a7e9c195af956610dba5a1073071d2d85e9d2e5d789463f63e52ab \
--hash=sha256:a15e75d284178afe529a536b0e8b28b7e107ef39626a7809b4ee64ff3abc9127 \
--hash=sha256:a6a6ff82f5f9b9702478035d8f6fb6903885653bff7ec3a1e011edc9b1a7168d \
--hash=sha256:b639f72b95389620c1f881d94739c614d385406ab1d6926a9ffe1c8abbea23fe \
--hash=sha256:bad44274b151d46619a7567010f7cde23a908c6faa84b97598fd2f474a0c6891 \
--hash=sha256:bbcef00d09a30948756c5968863316c949d9cedbc7aabac5e8f0ffbdb632e5f1 \
--hash=sha256:d788a3999014ddf416f2dc454efa4a5dbeda657c6aba031cf363741273804c6b \
--hash=sha256:eed88ae03e1ef3a75a0e96a55a99d7937ed03e53d0cffc2451c208db445a2966 \
--hash=sha256:f99451f3a579e73b5dd58b1b08d1179791d49084371d9a47baad3b22417f0317
zope.proxy==4.3.1 \
--hash=sha256:0cbcfcafaa3b5fde7ba7a7b9a2b5f09af25c9b90087ad65f9e61359fed0ca63b \
--hash=sha256:3de631dd5054a3a20b9ebff0e375f39c0565f1fb9131200d589a6a8f379214cd \
--hash=sha256:5429134d04d42262f4dac25f6dea907f6334e9a751ffc62cb1d40226fb52bdeb \
--hash=sha256:563c2454b2d0f23bca54d2e0e4d781149b7b06cb5df67e253ca3620f37202dd2 \
--hash=sha256:5bcf773345016b1461bb07f70c635b9386e5eaaa08e37d3939dcdf12d3fdbec5 \
--hash=sha256:8d84b7aef38c693874e2f2084514522bf73fd720fde0ce2a9352a51315ffa475 \
--hash=sha256:90de9473c05819b36816b6cb957097f809691836ed3142648bf62da84b4502fe \
--hash=sha256:dd592a69fe872445542a6e1acbefb8e28cbe6b4007b8f5146da917e49b155cc3 \
--hash=sha256:e7399ab865399fce322f9cefc6f2f3e4099d087ba581888a9fea1bbe1db42a08 \
--hash=sha256:e7d1c280d86d72735a420610df592aac72332194e531a8beff43a592c3a1b8eb \
--hash=sha256:e90243fee902adb0c39eceb3c69995c0f2004bc3fdb482fbf629efc656d124ed
--hash=sha256:67d59d66cdba8d624e0389b87b2a83a176f21f16a87553b50f5703b23f29bac2 \
--no-binary python-augeas
pytz==2015.7 \
--hash=sha256:3abe6a6d3fc2fbbe4c60144211f45da2edbe3182a6f6511af6bbba0598b1f992 \
--hash=sha256:939ef9c1e1224d980405689a97ffcf7828c56d1517b31d73464356c1f2b7769e \
--hash=sha256:ead4aefa7007249e05e51b01095719d5a8dd95760089f5730aac5698b1932918 \
--hash=sha256:3cca0df08bd0ed98432390494ce3ded003f5e661aa460be7a734bffe35983605 \
--hash=sha256:3ede470d3d17ba3c07638dfa0d10452bc1b6e5ad326127a65ba77e6aaeb11bec \
--hash=sha256:68c47964f7186eec306b13629627722b9079cd4447ed9e5ecaecd4eac84ca734 \
--hash=sha256:dd5d3991950aae40a6c81de1578942e73d629808cefc51d12cd157980e6cfc18 \
--hash=sha256:a77c52062c07eb7c7b30545dbc73e32995b7e117eea750317b5cb5c7a4618f14 \
--hash=sha256:81af9aec4bc960a9a0127c488f18772dae4634689233f06f65443e7b11ebeb51 \
--hash=sha256:e079b1dadc5c06246cc1bb6fe1b23a50b1d1173f2edd5104efd40bb73a28f406 \
--hash=sha256:fbd26746772c24cb93c8b97cbdad5cb9e46c86bbdb1b9d8a743ee00e2fb1fc5d \
--hash=sha256:99266ef30a37e43932deec2b7ca73e83c8dbc3b9ff703ec73eca6b1dae6befea \
--hash=sha256:8b6ce1c993909783bc96e0b4f34ea223bff7a4df2c90bdb9c4e0f1ac928689e3
requests==2.12.1 \
--hash=sha256:3f3f27a9d0f9092935efc78054ef324eb9f8166718270aefe036dfa1e4f68e1e \
--hash=sha256:2109ecea94df90980be040490ff1d879971b024861539abb00054062388b612e
six==1.10.0 \
--hash=sha256:0ff78c403d9bccf5a425a6d31a12aa6b47f1c21ca4dc2573a7e2f32a97335eb1 \
--hash=sha256:105f8d68616f8248e24bf0e9372ef04d3cc10104f1980f54d57b2ce73a5ad56a
traceback2==1.4.0 \
--hash=sha256:8253cebec4b19094d67cc5ed5af99bf1dba1285292226e98a31929f87a5d6b23 \
--hash=sha256:05acc67a09980c2ecfedd3423f7ae0104839eccb55fc645773e1caa0951c3030
unittest2==1.1.0 \
--hash=sha256:13f77d0875db6d9b435e1d4f41e74ad4cc2eb6e1d5c824996092b3430f088bb8 \
--hash=sha256:22882a0e418c284e1f718a822b3b022944d53d2d908e1690b319a9d3eb2c0579
zope.component==4.2.2 \
--hash=sha256:282c112b55dd8e3c869a3571f86767c150ab1284a9ace2bdec226c592acaf81a \
--no-binary zope.component
zope.event==4.1.0 \
--hash=sha256:dc7a59a2fd91730d3793131a5d261b29e93ec4e2a97f1bc487ce8defee2fe786 \
--no-binary zope.event
zope.interface==4.1.3 \
--hash=sha256:f07b631f7a601cd8cbd3332d54f43142c7088a83299f859356f08d1d4d4259b3 \
--hash=sha256:de5cca083b9439d8002fb76bbe6b4998c5a5a721fab25b84298967f002df4c94 \
--hash=sha256:6788416f7ea7f5b8a97be94825377aa25e8bdc73463e07baaf9858b29e737077 \
--hash=sha256:6f3230f7254518201e5a3708cbb2de98c848304f06e3ded8bfb39e5825cba2e1 \
--hash=sha256:5fa575a5240f04200c3088427d0d4b7b737f6e9018818a51d8d0f927a6a2517a \
--hash=sha256:522194ad6a545735edd75c8a83f48d65d1af064e432a7d320d64f56bafc12e99 \
--hash=sha256:e8c7b2d40943f71c99148c97f66caa7f5134147f57423f8db5b4825099ce9a09 \
--hash=sha256:279024f0208601c3caa907c53876e37ad88625f7eaf1cb3842dbe360b2287017 \
--hash=sha256:2e221a9eec7ccc58889a278ea13dcfed5ef939d80b07819a9a8b3cb1c681484f \
--hash=sha256:69118965410ec86d44dc6b9017ee3ddbd582e0c0abeef62b3a19dbf6c8ad132b \
--hash=sha256:d04df8686ec864d0cade8cf199f7f83aecd416109a20834d568f8310ded12dea \
--hash=sha256:e75a947e15ee97e7e71e02ea302feb2fc62d3a2bb4668bf9dfbed43a506ac7e7 \
--hash=sha256:4e45d22fb883222a5ab9f282a116fec5ee2e8d1a568ccff6a2d75bbd0eb6bcfc \
--hash=sha256:bce9339bb3c7a55e0803b63d21c5839e8e479bc85c4adf42ae415b72f94facb2 \
--hash=sha256:928138365245a0e8869a5999fbcc2a45475a0a6ed52a494d60dbdc540335fedd \
--hash=sha256:0d841ba1bb840eea0e6489dc5ecafa6125554971f53b5acb87764441e61bceba \
--hash=sha256:b09c8c1d47b3531c400e0195697f1414a63221de6ef478598a4f1460f7d9a392
requests-toolbelt==0.8.0 \
--hash=sha256:42c9c170abc2cacb78b8ab23ac957945c7716249206f90874651971a4acff237 \
--hash=sha256:f6a531936c6fa4c6cfce1b9c10d5c4f498d16528d2a54a22ca00011205a187b5
# Contains the requirements for the letsencrypt package.
#
@@ -1333,29 +1208,31 @@ letsencrypt==0.7.0 \
--hash=sha256:105a5fb107e45bcd0722eb89696986dcf5f08a86a321d6aef25a0c7c63375ade \
--hash=sha256:c36e532c486a7e92155ee09da54b436a3c420813ec1c590b98f635d924720de9
certbot==0.37.1 \
--hash=sha256:84dbdad204327b8d8ef9ab5b040f2be1e427a9f7e087affcc9a6051ea1b03fe7 \
--hash=sha256:aace73e63b0c11cdb4b0bd33e1780c1fbe0ce5669dc72e80c3aa9500145daf16
acme==0.37.1 \
--hash=sha256:83a4f6f3c5eb6a85233d5ba87714b426f2d096df58d711f8a2fc4071eb3fd3fc \
--hash=sha256:c069a761990751f7c4bf51d2e87ae10319bf460de6629d2908c9fa6f69e97111
certbot-apache==0.37.1 \
--hash=sha256:3ea832408877b12b3a60d17e8b2ee3387364f8c3023ac267161c25b99087cd42 \
--hash=sha256:e46c2644451101c0e216aa1f525a577cc903efaf871e0e4da277224a4439040c
certbot-nginx==0.37.1 \
--hash=sha256:1f9af389d26f06634e2eefaace3354e7679dabb4295e1d55d05a4ee7e23a64bd \
--hash=sha256:02a7ec15bd388d0f0e94a34c86a8f8d618ec7d5ffde0c206039bb4c46b294ce4
certbot==0.25.1 \
--hash=sha256:01689015364685fef3f1e1fb7832ba84eb3b0aa85bc5a71c96661f6d4c59981f \
--hash=sha256:5c23e5186133bb1afd805be5e0cd2fb7b95862a8b0459c9ecad4ae60f933e54e
acme==0.25.1 \
--hash=sha256:26e641a01536705fe5f12d856703b8ef06e5a07981a7b6379d2771dcdb69a742 \
--hash=sha256:47b5f3f73d69b7b1d13f918aa2cd75a8093069a68becf4af38e428e4613b2734
certbot-apache==0.25.1 \
--hash=sha256:a28b7c152cc11474bef5b5e7967aaea42b2c0aaf86fd82ee4082713d33cee5a9 \
--hash=sha256:ed012465617073a0f1057fe854dc8d1eb6d2dd7ede1fb2eee765129fed2a095a
certbot-nginx==0.25.1 \
--hash=sha256:83f82c3ba08c0b1d4bf449ac24018e8e7dd34a6248d35466f2de7da1cd312e15 \
--hash=sha256:68f98b41c54e0bf4218ef293079597176617bee3837ae3aa6528ce2ff0bf4f9c
UNLIKELY_EOF
# -------------------------------------------------------------------------
cat << "UNLIKELY_EOF" > "$TEMP_DIR/pipstrap.py"
#!/usr/bin/env python
"""A small script that can act as a trust root for installing pip >=8
Embed this in your project, and your VCS checkout is all you have to trust. In
a post-peep era, this lets you claw your way to a hash-checking version of pip,
with which you can install the rest of your dependencies safely. All it assumes
is Python 2.6 or better and *some* version of pip already installed. If
anything goes wrong, it will exit with a non-zero status code.
"""
# This is here so embedded copies are MIT-compliant:
# Copyright (c) 2016 Erik Rose
@@ -1374,6 +1251,7 @@ from distutils.version import StrictVersion
from hashlib import sha256
from os import environ
from os.path import join
from pipes import quote
from shutil import rmtree
try:
from subprocess import check_output
@@ -1393,7 +1271,7 @@ except ImportError:
cmd = popenargs[0]
raise CalledProcessError(retcode, cmd)
return output
import sys
from sys import exit, version_info
from tempfile import mkdtemp
try:
from urllib2 import build_opener, HTTPHandler, HTTPSHandler
@@ -1415,7 +1293,7 @@ maybe_argparse = (
[('18/dd/e617cfc3f6210ae183374cd9f6a26b20514bbb5a792af97949c5aacddf0f/'
'argparse-1.4.0.tar.gz',
'62b089a55be1d8949cd2bc7e0df0bddb9e028faefc8c32038cc84862aefdd6e4')]
if sys.version_info < (2, 7, 0) else [])
if version_info < (2, 7, 0) else [])
PACKAGES = maybe_argparse + [
@@ -1424,9 +1302,9 @@ PACKAGES = maybe_argparse + [
'pip-{0}.tar.gz'.format(PIP_VERSION),
'09f243e1a7b461f654c26a725fa373211bb7ff17a9300058b205c61658ca940d'),
# This version of setuptools has only optional dependencies:
('37/1b/b25507861991beeade31473868463dad0e58b1978c209de27384ae541b0b/'
'setuptools-40.6.3.zip',
'3b474dad69c49f0d2d86696b68105f3a6f195f7ab655af12ef9a9c326d2b08f8'),
('59/88/2f3990916931a5de6fa9706d6d75eb32ee8b78627bb2abaab7ed9e6d0622/'
'setuptools-29.0.1.tar.gz',
'b539118819a4857378398891fa5366e090690e46b3e41421a1e07d6e9fd8feb0'),
('c9/1d/bd19e691fd4cfe908c76c429fe6e4436c9e83583c4414b54f6c85471954a/'
'wheel-0.29.0.tar.gz',
'1ebb8ad7e26b448e9caa4773d2357849bf80ff9e313964bcaf79cbf0201a1648')
@@ -1481,8 +1359,10 @@ def hashed_download(url, temp, digest):
def get_index_base():
"""Return the URL to the dir containing the "packages" folder.
Try to wring something out of PIP_INDEX_URL, if set. Hack "/simple" off the
end if it's there; that is likely to give us the right dir.
"""
env_var = environ.get('PIP_INDEX_URL', '').rstrip('/')
if env_var:
@@ -1496,9 +1376,11 @@ def get_index_base():
def main():
python = sys.executable or 'python'
pip_version = StrictVersion(check_output([python, '-m', 'pip', '--version'])
pip_version = StrictVersion(check_output(['pip', '--version'])
.decode('utf-8').split()[1])
min_pip_version = StrictVersion(PIP_VERSION)
if pip_version >= min_pip_version:
return 0
has_pip_cache = pip_version >= StrictVersion('6.0')
index_base = get_index_base()
temp = mkdtemp(prefix='pipstrap-')
@@ -1507,12 +1389,12 @@ def main():
temp,
digest)
for path, digest in PACKAGES]
# Calling pip as a module is the preferred way to avoid problems about pip self-upgrade.
command = [python, '-m', 'pip', 'install', '--no-index', '--no-deps', '-U']
# Disable cache since it is not used and it otherwise sometimes throws permission warnings:
command.extend(['--no-cache-dir'] if has_pip_cache else [])
command.extend(downloads)
check_output(command)
check_output('pip install --no-index --no-deps -U ' +
# Disable cache since we're not using it and it otherwise
# sometimes throws permission warnings:
('--no-cache-dir ' if has_pip_cache else '') +
' '.join(quote(d) for d in downloads),
shell=True)
except HashError as exc:
print(exc)
except Exception:
@@ -1525,7 +1407,7 @@ def main():
if __name__ == '__main__':
sys.exit(main())
exit(main())
UNLIKELY_EOF
# -------------------------------------------------------------------------
@@ -1609,24 +1491,6 @@ else
exit 0
fi
DeterminePythonVersion "NOCRASH"
# Don't warn about file permissions if the user disabled the check or we
# can't find an up-to-date Python.
if [ "$PYVER" -ge "$MIN_PYVER" -a "$NO_PERMISSIONS_CHECK" != 1 ]; then
# If the script fails for some reason, don't break certbot-auto.
set +e
# Suppress unexpected error output.
CHECK_PERM_OUT=$(CheckPathPermissions "$LE_PYTHON" "$0" 2>/dev/null)
CHECK_PERM_STATUS="$?"
set -e
# Only print output if the script ran successfully and it actually produced
# output. The latter check resolves
# https://github.com/certbot/certbot/issues/7012.
if [ "$CHECK_PERM_STATUS" = 0 -a -n "$CHECK_PERM_OUT" ]; then
error "$CHECK_PERM_OUT"
fi
fi
if [ "$NO_SELF_UPGRADE" != 1 ]; then
TEMP_DIR=$(TempDir)
trap 'rm -rf "$TEMP_DIR"' EXIT
@@ -1783,16 +1647,12 @@ if __name__ == '__main__':
UNLIKELY_EOF
# ---------------------------------------------------------------------------
DeterminePythonVersion "NOCRASH"
if [ "$PYVER" -lt "$MIN_PYVER" ]; then
error "WARNING: couldn't find Python $MIN_PYTHON_VERSION+ to check for updates."
elif ! REMOTE_VERSION=`"$LE_PYTHON" "$TEMP_DIR/fetch.py" --latest-version` ; then
error "WARNING: unable to check for updates."
fi
LE_VERSION_STATE=`CompareVersions "$LE_PYTHON" "$LE_AUTO_VERSION" "$REMOTE_VERSION"`
if [ "$LE_VERSION_STATE" = "UNOFFICIAL" ]; then
say "Unofficial certbot-auto version detected, self-upgrade is disabled: $LE_AUTO_VERSION"
elif [ "$LE_VERSION_STATE" = "OUTDATED" ]; then
elif [ "$LE_AUTO_VERSION" != "$REMOTE_VERSION" ]; then
say "Upgrading certbot-auto $LE_AUTO_VERSION to $REMOTE_VERSION..."
# Now we drop into Python so we don't have to install even more

View File

@@ -1 +0,0 @@
recursive-include certbot_integration_tests/assets *

View File

@@ -1,8 +0,0 @@
[run]
# Avoid false warnings because certbot packages are not installed in the thread that executes
# the coverage: indeed, certbot is launched as a CLI from a subprocess.
disable_warnings = module-not-imported,no-data-collected
[report]
# Exclude unit tests in coverage during integration tests.
omit = **/*_test.py,**/tests/*,**/dns_common*,**/certbot_nginx/parser_obj.py

View File

@@ -1 +0,0 @@
"""Package certbot_integration_test is for tests that require a live acme ca server instance"""

View File

@@ -1,32 +0,0 @@
-----BEGIN CERTIFICATE-----
MIIFlTCCA32gAwIBAgIUR3wbM8qFE68f8NxfciHhUjR1GeUwDQYJKoZIhvcNAQEL
BQAwWTELMAkGA1UEBhMCQVUxEzARBgNVBAgMClNvbWUtU3RhdGUxITAfBgNVBAoM
GEludGVybmV0IFdpZGdpdHMgUHR5IEx0ZDESMBAGA1UEAwwJbmdpbngud3RmMCAX
DTE5MDQxODIwMDUwM1oYDzIyOTMwMTMwMjAwNTAzWjBZMQswCQYDVQQGEwJBVTET
MBEGA1UECAwKU29tZS1TdGF0ZTEhMB8GA1UECgwYSW50ZXJuZXQgV2lkZ2l0cyBQ
dHkgTHRkMRIwEAYDVQQDDAluZ2lueC53dGYwggIiMA0GCSqGSIb3DQEBAQUAA4IC
DwAwggIKAoICAQC/W+yxYE0PWJOS4df71Yx596fDjW03I9JZuu9kfP7mneMgy+OC
HyRm0TEhl6FPUp9tD9YeEHloUZNjHEOg/qrnbEOspv3Ha3RFinzrzkMwbzEPR3Xf
0go+aVsWelDhapFl8fccw4tWwijVZQquhBsWOUnPenS3Txe96kEv2NNJlJ0qFUa+
rOTruzRzOzlbgKv5WRb4+BxxWonHLkAQ5IT87GBlsCerVIyPD+BnZveZGl6e9oMH
ZlZvUT6aWRnzFWjAnQGiJpVIw7l9r4EW0jq1z7wqb37FrqrFbtWrOfUZVE7AlqXH
aKIR82/xwkcZfFk3sCAM0IcZc8B2SDLi4zNZtDivW6qQgTC/3z5yf1hnJ+j00dtE
X5qYlgXRaM2raOn31lxcerk5pjgagQ7Zj+v3YZS0QnenrgyXJcdnXLDj+cIARzx4
QHtoO0nyP0RJqxvwX/H98513JTkeqFBc/Bx11UWYsUv20Qoo9IAuz0VDARu6rquu
k9anv56yvxo77qZ8r80l3z8eMyDA+UjuSD2p1Za09RAHfva7o8rMUqULHNQ4pfFH
JIUozHoinAg/9lBC/W80fcbILks+Sdi6E9WQ0n8PLl7oFLx9prEDCycKuC0z76J/
Shb6R6sWr1YtzUFUc5EH2g9pMriaqT8uGO4CMOeRemXahrdT/H+Xg5m4TQIDAQAB
o1MwUTAdBgNVHQ4EFgQU46gJeu9ZOfTQ6c4vfbWbSLUpEMowHwYDVR0jBBgwFoAU
46gJeu9ZOfTQ6c4vfbWbSLUpEMowDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG9w0B
AQsFAAOCAgEAcnfkXDUTsEGs0MleegkGbRCVy72a3U7tv1KVTLB8qLPc3tpPJJoT
D4PbOuw9+yIE+HetZTZooOpaZoorLQdiwAEjlQ44RVuXSHSARQ8KW9ZZeiWN/Qvl
Ip4xJ/cHxcKTFKSc/99o8M+kmPKEXF9SUMfKPc5jXarNxCsnA3VriYqJ+CnYEox2
duNUEe3A9Y2d8ZxjmscBqlcXpk1kFwsCRT5UYVoUYwyjYznLkO5A+GJ0ZnMyRMQp
obUiB34hUrNgyOaBvizk+pNh9EV4rEBPRQwhy4vDMco4AjQcwLWQAQ9G4GSt/E+Q
62XdVDa6CAuOvBCudDPki7kEqNLbj1tMY1K/gsbgb6TYA/xTOVulAnqm4OEZ2svJ
0Jqw3BzMfRTaxsNU6jxm8WehVL15GjoJUzfs7Te+l7Vm/QNc1Dv2pmEhVfBibwMa
YxUZ8ClQtQ1lsOpne97Og0p/Cm93kKELNBLTjzXtpXGGPPYisAyNwe0Hadq8SiOd
pXeNwXa5vHOXHv8xBENzBvFJ3TRN2GmMlHBp/eOfVUx/huNSpcnh2gO3fn5EbMj7
43IaR133JW5yWbneYAMJOEAMdEB5EthRmEDtLVA7kLqLc/ywFTQ4VbS2b+PsOr5O
501nzt0OTMMEz+UafvGXj5OPJBhe26RtnYXzVwwLfto/F5udM5zglWo=
-----END CERTIFICATE-----

View File

@@ -1,11 +0,0 @@
#!/usr/bin/env python
import sys
import os
hook_script_type = os.path.basename(os.path.dirname(sys.argv[1]))
if hook_script_type == 'deploy' and ('RENEWED_DOMAINS' not in os.environ or 'RENEWED_LINEAGE' not in os.environ):
sys.stderr.write('Environment variables not properly set!\n')
sys.exit(1)
with open(sys.argv[2], 'a') as file_h:
file_h.write(hook_script_type + '\n')

View File

@@ -1,52 +0,0 @@
-----BEGIN PRIVATE KEY-----
MIIJQQIBADANBgkqhkiG9w0BAQEFAASCCSswggknAgEAAoICAQC/W+yxYE0PWJOS
4df71Yx596fDjW03I9JZuu9kfP7mneMgy+OCHyRm0TEhl6FPUp9tD9YeEHloUZNj
HEOg/qrnbEOspv3Ha3RFinzrzkMwbzEPR3Xf0go+aVsWelDhapFl8fccw4tWwijV
ZQquhBsWOUnPenS3Txe96kEv2NNJlJ0qFUa+rOTruzRzOzlbgKv5WRb4+BxxWonH
LkAQ5IT87GBlsCerVIyPD+BnZveZGl6e9oMHZlZvUT6aWRnzFWjAnQGiJpVIw7l9
r4EW0jq1z7wqb37FrqrFbtWrOfUZVE7AlqXHaKIR82/xwkcZfFk3sCAM0IcZc8B2
SDLi4zNZtDivW6qQgTC/3z5yf1hnJ+j00dtEX5qYlgXRaM2raOn31lxcerk5pjga
gQ7Zj+v3YZS0QnenrgyXJcdnXLDj+cIARzx4QHtoO0nyP0RJqxvwX/H98513JTke
qFBc/Bx11UWYsUv20Qoo9IAuz0VDARu6rquuk9anv56yvxo77qZ8r80l3z8eMyDA
+UjuSD2p1Za09RAHfva7o8rMUqULHNQ4pfFHJIUozHoinAg/9lBC/W80fcbILks+
Sdi6E9WQ0n8PLl7oFLx9prEDCycKuC0z76J/Shb6R6sWr1YtzUFUc5EH2g9pMria
qT8uGO4CMOeRemXahrdT/H+Xg5m4TQIDAQABAoICAAGGL+pxw+tdXz+KQPgmiUnn
aRSrqbUIugIw9Pst67HWjBqUxSkiKl4PSH7mAEjrdY2e1KvEodLs42mkrf04ShAx
0pArfFX8Sx7KrZgLOonGOPPQM+YmfCJnIGybaM2C1cmkFb3K6O81+LFKbr1ZHAYf
SrE2XnufS6cdmItTBMvPPTk6lieqpOAjy5UnYZuS+Muxo/czsrZMbFCD08rOpyiE
kXf94TMCJ2R0UetA7LPxe9N0TzLd485bLU55azV+dCkklwC9oe7EcFPJ9BNEdWdB
UlRcMvxMGdwct+L3QTaEb2QlTwi5kqDl+XxJeduAHA3Pf1Haz1iqjVvj01PvT1di
Cs0+ZeFBsa+BfiGDe9ONwuSQljda1CuI+vDv5bGUExulOSG1dHJ7RK9PBaXFaR/b
/9tRBwAw1Erm7s1JIkjda5Oc46jFb3HzDaZYB1n5hUmEIrYM8HhUOGITyVT3hxDO
AWlaV3aveQ0MmMXLptVXDgbjPGbWDGMLD9d5vUE9R7IyOLeXOmjthYlCH2rj378M
r2PkgX2tD0A/yoEZ8XCFdtBWSVajLdL0/gkm7sKosMABBy3yrSCxbHeq5TFuTAXA
hOdypX4NOZkA6WJU+hn3GkQyIScLqSrvGRA9kzHGoEWVZDKkB9DXg+dmTARZDWXD
mCnHkJo6+FcbhUpXniuZAoIBAQDmE94vvdstB+HEtXxN1uNDY7H8gPc/BUonU6a9
G5YOIbjByCfEDcXF8AUWekc6lc8DNG3ydx0dnb2ZAkxmdlsaD8GLqHGILzlSsOwR
sez8nR4+4n9vYMfx9Qal8Ren5xEP9Z9sJcNqbKVGta1WFtQzrgYbpVXXf/Luv0xS
YoVK8KaEACciD6XX4wmajrAXPPQgThvqQtXuTn/AxWsUDg1DK0tw1VRUuOJuJwpw
f6qocM9AyqUNvdeVyjFx8Slag34ZI7fmxPtHX/e6opTg3zVXab1Ow8AMICOHMRL6
m5/+wnWa9xMoKI4kYfk/QFqeTccnLDlwi6kQM8WRfbwr9AyPAoIBAQDU60wrX6Lm
0vIfngv1/4j/w+AGAwjvxiuJ7Q7LwQ2fGsZGOIfMK/lpBxCn543kGbQT+KQKNOjO
+EywObftnJ6Y2+om2NoLkCnCiptsfr5WlN8pxtIPQu2iu5xXA67WpQv4Nc4769PM
wJGVW3pmPKi6H0QjjqYAZd1NAXdN9Au14zZVh3KBWoz82kTHWKSL6Ld1UClG728W
k/moyCFFMMGTXX/LVliQzDVLM6L5jbAOaG317qAuxZIqFJ9NLwHFW9uH/i1S6Qfp
+lOmOfVYKu1O/qh1DUBQfuJkR1XIn2ifZEjxOsxeTmWu1LXpyoZy526JRu49pk8Z
DdEu+w7hsdNjAoIBAD1YWsub8Y6GJXpPcX9HpnzXXiOXN1VEUcs+kJyneFD4SMzS
U1gA3BS0tIaTv94tB28xUYdunwLAhkb/x+Mh95RxUwert+m5va0Ao1DsgeWw9tmJ
hrTptyYaUNV5/Pa1s2Tv9rvdLcd4hHDgDAGCQL4uzk4cvVCiOuHRe8YTorqig6N6
bvSz+2IelPbyyJzJkcXzTZoei+/oWkPJ340PWhXou0qwdrXIPgdkvXHVeGlE+t2p
qmyJi6vSp3Bb/sy1dq+5SFVtfBpBykmnA88ZdJ2EAge4RcJ150MqoIbVa8l/i9/v
tNnmRlAJF233+LFwx4L4VbBebIt3YlwyjDOj9J0CggEAIknKOGnsV/O8ni7bikAe
leG7X/x5IfPt6wZMDbAHO4oaSBCufcjPH4TNv9xgU014XIb8E9C1dS8zWmXRIujH
+aHgsWTWqGoM75FWukAm8taCob2s8lw63KwN301uiI6HwO8ZSTkPILgaOc1DhtdZ
7K9AT+GXBhVhcBc+WUVl5WKzy05GuGIWtlmIHfo+dXGCqdfA7fV9FEu8NtwTz4qs
gcja3aoIFTltk7C7HCkfIxLaMnK9RQr4IOK1TL63MEs8rUfXkLSKW7m+YtSOmCZB
lSkZg9AgfVYRq0h5nhddx91kicSISN+jLGaA7Sd6Q2LVwDG2CCOSNVyuRTyVBu+W
NQKCAQAWN6vB6oToNIoBLdOThm0HD07cNHcrnBjtaKsYsQDgqbr2m8LRCRzNRML4
jG0IAOWpuCiEGsgUPxywiI1Ufvyq7ZSNT1QQNzCR47NM3Ve6S2abrQkMIk9VJ+za
CB9c1BH92GokoRxqswb/BiMttG2EIP8L8/pSRYEcVnaaxAkf9QOhEwj4LJPGX0mS
t7kWIUVHPdFJ67F25dYr3mUHgyV+QJupQICkkkgY3nBOU1fS42vAugaxqH0wAP3T
53FlpY3NuE7+kYC3FjfcBer99F1pOac3X9jxhk26w9dr2/QNA33xhDXHKYvoLUCG
RPQylahJByU7IrtQzSCf/RE7q4v0
-----END PRIVATE KEY-----

View File

@@ -1,5 +0,0 @@
import pytest
# Custom assertions defined in the following package need to be registered to be properly
# displayed in a pytest report when they are failing.
pytest.register_assert_rewrite('certbot_integration_tests.certbot_tests.assertions')

View File

@@ -1,160 +0,0 @@
"""This module contains advanced assertions for the certbot integration tests."""
import os
try:
import grp
POSIX_MODE = True
except ImportError:
import win32api
import win32security
import ntsecuritycon
POSIX_MODE = False
EVERYBODY_SID = 'S-1-1-0'
SYSTEM_SID = 'S-1-5-18'
ADMINS_SID = 'S-1-5-32-544'
def assert_hook_execution(probe_path, probe_content):
"""
Assert that a certbot hook has been executed
:param probe_path: path to the file that received the hook output
:param probe_content: content expected when the hook is executed
"""
with open(probe_path, 'r') as file:
data = file.read()
lines = [line.strip() for line in data.splitlines()]
assert probe_content in lines
def assert_saved_renew_hook(config_dir, lineage):
"""
Assert that the renew hook configuration of a lineage has been saved.
:param config_dir: location of the certbot configuration
:param lineage: lineage domain name
"""
with open(os.path.join(config_dir, 'renewal', '{0}.conf'.format(lineage))) as file_h:
assert 'renew_hook' in file_h.read()
def assert_cert_count_for_lineage(config_dir, lineage, count):
"""
Assert the number of certificates generated for a lineage.
:param config_dir: location of the certbot configuration
:param lineage: lineage domain name
:param count: number of expected certificates
"""
archive_dir = os.path.join(config_dir, 'archive')
lineage_dir = os.path.join(archive_dir, lineage)
certs = [file for file in os.listdir(lineage_dir) if file.startswith('cert')]
assert len(certs) == count
def assert_equals_group_permissions(file1, file2):
"""
Assert that two files have the same permissions for group owner.
:param file1: first file path to compare
:param file2: second file path to compare
"""
# On Windows there is no group, so this assertion does nothing on this platform
if POSIX_MODE:
mode_file1 = os.stat(file1).st_mode & 0o070
mode_file2 = os.stat(file2).st_mode & 0o070
assert mode_file1 == mode_file2
def assert_equals_world_read_permissions(file1, file2):
"""
Assert that two files have the same read permissions for everyone.
:param file1: first file path to compare
:param file2: second file path to compare
"""
if POSIX_MODE:
mode_file1 = os.stat(file1).st_mode & 0o004
mode_file2 = os.stat(file2).st_mode & 0o004
else:
everybody = win32security.ConvertStringSidToSid(EVERYBODY_SID)
security1 = win32security.GetFileSecurity(file1, win32security.DACL_SECURITY_INFORMATION)
dacl1 = security1.GetSecurityDescriptorDacl()
mode_file1 = dacl1.GetEffectiveRightsFromAcl({
'TrusteeForm': win32security.TRUSTEE_IS_SID,
'TrusteeType': win32security.TRUSTEE_IS_USER,
'Identifier': everybody,
})
mode_file1 = mode_file1 & ntsecuritycon.FILE_GENERIC_READ
security2 = win32security.GetFileSecurity(file2, win32security.DACL_SECURITY_INFORMATION)
dacl2 = security2.GetSecurityDescriptorDacl()
mode_file2 = dacl2.GetEffectiveRightsFromAcl({
'TrusteeForm': win32security.TRUSTEE_IS_SID,
'TrusteeType': win32security.TRUSTEE_IS_USER,
'Identifier': everybody,
})
mode_file2 = mode_file2 & ntsecuritycon.FILE_GENERIC_READ
assert mode_file1 == mode_file2
def assert_equals_group_owner(file1, file2):
"""
Assert that two files have the same group owner.
:param file1: first file path to compare
:param file2: second file path to compare
"""
# On Windows there is no group, so this assertion does nothing on this platform
if POSIX_MODE:
group_owner_file1 = grp.getgrgid(os.stat(file1).st_gid)[0]
group_owner_file2 = grp.getgrgid(os.stat(file2).st_gid)[0]
assert group_owner_file1 == group_owner_file2
def assert_world_no_permissions(file):
"""
Assert that the given file is not world-readable.
:param file: path of the file to check
"""
if POSIX_MODE:
mode_file_all = os.stat(file).st_mode & 0o007
assert mode_file_all == 0
else:
security = win32security.GetFileSecurity(file, win32security.DACL_SECURITY_INFORMATION)
dacl = security.GetSecurityDescriptorDacl()
mode = dacl.GetEffectiveRightsFromAcl({
'TrusteeForm': win32security.TRUSTEE_IS_SID,
'TrusteeType': win32security.TRUSTEE_IS_USER,
'Identifier': win32security.ConvertStringSidToSid(EVERYBODY_SID),
})
assert not mode
def assert_world_read_permissions(file):
"""
Assert that the given file is world-readable, but not world-writable or world-executable.
:param file: path of the file to check
"""
if POSIX_MODE:
mode_file_all = os.stat(file).st_mode & 0o007
assert mode_file_all == 4
else:
security = win32security.GetFileSecurity(file, win32security.DACL_SECURITY_INFORMATION)
dacl = security.GetSecurityDescriptorDacl()
mode = dacl.GetEffectiveRightsFromAcl({
'TrusteeForm': win32security.TRUSTEE_IS_SID,
'TrusteeType': win32security.TRUSTEE_IS_USER,
'Identifier': win32security.ConvertStringSidToSid(EVERYBODY_SID),
})
assert not mode & ntsecuritycon.FILE_GENERIC_WRITE
assert not mode & ntsecuritycon.FILE_GENERIC_EXECUTE
assert mode & ntsecuritycon.FILE_GENERIC_READ == ntsecuritycon.FILE_GENERIC_READ
def _get_current_user():
account_name = win32api.GetUserNameEx(win32api.NameSamCompatible)
return win32security.LookupAccountName(None, account_name)[0]

View File

@@ -1,83 +0,0 @@
"""Module to handle the context of integration tests."""
import logging
import os
import shutil
import sys
import tempfile
from certbot_integration_tests.utils import certbot_call
class IntegrationTestsContext(object):
"""General fixture describing a certbot integration tests context"""
def __init__(self, request):
self.request = request
if hasattr(request.config, 'slaveinput'): # Worker node
self.worker_id = request.config.slaveinput['slaveid']
acme_xdist = request.config.slaveinput['acme_xdist']
else: # Primary node
self.worker_id = 'primary'
acme_xdist = request.config.acme_xdist
self.acme_server = acme_xdist['acme_server']
self.directory_url = acme_xdist['directory_url']
self.tls_alpn_01_port = acme_xdist['https_port'][self.worker_id]
self.http_01_port = acme_xdist['http_port'][self.worker_id]
self.other_port = acme_xdist['other_port'][self.worker_id]
# Challtestsrv REST API, that exposes entrypoints to register new DNS entries,
# is listening on challtestsrv_port.
self.challtestsrv_port = acme_xdist['challtestsrv_port']
self.workspace = tempfile.mkdtemp()
self.config_dir = os.path.join(self.workspace, 'conf')
probe = tempfile.mkstemp(dir=self.workspace)
os.close(probe[0])
self.hook_probe = probe[1]
self.manual_dns_auth_hook = (
'{0} -c "import os; import requests; import json; '
"assert not os.environ.get('CERTBOT_DOMAIN').startswith('fail'); "
"data = {{'host':'_acme-challenge.{{0}}.'.format(os.environ.get('CERTBOT_DOMAIN')),"
"'value':os.environ.get('CERTBOT_VALIDATION')}}; "
"request = requests.post('http://localhost:{1}/set-txt', data=json.dumps(data)); "
"request.raise_for_status(); "
'"'
).format(sys.executable, self.challtestsrv_port)
self.manual_dns_cleanup_hook = (
'{0} -c "import os; import requests; import json; '
"data = {{'host':'_acme-challenge.{{0}}.'.format(os.environ.get('CERTBOT_DOMAIN'))}}; "
"request = requests.post('http://localhost:{1}/clear-txt', data=json.dumps(data)); "
"request.raise_for_status(); "
'"'
).format(sys.executable, self.challtestsrv_port)
def cleanup(self):
"""Cleanup the integration test context."""
shutil.rmtree(self.workspace)
def certbot(self, args, force_renew=True):
"""
Execute certbot with given args, not renewing certificates by default.
:param args: args to pass to certbot
:param force_renew: set to False to not renew by default
:return: output of certbot execution
"""
command = ['--authenticator', 'standalone', '--installer', 'null']
command.extend(args)
return certbot_call.certbot_test(
command, self.directory_url, self.http_01_port, self.tls_alpn_01_port,
self.config_dir, self.workspace, force_renew=force_renew)
def get_domain(self, subdomain='le'):
"""
Generate a certificate domain name suitable for distributed certbot integration tests.
This is a requirement to let the distribution know how to redirect the challenge check
from the ACME server to the relevant pytest-xdist worker. This resolution is done by
appending the pytest worker id to the subdomain, using this pattern:
{subdomain}.{worker_id}.wtf
:param subdomain: the subdomain to use in the generated domain (default 'le')
:return: the well-formed domain suitable for redirection on
"""
return '{0}.{1}.wtf'.format(subdomain, self.worker_id)

View File

@@ -1,596 +0,0 @@
"""Module executing integration tests against certbot core."""
from __future__ import print_function
import os
import re
import shutil
import subprocess
import time
from os.path import join, exists
import pytest
from certbot_integration_tests.certbot_tests import context as certbot_context
from certbot_integration_tests.certbot_tests.assertions import (
assert_hook_execution, assert_saved_renew_hook,
assert_cert_count_for_lineage,
assert_world_no_permissions, assert_world_read_permissions,
assert_equals_group_owner, assert_equals_group_permissions, assert_equals_world_read_permissions,
EVERYBODY_SID
)
from certbot_integration_tests.utils import misc
@pytest.fixture()
def context(request):
# Fixture request is a built-in pytest fixture describing current test request.
integration_test_context = certbot_context.IntegrationTestsContext(request)
try:
yield integration_test_context
finally:
integration_test_context.cleanup()
def test_basic_commands(context):
"""Test simple commands on Certbot CLI."""
# TMPDIR env variable is set to workspace for the certbot subprocess.
# So tempdir module will create any temporary files/dirs in workspace,
# and its content can be tested to check correct certbot cleanup.
initial_count_tmpfiles = len(os.listdir(context.workspace))
context.certbot(['--help'])
context.certbot(['--help', 'all'])
context.certbot(['--version'])
with pytest.raises(subprocess.CalledProcessError):
context.certbot(['--csr'])
new_count_tmpfiles = len(os.listdir(context.workspace))
assert initial_count_tmpfiles == new_count_tmpfiles
def test_hook_dirs_creation(context):
"""Test all hooks directory are created during Certbot startup."""
context.certbot(['register'])
for hook_dir in misc.list_renewal_hooks_dirs(context.config_dir):
assert os.path.isdir(hook_dir)
def test_registration_override(context):
"""Test correct register/unregister, and registration override."""
context.certbot(['register'])
context.certbot(['unregister'])
context.certbot(['register', '--email', 'ex1@domain.org,ex2@domain.org'])
# TODO: When `certbot register --update-registration` is fully deprecated,
# delete the two following deprecated uses
context.certbot(['register', '--update-registration', '--email', 'ex1@domain.org'])
context.certbot(['register', '--update-registration', '--email', 'ex1@domain.org,ex2@domain.org'])
context.certbot(['update_account', '--email', 'example@domain.org'])
context.certbot(['update_account', '--email', 'ex1@domain.org,ex2@domain.org'])
def test_prepare_plugins(context):
"""Test that plugins are correctly instantiated and displayed."""
output = context.certbot(['plugins', '--init', '--prepare'])
assert 'webroot' in output
def test_http_01(context):
"""Test the HTTP-01 challenge using standalone plugin."""
# We start a server listening on the port for the
# TLS-SNI challenge to prevent regressions in #3601.
with misc.create_http_server(context.tls_alpn_01_port):
certname = context.get_domain('le2')
context.certbot([
'--domains', certname, '--preferred-challenges', 'http-01', 'run',
'--cert-name', certname,
'--pre-hook', misc.echo('wtf_pre', context.hook_probe),
'--post-hook', misc.echo('wtf_post', context.hook_probe),
'--deploy-hook', misc.echo('deploy', context.hook_probe),
])
assert_hook_execution(context.hook_probe, 'deploy')
assert_saved_renew_hook(context.config_dir, certname)
def test_manual_http_auth(context):
"""Test the HTTP-01 challenge using manual plugin."""
with misc.create_http_server(context.http_01_port) as webroot,\
misc.manual_http_hooks(webroot, context.http_01_port) as scripts:
certname = context.get_domain()
context.certbot([
'certonly', '-a', 'manual', '-d', certname,
'--cert-name', certname,
'--manual-auth-hook', scripts[0],
'--manual-cleanup-hook', scripts[1],
'--pre-hook', misc.echo('wtf_pre', context.hook_probe),
'--post-hook', misc.echo('wtf_post', context.hook_probe),
'--renew-hook', misc.echo('renew', context.hook_probe),
])
with pytest.raises(AssertionError):
assert_hook_execution(context.hook_probe, 'renew')
assert_saved_renew_hook(context.config_dir, certname)
def test_manual_dns_auth(context):
"""Test the DNS-01 challenge using manual plugin."""
certname = context.get_domain('dns')
context.certbot([
'-a', 'manual', '-d', certname, '--preferred-challenges', 'dns',
'run', '--cert-name', certname,
'--manual-auth-hook', context.manual_dns_auth_hook,
'--manual-cleanup-hook', context.manual_dns_cleanup_hook,
'--pre-hook', misc.echo('wtf_pre', context.hook_probe),
'--post-hook', misc.echo('wtf_post', context.hook_probe),
'--renew-hook', misc.echo('renew', context.hook_probe),
])
with pytest.raises(AssertionError):
assert_hook_execution(context.hook_probe, 'renew')
assert_saved_renew_hook(context.config_dir, certname)
context.certbot(['renew', '--cert-name', certname, '--authenticator', 'manual'])
assert_cert_count_for_lineage(context.config_dir, certname, 2)
def test_certonly(context):
"""Test the certonly verb on certbot."""
context.certbot(['certonly', '--cert-name', 'newname', '-d', context.get_domain('newname')])
def test_auth_and_install_with_csr(context):
"""Test certificate issuance and install using an existing CSR."""
certname = context.get_domain('le3')
key_path = join(context.workspace, 'key.pem')
csr_path = join(context.workspace, 'csr.der')
misc.generate_csr([certname], key_path, csr_path)
cert_path = join(context.workspace, 'csr', 'cert.pem')
chain_path = join(context.workspace, 'csr', 'chain.pem')
context.certbot([
'auth', '--csr', csr_path,
'--cert-path', cert_path,
'--chain-path', chain_path
])
print(misc.read_certificate(cert_path))
print(misc.read_certificate(chain_path))
context.certbot([
'--domains', certname, 'install',
'--cert-path', cert_path,
'--key-path', key_path
])
def test_renew_files_permissions(context):
"""Test proper certificate file permissions upon renewal"""
certname = context.get_domain('renew')
context.certbot(['-d', certname])
privkey1 = join(context.config_dir, 'archive', certname, 'privkey1.pem')
privkey2 = join(context.config_dir, 'archive', certname, 'privkey2.pem')
assert_cert_count_for_lineage(context.config_dir, certname, 1)
assert_world_no_permissions(privkey1)
context.certbot(['renew'])
assert_cert_count_for_lineage(context.config_dir, certname, 2)
assert_world_no_permissions(privkey2)
assert_equals_group_owner(privkey1, privkey2)
assert_equals_world_read_permissions(privkey1, privkey2)
assert_equals_group_permissions(privkey1, privkey2)
def test_renew_with_hook_scripts(context):
"""Test certificate renewal with script hooks."""
certname = context.get_domain('renew')
context.certbot(['-d', certname])
assert_cert_count_for_lineage(context.config_dir, certname, 1)
misc.generate_test_file_hooks(context.config_dir, context.hook_probe)
context.certbot(['renew'])
assert_cert_count_for_lineage(context.config_dir, certname, 2)
assert_hook_execution(context.hook_probe, 'deploy')
def test_renew_files_propagate_permissions(context):
"""Test proper certificate renewal with custom permissions propagated on private key."""
certname = context.get_domain('renew')
context.certbot(['-d', certname])
assert_cert_count_for_lineage(context.config_dir, certname, 1)
privkey1 = join(context.config_dir, 'archive', certname, 'privkey1.pem')
privkey2 = join(context.config_dir, 'archive', certname, 'privkey2.pem')
if os.name != 'nt':
os.chmod(privkey1, 0o444)
else:
import win32security
import ntsecuritycon
# Get the current DACL of the private key
security = win32security.GetFileSecurity(privkey1, win32security.DACL_SECURITY_INFORMATION)
dacl = security.GetSecurityDescriptorDacl()
# Create a read permission for Everybody group
everybody = win32security.ConvertStringSidToSid(EVERYBODY_SID)
dacl.AddAccessAllowedAce(win32security.ACL_REVISION, ntsecuritycon.FILE_GENERIC_READ, everybody)
# Apply the updated DACL to the private key
security.SetSecurityDescriptorDacl(1, dacl, 0)
win32security.SetFileSecurity(privkey1, win32security.DACL_SECURITY_INFORMATION, security)
context.certbot(['renew'])
assert_cert_count_for_lineage(context.config_dir, certname, 2)
if os.name != 'nt':
# On Linux, read world permissions + all group permissions will be copied from the previous private key
assert_world_read_permissions(privkey2)
assert_equals_world_read_permissions(privkey1, privkey2)
assert_equals_group_permissions(privkey1, privkey2)
else:
# On Windows, world will never have any permissions, and group permission is irrelevant for this platform
assert_world_no_permissions(privkey2)
def test_graceful_renew_it_is_not_time(context):
"""Test graceful renew is not done when it is not due time."""
certname = context.get_domain('renew')
context.certbot(['-d', certname])
assert_cert_count_for_lineage(context.config_dir, certname, 1)
context.certbot(['renew', '--deploy-hook', misc.echo('deploy', context.hook_probe)],
force_renew=False)
assert_cert_count_for_lineage(context.config_dir, certname, 1)
with pytest.raises(AssertionError):
assert_hook_execution(context.hook_probe, 'deploy')
def test_graceful_renew_it_is_time(context):
"""Test graceful renew is done when it is due time."""
certname = context.get_domain('renew')
context.certbot(['-d', certname])
assert_cert_count_for_lineage(context.config_dir, certname, 1)
with open(join(context.config_dir, 'renewal', '{0}.conf'.format(certname)), 'r') as file:
lines = file.readlines()
lines.insert(4, 'renew_before_expiry = 100 years{0}'.format(os.linesep))
with open(join(context.config_dir, 'renewal', '{0}.conf'.format(certname)), 'w') as file:
file.writelines(lines)
context.certbot(['renew', '--deploy-hook', misc.echo('deploy', context.hook_probe)],
force_renew=False)
assert_cert_count_for_lineage(context.config_dir, certname, 2)
assert_hook_execution(context.hook_probe, 'deploy')
def test_renew_with_changed_private_key_complexity(context):
"""Test proper renew with updated private key complexity."""
certname = context.get_domain('renew')
context.certbot(['-d', certname, '--rsa-key-size', '4096'])
key1 = join(context.config_dir, 'archive', certname, 'privkey1.pem')
assert os.stat(key1).st_size > 3000 # 4096 bits keys takes more than 3000 bytes
assert_cert_count_for_lineage(context.config_dir, certname, 1)
context.certbot(['renew'])
assert_cert_count_for_lineage(context.config_dir, certname, 2)
key2 = join(context.config_dir, 'archive', certname, 'privkey2.pem')
assert os.stat(key2).st_size > 3000
context.certbot(['renew', '--rsa-key-size', '2048'])
assert_cert_count_for_lineage(context.config_dir, certname, 3)
key3 = join(context.config_dir, 'archive', certname, 'privkey3.pem')
assert os.stat(key3).st_size < 1800 # 2048 bits keys takes less than 1800 bytes
def test_renew_ignoring_directory_hooks(context):
"""Test hooks are ignored during renewal with relevant CLI flag."""
certname = context.get_domain('renew')
context.certbot(['-d', certname])
assert_cert_count_for_lineage(context.config_dir, certname, 1)
misc.generate_test_file_hooks(context.config_dir, context.hook_probe)
context.certbot(['renew', '--no-directory-hooks'])
assert_cert_count_for_lineage(context.config_dir, certname, 2)
with pytest.raises(AssertionError):
assert_hook_execution(context.hook_probe, 'deploy')
def test_renew_empty_hook_scripts(context):
"""Test proper renew with empty hook scripts."""
certname = context.get_domain('renew')
context.certbot(['-d', certname])
assert_cert_count_for_lineage(context.config_dir, certname, 1)
misc.generate_test_file_hooks(context.config_dir, context.hook_probe)
for hook_dir in misc.list_renewal_hooks_dirs(context.config_dir):
shutil.rmtree(hook_dir)
os.makedirs(join(hook_dir, 'dir'))
open(join(hook_dir, 'file'), 'w').close()
context.certbot(['renew'])
assert_cert_count_for_lineage(context.config_dir, certname, 2)
def test_renew_hook_override(context):
"""Test correct hook override on renew."""
certname = context.get_domain('override')
context.certbot([
'certonly', '-d', certname,
'--preferred-challenges', 'http-01',
'--pre-hook', misc.echo('pre', context.hook_probe),
'--post-hook', misc.echo('post', context.hook_probe),
'--deploy-hook', misc.echo('deploy', context.hook_probe),
])
assert_hook_execution(context.hook_probe, 'pre')
assert_hook_execution(context.hook_probe, 'post')
assert_hook_execution(context.hook_probe, 'deploy')
# Now we override all previous hooks during next renew.
open(context.hook_probe, 'w').close()
context.certbot([
'renew', '--cert-name', certname,
'--pre-hook', misc.echo('pre_override', context.hook_probe),
'--post-hook', misc.echo('post_override', context.hook_probe),
'--deploy-hook', misc.echo('deploy_override', context.hook_probe),
])
assert_hook_execution(context.hook_probe, 'pre_override')
assert_hook_execution(context.hook_probe, 'post_override')
assert_hook_execution(context.hook_probe, 'deploy_override')
with pytest.raises(AssertionError):
assert_hook_execution(context.hook_probe, 'pre')
with pytest.raises(AssertionError):
assert_hook_execution(context.hook_probe, 'post')
with pytest.raises(AssertionError):
assert_hook_execution(context.hook_probe, 'deploy')
# Expect that this renew will reuse new hooks registered in the previous renew.
open(context.hook_probe, 'w').close()
context.certbot(['renew', '--cert-name', certname])
assert_hook_execution(context.hook_probe, 'pre_override')
assert_hook_execution(context.hook_probe, 'post_override')
assert_hook_execution(context.hook_probe, 'deploy_override')
def test_invalid_domain_with_dns_challenge(context):
"""Test certificate issuance failure with DNS-01 challenge."""
# Manual dns auth hooks from misc are designed to fail if the domain contains 'fail-*'.
domains = ','.join([context.get_domain('dns1'), context.get_domain('fail-dns1')])
context.certbot([
'-a', 'manual', '-d', domains,
'--allow-subset-of-names',
'--preferred-challenges', 'dns',
'--manual-auth-hook', context.manual_dns_auth_hook,
'--manual-cleanup-hook', context.manual_dns_cleanup_hook
])
output = context.certbot(['certificates'])
assert context.get_domain('fail-dns1') not in output
def test_reuse_key(context):
"""Test various scenarios where a key is reused."""
certname = context.get_domain('reusekey')
context.certbot(['--domains', certname, '--reuse-key'])
context.certbot(['renew', '--cert-name', certname])
with open(join(context.config_dir, 'archive/{0}/privkey1.pem').format(certname), 'r') as file:
privkey1 = file.read()
with open(join(context.config_dir, 'archive/{0}/privkey2.pem').format(certname), 'r') as file:
privkey2 = file.read()
assert privkey1 == privkey2
context.certbot(['--cert-name', certname, '--domains', certname, '--force-renewal'])
with open(join(context.config_dir, 'archive/{0}/privkey3.pem').format(certname), 'r') as file:
privkey3 = file.read()
assert privkey2 != privkey3
with open(join(context.config_dir, 'archive/{0}/cert1.pem').format(certname), 'r') as file:
cert1 = file.read()
with open(join(context.config_dir, 'archive/{0}/cert2.pem').format(certname), 'r') as file:
cert2 = file.read()
with open(join(context.config_dir, 'archive/{0}/cert3.pem').format(certname), 'r') as file:
cert3 = file.read()
assert len({cert1, cert2, cert3}) == 3
def test_ecdsa(context):
"""Test certificate issuance with ECDSA key."""
key_path = join(context.workspace, 'privkey-p384.pem')
csr_path = join(context.workspace, 'csr-p384.der')
cert_path = join(context.workspace, 'cert-p384.pem')
chain_path = join(context.workspace, 'chain-p384.pem')
misc.generate_csr([context.get_domain('ecdsa')], key_path, csr_path, key_type=misc.ECDSA_KEY_TYPE)
context.certbot(['auth', '--csr', csr_path, '--cert-path', cert_path, '--chain-path', chain_path])
certificate = misc.read_certificate(cert_path)
assert 'ASN1 OID: secp384r1' in certificate
def test_ocsp_must_staple(context):
"""Test that OCSP Must-Staple is correctly set in the generated certificate."""
if context.acme_server == 'pebble':
pytest.skip('Pebble does not support OCSP Must-Staple.')
certname = context.get_domain('must-staple')
context.certbot(['auth', '--must-staple', '--domains', certname])
certificate = misc.read_certificate(join(context.config_dir,
'live/{0}/cert.pem').format(certname))
assert 'status_request' in certificate or '1.3.6.1.5.5.7.1.24' in certificate
def test_revoke_simple(context):
"""Test various scenarios that revokes a certificate."""
# Default action after revoke is to delete the certificate.
certname = context.get_domain()
cert_path = join(context.config_dir, 'live', certname, 'cert.pem')
context.certbot(['-d', certname])
context.certbot(['revoke', '--cert-path', cert_path, '--delete-after-revoke'])
assert not exists(cert_path)
# Check default deletion is overridden.
certname = context.get_domain('le1')
cert_path = join(context.config_dir, 'live', certname, 'cert.pem')
context.certbot(['-d', certname])
context.certbot(['revoke', '--cert-path', cert_path, '--no-delete-after-revoke'])
assert exists(cert_path)
context.certbot(['delete', '--cert-name', certname])
assert not exists(join(context.config_dir, 'archive', certname))
assert not exists(join(context.config_dir, 'live', certname))
assert not exists(join(context.config_dir, 'renewal', '{0}.conf'.format(certname)))
certname = context.get_domain('le2')
key_path = join(context.config_dir, 'live', certname, 'privkey.pem')
cert_path = join(context.config_dir, 'live', certname, 'cert.pem')
context.certbot(['-d', certname])
context.certbot(['revoke', '--cert-path', cert_path, '--key-path', key_path])
def test_revoke_and_unregister(context):
"""Test revoke with a reason then unregister."""
cert1 = context.get_domain('le1')
cert2 = context.get_domain('le2')
cert3 = context.get_domain('le3')
cert_path1 = join(context.config_dir, 'live', cert1, 'cert.pem')
key_path2 = join(context.config_dir, 'live', cert2, 'privkey.pem')
cert_path2 = join(context.config_dir, 'live', cert2, 'cert.pem')
context.certbot(['-d', cert1])
context.certbot(['-d', cert2])
context.certbot(['-d', cert3])
context.certbot(['revoke', '--cert-path', cert_path1,
'--reason', 'cessationOfOperation'])
context.certbot(['revoke', '--cert-path', cert_path2, '--key-path', key_path2,
'--reason', 'keyCompromise'])
context.certbot(['unregister'])
output = context.certbot(['certificates'])
assert cert1 not in output
assert cert2 not in output
assert cert3 in output
def test_revoke_mutual_exclusive_flags(context):
"""Test --cert-path and --cert-name cannot be used during revoke."""
cert = context.get_domain('le1')
context.certbot(['-d', cert])
with pytest.raises(subprocess.CalledProcessError) as error:
context.certbot([
'revoke', '--cert-name', cert,
'--cert-path', join(context.config_dir, 'live', cert, 'fullchain.pem')
])
assert 'Exactly one of --cert-path or --cert-name must be specified' in error.out
def test_revoke_multiple_lineages(context):
"""Test revoke does not delete certs if multiple lineages share the same dir."""
cert1 = context.get_domain('le1')
context.certbot(['-d', cert1])
assert os.path.isfile(join(context.config_dir, 'renewal', '{0}.conf'.format(cert1)))
cert2 = context.get_domain('le2')
context.certbot(['-d', cert2])
# Copy over renewal configuration of cert1 into renewal configuration of cert2.
with open(join(context.config_dir, 'renewal', '{0}.conf'.format(cert2)), 'r') as file:
data = file.read()
data = re.sub('archive_dir = .*\n',
'archive_dir = {0}\n'.format(join(context.config_dir, 'archive', cert1).replace('\\', '\\\\')),
data)
with open(join(context.config_dir, 'renewal', '{0}.conf'.format(cert2)), 'w') as file:
file.write(data)
output = context.certbot([
'revoke', '--cert-path', join(context.config_dir, 'live', cert1, 'cert.pem')
])
assert 'Not deleting revoked certs due to overlapping archive dirs' in output
def test_wildcard_certificates(context):
"""Test wildcard certificate issuance."""
if context.acme_server == 'boulder-v1':
pytest.skip('Wildcard certificates are not supported on ACME v1')
certname = context.get_domain('wild')
context.certbot([
'-a', 'manual', '-d', '*.{0},{0}'.format(certname),
'--preferred-challenge', 'dns',
'--manual-auth-hook', context.manual_dns_auth_hook,
'--manual-cleanup-hook', context.manual_dns_cleanup_hook
])
assert exists(join(context.config_dir, 'live', certname, 'fullchain.pem'))
def test_ocsp_status_stale(context):
"""Test retrieval of OCSP statuses for staled config"""
sample_data_path = misc.load_sample_data_path(context.workspace)
output = context.certbot(['certificates', '--config-dir', sample_data_path])
assert output.count('TEST_CERT') == 2, ('Did not find two test certs as expected ({0})'
.format(output.count('TEST_CERT')))
assert output.count('EXPIRED') == 2, ('Did not find two expired certs as expected ({0})'
.format(output.count('EXPIRED')))
def test_ocsp_status_live(context):
"""Test retrieval of OCSP statuses for live config"""
cert = context.get_domain('ocsp-check')
# OSCP 1: Check live certificate OCSP status (VALID)
context.certbot(['--domains', cert])
output = context.certbot(['certificates'])
assert output.count('VALID') == 1, 'Expected {0} to be VALID'.format(cert)
assert output.count('EXPIRED') == 0, 'Did not expect {0} to be EXPIRED'.format(cert)
# OSCP 2: Check live certificate OCSP status (REVOKED)
context.certbot(['revoke', '--cert-name', cert, '--no-delete-after-revoke'])
# Sometimes in oldest tests (using openssl binary and not cryptography), the OCSP status is
# not seen immediately by Certbot as invalid. Waiting few seconds solves this transient issue.
time.sleep(5)
output = context.certbot(['certificates'])
assert output.count('INVALID') == 1, 'Expected {0} to be INVALID'.format(cert)
assert output.count('REVOKED') == 1, 'Expected {0} to be REVOKED'.format(cert)

View File

@@ -1,95 +0,0 @@
"""
General conftest for pytest execution of all integration tests lying
in the certbot_integration tests package.
As stated by pytest documentation, conftest module is used to set on
for a directory a specific configuration using built-in pytest hooks.
See https://docs.pytest.org/en/latest/reference.html#hook-reference
"""
import contextlib
import sys
import subprocess
from certbot_integration_tests.utils import acme_server as acme_lib
def pytest_addoption(parser):
"""
Standard pytest hook to add options to the pytest parser.
:param parser: current pytest parser that will be used on the CLI
"""
parser.addoption('--acme-server', default='pebble',
choices=['boulder-v1', 'boulder-v2', 'pebble'],
help='select the ACME server to use (boulder-v1, boulder-v2, '
'pebble), defaulting to pebble')
def pytest_configure(config):
"""
Standard pytest hook used to add a configuration logic for each node of a pytest run.
:param config: the current pytest configuration
"""
if not hasattr(config, 'slaveinput'): # If true, this is the primary node
with _print_on_err():
config.acme_xdist = _setup_primary_node(config)
def pytest_configure_node(node):
"""
Standard pytest-xdist hook used to configure a worker node.
:param node: current worker node
"""
node.slaveinput['acme_xdist'] = node.config.acme_xdist
@contextlib.contextmanager
def _print_on_err():
"""
During pytest-xdist setup, stdout is used for nodes communication, so print is useless.
However, stderr is still available. This context manager transfers stdout to stderr
for the duration of the context, allowing to display prints to the user.
"""
old_stdout = sys.stdout
sys.stdout = sys.stderr
try:
yield
finally:
sys.stdout = old_stdout
def _setup_primary_node(config):
"""
Setup the environment for integration tests.
Will:
- check runtime compatiblity (Docker, docker-compose, Nginx)
- create a temporary workspace and the persistent GIT repositories space
- configure and start paralleled ACME CA servers using Docker
- transfer ACME CA servers configurations to pytest nodes using env variables
:param config: Configuration of the pytest primary node
"""
# Check for runtime compatibility: some tools are required to be available in PATH
if 'boulder' in config.option.acme_server:
try:
subprocess.check_output(['docker', '-v'], stderr=subprocess.STDOUT)
except (subprocess.CalledProcessError, OSError):
raise ValueError('Error: docker is required in PATH to launch the integration tests on'
'boulder, but is not installed or not available for current user.')
try:
subprocess.check_output(['docker-compose', '-v'], stderr=subprocess.STDOUT)
except (subprocess.CalledProcessError, OSError):
raise ValueError('Error: docker-compose is required in PATH to launch the integration tests, '
'but is not installed or not available for current user.')
# Parameter numprocesses is added to option by pytest-xdist
workers = ['primary'] if not config.option.numprocesses\
else ['gw{0}'.format(i) for i in range(config.option.numprocesses)]
# By calling setup_acme_server we ensure that all necessary acme server instances will be
# fully started. This runtime is reflected by the acme_xdist returned.
acme_server = acme_lib.ACMEServer(config.option.acme_server, workers)
config.add_cleanup(acme_server.stop)
print('ACME xdist config:\n{0}'.format(acme_server.acme_xdist))
acme_server.start()
return acme_server.acme_xdist

View File

@@ -1,61 +0,0 @@
import os
import subprocess
from certbot_integration_tests.certbot_tests import context as certbot_context
from certbot_integration_tests.utils import misc, certbot_call
from certbot_integration_tests.nginx_tests import nginx_config as config
class IntegrationTestsContext(certbot_context.IntegrationTestsContext):
"""General fixture describing a certbot-nginx integration tests context"""
def __init__(self, request):
super(IntegrationTestsContext, self).__init__(request)
self.nginx_root = os.path.join(self.workspace, 'nginx')
os.mkdir(self.nginx_root)
self.webroot = os.path.join(self.nginx_root, 'webroot')
os.mkdir(self.webroot)
with open(os.path.join(self.webroot, 'index.html'), 'w') as file_handler:
file_handler.write('Hello World!')
self.nginx_config_path = os.path.join(self.nginx_root, 'nginx.conf')
self.nginx_config = None
default_server = request.param['default_server']
self.process = self._start_nginx(default_server)
def cleanup(self):
self._stop_nginx()
super(IntegrationTestsContext, self).cleanup()
def certbot_test_nginx(self, args):
"""
Main command to execute certbot using the nginx plugin.
:param list args: list of arguments to pass to nginx
:param bool force_renew: set to False to not renew by default
"""
command = ['--authenticator', 'nginx', '--installer', 'nginx',
'--nginx-server-root', self.nginx_root]
command.extend(args)
return certbot_call.certbot_test(
command, self.directory_url, self.http_01_port, self.tls_alpn_01_port,
self.config_dir, self.workspace, force_renew=True)
def _start_nginx(self, default_server):
self.nginx_config = config.construct_nginx_config(
self.nginx_root, self.webroot, self.http_01_port, self.tls_alpn_01_port,
self.other_port, default_server, wtf_prefix=self.worker_id)
with open(self.nginx_config_path, 'w') as file:
file.write(self.nginx_config)
process = subprocess.Popen(['nginx', '-c', self.nginx_config_path, '-g', 'daemon off;'])
assert process.poll() is None
misc.check_until_timeout('http://localhost:{0}'.format(self.http_01_port))
return process
def _stop_nginx(self):
assert self.process.poll() is None
self.process.terminate()
self.process.wait()

View File

@@ -1,126 +0,0 @@
"""General purpose nginx test configuration generator."""
import getpass
import pkg_resources
def construct_nginx_config(nginx_root, nginx_webroot, http_port, https_port, other_port,
default_server, key_path=None, cert_path=None, wtf_prefix='le'):
"""
This method returns a full nginx configuration suitable for integration tests.
:param str nginx_root: nginx root configuration path
:param str nginx_webroot: nginx webroot path
:param int http_port: HTTP port to listen on
:param int https_port: HTTPS port to listen on
:param int other_port: other HTTP port to listen on
:param bool default_server: True to set a default server in nginx config, False otherwise
:param str key_path: the path to a SSL key
:param str cert_path: the path to a SSL certificate
:param str wtf_prefix: the prefix to use in all domains handled by this nginx config
:return: a string containing the full nginx configuration
:rtype: str
"""
key_path = key_path if key_path \
else pkg_resources.resource_filename('certbot_integration_tests', 'assets/key.pem')
cert_path = cert_path if cert_path \
else pkg_resources.resource_filename('certbot_integration_tests', 'assets/cert.pem')
return '''\
# This error log will be written regardless of server scope error_log
# definitions, so we have to set this here in the main scope.
#
# Even doing this, Nginx will still try to create the default error file, and
# log a non-fatal error when it fails. After that things will work, however.
error_log {nginx_root}/error.log;
# The pidfile will be written to /var/run unless this is set.
pid {nginx_root}/nginx.pid;
user {user};
worker_processes 1;
events {{
worker_connections 1024;
}}
http {{
# Set an array of temp, cache and log file options that will otherwise default to
# restricted locations accessible only to root.
client_body_temp_path {nginx_root}/client_body;
fastcgi_temp_path {nginx_root}/fastcgi_temp;
proxy_temp_path {nginx_root}/proxy_temp;
#scgi_temp_path {nginx_root}/scgi_temp;
#uwsgi_temp_path {nginx_root}/uwsgi_temp;
access_log {nginx_root}/error.log;
# This should be turned off in a Virtualbox VM, as it can cause some
# interesting issues with data corruption in delivered files.
sendfile off;
tcp_nopush on;
tcp_nodelay on;
keepalive_timeout 65;
types_hash_max_size 2048;
#include /etc/nginx/mime.types;
index index.html index.htm index.php;
log_format main '$remote_addr - $remote_user [$time_local] $status '
'"$request" $body_bytes_sent "$http_referer" '
'"$http_user_agent" "$http_x_forwarded_for"';
default_type application/octet-stream;
server {{
# IPv4.
listen {http_port} {default_server};
# IPv6.
listen [::]:{http_port} {default_server};
server_name nginx.{wtf_prefix}.wtf nginx2.{wtf_prefix}.wtf;
root {nginx_webroot};
location / {{
# First attempt to serve request as file, then as directory, then fall
# back to index.html.
try_files $uri $uri/ /index.html;
}}
}}
server {{
listen {http_port};
listen [::]:{http_port};
server_name nginx3.{wtf_prefix}.wtf;
root {nginx_webroot};
location /.well-known/ {{
return 404;
}}
return 301 https://$host$request_uri;
}}
server {{
listen {other_port};
listen [::]:{other_port};
server_name nginx4.{wtf_prefix}.wtf nginx5.{wtf_prefix}.wtf;
}}
server {{
listen {http_port};
listen [::]:{http_port};
listen {https_port} ssl;
listen [::]:{https_port} ssl;
if ($scheme != "https") {{
return 301 https://$host$request_uri;
}}
server_name nginx6.{wtf_prefix}.wtf nginx7.{wtf_prefix}.wtf;
ssl_certificate {cert_path};
ssl_certificate_key {key_path};
}}
}}
'''.format(nginx_root=nginx_root, nginx_webroot=nginx_webroot, user=getpass.getuser(),
http_port=http_port, https_port=https_port, other_port=other_port,
default_server='default_server' if default_server else '', wtf_prefix=wtf_prefix,
key_path=key_path, cert_path=cert_path)

View File

@@ -1,54 +0,0 @@
"""Module executing integration tests against certbot with nginx plugin."""
import os
import ssl
import pytest
from certbot_integration_tests.nginx_tests import context as nginx_context
@pytest.fixture()
def context(request):
# Fixture request is a built-in pytest fixture describing current test request.
integration_test_context = nginx_context.IntegrationTestsContext(request)
try:
yield integration_test_context
finally:
integration_test_context.cleanup()
@pytest.mark.parametrize('certname_pattern, params, context', [
('nginx.{0}.wtf', ['run'], {'default_server': True}),
('nginx2.{0}.wtf', ['--preferred-challenges', 'http'], {'default_server': True}),
# Overlapping location block and server-block-level return 301
('nginx3.{0}.wtf', ['--preferred-challenges', 'http'], {'default_server': True}),
# No matching server block; default_server exists
('nginx4.{0}.wtf', ['--preferred-challenges', 'http'], {'default_server': True}),
# No matching server block; default_server does not exist
('nginx5.{0}.wtf', ['--preferred-challenges', 'http'], {'default_server': False}),
# Multiple domains, mix of matching and not
('nginx6.{0}.wtf,nginx7.{0}.wtf', ['--preferred-challenges', 'http'], {'default_server': False}),
], indirect=['context'])
def test_certificate_deployment(certname_pattern, params, context):
# type: (str, list, nginx_context.IntegrationTestsContext) -> None
"""
Test various scenarios to deploy a certificate to nginx using certbot.
"""
domains = certname_pattern.format(context.worker_id)
command = ['--domains', domains]
command.extend(params)
context.certbot_test_nginx(command)
lineage = domains.split(',')[0]
server_cert = ssl.get_server_certificate(('localhost', context.tls_alpn_01_port))
with open(os.path.join(context.workspace, 'conf/live/{0}/cert.pem'.format(lineage)), 'r') as file:
certbot_cert = file.read()
assert server_cert == certbot_cert
context.certbot_test_nginx(['rollback', '--checkpoints', '1'])
with open(context.nginx_config_path, 'r') as file_h:
current_nginx_config = file_h.read()
assert context.nginx_config == current_nginx_config

Some files were not shown because too many files have changed in this diff Show More