Compare commits
186 Commits
test-exit
...
nginx-ipv6
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8b59032614 | ||
|
|
c0a7600deb | ||
|
|
886b919659 | ||
|
|
296b4aaeea | ||
|
|
95a7d45856 | ||
|
|
99f00d21c4 | ||
|
|
7c11158810 | ||
|
|
232f5a92d1 | ||
|
|
1081a2501f | ||
|
|
03cbe9dd86 | ||
|
|
da3ae1611d | ||
|
|
f4ee96ea6e | ||
|
|
f986c45d60 | ||
|
|
994a6ce114 | ||
|
|
cacc40817b | ||
|
|
d2c16fcb62 | ||
|
|
a8051b58eb | ||
|
|
2d4f36cc9f | ||
|
|
13b4a4e1c2 | ||
|
|
1f258449a4 | ||
|
|
3087b436f3 | ||
|
|
356471cdf6 | ||
|
|
b9d129bd43 | ||
|
|
b0e5809df2 | ||
|
|
46052f826c | ||
|
|
5f6b1378ec | ||
|
|
34d78ff626 | ||
|
|
cad7d4c8ed | ||
|
|
ba84b7ab49 | ||
|
|
7412099567 | ||
|
|
85deca588f | ||
|
|
8b7d6c4ea3 | ||
|
|
36d5221bac | ||
|
|
1ce813c3cc | ||
|
|
ade01d618b | ||
|
|
5a4028c763 | ||
|
|
48fd7ee260 | ||
|
|
6aabb31eb5 | ||
|
|
3acde31ed3 | ||
|
|
e7884898ec | ||
|
|
9be4fedeec | ||
|
|
f0caf5b04f | ||
|
|
f6be07da74 | ||
|
|
7c16e0da26 | ||
|
|
03624fa9db | ||
|
|
d3a00a97a3 | ||
|
|
4bc0c83ca7 | ||
|
|
7d0a77ffcf | ||
|
|
837f691992 | ||
|
|
174a006d9c | ||
|
|
b529250535 | ||
|
|
134d499b07 | ||
|
|
68283940cd | ||
|
|
82d0ff1df2 | ||
|
|
d4fe812508 | ||
|
|
6988491b67 | ||
|
|
1a79f82082 | ||
|
|
9fb132ba69 | ||
|
|
a7267b0fcd | ||
|
|
756c44f7af | ||
|
|
d710c441e2 | ||
|
|
8ad18cbe6e | ||
|
|
c6bdad4ffb | ||
|
|
39696456db | ||
|
|
bbf397a9f9 | ||
|
|
7cb8c1264f | ||
|
|
b6f8a477b8 | ||
|
|
06dd645e85 | ||
|
|
2bfc92e58d | ||
|
|
ae0be73b53 | ||
|
|
8d362d4469 | ||
|
|
133f636817 | ||
|
|
df71ec33b3 | ||
|
|
b2b3285bf5 | ||
|
|
b43bf8f94a | ||
|
|
a5fae7eab5 | ||
|
|
8ca36a0b62 | ||
|
|
c33ee0e2df | ||
|
|
56db211367 | ||
|
|
cd2e70e9cd | ||
|
|
2c9e072a9f | ||
|
|
4d72fa42e3 | ||
|
|
16ed141301 | ||
|
|
af322a6068 | ||
|
|
48c890be61 | ||
|
|
5e58580d13 | ||
|
|
6c52cc49a7 | ||
|
|
47b3d19170 | ||
|
|
32de7303dd | ||
|
|
f9ed53e698 | ||
|
|
f31cb5f812 | ||
|
|
0ac21e47c7 | ||
|
|
f68fba2be2 | ||
|
|
d4676610e9 | ||
|
|
0b94d6aa18 | ||
|
|
e6c9e2a868 | ||
|
|
5508d1dd12 | ||
|
|
c0d10bba5a | ||
|
|
4f74b8eb7a | ||
|
|
744c993040 | ||
|
|
912d235466 | ||
|
|
314f4bbe22 | ||
|
|
7461bdbffd | ||
|
|
9288cddfe9 | ||
|
|
d6a7e2d1fe | ||
|
|
0321c0cb4c | ||
|
|
142ced234b | ||
|
|
43bea2edb3 | ||
|
|
5845f186ed | ||
|
|
c314ec0474 | ||
|
|
447d3d867d | ||
|
|
df8b374916 | ||
|
|
7d5ccd006b | ||
|
|
bb6a22b985 | ||
|
|
7f19ac5e3d | ||
|
|
9878f15966 | ||
|
|
1cf8c5a586 | ||
|
|
56ce87db27 | ||
|
|
fa74a32245 | ||
|
|
b3004fe6cf | ||
|
|
278194fe6d | ||
|
|
d327c1c28f | ||
|
|
7d0e4d7bad | ||
|
|
d962b2605a | ||
|
|
c779be8b88 | ||
|
|
f5580598cd | ||
|
|
7ad41dc3ef | ||
|
|
a8b5dfc76c | ||
|
|
0f30f9e96f | ||
|
|
6090fe9651 | ||
|
|
de1f9c4fe3 | ||
|
|
7ed2e91cc7 | ||
|
|
331d12ed50 | ||
|
|
29d80f334f | ||
|
|
6ede309c6a | ||
|
|
7efa213b22 | ||
|
|
a7a8e060e3 | ||
|
|
bc3765d6d0 | ||
|
|
fad1a4b576 | ||
|
|
368beee8bf | ||
|
|
3a9150a7ba | ||
|
|
ab286e0887 | ||
|
|
62bdf663f2 | ||
|
|
48ef16ab0d | ||
|
|
d0ecf739bd | ||
|
|
57e56cc97b | ||
|
|
1f3b028398 | ||
|
|
b23384438f | ||
|
|
c3c1609fa0 | ||
|
|
f314ea1d33 | ||
|
|
6bb95c6596 | ||
|
|
72b1a6f9cd | ||
|
|
bf763cbbc6 | ||
|
|
5318945267 | ||
|
|
72c480ef18 | ||
|
|
054873034c | ||
|
|
d118acf524 | ||
|
|
ec35828b9a | ||
|
|
97b22da1b6 | ||
|
|
595745e044 | ||
|
|
1c06144e18 | ||
|
|
62327b49c3 | ||
|
|
4c19d19cf5 | ||
|
|
6dedfa62b6 | ||
|
|
8a664622ea | ||
|
|
1b65ba88d8 | ||
|
|
e94ee31a6f | ||
|
|
32fa3b1d04 | ||
|
|
ad4ed22932 | ||
|
|
feffeb275b | ||
|
|
4243db1525 | ||
|
|
220d486190 | ||
|
|
ed4be4117c | ||
|
|
5cf82e4843 | ||
|
|
af354e9099 | ||
|
|
d57e8bfaa3 | ||
|
|
828363b21a | ||
|
|
7d17919527 | ||
|
|
33306de90b | ||
|
|
f4094e4d3f | ||
|
|
03f6c6d0e5 | ||
|
|
6aa21d1db6 | ||
|
|
bdf02c9fcc | ||
|
|
15c6c1388e | ||
|
|
be457ffa95 | ||
|
|
d3549e18a7 |
@@ -5,7 +5,7 @@ cache:
|
||||
- $HOME/.cache/pip
|
||||
|
||||
before_install:
|
||||
- '[ $TRAVIS_OS_NAME == linux ] && dpkg -s libaugeas0 || brew install augeas python3'
|
||||
- '([ $TRAVIS_OS_NAME == linux ] && dpkg -s libaugeas0) || (brew update && brew install augeas python3)'
|
||||
|
||||
before_script:
|
||||
- 'if [ $TRAVIS_OS_NAME = osx ] ; then ulimit -n 1024 ; fi'
|
||||
@@ -161,7 +161,9 @@ addons:
|
||||
- libapache2-mod-macro
|
||||
|
||||
install: "travis_retry pip install tox coveralls"
|
||||
script: 'travis_retry tox && ([ "xxx$BOULDER_INTEGRATION" = "xxx" ] || ./tests/travis-integration.sh)'
|
||||
script:
|
||||
- travis_retry tox
|
||||
- '[ -z "${BOULDER_INTEGRATION+x}" ] || (travis_retry tests/boulder-fetch.sh && tests/tox-boulder-integration.sh)'
|
||||
|
||||
after_success: '[ "$TOXENV" == "cover" ] && coveralls'
|
||||
|
||||
@@ -169,8 +171,7 @@ notifications:
|
||||
email: false
|
||||
irc:
|
||||
channels:
|
||||
- "chat.freenode.net#letsencrypt"
|
||||
- secure: "SGWZl3ownKx9xKVV2VnGt7DqkTmutJ89oJV9tjKhSs84kLijU6EYdPnllqISpfHMTxXflNZuxtGo0wTDYHXBuZL47w1O32W6nzuXdra5zC+i4sYQwYULUsyfOv9gJX8zWAULiK0Z3r0oho45U+FR5ZN6TPCidi8/eGU+EEPwaAw="
|
||||
on_success: never
|
||||
on_failure: always
|
||||
use_notice: true
|
||||
skip_join: true
|
||||
|
||||
189
CHANGELOG.md
189
CHANGELOG.md
@@ -2,6 +2,195 @@
|
||||
|
||||
Certbot adheres to [Semantic Versioning](http://semver.org/).
|
||||
|
||||
## 0.19.0 - 2017-10-04
|
||||
|
||||
### Added
|
||||
|
||||
* Certbot now has renewal hook directories where executable files can be placed
|
||||
for Certbot to run with the renew subcommand. Pre-hooks, deploy-hooks, and
|
||||
post-hooks can be specified in the renewal-hooks/pre, renewal-hooks/deploy,
|
||||
and renewal-hooks/post directories respectively in Certbot's configuration
|
||||
directory (which is /etc/letsencrypt by default). Certbot will automatically
|
||||
create these directories when it is run if they do not already exist.
|
||||
* After revoking a certificate with the revoke subcommand, Certbot will offer
|
||||
to delete the lineage associated with the certificate. When Certbot is run
|
||||
with --non-interactive, it will automatically try to delete the associated
|
||||
lineage.
|
||||
* When using Certbot's Google Cloud DNS plugin on Google Compute Engine, you no
|
||||
longer have to provide a credential file to Certbot if you have configured
|
||||
sufficient permissions for the instance which Certbot can automatically
|
||||
obtain using Google's metadata service.
|
||||
|
||||
### Changed
|
||||
|
||||
* When deleting certificates interactively using the delete subcommand, Certbot
|
||||
will now allow you to select multiple lineages to be deleted at once.
|
||||
* Certbot's Apache plugin no longer always parses Apache's sites-available on
|
||||
Debian based systems and instead only parses virtual hosts included in your
|
||||
Apache configuration. You can provide an additional directory for Certbot to
|
||||
parse using the command line flag --apache-vhost-root.
|
||||
|
||||
### Fixed
|
||||
|
||||
* The plugins subcommand can now be run without root access.
|
||||
* certbot-auto now includes a timeout when updating itself so it no longer
|
||||
hangs indefinitely when it is unable to connect to the external server.
|
||||
* An issue where Certbot's Apache plugin would sometimes fail to deploy a
|
||||
certificate on Debian based systems if mod_ssl wasn't already enabled has
|
||||
been resolved.
|
||||
* A bug in our Docker image where the certificates subcommand could not report
|
||||
if certificates maintained by Certbot had been revoked has been fixed.
|
||||
* Certbot's RFC 2136 DNS plugin (for use with software like BIND) now properly
|
||||
performs DNS challenges when the domain being verified contains a CNAME
|
||||
record.
|
||||
|
||||
More details about these changes can be found on our GitHub repo:
|
||||
https://github.com/certbot/certbot/milestone/43?closed=1
|
||||
|
||||
## 0.18.2 - 2017-09-20
|
||||
|
||||
### Fixed
|
||||
|
||||
* An issue where Certbot's ACME module would raise an AttributeError trying to
|
||||
create self-signed certificates when used with pyOpenSSL 17.3.0 has been
|
||||
resolved. For Certbot users with this version of pyOpenSSL, this caused
|
||||
Certbot to crash when performing a TLS SNI challenge or when the Nginx plugin
|
||||
tried to create an SSL server block.
|
||||
|
||||
More details about these changes can be found on our GitHub repo:
|
||||
https://github.com/certbot/certbot/milestone/46?closed=1
|
||||
|
||||
## 0.18.1 - 2017-09-08
|
||||
|
||||
### Fixed
|
||||
|
||||
* If certbot-auto was running as an unprivileged user and it upgraded from
|
||||
0.17.0 to 0.18.0, it would crash with a permissions error and would need to
|
||||
be run again to successfully complete the upgrade. This has been fixed and
|
||||
certbot-auto should upgrade cleanly to 0.18.1.
|
||||
* Certbot usually uses "certbot-auto" or "letsencrypt-auto" in error messages
|
||||
and the User-Agent string instead of "certbot" when you are using one of
|
||||
these wrapper scripts. Proper detection of this was broken with Certbot's new
|
||||
installation path in /opt in 0.18.0 but this problem has been resolved.
|
||||
|
||||
More details about these changes can be found on our GitHub repo:
|
||||
https://github.com/certbot/certbot/milestone/45?closed=1
|
||||
|
||||
## 0.18.0 - 2017-09-06
|
||||
|
||||
### Added
|
||||
|
||||
* The Nginx plugin now configures Nginx to use 2048-bit Diffie-Hellman
|
||||
parameters. Java 6 clients do not support Diffie-Hellman parameters larger
|
||||
than 1024 bits, so if you need to support these clients you will need to
|
||||
manually modify your Nginx configuration after using the Nginx installer.
|
||||
|
||||
### Changed
|
||||
|
||||
* certbot-auto now installs Certbot in directories under `/opt/eff.org`. If you
|
||||
had an existing installation from certbot-auto, a symlink is created to the
|
||||
new directory. You can configure certbot-auto to use a different path by
|
||||
setting the environment variable VENV_PATH.
|
||||
* The Nginx plugin can now be selected in Certbot's interactive output.
|
||||
* Output verbosity of renewal failures when running with `--quiet` has been
|
||||
reduced.
|
||||
* The default revocation reason shown in Certbot help output now is a human
|
||||
readable string instead of a numerical code.
|
||||
* Plugin selection is now included in normal terminal output.
|
||||
|
||||
### Fixed
|
||||
|
||||
* A newer version of ConfigArgParse is now installed when using certbot-auto
|
||||
causing values set to false in a Certbot INI configuration file to be handled
|
||||
intuitively. Setting a boolean command line flag to false is equivalent to
|
||||
not including it in the configuration file at all.
|
||||
* New naming conventions preventing certbot-auto from installing OS
|
||||
dependencies on Fedora 26 have been resolved.
|
||||
|
||||
More details about these changes can be found on our GitHub repo:
|
||||
https://github.com/certbot/certbot/milestone/42?closed=1
|
||||
|
||||
## 0.17.0 - 2017-08-02
|
||||
|
||||
### Added
|
||||
|
||||
* Support in our nginx plugin for modifying SSL server blocks that do
|
||||
not contain certificate or key directives.
|
||||
* A `--max-log-backups` flag to allow users to configure or even completely
|
||||
disable Certbot's built in log rotation.
|
||||
* A `--user-agent-comment` flag to allow people who build tools around Certbot
|
||||
to differentiate their user agent string by adding a comment to its default
|
||||
value.
|
||||
|
||||
### Changed
|
||||
|
||||
* Due to some awesome work by
|
||||
[cryptography project](https://github.com/pyca/cryptography), compilation can
|
||||
now be avoided on most systems when using certbot-auto. This eliminates many
|
||||
problems people have had in the past such as running out of memory, having
|
||||
invalid headers/libraries, and changes to the OS packages on their system
|
||||
after compilation breaking Certbot.
|
||||
* The `--renew-hook` flag has been hidden in favor of `--deploy-hook`. This new
|
||||
flag works exactly the same way except it is always run when a certificate is
|
||||
issued rather than just when it is renewed.
|
||||
* We have started printing deprecation warnings in certbot-auto for
|
||||
experimentally supported systems with OS packages available.
|
||||
* A certificate lineage's name is included in error messages during renewal.
|
||||
|
||||
### Fixed
|
||||
|
||||
* Encoding errors that could occur when parsing error messages from the ACME
|
||||
server containing Unicode have been resolved.
|
||||
* certbot-auto no longer prints misleading messages about there being a newer
|
||||
pip version available when installation fails.
|
||||
* Certbot's ACME library now properly extracts domains from critical SAN
|
||||
extensions.
|
||||
|
||||
More details about these changes can be found on our GitHub repo:
|
||||
https://github.com/certbot/certbot/issues?q=is%3Aissue+milestone%3A0.17.0+is%3Aclosed
|
||||
|
||||
## 0.16.0 - 2017-07-05
|
||||
|
||||
### Added
|
||||
|
||||
* A plugin for performing DNS challenges using dynamic DNS updates as defined
|
||||
in RFC 2316. This plugin is packaged separately from Certbot and is available
|
||||
at https://pypi.python.org/pypi/certbot-dns-rfc2136. It supports Python 2.6,
|
||||
2.7, and 3.3+. At this time, there isn't a good way to install this plugin
|
||||
when using certbot-auto, but this should change in the near future.
|
||||
* Plugins for performing DNS challenges for the providers
|
||||
[DNS Made Easy](https://pypi.python.org/pypi/certbot-dns-dnsmadeeasy) and
|
||||
[LuaDNS](https://pypi.python.org/pypi/certbot-dns-luadns). These plugins are
|
||||
packaged separately from Certbot and support Python 2.7 and 3.3+. Currently,
|
||||
there isn't a good way to install these plugins when using certbot-auto,
|
||||
but that should change soon.
|
||||
* Support for performing TLS-SNI-01 challenges when using the manual plugin.
|
||||
* Automatic detection of Arch Linux in the Apache plugin providing better
|
||||
default settings for the plugin.
|
||||
|
||||
### Changed
|
||||
|
||||
* The text of the interactive question about whether a redirect from HTTP to
|
||||
HTTPS should be added by Certbot has been rewritten to better explain the
|
||||
choices to the user.
|
||||
* Simplified HTTP challenge instructions in the manual plugin.
|
||||
|
||||
### Fixed
|
||||
|
||||
* Problems performing a dry run when using the Nginx plugin have been fixed.
|
||||
* Resolved an issue where certbot-dns-digitalocean's test suite would sometimes
|
||||
fail when ran using Python 3.
|
||||
* On some systems, previous versions of certbot-auto would error out with a
|
||||
message about a missing hash for setuptools. This has been fixed.
|
||||
* A bug where Certbot would sometimes not print a space at the end of an
|
||||
interactive prompt has been resolved.
|
||||
* Nonfatal tracebacks are no longer shown in rare cases where Certbot
|
||||
encounters an exception trying to close its TCP connection with the ACME
|
||||
server.
|
||||
|
||||
More details about these changes can be found on our GitHub repo:
|
||||
https://github.com/certbot/certbot/issues?q=is%3Aissue+milestone%3A0.16.0+is%3Aclosed
|
||||
|
||||
## 0.15.0 - 2017-06-08
|
||||
|
||||
### Added
|
||||
|
||||
@@ -12,6 +12,7 @@ COPY certbot src/certbot
|
||||
RUN apk add --no-cache --virtual .certbot-deps \
|
||||
libffi \
|
||||
libssl1.0 \
|
||||
openssl \
|
||||
ca-certificates \
|
||||
binutils
|
||||
RUN apk add --no-cache --virtual .build-deps \
|
||||
|
||||
@@ -6,3 +6,4 @@ include linter_plugin.py
|
||||
recursive-include docs *
|
||||
recursive-include examples *
|
||||
recursive-include certbot/tests/testdata *
|
||||
include certbot/ssl-dhparams.pem
|
||||
|
||||
@@ -15,6 +15,9 @@ protocol) that can automate the tasks of obtaining certificates and
|
||||
configuring webservers to use them. This client runs on Unix-based operating
|
||||
systems.
|
||||
|
||||
To see the changes made to Certbot between versions please refer to our
|
||||
`changelog <https://github.com/certbot/certbot/blob/master/CHANGELOG.md>`_.
|
||||
|
||||
Until May 2016, Certbot was named simply ``letsencrypt`` or ``letsencrypt-auto``,
|
||||
depending on install method. Instructions on the Internet, and some pieces of the
|
||||
software, may still refer to this older name.
|
||||
|
||||
@@ -519,7 +519,12 @@ class ClientNetwork(object): # pylint: disable=too-many-instance-attributes
|
||||
self._default_timeout = timeout
|
||||
|
||||
def __del__(self):
|
||||
self.session.close()
|
||||
# Try to close the session, but don't show exceptions to the
|
||||
# user if the call to close() fails. See #4840.
|
||||
try:
|
||||
self.session.close()
|
||||
except Exception: # pylint: disable=broad-except
|
||||
pass
|
||||
|
||||
def _wrap_in_jws(self, obj, nonce):
|
||||
"""Wrap `JSONDeSerializable` object in JWS.
|
||||
|
||||
@@ -600,12 +600,19 @@ class ClientNetworkTest(unittest.TestCase):
|
||||
mock.ANY, mock.ANY, verify=mock.ANY, headers=mock.ANY,
|
||||
timeout=45)
|
||||
|
||||
def test_del(self):
|
||||
def test_del(self, close_exception=None):
|
||||
sess = mock.MagicMock()
|
||||
|
||||
if close_exception is not None:
|
||||
sess.close.side_effect = close_exception
|
||||
|
||||
self.net.session = sess
|
||||
del self.net
|
||||
sess.close.assert_called_once_with()
|
||||
|
||||
def test_del_error(self):
|
||||
self.test_del(ReferenceError)
|
||||
|
||||
@mock.patch('acme.client.requests')
|
||||
def test_requests_error_passthrough(self, mock_requests):
|
||||
mock_requests.exceptions = requests.exceptions
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
import binascii
|
||||
import contextlib
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import socket
|
||||
import sys
|
||||
@@ -218,7 +219,7 @@ def _pyopenssl_cert_or_req_san(cert_or_req):
|
||||
text = func(OpenSSL.crypto.FILETYPE_TEXT, cert_or_req).decode("utf-8")
|
||||
# WARNING: this function does not support multiple SANs extensions.
|
||||
# Multiple X509v3 extensions of the same type is disallowed by RFC 5280.
|
||||
match = re.search(r"X509v3 Subject Alternative Name:\s*(.*)", text)
|
||||
match = re.search(r"X509v3 Subject Alternative Name:(?: critical)?\s*(.*)", text)
|
||||
# WARNING: this function assumes that no SAN can include
|
||||
# parts_separator, hence the split!
|
||||
sans_parts = [] if match is None else match.group(1).split(parts_separator)
|
||||
@@ -243,7 +244,7 @@ def gen_ss_cert(key, domains, not_before=None,
|
||||
"""
|
||||
assert domains, "Must provide one or more hostnames for the cert."
|
||||
cert = OpenSSL.crypto.X509()
|
||||
cert.set_serial_number(int(binascii.hexlify(OpenSSL.rand.bytes(16)), 16))
|
||||
cert.set_serial_number(int(binascii.hexlify(os.urandom(16)), 16))
|
||||
cert.set_version(2)
|
||||
|
||||
extensions = [
|
||||
|
||||
@@ -131,6 +131,11 @@ class PyOpenSSLCertOrReqSANTest(unittest.TestCase):
|
||||
self.assertEqual(self._call_csr('csr-idnsans.pem'),
|
||||
self._get_idn_names())
|
||||
|
||||
def test_critical_san(self):
|
||||
self.assertEqual(self._call_cert('critical-san.pem'),
|
||||
['chicago-cubs.venafi.example', 'cubs.venafi.example'])
|
||||
|
||||
|
||||
|
||||
class RandomSnTest(unittest.TestCase):
|
||||
"""Test for random certificate serial numbers."""
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
"""ACME protocol messages."""
|
||||
import collections
|
||||
import six
|
||||
|
||||
from acme import challenges
|
||||
from acme import errors
|
||||
@@ -36,9 +37,13 @@ ERROR_TYPE_DESCRIPTIONS.update(dict( # add errors with old prefix, deprecate me
|
||||
|
||||
def is_acme_error(err):
|
||||
"""Check if argument is an ACME error."""
|
||||
return (ERROR_PREFIX in str(err)) or (OLD_ERROR_PREFIX in str(err))
|
||||
if isinstance(err, Error) and (err.typ is not None):
|
||||
return (ERROR_PREFIX in err.typ) or (OLD_ERROR_PREFIX in err.typ)
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
@six.python_2_unicode_compatible
|
||||
class Error(jose.JSONObjectWithFields, errors.Error):
|
||||
"""ACME error.
|
||||
|
||||
@@ -92,10 +97,10 @@ class Error(jose.JSONObjectWithFields, errors.Error):
|
||||
return code
|
||||
|
||||
def __str__(self):
|
||||
return ' :: '.join(
|
||||
part for part in
|
||||
return b' :: '.join(
|
||||
part.encode('ascii', 'backslashreplace') for part in
|
||||
(self.typ, self.description, self.detail, self.title)
|
||||
if part is not None)
|
||||
if part is not None).decode()
|
||||
|
||||
|
||||
class _Constant(jose.JSONDeSerializable, collections.Hashable): # type: ignore
|
||||
|
||||
@@ -26,6 +26,7 @@ class ErrorTest(unittest.TestCase):
|
||||
'type': ERROR_PREFIX + 'malformed',
|
||||
}
|
||||
self.error_custom = Error(typ='custom', detail='bar')
|
||||
self.empty_error = Error()
|
||||
self.jobj_custom = {'type': 'custom', 'detail': 'bar'}
|
||||
|
||||
def test_default_typ(self):
|
||||
@@ -45,12 +46,6 @@ class ErrorTest(unittest.TestCase):
|
||||
'The request message was malformed', self.error.description)
|
||||
self.assertTrue(self.error_custom.description is None)
|
||||
|
||||
def test_str(self):
|
||||
self.assertEqual(
|
||||
'urn:ietf:params:acme:error:malformed :: The request message was '
|
||||
'malformed :: foo :: title', str(self.error))
|
||||
self.assertEqual('custom :: bar', str(self.error_custom))
|
||||
|
||||
def test_code(self):
|
||||
from acme.messages import Error
|
||||
self.assertEqual('malformed', self.error.code)
|
||||
@@ -60,8 +55,16 @@ class ErrorTest(unittest.TestCase):
|
||||
def test_is_acme_error(self):
|
||||
from acme.messages import is_acme_error
|
||||
self.assertTrue(is_acme_error(self.error))
|
||||
self.assertTrue(is_acme_error(str(self.error)))
|
||||
self.assertFalse(is_acme_error(self.error_custom))
|
||||
self.assertFalse(is_acme_error(self.empty_error))
|
||||
self.assertFalse(is_acme_error("must pet all the {dogs|rabbits}"))
|
||||
|
||||
def test_unicode_error(self):
|
||||
from acme.messages import Error, ERROR_PREFIX, is_acme_error
|
||||
arabic_error = Error(
|
||||
detail=u'\u0639\u062f\u0627\u0644\u0629', typ=ERROR_PREFIX + 'malformed',
|
||||
title='title')
|
||||
self.assertTrue(is_acme_error(arabic_error))
|
||||
|
||||
def test_with_code(self):
|
||||
from acme.messages import Error, is_acme_error
|
||||
|
||||
28
acme/acme/testdata/critical-san.pem
vendored
Normal file
28
acme/acme/testdata/critical-san.pem
vendored
Normal file
@@ -0,0 +1,28 @@
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIErTCCA5WgAwIBAgIKETb7VQAAAAAdGTANBgkqhkiG9w0BAQsFADCBkTELMAkG
|
||||
A1UEBhMCVVMxDTALBgNVBAgTBFV0YWgxFzAVBgNVBAcTDlNhbHQgTGFrZSBDaXR5
|
||||
MRUwEwYDVQQKEwxWZW5hZmksIEluYy4xHzAdBgNVBAsTFkRlbW9uc3RyYXRpb24g
|
||||
U2VydmljZXMxIjAgBgNVBAMTGVZlbmFmaSBFeGFtcGxlIElzc3VpbmcgQ0EwHhcN
|
||||
MTcwNzEwMjMxNjA1WhcNMTcwODA5MjMxNjA1WjAAMIIBIjANBgkqhkiG9w0BAQEF
|
||||
AAOCAQ8AMIIBCgKCAQEA7CU5qRIzCs9hCRiSUvLZ8r81l4zIYbx1V1vZz6x1cS4M
|
||||
0keNfFJ1wB+zuvx80KaMYkWPYlg4Rsm9Ok3ZapakXDlaWtrfg78lxtHuPw1o7AYV
|
||||
EXDwwPkNugLMJfYw5hWYSr8PCLcOJoY00YQ0fJ44L+kVsUyGjN4UTRRZmOh/yNVU
|
||||
0W12dTCz4X7BAW01OuY6SxxwewnW3sBEep+APfr2jd/oIx7fgZmVB8aRCDPj4AFl
|
||||
XINWIwxmptOwnKPbwLN/vhCvJRUkO6rA8lpYwQkedFf6fHhqi2Sq/NCEOg4RvMCF
|
||||
fKbMpncOXxz+f4/i43SVLrPz/UyhjNbKGJZ+zFrQowIDAQABo4IBlTCCAZEwPgYD
|
||||
VR0RAQH/BDQwMoIbY2hpY2Fnby1jdWJzLnZlbmFmaS5leGFtcGxlghNjdWJzLnZl
|
||||
bmFmaS5leGFtcGxlMB0GA1UdDgQWBBTgKZXVSFNyPHHtO/phtIALPcCF5DAfBgNV
|
||||
HSMEGDAWgBT/JJ6Wei/pzf+9DRHuv6Wgdk2HsjBSBgNVHR8ESzBJMEegRaBDhkFo
|
||||
dHRwOi8vcGtpLnZlbmFmaS5leGFtcGxlL2NybC9WZW5hZmklMjBFeGFtcGxlJTIw
|
||||
SXNzdWluZyUyMENBLmNybDA6BggrBgEFBQcBAQQuMCwwKgYIKwYBBQUHMAGGHmh0
|
||||
dHA6Ly9wa2kudmVuYWZpLmV4YW1wbGUvb2NzcDAOBgNVHQ8BAf8EBAMCBaAwPQYJ
|
||||
KwYBBAGCNxUHBDAwLgYmKwYBBAGCNxUIhIDLGYTvsSSEnZ8ehvD5UofP4hMEgobv
|
||||
DIGy4mcCAWQCAQIwEwYDVR0lBAwwCgYIKwYBBQUHAwEwGwYJKwYBBAGCNxUKBA4w
|
||||
DDAKBggrBgEFBQcDATANBgkqhkiG9w0BAQsFAAOCAQEA3YW4t1AzxEn384OqdU6L
|
||||
ny8XkMhWpRM0W0Z9ZC3gRZKbVUu49nG/KB5hbVn/de33zdX9HOZJKc0vXzkGZQUs
|
||||
OUCCsKX4VKzV5naGXOuGRbvV4CJh5P0kPlDzyb5t312S49nJdcdBf0Y/uL5Qzhst
|
||||
bXy8qNfFNG3SIKKRAUpqE9OVIl+F+JBwexa+v/4dFtUOqMipfXxB3TaxnDqvU1dS
|
||||
yO34ZTvIMGXJIZ5nn/d/LNc3N3vBg2SHkMpladqw0Hr7mL0bFOe0b+lJgkDP06Be
|
||||
n08fikhz1j2AW4/ZHa9w4DUz7J21+RtHMhh+Vd1On0EAeZ563svDe7Z+yrg6zOVv
|
||||
KA==
|
||||
-----END CERTIFICATE-----
|
||||
@@ -4,7 +4,7 @@ from setuptools import setup
|
||||
from setuptools import find_packages
|
||||
|
||||
|
||||
version = '0.16.0.dev0'
|
||||
version = '0.20.0.dev0'
|
||||
|
||||
# Please update tox.ini when modifying dependency version requirements
|
||||
install_requires = [
|
||||
@@ -16,15 +16,11 @@ install_requires = [
|
||||
'PyOpenSSL>=0.13',
|
||||
'pyrfc3339',
|
||||
'pytz',
|
||||
# requests>=2.10 is required to fix
|
||||
# https://github.com/shazow/urllib3/issues/556. This requirement can be
|
||||
# relaxed to 'requests[security]>=2.4.1', however, less useful errors
|
||||
# will be raised for some network/SSL errors.
|
||||
'requests[security]>=2.10',
|
||||
'requests[security]>=2.4.1', # security extras added in 2.4.1
|
||||
# For pkg_resources. >=1.0 so pip resolves it to a version cryptography
|
||||
# will tolerate; see #2599:
|
||||
'setuptools>=1.0',
|
||||
'six',
|
||||
'six>=1.9.0', # needed for python_2_unicode_compatible
|
||||
]
|
||||
|
||||
# env markers cause problems with older pip and setuptools
|
||||
|
||||
@@ -3,7 +3,6 @@ import logging
|
||||
|
||||
|
||||
from certbot import errors
|
||||
from certbot import reverter
|
||||
from certbot.plugins import common
|
||||
|
||||
from certbot_apache import constants
|
||||
@@ -11,7 +10,7 @@ from certbot_apache import constants
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class AugeasConfigurator(common.Plugin):
|
||||
class AugeasConfigurator(common.Installer):
|
||||
"""Base Augeas Configurator class.
|
||||
|
||||
:ivar config: Configuration.
|
||||
@@ -33,11 +32,6 @@ class AugeasConfigurator(common.Plugin):
|
||||
|
||||
self.save_notes = ""
|
||||
|
||||
# See if any temporary changes need to be recovered
|
||||
# This needs to occur before VirtualHost objects are setup...
|
||||
# because this will change the underlying configuration and potential
|
||||
# vhosts
|
||||
self.reverter = reverter.Reverter(self.config)
|
||||
|
||||
def init_augeas(self):
|
||||
""" Initialize the actual Augeas instance """
|
||||
@@ -50,6 +44,10 @@ class AugeasConfigurator(common.Plugin):
|
||||
flags=(augeas.Augeas.NONE |
|
||||
augeas.Augeas.NO_MODL_AUTOLOAD |
|
||||
augeas.Augeas.ENABLE_SPAN))
|
||||
# See if any temporary changes need to be recovered
|
||||
# This needs to occur before VirtualHost objects are setup...
|
||||
# because this will change the underlying configuration and potential
|
||||
# vhosts
|
||||
self.recovery_routine()
|
||||
|
||||
def check_parsing_errors(self, lens):
|
||||
@@ -78,26 +76,26 @@ class AugeasConfigurator(common.Plugin):
|
||||
self.aug.get(path + "/message")))
|
||||
raise errors.PluginError(msg)
|
||||
|
||||
# TODO: Cleanup this function
|
||||
def save(self, title=None, temporary=False):
|
||||
"""Saves all changes to the configuration files.
|
||||
def ensure_augeas_state(self):
|
||||
"""Makes sure that all Augeas dom changes are written to files to avoid
|
||||
loss of configuration directives when doing additional augeas parsing,
|
||||
causing a possible augeas.load() resulting dom reset
|
||||
"""
|
||||
|
||||
This function first checks for save errors, if none are found,
|
||||
all configuration changes made will be saved. According to the
|
||||
function parameters. If an exception is raised, a new checkpoint
|
||||
was not created.
|
||||
if self.unsaved_files():
|
||||
self.save_notes += "(autosave)"
|
||||
self.save()
|
||||
|
||||
:param str title: The title of the save. If a title is given, the
|
||||
configuration will be saved as a new checkpoint and put in a
|
||||
timestamped directory.
|
||||
|
||||
:param bool temporary: Indicates whether the changes made will
|
||||
be quickly reversed in the future (ie. challenges)
|
||||
def unsaved_files(self):
|
||||
"""Lists files that have modified Augeas DOM but the changes have not
|
||||
been written to the filesystem yet, used by `self.save()` and
|
||||
ApacheConfigurator to check the file state.
|
||||
|
||||
:raises .errors.PluginError: If there was an error in Augeas, in
|
||||
an attempt to save the configuration, or an error creating a
|
||||
checkpoint
|
||||
|
||||
:returns: `set` of unsaved files
|
||||
"""
|
||||
save_state = self.aug.get("/augeas/save")
|
||||
self.aug.set("/augeas/save", "noop")
|
||||
@@ -113,30 +111,41 @@ class AugeasConfigurator(common.Plugin):
|
||||
raise errors.PluginError(
|
||||
"Error saving files, check logs for more info.")
|
||||
|
||||
# Return the original save method
|
||||
self.aug.set("/augeas/save", save_state)
|
||||
|
||||
# Retrieve list of modified files
|
||||
# Note: Noop saves can cause the file to be listed twice, I used a
|
||||
# set to remove this possibility. This is a known augeas 0.10 error.
|
||||
save_paths = self.aug.match("/augeas/events/saved")
|
||||
|
||||
# If the augeas tree didn't change, no files were saved and a backup
|
||||
# should not be created
|
||||
save_files = set()
|
||||
if save_paths:
|
||||
for path in save_paths:
|
||||
save_files.add(self.aug.get(path)[6:])
|
||||
return save_files
|
||||
|
||||
try:
|
||||
# Create Checkpoint
|
||||
if temporary:
|
||||
self.reverter.add_to_temp_checkpoint(
|
||||
save_files, self.save_notes)
|
||||
else:
|
||||
self.reverter.add_to_checkpoint(save_files,
|
||||
self.save_notes)
|
||||
except errors.ReverterError as err:
|
||||
raise errors.PluginError(str(err))
|
||||
def save(self, title=None, temporary=False):
|
||||
"""Saves all changes to the configuration files.
|
||||
|
||||
This function first checks for save errors, if none are found,
|
||||
all configuration changes made will be saved. According to the
|
||||
function parameters. If an exception is raised, a new checkpoint
|
||||
was not created.
|
||||
|
||||
:param str title: The title of the save. If a title is given, the
|
||||
configuration will be saved as a new checkpoint and put in a
|
||||
timestamped directory.
|
||||
|
||||
:param bool temporary: Indicates whether the changes made will
|
||||
be quickly reversed in the future (ie. challenges)
|
||||
|
||||
"""
|
||||
save_files = self.unsaved_files()
|
||||
if save_files:
|
||||
self.add_to_checkpoint(save_files,
|
||||
self.save_notes, temporary=temporary)
|
||||
|
||||
self.aug.set("/augeas/save", save_state)
|
||||
self.save_notes = ""
|
||||
self.aug.save()
|
||||
|
||||
@@ -147,10 +156,7 @@ class AugeasConfigurator(common.Plugin):
|
||||
self.aug.remove("/files/"+sf)
|
||||
self.aug.load()
|
||||
if title and not temporary:
|
||||
try:
|
||||
self.reverter.finalize_checkpoint(title)
|
||||
except errors.ReverterError as err:
|
||||
raise errors.PluginError(str(err))
|
||||
self.finalize_checkpoint(title)
|
||||
|
||||
def _log_save_errors(self, ex_errs):
|
||||
"""Log errors due to bad Augeas save.
|
||||
@@ -175,10 +181,7 @@ class AugeasConfigurator(common.Plugin):
|
||||
:raises .errors.PluginError: If unable to recover the configuration
|
||||
|
||||
"""
|
||||
try:
|
||||
self.reverter.recovery_routine()
|
||||
except errors.ReverterError as err:
|
||||
raise errors.PluginError(str(err))
|
||||
super(AugeasConfigurator, self).recovery_routine()
|
||||
# Need to reload configuration after these changes take effect
|
||||
self.aug.load()
|
||||
|
||||
@@ -188,10 +191,7 @@ class AugeasConfigurator(common.Plugin):
|
||||
:raises .errors.PluginError: If unable to revert the challenge config.
|
||||
|
||||
"""
|
||||
try:
|
||||
self.reverter.revert_temporary_config()
|
||||
except errors.ReverterError as err:
|
||||
raise errors.PluginError(str(err))
|
||||
self.revert_temporary_config()
|
||||
self.aug.load()
|
||||
|
||||
def rollback_checkpoints(self, rollback=1):
|
||||
@@ -203,20 +203,5 @@ class AugeasConfigurator(common.Plugin):
|
||||
the function is unable to correctly revert the configuration
|
||||
|
||||
"""
|
||||
try:
|
||||
self.reverter.rollback_checkpoints(rollback)
|
||||
except errors.ReverterError as err:
|
||||
raise errors.PluginError(str(err))
|
||||
super(AugeasConfigurator, self).rollback_checkpoints(rollback)
|
||||
self.aug.load()
|
||||
|
||||
def view_config_changes(self):
|
||||
"""Show all of the configuration changes that have taken place.
|
||||
|
||||
:raises .errors.PluginError: If there is a problem while processing
|
||||
the checkpoints directories.
|
||||
|
||||
"""
|
||||
try:
|
||||
self.reverter.view_config_changes()
|
||||
except errors.ReverterError as err:
|
||||
raise errors.PluginError(str(err))
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
"""Apache Configuration based off of Augeas Configurator."""
|
||||
# pylint: disable=too-many-lines
|
||||
import filecmp
|
||||
import fnmatch
|
||||
import logging
|
||||
import os
|
||||
@@ -96,7 +95,7 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
help="SSL vhost configuration extension.")
|
||||
add("server-root", default=constants.os_constant("server_root"),
|
||||
help="Apache server root directory.")
|
||||
add("vhost-root", default=constants.os_constant("vhost_root"),
|
||||
add("vhost-root", default=None,
|
||||
help="Apache server VirtualHost configuration root")
|
||||
add("logs-root", default=constants.os_constant("logs_root"),
|
||||
help="Apache server logs directory")
|
||||
@@ -134,6 +133,7 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
self.parser = None
|
||||
self.version = version
|
||||
self.vhosts = None
|
||||
self.vhostroot = None
|
||||
self._enhance_func = {"redirect": self._enable_redirect,
|
||||
"ensure-http-header": self._set_http_header,
|
||||
"staple-ocsp": self._enable_ocsp_stapling}
|
||||
@@ -190,9 +190,15 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
"version 1.2.0 or higher, please make sure you have you have "
|
||||
"those installed.")
|
||||
|
||||
# Parse vhost-root if defined on cli
|
||||
if not self.conf("vhost-root"):
|
||||
self.vhostroot = constants.os_constant("vhost_root")
|
||||
else:
|
||||
self.vhostroot = os.path.abspath(self.conf("vhost-root"))
|
||||
|
||||
self.parser = parser.ApacheParser(
|
||||
self.aug, self.conf("server-root"), self.conf("vhost-root"),
|
||||
self.version)
|
||||
self.version, configurator=self)
|
||||
# Check for errors in parsing files with Augeas
|
||||
self.check_parsing_errors("httpd.aug")
|
||||
|
||||
@@ -242,13 +248,18 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
a lack of directives
|
||||
|
||||
"""
|
||||
# Choose vhost before (possible) enabling of mod_ssl, to keep the
|
||||
# vhost choice namespace similar with the pre-validation one.
|
||||
vhost = self.choose_vhost(domain)
|
||||
self._clean_vhost(vhost)
|
||||
|
||||
# This is done first so that ssl module is enabled and cert_path,
|
||||
# cert_key... can all be parsed appropriately
|
||||
self.prepare_server_https("443")
|
||||
|
||||
# Add directives and remove duplicates
|
||||
self._add_dummy_ssl_directives(vhost.path)
|
||||
self._clean_vhost(vhost)
|
||||
|
||||
path = {"cert_path": self.parser.find_dir("SSLCertificateFile",
|
||||
None, vhost.path),
|
||||
"cert_key": self.parser.find_dir("SSLCertificateKeyFile",
|
||||
@@ -290,6 +301,10 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
self.aug.set(path["cert_path"][-1], fullchain_path)
|
||||
self.aug.set(path["cert_key"][-1], key_path)
|
||||
|
||||
# Enable the new vhost if needed
|
||||
if not vhost.enabled:
|
||||
self.enable_site(vhost)
|
||||
|
||||
# Save notes about the transaction that took place
|
||||
self.save_notes += ("Changed vhost at %s with addresses of %s\n"
|
||||
"\tSSLCertificateFile %s\n"
|
||||
@@ -300,11 +315,6 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
if chain_path is not None:
|
||||
self.save_notes += "\tSSLCertificateChainFile %s\n" % chain_path
|
||||
|
||||
# Make sure vhost is enabled if distro with enabled / available
|
||||
if self.conf("handle-sites"):
|
||||
if not vhost.enabled:
|
||||
self.enable_site(vhost)
|
||||
|
||||
def choose_vhost(self, target_name, temp=False):
|
||||
"""Chooses a virtual host based on the given domain name.
|
||||
|
||||
@@ -347,9 +357,7 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
vhost = display_ops.select_vhost(target_name, self.vhosts)
|
||||
if vhost is None:
|
||||
logger.error(
|
||||
"No vhost exists with servername or alias of: %s "
|
||||
"(or it's in a file with multiple vhosts, which Certbot "
|
||||
"can't parse yet). "
|
||||
"No vhost exists with servername or alias of %s. "
|
||||
"No vhost was selected. Please specify ServerName or ServerAlias "
|
||||
"in the Apache config, or split vhosts into separate files.",
|
||||
target_name)
|
||||
@@ -581,17 +589,14 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
if filename is None:
|
||||
return None
|
||||
|
||||
if self.conf("handle-sites"):
|
||||
is_enabled = self.is_site_enabled(filename)
|
||||
else:
|
||||
is_enabled = True
|
||||
|
||||
macro = False
|
||||
if "/macro/" in path.lower():
|
||||
macro = True
|
||||
|
||||
vhost_enabled = self.parser.parsed_in_original(filename)
|
||||
|
||||
vhost = obj.VirtualHost(filename, path, addrs, is_ssl,
|
||||
is_enabled, modmacro=macro)
|
||||
vhost_enabled, modmacro=macro)
|
||||
self._add_servernames(vhost)
|
||||
return vhost
|
||||
|
||||
@@ -646,7 +651,6 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
elif internal_path not in internal_paths[realpath]:
|
||||
internal_paths[realpath].add(internal_path)
|
||||
vhs.append(new_vhost)
|
||||
|
||||
return vhs
|
||||
|
||||
def is_name_vhost(self, target_addr):
|
||||
@@ -857,14 +861,22 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
vh_p = self._get_new_vh_path(orig_matches, new_matches)
|
||||
|
||||
if not vh_p:
|
||||
raise errors.PluginError(
|
||||
"Could not reverse map the HTTPS VirtualHost to the original")
|
||||
# The vhost was not found on the currently parsed paths
|
||||
# Make Augeas aware of the new vhost
|
||||
self.parser.parse_file(ssl_fp)
|
||||
# Try to search again
|
||||
new_matches = self.aug.match(
|
||||
"/files%s//* [label()=~regexp('%s')]" %
|
||||
(self._escape(ssl_fp),
|
||||
parser.case_i("VirtualHost")))
|
||||
vh_p = self._get_new_vh_path(orig_matches, new_matches)
|
||||
if not vh_p:
|
||||
raise errors.PluginError(
|
||||
"Could not reverse map the HTTPS VirtualHost to the original")
|
||||
|
||||
|
||||
# Update Addresses
|
||||
self._update_ssl_vhosts_addrs(vh_p)
|
||||
# Add directives
|
||||
self._add_dummy_ssl_directives(vh_p)
|
||||
self.save()
|
||||
|
||||
# Log actions and create save notes
|
||||
logger.info("Created an SSL vhost at %s", ssl_fp)
|
||||
@@ -875,6 +887,7 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
# Create the Vhost object
|
||||
ssl_vhost = self._create_vhost(vh_p)
|
||||
ssl_vhost.ancestor = nonssl_vhost
|
||||
|
||||
self.vhosts.append(ssl_vhost)
|
||||
|
||||
# NOTE: Searches through Augeas seem to ruin changes to directives
|
||||
@@ -903,11 +916,29 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
return None
|
||||
|
||||
def _get_ssl_vhost_path(self, non_ssl_vh_fp):
|
||||
# Get filepath of new ssl_vhost
|
||||
if non_ssl_vh_fp.endswith(".conf"):
|
||||
return non_ssl_vh_fp[:-(len(".conf"))] + self.conf("le_vhost_ext")
|
||||
""" Get a file path for SSL vhost, uses user defined path as priority,
|
||||
but if the value is invalid or not defined, will fall back to non-ssl
|
||||
vhost filepath.
|
||||
|
||||
:param str non_ssl_vh_fp: Filepath of non-SSL vhost
|
||||
|
||||
:returns: Filepath for SSL vhost
|
||||
:rtype: str
|
||||
"""
|
||||
|
||||
if self.conf("vhost-root") and os.path.exists(self.conf("vhost-root")):
|
||||
# Defined by user on CLI
|
||||
|
||||
fp = os.path.join(os.path.realpath(self.vhostroot),
|
||||
os.path.basename(non_ssl_vh_fp))
|
||||
else:
|
||||
return non_ssl_vh_fp + self.conf("le_vhost_ext")
|
||||
# Use non-ssl filepath
|
||||
fp = os.path.realpath(non_ssl_vh_fp)
|
||||
|
||||
if fp.endswith(".conf"):
|
||||
return fp[:-(len(".conf"))] + self.conf("le_vhost_ext")
|
||||
else:
|
||||
return fp + self.conf("le_vhost_ext")
|
||||
|
||||
def _sift_rewrite_rule(self, line):
|
||||
"""Decides whether a line should be copied to a SSL vhost.
|
||||
@@ -972,6 +1003,10 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
# The content does not include the closing tag, so add it
|
||||
new_file.write("</VirtualHost>\n")
|
||||
new_file.write("</IfModule>\n")
|
||||
# Add new file to augeas paths if we're supposed to handle
|
||||
# activation (it's not included as default)
|
||||
if not self.parser.parsed_in_current(ssl_fp):
|
||||
self.parser.parse_file(ssl_fp)
|
||||
except IOError:
|
||||
logger.fatal("Error writing/reading to file in make_vhost_ssl")
|
||||
raise errors.PluginError("Unable to write/read in make_vhost_ssl")
|
||||
@@ -1260,13 +1295,13 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
.. note:: This function saves the configuration
|
||||
|
||||
:param ssl_vhost: Destination of traffic, an ssl enabled vhost
|
||||
:type ssl_vhost: :class:`~letsencrypt_apache.obj.VirtualHost`
|
||||
:type ssl_vhost: :class:`~certbot_apache.obj.VirtualHost`
|
||||
|
||||
:param unused_options: Not currently used
|
||||
:type unused_options: Not Available
|
||||
|
||||
:returns: Success, general_vhost (HTTP vhost)
|
||||
:rtype: (bool, :class:`~letsencrypt_apache.obj.VirtualHost`)
|
||||
:rtype: (bool, :class:`~certbot_apache.obj.VirtualHost`)
|
||||
|
||||
"""
|
||||
min_apache_ver = (2, 3, 3)
|
||||
@@ -1612,7 +1647,7 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
if len(ssl_vhost.name) < (255 - (len(redirect_filename) + 1)):
|
||||
redirect_filename = "le-redirect-%s.conf" % ssl_vhost.name
|
||||
|
||||
redirect_filepath = os.path.join(self.conf("vhost-root"),
|
||||
redirect_filepath = os.path.join(self.vhostroot,
|
||||
redirect_filename)
|
||||
|
||||
# Register the new file that will be created
|
||||
@@ -1623,6 +1658,11 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
# Write out file
|
||||
with open(redirect_filepath, "w") as redirect_file:
|
||||
redirect_file.write(text)
|
||||
|
||||
# Add new include to configuration if it doesn't exist yet
|
||||
if not self.parser.parsed_in_current(redirect_filepath):
|
||||
self.parser.parse_file(redirect_filepath)
|
||||
|
||||
logger.info("Created redirect file: %s", redirect_filename)
|
||||
|
||||
return redirect_filepath
|
||||
@@ -1662,32 +1702,6 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
|
||||
return redirects
|
||||
|
||||
def is_site_enabled(self, avail_fp):
|
||||
"""Checks to see if the given site is enabled.
|
||||
|
||||
.. todo:: fix hardcoded sites-enabled, check os.path.samefile
|
||||
|
||||
:param str avail_fp: Complete file path of available site
|
||||
|
||||
:returns: Success
|
||||
:rtype: bool
|
||||
|
||||
"""
|
||||
|
||||
enabled_dir = os.path.join(self.parser.root, "sites-enabled")
|
||||
if not os.path.isdir(enabled_dir):
|
||||
error_msg = ("Directory '{0}' does not exist. Please ensure "
|
||||
"that the values for --apache-handle-sites and "
|
||||
"--apache-server-root are correct for your "
|
||||
"environment.".format(enabled_dir))
|
||||
raise errors.ConfigurationError(error_msg)
|
||||
for entry in os.listdir(enabled_dir):
|
||||
try:
|
||||
if filecmp.cmp(avail_fp, os.path.join(enabled_dir, entry)):
|
||||
return True
|
||||
except OSError:
|
||||
pass
|
||||
return False
|
||||
|
||||
def enable_site(self, vhost):
|
||||
"""Enables an available site, Apache reload required.
|
||||
@@ -1707,21 +1721,40 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
supported.
|
||||
|
||||
"""
|
||||
if self.is_site_enabled(vhost.filep):
|
||||
if vhost.enabled:
|
||||
return
|
||||
|
||||
if "/sites-available/" in vhost.filep:
|
||||
enabled_path = ("%s/sites-enabled/%s" %
|
||||
(self.parser.root, os.path.basename(vhost.filep)))
|
||||
self.reverter.register_file_creation(False, enabled_path)
|
||||
# Handle non-debian systems
|
||||
if not self.conf("handle-sites"):
|
||||
if not self.parser.parsed_in_original(vhost.filep):
|
||||
# Add direct include to root conf
|
||||
self.parser.add_include(self.parser.loc["default"], vhost.filep)
|
||||
vhost.enabled = True
|
||||
return
|
||||
|
||||
enabled_path = ("%s/sites-enabled/%s" %
|
||||
(self.parser.root, os.path.basename(vhost.filep)))
|
||||
self.reverter.register_file_creation(False, enabled_path)
|
||||
try:
|
||||
os.symlink(vhost.filep, enabled_path)
|
||||
vhost.enabled = True
|
||||
logger.info("Enabling available site: %s", vhost.filep)
|
||||
self.save_notes += "Enabled site %s\n" % vhost.filep
|
||||
else:
|
||||
raise errors.NotSupportedError(
|
||||
"Unsupported filesystem layout. "
|
||||
"sites-available/enabled expected.")
|
||||
except OSError as err:
|
||||
if os.path.islink(enabled_path) and os.path.realpath(
|
||||
enabled_path) == vhost.filep:
|
||||
# Already in shape
|
||||
vhost.enabled = True
|
||||
return
|
||||
else:
|
||||
logger.warning(
|
||||
"Could not symlink %s to %s, got error: %s", enabled_path,
|
||||
vhost.filep, err.strerror)
|
||||
errstring = ("Encountered error while trying to enable a " +
|
||||
"newly created VirtualHost located at {0} by " +
|
||||
"linking to it from {1}")
|
||||
raise errors.NotSupportedError(errstring.format(vhost.filep,
|
||||
enabled_path))
|
||||
vhost.enabled = True
|
||||
logger.info("Enabling available site: %s", vhost.filep)
|
||||
self.save_notes += "Enabled site %s\n" % vhost.filep
|
||||
|
||||
def enable_mod(self, mod_name, temp=False):
|
||||
"""Enables module in Apache.
|
||||
@@ -1991,5 +2024,5 @@ def install_ssl_options_conf(options_ssl, options_ssl_digest):
|
||||
# XXX if we ever try to enforce a local privilege boundary (eg, running
|
||||
# certbot for unprivileged users via setuid), this function will need
|
||||
# to be modified.
|
||||
return common.install_ssl_options_conf(options_ssl, options_ssl_digest,
|
||||
return common.install_version_controlled_file(options_ssl, options_ssl_digest,
|
||||
constants.os_constant("MOD_SSL_CONF_SRC"), constants.ALL_SSL_OPTIONS_HASHES)
|
||||
|
||||
@@ -85,10 +85,12 @@ def _vhost_menu(domain, vhosts):
|
||||
"vhosts are not yet supported)".format(domain, os.linesep),
|
||||
choices, force_interactive=True)
|
||||
except errors.MissingCommandlineFlag:
|
||||
msg = ("Encountered vhost ambiguity but unable to ask for user guidance in "
|
||||
"non-interactive mode. Currently Certbot needs each vhost to be "
|
||||
"in its own conf file, and may need vhosts to be explicitly "
|
||||
"labelled with ServerName or ServerAlias directives.")
|
||||
msg = (
|
||||
"Encountered vhost ambiguity when trying to find a vhost for "
|
||||
"{0} but was unable to ask for user "
|
||||
"guidance in non-interactive mode. Certbot may need "
|
||||
"vhosts to be explicitly labelled with ServerName or "
|
||||
"ServerAlias directives.".format(domain))
|
||||
logger.warning(msg)
|
||||
raise errors.MissingCommandlineFlag(msg)
|
||||
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
"""ApacheParser is a member object of the ApacheConfigurator class."""
|
||||
import copy
|
||||
import fnmatch
|
||||
import logging
|
||||
import os
|
||||
@@ -30,9 +31,15 @@ class ApacheParser(object):
|
||||
arg_var_interpreter = re.compile(r"\$\{[^ \}]*}")
|
||||
fnmatch_chars = set(["*", "?", "\\", "[", "]"])
|
||||
|
||||
def __init__(self, aug, root, vhostroot, version=(2, 4)):
|
||||
def __init__(self, aug, root, vhostroot=None, version=(2, 4),
|
||||
configurator=None):
|
||||
# Note: Order is important here.
|
||||
|
||||
# Needed for calling save() with reverter functionality that resides in
|
||||
# AugeasConfigurator superclass of ApacheConfigurator. This resolves
|
||||
# issues with aug.load() after adding new files / defines to parse tree
|
||||
self.configurator = configurator
|
||||
|
||||
# This uses the binary, so it can be done first.
|
||||
# https://httpd.apache.org/docs/2.4/mod/core.html#define
|
||||
# https://httpd.apache.org/docs/2.4/mod/core.html#ifdefine
|
||||
@@ -46,9 +53,7 @@ class ApacheParser(object):
|
||||
# Find configuration root and make sure augeas can parse it.
|
||||
self.root = os.path.abspath(root)
|
||||
self.loc = {"root": self._find_config_root()}
|
||||
self._parse_file(self.loc["root"])
|
||||
|
||||
self.vhostroot = os.path.abspath(vhostroot)
|
||||
self.parse_file(self.loc["root"])
|
||||
|
||||
# This problem has been fixed in Augeas 1.0
|
||||
self.standardize_excl()
|
||||
@@ -62,15 +67,42 @@ class ApacheParser(object):
|
||||
# Set up rest of locations
|
||||
self.loc.update(self._set_locations())
|
||||
|
||||
# Must also attempt to parse virtual host root
|
||||
self._parse_file(self.vhostroot + "/" +
|
||||
constants.os_constant("vhost_files"))
|
||||
self.existing_paths = copy.deepcopy(self.parser_paths)
|
||||
|
||||
# Must also attempt to parse additional virtual host root
|
||||
if vhostroot:
|
||||
self.parse_file(os.path.abspath(vhostroot) + "/" +
|
||||
constants.os_constant("vhost_files"))
|
||||
|
||||
# check to see if there were unparsed define statements
|
||||
if version < (2, 4):
|
||||
if self.find_dir("Define", exclude=False):
|
||||
raise errors.PluginError("Error parsing runtime variables")
|
||||
|
||||
def add_include(self, main_config, inc_path):
|
||||
"""Add Include for a new configuration file if one does not exist
|
||||
|
||||
:param str main_config: file path to main Apache config file
|
||||
:param str inc_path: path of file to include
|
||||
|
||||
"""
|
||||
if len(self.find_dir(case_i("Include"), inc_path)) == 0:
|
||||
logger.debug("Adding Include %s to %s",
|
||||
inc_path, get_aug_path(main_config))
|
||||
self.add_dir(
|
||||
get_aug_path(main_config),
|
||||
"Include", inc_path)
|
||||
|
||||
# Add new path to parser paths
|
||||
new_dir = os.path.dirname(inc_path)
|
||||
new_file = os.path.basename(inc_path)
|
||||
if new_dir in self.existing_paths.keys():
|
||||
# Add to existing path
|
||||
self.existing_paths[new_dir].append(new_file)
|
||||
else:
|
||||
# Create a new path
|
||||
self.existing_paths[new_dir] = [new_file]
|
||||
|
||||
def init_modules(self):
|
||||
"""Iterates on the configuration until no new modules are loaded.
|
||||
|
||||
@@ -91,9 +123,14 @@ class ApacheParser(object):
|
||||
|
||||
for match_name, match_filename in six.moves.zip(
|
||||
iterator, iterator):
|
||||
self.modules.add(self.get_arg(match_name))
|
||||
self.modules.add(
|
||||
os.path.basename(self.get_arg(match_filename))[:-2] + "c")
|
||||
mod_name = self.get_arg(match_name)
|
||||
mod_filename = self.get_arg(match_filename)
|
||||
if mod_name and mod_filename:
|
||||
self.modules.add(mod_name)
|
||||
self.modules.add(os.path.basename(mod_filename)[:-2] + "c")
|
||||
else:
|
||||
logger.debug("Could not read LoadModule directive from " +
|
||||
"Augeas path: {0}".format(match_name[6:]))
|
||||
|
||||
def update_runtime_variables(self):
|
||||
""""
|
||||
@@ -339,7 +376,10 @@ class ApacheParser(object):
|
||||
|
||||
# Note: normal argument may be a quoted variable
|
||||
# e.g. strip now, not later
|
||||
value = value.strip("'\"")
|
||||
if not value:
|
||||
return None
|
||||
else:
|
||||
value = value.strip("'\"")
|
||||
|
||||
variables = ApacheParser.arg_var_interpreter.findall(value)
|
||||
|
||||
@@ -428,9 +468,9 @@ class ApacheParser(object):
|
||||
|
||||
# Attempts to add a transform to the file if one does not already exist
|
||||
if os.path.isdir(arg):
|
||||
self._parse_file(os.path.join(arg, "*"))
|
||||
self.parse_file(os.path.join(arg, "*"))
|
||||
else:
|
||||
self._parse_file(arg)
|
||||
self.parse_file(arg)
|
||||
|
||||
# Argument represents an fnmatch regular expression, convert it
|
||||
# Split up the path and convert each into an Augeas accepted regex
|
||||
@@ -470,7 +510,7 @@ class ApacheParser(object):
|
||||
# Since Python 3.6, it returns a different pattern like (?s:.*\.load)\Z
|
||||
return fnmatch.translate(clean_fn_match)[4:-3]
|
||||
|
||||
def _parse_file(self, filepath):
|
||||
def parse_file(self, filepath):
|
||||
"""Parse file with Augeas
|
||||
|
||||
Checks to see if file_path is parsed by Augeas
|
||||
@@ -480,6 +520,10 @@ class ApacheParser(object):
|
||||
|
||||
"""
|
||||
use_new, remove_old = self._check_path_actions(filepath)
|
||||
# Ensure that we have the latest Augeas DOM state on disk before
|
||||
# calling aug.load() which reloads the state from disk
|
||||
if self.configurator:
|
||||
self.configurator.ensure_augeas_state()
|
||||
# Test if augeas included file for Httpd.lens
|
||||
# Note: This works for augeas globs, ie. *.conf
|
||||
if use_new:
|
||||
@@ -494,6 +538,39 @@ class ApacheParser(object):
|
||||
self._add_httpd_transform(filepath)
|
||||
self.aug.load()
|
||||
|
||||
def parsed_in_current(self, filep):
|
||||
"""Checks if the file path is parsed by current Augeas parser config
|
||||
ie. returns True if the file is found on a path that's found in live
|
||||
Augeas configuration.
|
||||
|
||||
:param str filep: Path to match
|
||||
|
||||
:returns: True if file is parsed in existing configuration tree
|
||||
:rtype: bool
|
||||
"""
|
||||
return self._parsed_by_parser_paths(filep, self.parser_paths)
|
||||
|
||||
def parsed_in_original(self, filep):
|
||||
"""Checks if the file path is parsed by existing Apache config.
|
||||
ie. returns True if the file is found on a path that matches Include or
|
||||
IncludeOptional statement in the Apache configuration.
|
||||
|
||||
:param str filep: Path to match
|
||||
|
||||
:returns: True if file is parsed in existing configuration tree
|
||||
:rtype: bool
|
||||
"""
|
||||
return self._parsed_by_parser_paths(filep, self.existing_paths)
|
||||
|
||||
def _parsed_by_parser_paths(self, filep, paths):
|
||||
"""Helper function that searches through provided paths and returns
|
||||
True if file path is found in the set"""
|
||||
for directory in paths.keys():
|
||||
for filename in paths[directory]:
|
||||
if fnmatch.fnmatch(filep, os.path.join(directory, filename)):
|
||||
return True
|
||||
return False
|
||||
|
||||
def _check_path_actions(self, filepath):
|
||||
"""Determine actions to take with a new augeas path
|
||||
|
||||
@@ -622,7 +699,6 @@ class ApacheParser(object):
|
||||
for name in location:
|
||||
if os.path.isfile(os.path.join(self.root, name)):
|
||||
return os.path.join(self.root, name)
|
||||
|
||||
raise errors.NoInstallationError("Could not find configuration root")
|
||||
|
||||
|
||||
|
||||
@@ -26,6 +26,7 @@ function Setup() {
|
||||
ErrorLog /tmp/error.log
|
||||
CustomLog /tmp/requests.log combined
|
||||
</VirtualHost>" | sudo tee $EA/sites-available/throwaway-example.conf >/dev/null
|
||||
sudo ln -sf $EA/sites-available/throwaway-example.conf $EA/sites-enabled/throwaway-example.conf
|
||||
else
|
||||
TMP="/tmp/`basename \"$APPEND_APACHECONF\"`.$$"
|
||||
sudo cp -a "$APPEND_APACHECONF" "$TMP"
|
||||
@@ -37,6 +38,7 @@ function Cleanup() {
|
||||
if [ "$APPEND_APACHECONF" = "" ] ; then
|
||||
sudo rm /etc/apache2/sites-{enabled,available}/"$f"
|
||||
sudo rm $EA/sites-available/throwaway-example.conf
|
||||
sudo rm $EA/sites-enabled/throwaway-example.conf
|
||||
else
|
||||
sudo mv "$TMP" "$APPEND_APACHECONF"
|
||||
fi
|
||||
|
||||
@@ -31,7 +31,7 @@ class AugeasConfiguratorTest(util.ApacheTest):
|
||||
|
||||
def test_bad_parse(self):
|
||||
# pylint: disable=protected-access
|
||||
self.config.parser._parse_file(os.path.join(
|
||||
self.config.parser.parse_file(os.path.join(
|
||||
self.config.parser.root, "conf-available", "bad_conf_file.conf"))
|
||||
self.assertRaises(
|
||||
errors.PluginError, self.config.check_parsing_errors, "httpd.aug")
|
||||
|
||||
@@ -8,6 +8,7 @@ import unittest
|
||||
import mock
|
||||
# six is used in mock.patch()
|
||||
import six # pylint: disable=unused-import
|
||||
import tempfile
|
||||
|
||||
from acme import challenges
|
||||
|
||||
@@ -34,9 +35,20 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
def setUp(self): # pylint: disable=arguments-differ
|
||||
super(MultipleVhostsTest, self).setUp()
|
||||
|
||||
self.config = util.get_apache_configurator(
|
||||
self.config_path, self.vhost_path, self.config_dir, self.work_dir)
|
||||
self.config = self.mock_deploy_cert(self.config)
|
||||
from certbot_apache.constants import os_constant
|
||||
orig_os_constant = os_constant
|
||||
def mock_os_constant(key, vhost_path=self.vhost_path):
|
||||
"""Mock default vhost path"""
|
||||
if key == "vhost_root":
|
||||
return vhost_path
|
||||
else:
|
||||
return orig_os_constant(key)
|
||||
|
||||
with mock.patch("certbot_apache.constants.os_constant") as mock_c:
|
||||
mock_c.side_effect = mock_os_constant
|
||||
self.config = util.get_apache_configurator(
|
||||
self.config_path, None, self.config_dir, self.work_dir)
|
||||
self.config = self.mock_deploy_cert(self.config)
|
||||
self.vh_truth = util.get_vh_truth(
|
||||
self.temp_dir, "debian_apache_2_4/multiple_vhosts")
|
||||
|
||||
@@ -121,19 +133,20 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
|
||||
@certbot_util.patch_get_utility()
|
||||
def test_get_all_names(self, mock_getutility):
|
||||
mock_getutility.notification = mock.MagicMock(return_value=True)
|
||||
mock_utility = mock_getutility()
|
||||
mock_utility.notification = mock.MagicMock(return_value=True)
|
||||
names = self.config.get_all_names()
|
||||
self.assertEqual(names, set(
|
||||
["certbot.demo", "ocspvhost.com", "encryption-example.demo"]
|
||||
["certbot.demo", "ocspvhost.com", "encryption-example.demo",
|
||||
"nonsym.link", "vhost.in.rootconf"]
|
||||
))
|
||||
|
||||
@certbot_util.patch_get_utility()
|
||||
@mock.patch("certbot_apache.configurator.socket.gethostbyaddr")
|
||||
def test_get_all_names_addrs(self, mock_gethost, mock_getutility):
|
||||
mock_gethost.side_effect = [("google.com", "", ""), socket.error]
|
||||
notification = mock.Mock()
|
||||
notification.notification = mock.Mock(return_value=True)
|
||||
mock_getutility.return_value = notification
|
||||
mock_utility = mock_getutility()
|
||||
mock_utility.notification.return_value = True
|
||||
vhost = obj.VirtualHost(
|
||||
"fp", "ap",
|
||||
set([obj.Addr(("8.8.8.8", "443")),
|
||||
@@ -145,7 +158,7 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
|
||||
names = self.config.get_all_names()
|
||||
# Names get filtered, only 5 are returned
|
||||
self.assertEqual(len(names), 5)
|
||||
self.assertEqual(len(names), 7)
|
||||
self.assertTrue("zombo.com" in names)
|
||||
self.assertTrue("google.com" in names)
|
||||
self.assertTrue("certbot.demo" in names)
|
||||
@@ -159,7 +172,7 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
def test_get_aug_internal_path(self):
|
||||
from certbot_apache.configurator import get_internal_aug_path
|
||||
internal_paths = [
|
||||
"VirtualHost", "IfModule/VirtualHost", "VirtualHost", "VirtualHost",
|
||||
"Virtualhost", "IfModule/VirtualHost", "VirtualHost", "VirtualHost",
|
||||
"Macro/VirtualHost", "IfModule/VirtualHost", "VirtualHost",
|
||||
"IfModule/VirtualHost"]
|
||||
|
||||
@@ -185,14 +198,9 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
self.vh_truth[2].get_names(), set(["*.le.co", "ip-172-30-0-17"]))
|
||||
|
||||
def test_get_virtual_hosts(self):
|
||||
"""Make sure all vhosts are being properly found.
|
||||
|
||||
.. note:: If test fails, only finding 1 Vhost... it is likely that
|
||||
it is a problem with is_enabled. If finding only 3, likely is_ssl
|
||||
|
||||
"""
|
||||
"""Make sure all vhosts are being properly found."""
|
||||
vhs = self.config.get_virtual_hosts()
|
||||
self.assertEqual(len(vhs), 8)
|
||||
self.assertEqual(len(vhs), 10)
|
||||
found = 0
|
||||
|
||||
for vhost in vhs:
|
||||
@@ -203,7 +211,7 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
else:
|
||||
raise Exception("Missed: %s" % vhost) # pragma: no cover
|
||||
|
||||
self.assertEqual(found, 8)
|
||||
self.assertEqual(found, 10)
|
||||
|
||||
# Handle case of non-debian layout get_virtual_hosts
|
||||
with mock.patch(
|
||||
@@ -211,7 +219,7 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
) as mock_conf:
|
||||
mock_conf.return_value = False
|
||||
vhs = self.config.get_virtual_hosts()
|
||||
self.assertEqual(len(vhs), 8)
|
||||
self.assertEqual(len(vhs), 10)
|
||||
|
||||
@mock.patch("certbot_apache.display_ops.select_vhost")
|
||||
def test_choose_vhost_none_avail(self, mock_select):
|
||||
@@ -226,8 +234,10 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
self.vh_truth[1], self.config.choose_vhost("none.com"))
|
||||
|
||||
@mock.patch("certbot_apache.display_ops.select_vhost")
|
||||
def test_choose_vhost_select_vhost_non_ssl(self, mock_select):
|
||||
@mock.patch("certbot_apache.obj.VirtualHost.conflicts")
|
||||
def test_choose_vhost_select_vhost_non_ssl(self, mock_conf, mock_select):
|
||||
mock_select.return_value = self.vh_truth[0]
|
||||
mock_conf.return_value = False
|
||||
chosen_vhost = self.config.choose_vhost("none.com")
|
||||
self.vh_truth[0].aliases.add("none.com")
|
||||
self.assertEqual(
|
||||
@@ -237,6 +247,15 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
self.assertFalse(self.vh_truth[0].ssl)
|
||||
self.assertTrue(chosen_vhost.ssl)
|
||||
|
||||
@mock.patch("certbot_apache.configurator.ApacheConfigurator._find_best_vhost")
|
||||
@mock.patch("certbot_apache.parser.ApacheParser.add_dir")
|
||||
def test_choose_vhost_and_servername_addition(self, mock_add, mock_find):
|
||||
ret_vh = self.vh_truth[8]
|
||||
ret_vh.enabled = False
|
||||
mock_find.return_value = self.vh_truth[8]
|
||||
self.config.choose_vhost("whatever.com")
|
||||
self.assertTrue(mock_add.called)
|
||||
|
||||
@mock.patch("certbot_apache.display_ops.select_vhost")
|
||||
def test_choose_vhost_select_vhost_with_temp(self, mock_select):
|
||||
mock_select.return_value = self.vh_truth[0]
|
||||
@@ -288,9 +307,9 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
# Assume only the two default vhosts.
|
||||
self.config.vhosts = [
|
||||
vh for vh in self.config.vhosts
|
||||
if vh.name not in ["certbot.demo",
|
||||
if vh.name not in ["certbot.demo", "nonsym.link",
|
||||
"encryption-example.demo",
|
||||
"ocspvhost.com"]
|
||||
"ocspvhost.com", "vhost.in.rootconf"]
|
||||
and "*.blue.purple.com" not in vh.aliases
|
||||
]
|
||||
self.assertEqual(
|
||||
@@ -299,26 +318,7 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
|
||||
def test_non_default_vhosts(self):
|
||||
# pylint: disable=protected-access
|
||||
self.assertEqual(len(self.config._non_default_vhosts()), 6)
|
||||
|
||||
def test_is_site_enabled(self):
|
||||
"""Test if site is enabled.
|
||||
|
||||
.. note:: This test currently fails for hard links
|
||||
(which may happen if you move dirs incorrectly)
|
||||
.. warning:: This test does not work when running using the
|
||||
unittest.main() function. It incorrectly copies symlinks.
|
||||
|
||||
"""
|
||||
self.assertTrue(self.config.is_site_enabled(self.vh_truth[0].filep))
|
||||
self.assertFalse(self.config.is_site_enabled(self.vh_truth[1].filep))
|
||||
self.assertTrue(self.config.is_site_enabled(self.vh_truth[2].filep))
|
||||
self.assertTrue(self.config.is_site_enabled(self.vh_truth[3].filep))
|
||||
with mock.patch("os.path.isdir") as mock_isdir:
|
||||
mock_isdir.return_value = False
|
||||
self.assertRaises(errors.ConfigurationError,
|
||||
self.config.is_site_enabled,
|
||||
"irrelevant")
|
||||
self.assertEqual(len(self.config._non_default_vhosts()), 8)
|
||||
|
||||
@mock.patch("certbot.util.run_script")
|
||||
@mock.patch("certbot.util.exe_exists")
|
||||
@@ -345,21 +345,59 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
self.assertRaises(
|
||||
errors.MisconfigurationError, self.config.enable_mod, "ssl")
|
||||
|
||||
def test_enable_site(self):
|
||||
# Default 443 vhost
|
||||
self.assertFalse(self.vh_truth[1].enabled)
|
||||
self.config.enable_site(self.vh_truth[1])
|
||||
def test_enable_site_already_enabled(self):
|
||||
self.assertTrue(self.vh_truth[1].enabled)
|
||||
|
||||
# Go again to make sure nothing fails
|
||||
self.config.enable_site(self.vh_truth[1])
|
||||
|
||||
def test_enable_site_failure(self):
|
||||
self.config.parser.root = "/tmp/nonexistent"
|
||||
self.assertRaises(
|
||||
errors.NotSupportedError,
|
||||
self.config.enable_site,
|
||||
obj.VirtualHost("asdf", "afsaf", set(), False, False))
|
||||
|
||||
def test_enable_site_nondebian(self):
|
||||
mock_c = "certbot_apache.configurator.ApacheConfigurator.conf"
|
||||
def conf_side_effect(arg):
|
||||
""" Mock function for ApacheConfigurator.conf """
|
||||
confvars = {"handle-sites": False}
|
||||
if arg in confvars:
|
||||
return confvars[arg]
|
||||
inc_path = "/path/to/whereever"
|
||||
vhost = self.vh_truth[0]
|
||||
with mock.patch(mock_c) as mock_conf:
|
||||
mock_conf.side_effect = conf_side_effect
|
||||
vhost.enabled = False
|
||||
vhost.filep = inc_path
|
||||
self.assertFalse(self.config.parser.find_dir("Include", inc_path))
|
||||
self.assertFalse(
|
||||
os.path.dirname(inc_path) in self.config.parser.existing_paths)
|
||||
self.config.enable_site(vhost)
|
||||
self.assertTrue(self.config.parser.find_dir("Include", inc_path))
|
||||
self.assertTrue(
|
||||
os.path.dirname(inc_path) in self.config.parser.existing_paths)
|
||||
self.assertTrue(
|
||||
os.path.basename(inc_path) in self.config.parser.existing_paths[
|
||||
os.path.dirname(inc_path)])
|
||||
|
||||
def test_deploy_cert_enable_new_vhost(self):
|
||||
# Create
|
||||
ssl_vhost = self.config.make_vhost_ssl(self.vh_truth[0])
|
||||
self.config.parser.modules.add("ssl_module")
|
||||
self.config.parser.modules.add("mod_ssl.c")
|
||||
self.assertFalse(ssl_vhost.enabled)
|
||||
self.config.deploy_cert(
|
||||
"encryption-example.demo", "example/cert.pem", "example/key.pem",
|
||||
"example/cert_chain.pem", "example/fullchain.pem")
|
||||
self.assertTrue(ssl_vhost.enabled)
|
||||
# Make sure that we don't error out if symlink already exists
|
||||
ssl_vhost.enabled = False
|
||||
self.assertFalse(ssl_vhost.enabled)
|
||||
self.config.deploy_cert(
|
||||
"encryption-example.demo", "example/cert.pem", "example/key.pem",
|
||||
"example/cert_chain.pem", "example/fullchain.pem")
|
||||
self.assertTrue(ssl_vhost.enabled)
|
||||
|
||||
def test_deploy_cert_newssl(self):
|
||||
self.config = util.get_apache_configurator(
|
||||
self.config_path, self.vhost_path, self.config_dir,
|
||||
@@ -388,12 +426,14 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
|
||||
# Verify one directive was found in the correct file
|
||||
self.assertEqual(len(loc_cert), 1)
|
||||
self.assertEqual(configurator.get_file_path(loc_cert[0]),
|
||||
self.vh_truth[1].filep)
|
||||
self.assertEqual(
|
||||
configurator.get_file_path(loc_cert[0]),
|
||||
self.vh_truth[1].filep)
|
||||
|
||||
self.assertEqual(len(loc_key), 1)
|
||||
self.assertEqual(configurator.get_file_path(loc_key[0]),
|
||||
self.vh_truth[1].filep)
|
||||
self.assertEqual(
|
||||
configurator.get_file_path(loc_key[0]),
|
||||
self.vh_truth[1].filep)
|
||||
|
||||
def test_deploy_cert_newssl_no_fullchain(self):
|
||||
self.config = util.get_apache_configurator(
|
||||
@@ -427,10 +467,75 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
"random.demo", "example/cert.pem",
|
||||
"example/key.pem"))
|
||||
|
||||
def test_deploy_cert_not_parsed_path(self):
|
||||
# Make sure that we add include to root config for vhosts when
|
||||
# handle-sites is false
|
||||
self.config.parser.modules.add("ssl_module")
|
||||
self.config.parser.modules.add("mod_ssl.c")
|
||||
tmp_path = os.path.realpath(tempfile.mkdtemp("vhostroot"))
|
||||
os.chmod(tmp_path, 0o755)
|
||||
mock_p = "certbot_apache.configurator.ApacheConfigurator._get_ssl_vhost_path"
|
||||
mock_a = "certbot_apache.parser.ApacheParser.add_include"
|
||||
mock_c = "certbot_apache.configurator.ApacheConfigurator.conf"
|
||||
orig_conf = self.config.conf
|
||||
def conf_side_effect(arg):
|
||||
""" Mock function for ApacheConfigurator.conf """
|
||||
confvars = {"handle-sites": False}
|
||||
if arg in confvars:
|
||||
return confvars[arg]
|
||||
else:
|
||||
return orig_conf("arg")
|
||||
|
||||
with mock.patch(mock_c) as mock_conf:
|
||||
mock_conf.side_effect = conf_side_effect
|
||||
with mock.patch(mock_p) as mock_path:
|
||||
mock_path.return_value = os.path.join(tmp_path, "whatever.conf")
|
||||
with mock.patch(mock_a) as mock_add:
|
||||
self.config.deploy_cert(
|
||||
"encryption-example.demo",
|
||||
"example/cert.pem", "example/key.pem",
|
||||
"example/cert_chain.pem")
|
||||
# Test that we actually called add_include
|
||||
self.assertTrue(mock_add.called)
|
||||
shutil.rmtree(tmp_path)
|
||||
|
||||
|
||||
def test_deploy_cert(self):
|
||||
self.config.parser.modules.add("ssl_module")
|
||||
self.config.parser.modules.add("mod_ssl.c")
|
||||
|
||||
# Patch _add_dummy_ssl_directives to make sure we write them correctly
|
||||
# pylint: disable=protected-access
|
||||
orig_add_dummy = self.config._add_dummy_ssl_directives
|
||||
def mock_add_dummy_ssl(vhostpath):
|
||||
"""Mock method for _add_dummy_ssl_directives"""
|
||||
def find_args(path, directive):
|
||||
"""Return list of arguments in requested directive at path"""
|
||||
f_args = []
|
||||
dirs = self.config.parser.find_dir(directive, None,
|
||||
path)
|
||||
for d in dirs:
|
||||
f_args.append(self.config.parser.get_arg(d))
|
||||
return f_args
|
||||
# Verify that the dummy directives do not exist
|
||||
self.assertFalse(
|
||||
"insert_cert_file_path" in find_args(vhostpath,
|
||||
"SSLCertificateFile"))
|
||||
self.assertFalse(
|
||||
"insert_key_file_path" in find_args(vhostpath,
|
||||
"SSLCertificateKeyFile"))
|
||||
orig_add_dummy(vhostpath)
|
||||
# Verify that the dummy directives exist
|
||||
self.assertTrue(
|
||||
"insert_cert_file_path" in find_args(vhostpath,
|
||||
"SSLCertificateFile"))
|
||||
self.assertTrue(
|
||||
"insert_key_file_path" in find_args(vhostpath,
|
||||
"SSLCertificateKeyFile"))
|
||||
|
||||
# pylint: disable=protected-access
|
||||
self.config._add_dummy_ssl_directives = mock_add_dummy_ssl
|
||||
|
||||
# Get the default 443 vhost
|
||||
self.config.assoc["random.demo"] = self.vh_truth[1]
|
||||
self.config.deploy_cert(
|
||||
@@ -452,16 +557,19 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
|
||||
# Verify one directive was found in the correct file
|
||||
self.assertEqual(len(loc_cert), 1)
|
||||
self.assertEqual(configurator.get_file_path(loc_cert[0]),
|
||||
self.vh_truth[1].filep)
|
||||
self.assertEqual(
|
||||
configurator.get_file_path(loc_cert[0]),
|
||||
self.vh_truth[1].filep)
|
||||
|
||||
self.assertEqual(len(loc_key), 1)
|
||||
self.assertEqual(configurator.get_file_path(loc_key[0]),
|
||||
self.vh_truth[1].filep)
|
||||
self.assertEqual(
|
||||
configurator.get_file_path(loc_key[0]),
|
||||
self.vh_truth[1].filep)
|
||||
|
||||
self.assertEqual(len(loc_chain), 1)
|
||||
self.assertEqual(configurator.get_file_path(loc_chain[0]),
|
||||
self.vh_truth[1].filep)
|
||||
self.assertEqual(
|
||||
configurator.get_file_path(loc_chain[0]),
|
||||
self.vh_truth[1].filep)
|
||||
|
||||
# One more time for chain directive setting
|
||||
self.config.deploy_cert(
|
||||
@@ -614,6 +722,30 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
mock_span.return_value = return_value
|
||||
self.test_make_vhost_ssl()
|
||||
|
||||
def test_make_vhost_ssl_nonsymlink(self):
|
||||
ssl_vhost_slink = self.config.make_vhost_ssl(self.vh_truth[8])
|
||||
self.assertTrue(ssl_vhost_slink.ssl)
|
||||
self.assertTrue(ssl_vhost_slink.enabled)
|
||||
self.assertEqual(ssl_vhost_slink.name, "nonsym.link")
|
||||
|
||||
def test_make_vhost_ssl_nonexistent_vhost_path(self):
|
||||
def conf_side_effect(arg):
|
||||
""" Mock function for ApacheConfigurator.conf """
|
||||
confvars = {
|
||||
"vhost-root": "/tmp/nonexistent",
|
||||
"le_vhost_ext": "-le-ssl.conf",
|
||||
"handle-sites": True}
|
||||
return confvars[arg]
|
||||
|
||||
with mock.patch(
|
||||
"certbot_apache.configurator.ApacheConfigurator.conf"
|
||||
) as mock_conf:
|
||||
mock_conf.side_effect = conf_side_effect
|
||||
ssl_vhost = self.config.make_vhost_ssl(self.vh_truth[1])
|
||||
self.assertEqual(os.path.dirname(ssl_vhost.filep),
|
||||
os.path.dirname(os.path.realpath(
|
||||
self.vh_truth[1].filep)))
|
||||
|
||||
def test_make_vhost_ssl(self):
|
||||
ssl_vhost = self.config.make_vhost_ssl(self.vh_truth[0])
|
||||
|
||||
@@ -623,22 +755,17 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
"encryption-example-le-ssl.conf"))
|
||||
|
||||
self.assertEqual(ssl_vhost.path,
|
||||
"/files" + ssl_vhost.filep + "/IfModule/VirtualHost")
|
||||
"/files" + ssl_vhost.filep + "/IfModule/Virtualhost")
|
||||
self.assertEqual(len(ssl_vhost.addrs), 1)
|
||||
self.assertEqual(set([obj.Addr.fromstring("*:443")]), ssl_vhost.addrs)
|
||||
self.assertEqual(ssl_vhost.name, "encryption-example.demo")
|
||||
self.assertTrue(ssl_vhost.ssl)
|
||||
self.assertFalse(ssl_vhost.enabled)
|
||||
|
||||
self.assertTrue(self.config.parser.find_dir(
|
||||
"SSLCertificateFile", None, ssl_vhost.path, False))
|
||||
self.assertTrue(self.config.parser.find_dir(
|
||||
"SSLCertificateKeyFile", None, ssl_vhost.path, False))
|
||||
|
||||
self.assertEqual(self.config.is_name_vhost(self.vh_truth[0]),
|
||||
self.config.is_name_vhost(ssl_vhost))
|
||||
|
||||
self.assertEqual(len(self.config.vhosts), 9)
|
||||
self.assertEqual(len(self.config.vhosts), 11)
|
||||
|
||||
def test_clean_vhost_ssl(self):
|
||||
# pylint: disable=protected-access
|
||||
@@ -688,17 +815,17 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
DIRECTIVES = ["Foo", "Bar"]
|
||||
for directive in DIRECTIVES:
|
||||
for _ in range(10):
|
||||
self.config.parser.add_dir(self.vh_truth[1].path,
|
||||
self.config.parser.add_dir(self.vh_truth[2].path,
|
||||
directive, ["baz"])
|
||||
self.config.save()
|
||||
|
||||
self.config._remove_directives(self.vh_truth[1].path, DIRECTIVES)
|
||||
self.config._remove_directives(self.vh_truth[2].path, DIRECTIVES)
|
||||
self.config.save()
|
||||
|
||||
for directive in DIRECTIVES:
|
||||
self.assertEqual(
|
||||
len(self.config.parser.find_dir(
|
||||
directive, None, self.vh_truth[1].path, False)), 0)
|
||||
directive, None, self.vh_truth[2].path, False)), 0)
|
||||
|
||||
def test_make_vhost_ssl_bad_write(self):
|
||||
mock_open = mock.mock_open()
|
||||
@@ -717,10 +844,10 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
|
||||
def test_add_name_vhost_if_necessary(self):
|
||||
# pylint: disable=protected-access
|
||||
self.config.save = mock.Mock()
|
||||
self.config.add_name_vhost = mock.Mock()
|
||||
self.config.version = (2, 2)
|
||||
self.config._add_name_vhost_if_necessary(self.vh_truth[0])
|
||||
self.assertTrue(self.config.save.called)
|
||||
self.assertTrue(self.config.add_name_vhost.called)
|
||||
|
||||
new_addrs = set()
|
||||
for addr in self.vh_truth[0].addrs:
|
||||
@@ -728,7 +855,7 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
|
||||
self.vh_truth[0].addrs = new_addrs
|
||||
self.config._add_name_vhost_if_necessary(self.vh_truth[0])
|
||||
self.assertEqual(self.config.save.call_count, 2)
|
||||
self.assertEqual(self.config.add_name_vhost.call_count, 2)
|
||||
|
||||
@mock.patch("certbot_apache.configurator.tls_sni_01.ApacheTlsSni01.perform")
|
||||
@mock.patch("certbot_apache.configurator.ApacheConfigurator.restart")
|
||||
@@ -915,7 +1042,6 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
"SSLUseStapling", "on", ssl_vhost.path)
|
||||
|
||||
self.assertEqual(len(ssl_use_stapling_aug_path), 1)
|
||||
|
||||
ssl_vhost_aug_path = parser.get_aug_path(ssl_vhost.filep)
|
||||
stapling_cache_aug_path = self.config.parser.find_dir('SSLStaplingCache',
|
||||
"shmcb:/var/run/apache2/stapling_cache(128000)",
|
||||
@@ -1177,7 +1303,7 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
|
||||
# pylint: disable=protected-access
|
||||
self.config._enable_redirect(self.vh_truth[1], "")
|
||||
self.assertEqual(len(self.config.vhosts), 9)
|
||||
self.assertEqual(len(self.config.vhosts), 11)
|
||||
|
||||
def test_create_own_redirect_for_old_apache_version(self):
|
||||
self.config.parser.modules.add("rewrite_module")
|
||||
@@ -1188,7 +1314,7 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
|
||||
# pylint: disable=protected-access
|
||||
self.config._enable_redirect(self.vh_truth[1], "")
|
||||
self.assertEqual(len(self.config.vhosts), 9)
|
||||
self.assertEqual(len(self.config.vhosts), 11)
|
||||
|
||||
def test_sift_rewrite_rule(self):
|
||||
# pylint: disable=protected-access
|
||||
@@ -1285,13 +1411,17 @@ class AugeasVhostsTest(util.ApacheTest):
|
||||
for name in names:
|
||||
self.assertFalse(name in self.config.choose_vhost(name).aliases)
|
||||
|
||||
def test_choose_vhost_without_matching_wildcard(self):
|
||||
@mock.patch("certbot_apache.obj.VirtualHost.conflicts")
|
||||
def test_choose_vhost_without_matching_wildcard(self, mock_conflicts):
|
||||
mock_conflicts.return_value = False
|
||||
mock_path = "certbot_apache.display_ops.select_vhost"
|
||||
with mock.patch(mock_path, lambda _, vhosts: vhosts[0]):
|
||||
for name in ("a.example.net", "other.example.net"):
|
||||
self.assertTrue(name in self.config.choose_vhost(name).aliases)
|
||||
|
||||
def test_choose_vhost_wildcard_not_found(self):
|
||||
@mock.patch("certbot_apache.obj.VirtualHost.conflicts")
|
||||
def test_choose_vhost_wildcard_not_found(self, mock_conflicts):
|
||||
mock_conflicts.return_value = False
|
||||
mock_path = "certbot_apache.display_ops.select_vhost"
|
||||
names = (
|
||||
"abc.example.net", "not.there.tld", "aa.wildcard.tld"
|
||||
@@ -1358,10 +1488,6 @@ class MultiVhostsTest(util.ApacheTest):
|
||||
self.assertTrue(ssl_vhost.ssl)
|
||||
self.assertFalse(ssl_vhost.enabled)
|
||||
|
||||
self.assertTrue(self.config.parser.find_dir(
|
||||
"SSLCertificateFile", None, ssl_vhost.path, False))
|
||||
self.assertTrue(self.config.parser.find_dir(
|
||||
"SSLCertificateKeyFile", None, ssl_vhost.path, False))
|
||||
|
||||
self.assertEqual(self.config.is_name_vhost(self.vh_truth[1]),
|
||||
self.config.is_name_vhost(ssl_vhost))
|
||||
@@ -1497,7 +1623,7 @@ class InstallSslOptionsConfTest(util.ApacheTest):
|
||||
with mock.patch("certbot.plugins.common.logger") as mock_logger:
|
||||
self._call()
|
||||
self.assertEqual(mock_logger.warning.call_args[0][0],
|
||||
"%s has been manually modified; updated ssl configuration options "
|
||||
"%s has been manually modified; updated file "
|
||||
"saved to %s. We recommend updating %s for security purposes.")
|
||||
self.assertEqual(crypto_util.sha256sum(constants.os_constant("MOD_SSL_CONF_SRC")),
|
||||
self._current_ssl_options_hash())
|
||||
|
||||
@@ -38,7 +38,7 @@ class BasicParserTest(util.ParserTest):
|
||||
file_path = os.path.join(
|
||||
self.config_path, "not-parsed-by-default", "certbot.conf")
|
||||
|
||||
self.parser._parse_file(file_path) # pylint: disable=protected-access
|
||||
self.parser.parse_file(file_path) # pylint: disable=protected-access
|
||||
|
||||
# search for the httpd incl
|
||||
matches = self.parser.aug.match(
|
||||
@@ -52,7 +52,7 @@ class BasicParserTest(util.ParserTest):
|
||||
test2 = self.parser.find_dir("documentroot")
|
||||
|
||||
self.assertEqual(len(test), 1)
|
||||
self.assertEqual(len(test2), 4)
|
||||
self.assertEqual(len(test2), 7)
|
||||
|
||||
def test_add_dir(self):
|
||||
aug_default = "/files" + self.parser.loc["default"]
|
||||
@@ -66,6 +66,10 @@ class BasicParserTest(util.ParserTest):
|
||||
for i, match in enumerate(matches):
|
||||
self.assertEqual(self.parser.aug.get(match), str(i + 1))
|
||||
|
||||
def test_empty_arg(self):
|
||||
self.assertEquals(None,
|
||||
self.parser.get_arg("/files/whatever/nonexistent"))
|
||||
|
||||
def test_add_dir_to_ifmodssl(self):
|
||||
"""test add_dir_to_ifmodssl.
|
||||
|
||||
@@ -114,6 +118,16 @@ class BasicParserTest(util.ParserTest):
|
||||
self.assertEqual(results["default"], results["listen"])
|
||||
self.assertEqual(results["default"], results["name"])
|
||||
|
||||
@mock.patch("certbot_apache.parser.ApacheParser.find_dir")
|
||||
@mock.patch("certbot_apache.parser.ApacheParser.get_arg")
|
||||
def test_init_modules_bad_syntax(self, mock_arg, mock_find):
|
||||
mock_find.return_value = ["1", "2", "3", "4", "5", "6", "7", "8"]
|
||||
mock_arg.return_value = None
|
||||
with mock.patch("certbot_apache.parser.logger") as mock_logger:
|
||||
self.parser.init_modules()
|
||||
# Make sure that we got None return value and logged the file
|
||||
self.assertTrue(mock_logger.debug.called)
|
||||
|
||||
@mock.patch("certbot_apache.parser.ApacheParser._get_runtime_cfg")
|
||||
def test_update_runtime_variables(self, mock_cfg):
|
||||
mock_cfg.return_value = (
|
||||
|
||||
@@ -0,0 +1 @@
|
||||
../sites-available/another_wildcard.conf
|
||||
@@ -0,0 +1 @@
|
||||
../sites-available/old,default.conf
|
||||
@@ -0,0 +1 @@
|
||||
../sites-available/wildcard.conf
|
||||
@@ -193,4 +193,15 @@ IncludeOptional conf-enabled/*.conf
|
||||
# Include the virtual host configurations:
|
||||
IncludeOptional sites-enabled/*.conf
|
||||
|
||||
<VirtualHost *:80>
|
||||
|
||||
ServerName vhost.in.rootconf
|
||||
ServerAdmin webmaster@localhost
|
||||
DocumentRoot /var/www/html
|
||||
|
||||
ErrorLog ${APACHE_LOG_DIR}/error.log
|
||||
CustomLog ${APACHE_LOG_DIR}/access.log combined
|
||||
|
||||
</VirtualHost>
|
||||
|
||||
# vim: syntax=apache ts=4 sw=4 sts=4 sr noet
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
<VirtualHost *:80>
|
||||
<Virtualhost *:80>
|
||||
ServerName encryption-example.demo
|
||||
ServerAdmin webmaster@localhost
|
||||
|
||||
@@ -39,4 +39,4 @@
|
||||
Allow from 127.0.0.0/255.0.0.0 ::1/128
|
||||
</Directory>
|
||||
|
||||
</VirtualHost>
|
||||
</Virtualhost>
|
||||
|
||||
@@ -0,0 +1 @@
|
||||
../sites-available/default-ssl-port-only.conf
|
||||
@@ -0,0 +1 @@
|
||||
../sites-available/default-ssl.conf
|
||||
@@ -0,0 +1,9 @@
|
||||
<VirtualHost *:80>
|
||||
ServerName nonsym.link
|
||||
ServerAdmin webmaster@localhost
|
||||
|
||||
DocumentRoot /var/www-certbot-reworld/static/
|
||||
|
||||
ErrorLog ${APACHE_LOG_DIR}/error.log
|
||||
CustomLog ${APACHE_LOG_DIR}/access.log combined
|
||||
</VirtualHost>
|
||||
@@ -0,0 +1 @@
|
||||
../sites-available/wildcard.conf
|
||||
@@ -45,6 +45,9 @@ class ApacheTest(unittest.TestCase): # pylint: disable=too-few-public-methods
|
||||
return
|
||||
|
||||
for vhost_basename in os.listdir(sites_enabled):
|
||||
# Keep the one non-symlink test vhost in place
|
||||
if vhost_basename == "non-symlink.conf":
|
||||
continue
|
||||
vhost = os.path.join(sites_enabled, vhost_basename)
|
||||
if not os.path.islink(vhost): # pragma: no cover
|
||||
os.remove(vhost)
|
||||
@@ -115,18 +118,20 @@ def get_vh_truth(temp_dir, config_name):
|
||||
"""Return the ground truth for the specified directory."""
|
||||
if config_name == "debian_apache_2_4/multiple_vhosts":
|
||||
prefix = os.path.join(
|
||||
temp_dir, config_name, "apache2/sites-available")
|
||||
temp_dir, config_name, "apache2/sites-enabled")
|
||||
|
||||
aug_pre = "/files" + prefix
|
||||
vh_truth = [
|
||||
obj.VirtualHost(
|
||||
os.path.join(prefix, "encryption-example.conf"),
|
||||
os.path.join(aug_pre, "encryption-example.conf/VirtualHost"),
|
||||
os.path.join(aug_pre, "encryption-example.conf/Virtualhost"),
|
||||
set([obj.Addr.fromstring("*:80")]),
|
||||
False, True, "encryption-example.demo"),
|
||||
obj.VirtualHost(
|
||||
os.path.join(prefix, "default-ssl.conf"),
|
||||
os.path.join(aug_pre, "default-ssl.conf/IfModule/VirtualHost"),
|
||||
set([obj.Addr.fromstring("_default_:443")]), True, False),
|
||||
os.path.join(aug_pre,
|
||||
"default-ssl.conf/IfModule/VirtualHost"),
|
||||
set([obj.Addr.fromstring("_default_:443")]), True, True),
|
||||
obj.VirtualHost(
|
||||
os.path.join(prefix, "000-default.conf"),
|
||||
os.path.join(aug_pre, "000-default.conf/VirtualHost"),
|
||||
@@ -148,17 +153,34 @@ def get_vh_truth(temp_dir, config_name):
|
||||
os.path.join(prefix, "default-ssl-port-only.conf"),
|
||||
os.path.join(aug_pre, ("default-ssl-port-only.conf/"
|
||||
"IfModule/VirtualHost")),
|
||||
set([obj.Addr.fromstring("_default_:443")]), True, False),
|
||||
set([obj.Addr.fromstring("_default_:443")]), True, True),
|
||||
obj.VirtualHost(
|
||||
os.path.join(prefix, "wildcard.conf"),
|
||||
os.path.join(aug_pre, "wildcard.conf/VirtualHost"),
|
||||
set([obj.Addr.fromstring("*:80")]), False, False,
|
||||
set([obj.Addr.fromstring("*:80")]), False, True,
|
||||
"ip-172-30-0-17", aliases=["*.blue.purple.com"]),
|
||||
obj.VirtualHost(
|
||||
os.path.join(prefix, "ocsp-ssl.conf"),
|
||||
os.path.join(aug_pre, "ocsp-ssl.conf/IfModule/VirtualHost"),
|
||||
set([obj.Addr.fromstring("10.2.3.4:443")]), True, True,
|
||||
"ocspvhost.com")]
|
||||
"ocspvhost.com"),
|
||||
obj.VirtualHost(
|
||||
os.path.join(prefix, "non-symlink.conf"),
|
||||
os.path.join(aug_pre, "non-symlink.conf/VirtualHost"),
|
||||
set([obj.Addr.fromstring("*:80")]), False, True,
|
||||
"nonsym.link"),
|
||||
obj.VirtualHost(
|
||||
os.path.join(prefix, "default-ssl-port-only.conf"),
|
||||
os.path.join(aug_pre,
|
||||
"default-ssl-port-only.conf/VirtualHost"),
|
||||
set([obj.Addr.fromstring("*:80")]), True, True, ""),
|
||||
obj.VirtualHost(
|
||||
os.path.join(temp_dir, config_name,
|
||||
"apache2/apache2.conf"),
|
||||
"/files" + os.path.join(temp_dir, config_name,
|
||||
"apache2/apache2.conf/VirtualHost"),
|
||||
set([obj.Addr.fromstring("*:80")]), False, True,
|
||||
"vhost.in.rootconf")]
|
||||
return vh_truth
|
||||
if config_name == "debian_apache_2_4/multi_vhosts":
|
||||
prefix = os.path.join(
|
||||
|
||||
@@ -7,7 +7,6 @@ from certbot.plugins import common
|
||||
from certbot.errors import PluginError, MissingCommandlineFlag
|
||||
|
||||
from certbot_apache import obj
|
||||
from certbot_apache import parser
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -105,7 +104,8 @@ class ApacheTlsSni01(common.TLSSNI01):
|
||||
|
||||
config_text += "</IfModule>\n"
|
||||
|
||||
self._conf_include_check(self.configurator.parser.loc["default"])
|
||||
self.configurator.parser.add_include(
|
||||
self.configurator.parser.loc["default"], self.challenge_conf)
|
||||
self.configurator.reverter.register_file_creation(
|
||||
True, self.challenge_conf)
|
||||
|
||||
@@ -126,9 +126,8 @@ class ApacheTlsSni01(common.TLSSNI01):
|
||||
vhost = self.configurator.choose_vhost(achall.domain, temp=True)
|
||||
except (PluginError, MissingCommandlineFlag):
|
||||
# We couldn't find the virtualhost for this domain, possibly
|
||||
# because it's a new vhost that's not configured yet (GH #677),
|
||||
# or perhaps because there were multiple <VirtualHost> sections
|
||||
# in the config file (GH #1042). See also GH #2600.
|
||||
# because it's a new vhost that's not configured yet
|
||||
# (GH #677). See also GH #2600.
|
||||
logger.warning("Falling back to default vhost %s...", default_addr)
|
||||
addrs.add(default_addr)
|
||||
return addrs
|
||||
@@ -143,24 +142,6 @@ class ApacheTlsSni01(common.TLSSNI01):
|
||||
|
||||
return addrs
|
||||
|
||||
def _conf_include_check(self, main_config):
|
||||
"""Add TLS-SNI-01 challenge conf file into configuration.
|
||||
|
||||
Adds TLS-SNI-01 challenge include file if it does not already exist
|
||||
within mainConfig
|
||||
|
||||
:param str main_config: file path to main user apache config file
|
||||
|
||||
"""
|
||||
if len(self.configurator.parser.find_dir(
|
||||
parser.case_i("Include"), self.challenge_conf)) == 0:
|
||||
# print "Including challenge virtual host(s)"
|
||||
logger.debug("Adding Include %s to %s",
|
||||
self.challenge_conf, parser.get_aug_path(main_config))
|
||||
self.configurator.parser.add_dir(
|
||||
parser.get_aug_path(main_config),
|
||||
"Include", self.challenge_conf)
|
||||
|
||||
def _get_config_text(self, achall, ip_addrs):
|
||||
"""Chocolate virtual server configuration text
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@ from setuptools import setup
|
||||
from setuptools import find_packages
|
||||
|
||||
|
||||
version = '0.16.0.dev0'
|
||||
version = '0.20.0.dev0'
|
||||
|
||||
# Please update tox.ini when modifying dependency version requirements
|
||||
install_requires = [
|
||||
|
||||
606
certbot-auto
606
certbot-auto
@@ -23,12 +23,15 @@ fi
|
||||
if [ -z "$XDG_DATA_HOME" ]; then
|
||||
XDG_DATA_HOME=~/.local/share
|
||||
fi
|
||||
VENV_NAME="letsencrypt"
|
||||
if [ -z "$VENV_PATH" ]; then
|
||||
VENV_PATH="$XDG_DATA_HOME/$VENV_NAME"
|
||||
# We export these values so they are preserved properly if this script is
|
||||
# rerun with sudo/su where $HOME/$XDG_DATA_HOME may have a different value.
|
||||
export OLD_VENV_PATH="$XDG_DATA_HOME/letsencrypt"
|
||||
export VENV_PATH="/opt/eff.org/certbot/venv"
|
||||
fi
|
||||
VENV_BIN="$VENV_PATH/bin"
|
||||
LE_AUTO_VERSION="0.15.0"
|
||||
BOOTSTRAP_VERSION_PATH="$VENV_PATH/certbot-auto-bootstrap-version.txt"
|
||||
LE_AUTO_VERSION="0.19.0"
|
||||
BASENAME=$(basename $0)
|
||||
USAGE="Usage: $BASENAME [OPTIONS]
|
||||
A self-updating wrapper script for the Certbot ACME client. When run, updates
|
||||
@@ -49,6 +52,7 @@ Help for certbot itself cannot be provided until it is installed.
|
||||
implies --non-interactive
|
||||
|
||||
All arguments are accepted and forwarded to the Certbot client when run."
|
||||
export CERTBOT_AUTO="$0"
|
||||
|
||||
for arg in "$@" ; do
|
||||
case "$arg" in
|
||||
@@ -77,7 +81,7 @@ for arg in "$@" ; do
|
||||
h)
|
||||
HELP=1;;
|
||||
n)
|
||||
ASSUME_YES=1;;
|
||||
NONINTERACTIVE=1;;
|
||||
q)
|
||||
QUIET=1;;
|
||||
v)
|
||||
@@ -93,8 +97,8 @@ if [ $BASENAME = "letsencrypt-auto" ]; then
|
||||
HELP=0
|
||||
fi
|
||||
|
||||
# Set ASSUME_YES to 1 if QUIET (i.e. --quiet implies --non-interactive)
|
||||
if [ "$QUIET" = 1 ]; then
|
||||
# Set ASSUME_YES to 1 if QUIET or NONINTERACTIVE
|
||||
if [ "$QUIET" = 1 -o "$NONINTERACTIVE" = 1 ]; then
|
||||
ASSUME_YES=1
|
||||
fi
|
||||
|
||||
@@ -119,16 +123,18 @@ else
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# certbot-auto needs root access to bootstrap OS dependencies, and
|
||||
# certbot itself needs root access for almost all modes of operation
|
||||
# The "normal" case is that sudo is used for the steps that need root, but
|
||||
# this script *can* be run as root (not recommended), or fall back to using
|
||||
# `su`. Auto-detection can be overridden by explicitly setting the
|
||||
# environment variable LE_AUTO_SUDO to 'sudo', 'sudo_su' or '' as used below.
|
||||
# Certbot itself needs root access for almost all modes of operation.
|
||||
# certbot-auto needs root access to bootstrap OS dependencies and install
|
||||
# Certbot at a protected path so it can be safely run as root. To accomplish
|
||||
# this, this script will attempt to run itself as root if it doesn't have the
|
||||
# necessary privileges by using `sudo` or falling back to `su` if it is not
|
||||
# available. The mechanism used to obtain root access can be set explicitly by
|
||||
# setting the environment variable LE_AUTO_SUDO to 'sudo', 'su', 'su_sudo',
|
||||
# 'SuSudo', or '' as used below.
|
||||
|
||||
# Because the parameters in `su -c` has to be a string,
|
||||
# we need to properly escape it.
|
||||
su_sudo() {
|
||||
SuSudo() {
|
||||
args=""
|
||||
# This `while` loop iterates over all parameters given to this function.
|
||||
# For each parameter, all `'` will be replace by `'"'"'`, and the escaped string
|
||||
@@ -147,37 +153,57 @@ su_sudo() {
|
||||
su root -c "$args"
|
||||
}
|
||||
|
||||
SUDO_ENV=""
|
||||
export CERTBOT_AUTO="$0"
|
||||
if [ -n "${LE_AUTO_SUDO+x}" ]; then
|
||||
case "$LE_AUTO_SUDO" in
|
||||
su_sudo|su)
|
||||
SUDO=su_sudo
|
||||
;;
|
||||
sudo)
|
||||
SUDO=sudo
|
||||
SUDO_ENV="CERTBOT_AUTO=$0"
|
||||
;;
|
||||
'') ;; # Nothing to do for plain root method.
|
||||
*)
|
||||
error "Error: unknown root authorization mechanism '$LE_AUTO_SUDO'."
|
||||
exit 1
|
||||
esac
|
||||
say "Using preset root authorization mechanism '$LE_AUTO_SUDO'."
|
||||
else
|
||||
if test "`id -u`" -ne "0" ; then
|
||||
if $EXISTS sudo 1>/dev/null 2>&1; then
|
||||
SUDO=sudo
|
||||
SUDO_ENV="CERTBOT_AUTO=$0"
|
||||
else
|
||||
say \"sudo\" is not available, will use \"su\" for installation steps...
|
||||
SUDO=su_sudo
|
||||
fi
|
||||
# Sets the environment variable SUDO to be the name of the program or function
|
||||
# to call to get root access. If this script already has root privleges, SUDO
|
||||
# is set to an empty string. The value in SUDO should be run with the command
|
||||
# to called with root privileges as arguments.
|
||||
SetRootAuthMechanism() {
|
||||
SUDO=""
|
||||
if [ -n "${LE_AUTO_SUDO+x}" ]; then
|
||||
case "$LE_AUTO_SUDO" in
|
||||
SuSudo|su_sudo|su)
|
||||
SUDO=SuSudo
|
||||
;;
|
||||
sudo)
|
||||
SUDO="sudo -E"
|
||||
;;
|
||||
'') ;; # Nothing to do for plain root method.
|
||||
*)
|
||||
error "Error: unknown root authorization mechanism '$LE_AUTO_SUDO'."
|
||||
exit 1
|
||||
esac
|
||||
say "Using preset root authorization mechanism '$LE_AUTO_SUDO'."
|
||||
else
|
||||
SUDO=
|
||||
if test "`id -u`" -ne "0" ; then
|
||||
if $EXISTS sudo 1>/dev/null 2>&1; then
|
||||
SUDO="sudo -E"
|
||||
else
|
||||
say \"sudo\" is not available, will use \"su\" for installation steps...
|
||||
SUDO=SuSudo
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
if [ "$1" = "--cb-auto-has-root" ]; then
|
||||
shift 1
|
||||
else
|
||||
SetRootAuthMechanism
|
||||
if [ -n "$SUDO" ]; then
|
||||
echo "Requesting to rerun $0 with root privileges..."
|
||||
$SUDO "$0" --cb-auto-has-root "$@"
|
||||
exit 0
|
||||
fi
|
||||
fi
|
||||
|
||||
# Runs this script again with the given arguments. --cb-auto-has-root is added
|
||||
# to the command line arguments to ensure we don't try to acquire root a
|
||||
# second time. After the script is rerun, we exit the current script.
|
||||
RerunWithArgs() {
|
||||
"$0" --cb-auto-has-root "$@"
|
||||
exit 0
|
||||
}
|
||||
|
||||
BootstrapMessage() {
|
||||
# Arguments: Platform name
|
||||
say "Bootstrapping dependencies for $1... (you can skip this with --no-bootstrap)"
|
||||
@@ -200,6 +226,25 @@ ExperimentalBootstrap() {
|
||||
fi
|
||||
}
|
||||
|
||||
DeprecationBootstrap() {
|
||||
# Arguments: Platform name, bootstrap function name
|
||||
if [ "$DEBUG" = 1 ]; then
|
||||
if [ "$2" != "" ]; then
|
||||
BootstrapMessage $1
|
||||
$2
|
||||
fi
|
||||
else
|
||||
error "WARNING: certbot-auto support for this $1 is DEPRECATED!"
|
||||
error "Please visit certbot.eff.org to learn how to download a version of"
|
||||
error "Certbot that is packaged for your system. While an existing version"
|
||||
error "of certbot-auto may work currently, we have stopped supporting updating"
|
||||
error "system packages for your system. Please switch to a packaged version"
|
||||
error "as soon as possible."
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
|
||||
DeterminePythonVersion() {
|
||||
for LE_PYTHON in "$LE_PYTHON" python2.7 python27 python2 python; do
|
||||
# Break (while keeping the LE_PYTHON value) if found.
|
||||
@@ -219,6 +264,10 @@ DeterminePythonVersion() {
|
||||
fi
|
||||
}
|
||||
|
||||
# If new packages are installed by BootstrapDebCommon below, this version
|
||||
# number must be increased.
|
||||
BOOTSTRAP_DEB_COMMON_VERSION=1
|
||||
|
||||
BootstrapDebCommon() {
|
||||
# Current version tested with:
|
||||
#
|
||||
@@ -242,7 +291,7 @@ BootstrapDebCommon() {
|
||||
QUIET_FLAG='-qq'
|
||||
fi
|
||||
|
||||
$SUDO apt-get $QUIET_FLAG update || error apt-get update hit problems but continuing anyway...
|
||||
apt-get $QUIET_FLAG update || error apt-get update hit problems but continuing anyway...
|
||||
|
||||
# virtualenv binary can be found in different packages depending on
|
||||
# distro version (#346)
|
||||
@@ -292,13 +341,13 @@ BootstrapDebCommon() {
|
||||
esac
|
||||
fi
|
||||
if [ "$add_backports" = 1 ]; then
|
||||
$SUDO sh -c "echo $BACKPORT_SOURCELINE >> /etc/apt/sources.list.d/$BACKPORT_NAME.list"
|
||||
$SUDO apt-get $QUIET_FLAG update
|
||||
sh -c "echo $BACKPORT_SOURCELINE >> /etc/apt/sources.list.d/$BACKPORT_NAME.list"
|
||||
apt-get $QUIET_FLAG update
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
if [ "$add_backports" != 0 ]; then
|
||||
$SUDO apt-get install $QUIET_FLAG $YES_FLAG --no-install-recommends -t "$BACKPORT_NAME" $augeas_pkg
|
||||
apt-get install $QUIET_FLAG $YES_FLAG --no-install-recommends -t "$BACKPORT_NAME" $augeas_pkg
|
||||
augeas_pkg=
|
||||
fi
|
||||
}
|
||||
@@ -317,7 +366,7 @@ BootstrapDebCommon() {
|
||||
# XXX add a case for ubuntu PPAs
|
||||
fi
|
||||
|
||||
$SUDO apt-get install $QUIET_FLAG $YES_FLAG --no-install-recommends \
|
||||
apt-get install $QUIET_FLAG $YES_FLAG --no-install-recommends \
|
||||
python \
|
||||
python-dev \
|
||||
$virtualenv \
|
||||
@@ -335,6 +384,10 @@ BootstrapDebCommon() {
|
||||
fi
|
||||
}
|
||||
|
||||
# If new packages are installed by BootstrapRpmCommon below, this version
|
||||
# number must be increased.
|
||||
BOOTSTRAP_RPM_COMMON_VERSION=1
|
||||
|
||||
BootstrapRpmCommon() {
|
||||
# Tested with:
|
||||
# - Fedora 20, 21, 22, 23 (x64)
|
||||
@@ -361,9 +414,9 @@ BootstrapRpmCommon() {
|
||||
QUIET_FLAG='--quiet'
|
||||
fi
|
||||
|
||||
if ! $SUDO $tool list *virtualenv >/dev/null 2>&1; then
|
||||
if ! $tool list *virtualenv >/dev/null 2>&1; then
|
||||
echo "To use Certbot, packages from the EPEL repository need to be installed."
|
||||
if ! $SUDO $tool list epel-release >/dev/null 2>&1; then
|
||||
if ! $tool list epel-release >/dev/null 2>&1; then
|
||||
error "Enable the EPEL repository and try running Certbot again."
|
||||
exit 1
|
||||
fi
|
||||
@@ -375,7 +428,7 @@ BootstrapRpmCommon() {
|
||||
/bin/echo -e "\e[0K\rEnabling the EPEL repository in 1 seconds..."
|
||||
sleep 1s
|
||||
fi
|
||||
if ! $SUDO $tool install $yes_flag $QUIET_FLAG epel-release; then
|
||||
if ! $tool install $yes_flag $QUIET_FLAG epel-release; then
|
||||
error "Could not enable EPEL. Aborting bootstrap!"
|
||||
exit 1
|
||||
fi
|
||||
@@ -391,9 +444,8 @@ BootstrapRpmCommon() {
|
||||
ca-certificates
|
||||
"
|
||||
|
||||
# Some distros and older versions of current distros use a "python27"
|
||||
# instead of "python" naming convention. Try both conventions.
|
||||
if $SUDO $tool list python >/dev/null 2>&1; then
|
||||
# Most RPM distros use the "python" or "python-" naming convention. Let's try that first.
|
||||
if $tool list python >/dev/null 2>&1; then
|
||||
pkgs="$pkgs
|
||||
python
|
||||
python-devel
|
||||
@@ -401,6 +453,20 @@ BootstrapRpmCommon() {
|
||||
python-tools
|
||||
python-pip
|
||||
"
|
||||
# Fedora 26 starts to use the prefix python2 for python2 based packages.
|
||||
# this elseif is theoretically for any Fedora over version 26:
|
||||
elif $tool list python2 >/dev/null 2>&1; then
|
||||
pkgs="$pkgs
|
||||
python2
|
||||
python2-libs
|
||||
python2-setuptools
|
||||
python2-devel
|
||||
python2-virtualenv
|
||||
python2-tools
|
||||
python2-pip
|
||||
"
|
||||
# Some distros and older versions of current distros use a "python27"
|
||||
# instead of the "python" or "python-" naming convention.
|
||||
else
|
||||
pkgs="$pkgs
|
||||
python27
|
||||
@@ -411,18 +477,22 @@ BootstrapRpmCommon() {
|
||||
"
|
||||
fi
|
||||
|
||||
if $SUDO $tool list installed "httpd" >/dev/null 2>&1; then
|
||||
if $tool list installed "httpd" >/dev/null 2>&1; then
|
||||
pkgs="$pkgs
|
||||
mod_ssl
|
||||
"
|
||||
fi
|
||||
|
||||
if ! $SUDO $tool install $yes_flag $QUIET_FLAG $pkgs; then
|
||||
if ! $tool install $yes_flag $QUIET_FLAG $pkgs; then
|
||||
error "Could not install OS dependencies. Aborting bootstrap!"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
# If new packages are installed by BootstrapSuseCommon below, this version
|
||||
# number must be increased.
|
||||
BOOTSTRAP_SUSE_COMMON_VERSION=1
|
||||
|
||||
BootstrapSuseCommon() {
|
||||
# SLE12 don't have python-virtualenv
|
||||
|
||||
@@ -435,7 +505,7 @@ BootstrapSuseCommon() {
|
||||
QUIET_FLAG='-qq'
|
||||
fi
|
||||
|
||||
$SUDO zypper $QUIET_FLAG $zypper_flags in $install_flags \
|
||||
zypper $QUIET_FLAG $zypper_flags in $install_flags \
|
||||
python \
|
||||
python-devel \
|
||||
python-virtualenv \
|
||||
@@ -446,6 +516,10 @@ BootstrapSuseCommon() {
|
||||
ca-certificates
|
||||
}
|
||||
|
||||
# If new packages are installed by BootstrapArchCommon below, this version
|
||||
# number must be increased.
|
||||
BOOTSTRAP_ARCH_COMMON_VERSION=1
|
||||
|
||||
BootstrapArchCommon() {
|
||||
# Tested with:
|
||||
# - ArchLinux (x86_64)
|
||||
@@ -466,21 +540,25 @@ BootstrapArchCommon() {
|
||||
"
|
||||
|
||||
# pacman -T exits with 127 if there are missing dependencies
|
||||
missing=$($SUDO pacman -T $deps) || true
|
||||
missing=$(pacman -T $deps) || true
|
||||
|
||||
if [ "$ASSUME_YES" = 1 ]; then
|
||||
noconfirm="--noconfirm"
|
||||
fi
|
||||
|
||||
if [ "$missing" ]; then
|
||||
if [ "$QUIET" = 1]; then
|
||||
$SUDO pacman -S --needed $missing $noconfirm > /dev/null
|
||||
if [ "$QUIET" = 1 ]; then
|
||||
pacman -S --needed $missing $noconfirm > /dev/null
|
||||
else
|
||||
$SUDO pacman -S --needed $missing $noconfirm
|
||||
pacman -S --needed $missing $noconfirm
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
# If new packages are installed by BootstrapGentooCommon below, this version
|
||||
# number must be increased.
|
||||
BOOTSTRAP_GENTOO_COMMON_VERSION=1
|
||||
|
||||
BootstrapGentooCommon() {
|
||||
PACKAGES="
|
||||
dev-lang/python:2.7
|
||||
@@ -498,29 +576,37 @@ BootstrapGentooCommon() {
|
||||
|
||||
case "$PACKAGE_MANAGER" in
|
||||
(paludis)
|
||||
$SUDO cave resolve --preserve-world --keep-targets if-possible $PACKAGES -x
|
||||
cave resolve --preserve-world --keep-targets if-possible $PACKAGES -x
|
||||
;;
|
||||
(pkgcore)
|
||||
$SUDO pmerge --noreplace --oneshot $ASK_OPTION $PACKAGES
|
||||
pmerge --noreplace --oneshot $ASK_OPTION $PACKAGES
|
||||
;;
|
||||
(portage|*)
|
||||
$SUDO emerge --noreplace --oneshot $ASK_OPTION $PACKAGES
|
||||
emerge --noreplace --oneshot $ASK_OPTION $PACKAGES
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
# If new packages are installed by BootstrapFreeBsd below, this version number
|
||||
# must be increased.
|
||||
BOOTSTRAP_FREEBSD_VERSION=1
|
||||
|
||||
BootstrapFreeBsd() {
|
||||
if [ "$QUIET" = 1 ]; then
|
||||
QUIET_FLAG="--quiet"
|
||||
fi
|
||||
|
||||
$SUDO pkg install -Ay $QUIET_FLAG \
|
||||
pkg install -Ay $QUIET_FLAG \
|
||||
python \
|
||||
py27-virtualenv \
|
||||
augeas \
|
||||
libffi
|
||||
}
|
||||
|
||||
# If new packages are installed by BootstrapMac below, this version number must
|
||||
# be increased.
|
||||
BOOTSTRAP_MAC_VERSION=1
|
||||
|
||||
BootstrapMac() {
|
||||
if hash brew 2>/dev/null; then
|
||||
say "Using Homebrew to install dependencies..."
|
||||
@@ -529,7 +615,7 @@ BootstrapMac() {
|
||||
elif hash port 2>/dev/null; then
|
||||
say "Using MacPorts to install dependencies..."
|
||||
pkgman=port
|
||||
pkgcmd="$SUDO port install"
|
||||
pkgcmd="port install"
|
||||
else
|
||||
say "No Homebrew/MacPorts; installing Homebrew..."
|
||||
ruby -e "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install)"
|
||||
@@ -549,8 +635,8 @@ BootstrapMac() {
|
||||
# Workaround for _dlopen not finding augeas on macOS
|
||||
if [ "$pkgman" = "port" ] && ! [ -e "/usr/local/lib/libaugeas.dylib" ] && [ -e "/opt/local/lib/libaugeas.dylib" ]; then
|
||||
say "Applying augeas workaround"
|
||||
$SUDO mkdir -p /usr/local/lib/
|
||||
$SUDO ln -s /opt/local/lib/libaugeas.dylib /usr/local/lib/
|
||||
mkdir -p /usr/local/lib/
|
||||
ln -s /opt/local/lib/libaugeas.dylib /usr/local/lib/
|
||||
fi
|
||||
|
||||
if ! hash pip 2>/dev/null; then
|
||||
@@ -566,17 +652,25 @@ BootstrapMac() {
|
||||
fi
|
||||
}
|
||||
|
||||
# If new packages are installed by BootstrapSmartOS below, this version number
|
||||
# must be increased.
|
||||
BOOTSTRAP_SMARTOS_VERSION=1
|
||||
|
||||
BootstrapSmartOS() {
|
||||
pkgin update
|
||||
pkgin -y install 'gcc49' 'py27-augeas' 'py27-virtualenv'
|
||||
}
|
||||
|
||||
# If new packages are installed by BootstrapMageiaCommon below, this version
|
||||
# number must be increased.
|
||||
BOOTSTRAP_MAGEIA_COMMON_VERSION=1
|
||||
|
||||
BootstrapMageiaCommon() {
|
||||
if [ "$QUIET" = 1 ]; then
|
||||
QUIET_FLAG='--quiet'
|
||||
fi
|
||||
|
||||
if ! $SUDO urpmi --force $QUIET_FLAG \
|
||||
if ! urpmi --force $QUIET_FLAG \
|
||||
python \
|
||||
libpython-devel \
|
||||
python-virtualenv
|
||||
@@ -585,7 +679,7 @@ BootstrapMageiaCommon() {
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if ! $SUDO urpmi --force $QUIET_FLAG \
|
||||
if ! urpmi --force $QUIET_FLAG \
|
||||
git \
|
||||
gcc \
|
||||
python-augeas \
|
||||
@@ -599,23 +693,41 @@ BootstrapMageiaCommon() {
|
||||
}
|
||||
|
||||
|
||||
# Install required OS packages:
|
||||
Bootstrap() {
|
||||
if [ "$NO_BOOTSTRAP" = 1 ]; then
|
||||
return
|
||||
elif [ -f /etc/debian_version ]; then
|
||||
# Set Bootstrap to the function that installs OS dependencies on this system
|
||||
# and BOOTSTRAP_VERSION to the unique identifier for the current version of
|
||||
# that function. If Bootstrap is set to a function that doesn't install any
|
||||
# packages (either because --no-bootstrap was included on the command line or
|
||||
# we don't know how to bootstrap on this system), BOOTSTRAP_VERSION is not set.
|
||||
if [ "$NO_BOOTSTRAP" = 1 ]; then
|
||||
Bootstrap() {
|
||||
:
|
||||
}
|
||||
elif [ -f /etc/debian_version ]; then
|
||||
Bootstrap() {
|
||||
BootstrapMessage "Debian-based OSes"
|
||||
BootstrapDebCommon
|
||||
elif [ -f /etc/mageia-release ]; then
|
||||
# Mageia has both /etc/mageia-release and /etc/redhat-release
|
||||
}
|
||||
BOOTSTRAP_VERSION="BootstrapDebCommon $BOOTSTRAP_DEB_COMMON_VERSION"
|
||||
elif [ -f /etc/mageia-release ]; then
|
||||
# Mageia has both /etc/mageia-release and /etc/redhat-release
|
||||
Bootstrap() {
|
||||
ExperimentalBootstrap "Mageia" BootstrapMageiaCommon
|
||||
elif [ -f /etc/redhat-release ]; then
|
||||
}
|
||||
BOOTSTRAP_VERSION="BootstrapMageiaCommon $BOOTSTRAP_MAGEIA_COMMON_VERSION"
|
||||
elif [ -f /etc/redhat-release ]; then
|
||||
Bootstrap() {
|
||||
BootstrapMessage "RedHat-based OSes"
|
||||
BootstrapRpmCommon
|
||||
elif [ -f /etc/os-release ] && `grep -q openSUSE /etc/os-release` ; then
|
||||
}
|
||||
BOOTSTRAP_VERSION="BootstrapRpmCommon $BOOTSTRAP_RPM_COMMON_VERSION"
|
||||
elif [ -f /etc/os-release ] && `grep -q openSUSE /etc/os-release` ; then
|
||||
Bootstrap() {
|
||||
BootstrapMessage "openSUSE-based OSes"
|
||||
BootstrapSuseCommon
|
||||
elif [ -f /etc/arch-release ]; then
|
||||
}
|
||||
BOOTSTRAP_VERSION="BootstrapSuseCommon $BOOTSTRAP_SUSE_COMMON_VERSION"
|
||||
elif [ -f /etc/arch-release ]; then
|
||||
Bootstrap() {
|
||||
if [ "$DEBUG" = 1 ]; then
|
||||
BootstrapMessage "Archlinux"
|
||||
BootstrapArchCommon
|
||||
@@ -627,25 +739,76 @@ Bootstrap() {
|
||||
error "--debug flag."
|
||||
exit 1
|
||||
fi
|
||||
elif [ -f /etc/manjaro-release ]; then
|
||||
}
|
||||
BOOTSTRAP_VERSION="BootstrapArchCommon $BOOTSTRAP_ARCH_COMMON_VERSION"
|
||||
elif [ -f /etc/manjaro-release ]; then
|
||||
Bootstrap() {
|
||||
ExperimentalBootstrap "Manjaro Linux" BootstrapArchCommon
|
||||
elif [ -f /etc/gentoo-release ]; then
|
||||
ExperimentalBootstrap "Gentoo" BootstrapGentooCommon
|
||||
elif uname | grep -iq FreeBSD ; then
|
||||
ExperimentalBootstrap "FreeBSD" BootstrapFreeBsd
|
||||
elif uname | grep -iq Darwin ; then
|
||||
ExperimentalBootstrap "macOS" BootstrapMac
|
||||
elif [ -f /etc/issue ] && grep -iq "Amazon Linux" /etc/issue ; then
|
||||
}
|
||||
BOOTSTRAP_VERSION="BootstrapArchCommon $BOOTSTRAP_ARCH_COMMON_VERSION"
|
||||
elif [ -f /etc/gentoo-release ]; then
|
||||
Bootstrap() {
|
||||
DeprecationBootstrap "Gentoo" BootstrapGentooCommon
|
||||
}
|
||||
BOOTSTRAP_VERSION="BootstrapGentooCommon $BOOTSTRAP_GENTOO_COMMON_VERSION"
|
||||
elif uname | grep -iq FreeBSD ; then
|
||||
Bootstrap() {
|
||||
DeprecationBootstrap "FreeBSD" BootstrapFreeBsd
|
||||
}
|
||||
BOOTSTRAP_VERSION="BootstrapFreeBsd $BOOTSTRAP_FREEBSD_VERSION"
|
||||
elif uname | grep -iq Darwin ; then
|
||||
Bootstrap() {
|
||||
DeprecationBootstrap "macOS" BootstrapMac
|
||||
}
|
||||
BOOTSTRAP_VERSION="BootstrapMac $BOOTSTRAP_MAC_VERSION"
|
||||
elif [ -f /etc/issue ] && grep -iq "Amazon Linux" /etc/issue ; then
|
||||
Bootstrap() {
|
||||
ExperimentalBootstrap "Amazon Linux" BootstrapRpmCommon
|
||||
elif [ -f /etc/product ] && grep -q "Joyent Instance" /etc/product ; then
|
||||
}
|
||||
BOOTSTRAP_VERSION="BootstrapRpmCommon $BOOTSTRAP_RPM_COMMON_VERSION"
|
||||
elif [ -f /etc/product ] && grep -q "Joyent Instance" /etc/product ; then
|
||||
Bootstrap() {
|
||||
ExperimentalBootstrap "Joyent SmartOS Zone" BootstrapSmartOS
|
||||
else
|
||||
}
|
||||
BOOTSTRAP_VERSION="BootstrapSmartOS $BOOTSTRAP_SMARTOS_VERSION"
|
||||
else
|
||||
Bootstrap() {
|
||||
error "Sorry, I don't know how to bootstrap Certbot on your operating system!"
|
||||
error
|
||||
error "You will need to install OS dependencies, configure virtualenv, and run pip install manually."
|
||||
error "Please see https://letsencrypt.readthedocs.org/en/latest/contributing.html#prerequisites"
|
||||
error "for more info."
|
||||
exit 1
|
||||
}
|
||||
fi
|
||||
|
||||
# Sets PREV_BOOTSTRAP_VERSION to the identifier for the bootstrap script used
|
||||
# to install OS dependencies on this system. PREV_BOOTSTRAP_VERSION isn't set
|
||||
# if it is unknown how OS dependencies were installed on this system.
|
||||
SetPrevBootstrapVersion() {
|
||||
if [ -f $BOOTSTRAP_VERSION_PATH ]; then
|
||||
PREV_BOOTSTRAP_VERSION=$(cat "$BOOTSTRAP_VERSION_PATH")
|
||||
# The list below only contains bootstrap version strings that existed before
|
||||
# we started writing them to disk.
|
||||
#
|
||||
# DO NOT MODIFY THIS LIST UNLESS YOU KNOW WHAT YOU'RE DOING!
|
||||
elif grep -Fqx "$BOOTSTRAP_VERSION" << "UNLIKELY_EOF"
|
||||
BootstrapDebCommon 1
|
||||
BootstrapMageiaCommon 1
|
||||
BootstrapRpmCommon 1
|
||||
BootstrapSuseCommon 1
|
||||
BootstrapArchCommon 1
|
||||
BootstrapGentooCommon 1
|
||||
BootstrapFreeBsd 1
|
||||
BootstrapMac 1
|
||||
BootstrapSmartOS 1
|
||||
UNLIKELY_EOF
|
||||
then
|
||||
# If there's no bootstrap version saved to disk, but the currently selected
|
||||
# bootstrap script is from before we started saving the version number,
|
||||
# return the currently selected version to prevent us from rebootstrapping
|
||||
# unnecessarily.
|
||||
PREV_BOOTSTRAP_VERSION="$BOOTSTRAP_VERSION"
|
||||
fi
|
||||
}
|
||||
|
||||
@@ -659,18 +822,38 @@ if [ "$1" = "--le-auto-phase2" ]; then
|
||||
# Phase 2: Create venv, install LE, and run.
|
||||
|
||||
shift 1 # the --le-auto-phase2 arg
|
||||
if [ -f "$VENV_BIN/letsencrypt" ]; then
|
||||
# --version output ran through grep due to python-cryptography DeprecationWarnings
|
||||
# grep for both certbot and letsencrypt until certbot and shim packages have been released
|
||||
INSTALLED_VERSION=$("$VENV_BIN/letsencrypt" --version 2>&1 | grep "^certbot\|^letsencrypt" | cut -d " " -f 2)
|
||||
if [ -z "$INSTALLED_VERSION" ]; then
|
||||
error "Error: couldn't get currently installed version for $VENV_BIN/letsencrypt: " 1>&2
|
||||
"$VENV_BIN/letsencrypt" --version
|
||||
exit 1
|
||||
SetPrevBootstrapVersion
|
||||
|
||||
INSTALLED_VERSION="none"
|
||||
if [ -d "$VENV_PATH" ]; then
|
||||
# If the selected Bootstrap function isn't a noop and it differs from the
|
||||
# previously used version
|
||||
if [ -n "$BOOTSTRAP_VERSION" -a "$BOOTSTRAP_VERSION" != "$PREV_BOOTSTRAP_VERSION" ]; then
|
||||
# if non-interactive mode or stdin and stdout are connected to a terminal
|
||||
if [ \( "$NONINTERACTIVE" = 1 \) -o \( \( -t 0 \) -a \( -t 1 \) \) ]; then
|
||||
rm -rf "$VENV_PATH"
|
||||
RerunWithArgs "$@"
|
||||
else
|
||||
error "Skipping upgrade because new OS dependencies may need to be installed."
|
||||
error
|
||||
error "To upgrade to a newer version, please run this script again manually so you can"
|
||||
error "approve changes or with --non-interactive on the command line to automatically"
|
||||
error "install any required packages."
|
||||
# Set INSTALLED_VERSION to be the same so we don't update the venv
|
||||
INSTALLED_VERSION="$LE_AUTO_VERSION"
|
||||
fi
|
||||
elif [ -f "$VENV_BIN/letsencrypt" ]; then
|
||||
# --version output ran through grep due to python-cryptography DeprecationWarnings
|
||||
# grep for both certbot and letsencrypt until certbot and shim packages have been released
|
||||
INSTALLED_VERSION=$("$VENV_BIN/letsencrypt" --version 2>&1 | grep "^certbot\|^letsencrypt" | cut -d " " -f 2)
|
||||
if [ -z "$INSTALLED_VERSION" ]; then
|
||||
error "Error: couldn't get currently installed version for $VENV_BIN/letsencrypt: " 1>&2
|
||||
"$VENV_BIN/letsencrypt" --version
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
else
|
||||
INSTALLED_VERSION="none"
|
||||
fi
|
||||
|
||||
if [ "$LE_AUTO_VERSION" != "$INSTALLED_VERSION" ]; then
|
||||
say "Creating virtual environment..."
|
||||
DeterminePythonVersion
|
||||
@@ -681,6 +864,12 @@ if [ "$1" = "--le-auto-phase2" ]; then
|
||||
virtualenv --no-site-packages --python "$LE_PYTHON" "$VENV_PATH" > /dev/null
|
||||
fi
|
||||
|
||||
if [ -n "$BOOTSTRAP_VERSION" ]; then
|
||||
echo "$BOOTSTRAP_VERSION" > "$BOOTSTRAP_VERSION_PATH"
|
||||
elif [ -n "$PREV_BOOTSTRAP_VERSION" ]; then
|
||||
echo "$PREV_BOOTSTRAP_VERSION" > "$BOOTSTRAP_VERSION_PATH"
|
||||
fi
|
||||
|
||||
say "Installing Python packages..."
|
||||
TEMP_DIR=$(TempDir)
|
||||
trap 'rm -rf "$TEMP_DIR"' EXIT
|
||||
@@ -710,54 +899,78 @@ pycparser==2.14 \
|
||||
asn1crypto==0.22.0 \
|
||||
--hash=sha256:d232509fefcfcdb9a331f37e9c9dc20441019ad927c7d2176cf18ed5da0ba097 \
|
||||
--hash=sha256:cbbadd640d3165ab24b06ef25d1dca09a3441611ac15f6a6b452474fdf0aed1a
|
||||
cffi==1.4.2 \
|
||||
--hash=sha256:53c1c9ddb30431513eb7f3cdef0a3e06b0f1252188aaa7744af0f5a4cd45dbaf \
|
||||
--hash=sha256:a568f49dfca12a8d9f370187257efc58a38109e1eee714d928561d7a018a64f8 \
|
||||
--hash=sha256:809c6ca8cfbcaeebfbd432b4576001b40d38ff2463773cb57577d75e1a020bc3 \
|
||||
--hash=sha256:86cdca2cd9cba41422230390df17dfeaa9f344a911e3975c8be9da57b35548e9 \
|
||||
--hash=sha256:24b13db84aec385ca23c7b8ded83ef8bb4177bc181d14758f9f975be5d020d86 \
|
||||
--hash=sha256:969aeffd7c0e097f6be1efd682c156ae226591a0793a94b6c2d5e4293f4c8d4e \
|
||||
--hash=sha256:000f358d4b0fa249feaab9c1ce7d5b2fe7e02e7bdf6806c26418505fc685e268 \
|
||||
--hash=sha256:a9d86f460bbd8358a2d513ad779e3f3fc878e3b93a00b5002faebf616ffe6b9c \
|
||||
--hash=sha256:3127b3ab33eb23ccac071f9a0802748e5cf7c5cbcd02482bb063e35b41dbb0b0 \
|
||||
--hash=sha256:e2b2d42236469a40224d39e7b6c60575f388b2f423f354c7ee90a5b7f58c8065 \
|
||||
--hash=sha256:8c2dccafee89b1b424b0bec6ad2dd9622c949d2024e929f5da1ed801eac75f1d \
|
||||
--hash=sha256:a4de7a4d11aed488bab4fb14f4988587a829bece5a20433f780d6e33b08083cb \
|
||||
--hash=sha256:5ca8fe30425265a49274e4b0213a1bc98f4b13449ae5e96f984771e5d83e58c1 \
|
||||
--hash=sha256:a4fd38802f59e714eba81a024f62db710b27dbe27a7ea12e911537327aa84d30 \
|
||||
--hash=sha256:86cd6912bbc83e9405d4a73cd7f4b4ee8353652d2dbc7c820106ed5b4d1bab3a \
|
||||
--hash=sha256:8f1d177d364ea35900415ae24ca3e471be3d5334ed0419294068c49f45913998
|
||||
ConfigArgParse==0.10.0 \
|
||||
--hash=sha256:3b50a83dd58149dfcee98cb6565265d10b53e9c0a2bca7eeef7fb5f5524890a7
|
||||
cffi==1.10.0 \
|
||||
--hash=sha256:446699c10f3c390633d0722bc19edbc7ac4b94761918a4a4f7908a24e86ebbd0 \
|
||||
--hash=sha256:562326fc7f55a59ef3fef5e82908fe938cdc4bbda32d734c424c7cd9ed73e93a \
|
||||
--hash=sha256:7f732ad4a30db0b39400c3f7011249f7d0701007d511bf09604729aea222871f \
|
||||
--hash=sha256:94fb8410c6c4fc48e7ea759d3d1d9ca561171a88d00faddd4aa0306f698ad6a0 \
|
||||
--hash=sha256:587a5043df4b00a2130e09fed42da02a4ed3c688bd9bf07a3ac89d2271f4fb07 \
|
||||
--hash=sha256:ec08b88bef627ec1cea210e1608c85d3cf44893bcde74e41b7f7dbdfd2c1bad6 \
|
||||
--hash=sha256:a41406f6d62abcdf3eef9fd998d8dcff04fd2a7746644143045feeebd76352d1 \
|
||||
--hash=sha256:b560916546b2f209d74b82bdbc3223cee9a165b0242fa00a06dfc48a2054864a \
|
||||
--hash=sha256:e74896774e437f4715c57edeb5cf3d3a40d7727f541c2c12156617b5a15d1829 \
|
||||
--hash=sha256:9a31c18ba4881a116e448c52f3f5d3e14401cf7a9c43cc88f06f2a7f5428da0e \
|
||||
--hash=sha256:80796ea68e11624a0279d3b802f88a7fe7214122b97a15a6c97189934a2cc776 \
|
||||
--hash=sha256:f4019826a2dec066c909a1f483ef0dcf9325d6740cc0bd15308942b28b0930f7 \
|
||||
--hash=sha256:7248506981eeba23888b4140a69a53c4c0c0a386abcdca61ed8dd790a73e64b9 \
|
||||
--hash=sha256:a8955265d146e86fe2ce116394be4eaf0cb40314a79b19f11c4fa574cd639572 \
|
||||
--hash=sha256:c49187260043bd4c1d6a52186f9774f17d9b1da0a406798ebf4bfc12da166ade \
|
||||
--hash=sha256:c1d8b3d8dcb5c23ac1a8bf56422036f3f305a3c5a8bc8c354256579a1e2aa2c1 \
|
||||
--hash=sha256:9e389615bcecb8c782a87939d752340bb0a3a097e90bae54d7f0915bc12f45bd \
|
||||
--hash=sha256:d09ff358f75a874f69fa7d1c2b4acecf4282a950293fcfcf89aa606da8a9a500 \
|
||||
--hash=sha256:b69b4557aae7de18b7c174a917fe19873529d927ac592762d9771661875bbd40 \
|
||||
--hash=sha256:5de52b081a2775e76b971de9d997d85c4457fc0a09079e12d66849548ae60981 \
|
||||
--hash=sha256:e7d88fecb7b6250a1fd432e6dc64890342c372fce13dbfe4bb6f16348ad00c14 \
|
||||
--hash=sha256:1426e67e855ef7f5030c9184f4f1a9f4bfa020c31c962cd41fd129ec5aef4a6a \
|
||||
--hash=sha256:267dd2c66a5760c5f4d47e2ebcf8eeac7ef01e1ae6ae7a6d0d241a290068bc38 \
|
||||
--hash=sha256:e553eb489511cacf19eda6e52bc9e151316f0d721724997dda2c4d3079b778db \
|
||||
--hash=sha256:98b89b2c57f97ce2db7aeba60db173c84871d73b40e41a11ea95de1500ddc57e \
|
||||
--hash=sha256:e2b7e090188833bc58b2ae03fb864c22688654ebd2096bcf38bc860c4f38a3d8 \
|
||||
--hash=sha256:afa7d8b8d38ad40db8713ee053d41b36d87d6ae5ec5ad36f9210b548a18dc214 \
|
||||
--hash=sha256:4fc9c2ff7924b3a1fa326e1799e5dd58cac585d7fb25fe53ccaa1333b0453d65 \
|
||||
--hash=sha256:937db39a1ec5af3003b16357b2042bba67c88d43bc11aaa203fa8a5924524209 \
|
||||
--hash=sha256:ab22285797631df3b513b2cd3ecdc51cd8e3d36788e3991d93d0759d6883b027 \
|
||||
--hash=sha256:96e599b924ef009aa867f725b3249ee51d76489f484d3a45b4bd219c5ec6ed59 \
|
||||
--hash=sha256:bea842a0512be6a8007e585790bccd5d530520fc025ce63b03e139be373b0063 \
|
||||
--hash=sha256:e7175287f7fe7b1cc203bb958b17db40abd732690c1e18e700f10e0843a58598 \
|
||||
--hash=sha256:285ab352552f52f1398c912556d4d36d4ea9b8450e5c65d03809bf9886755533 \
|
||||
--hash=sha256:5576644b859197da7bbd8f8c7c2fb5dcc6cd505cadb42992d5f104c013f8a214 \
|
||||
--hash=sha256:b3b02911eb1f6ada203b0763ba924234629b51586f72a21faacc638269f4ced5
|
||||
ConfigArgParse==0.12.0 \
|
||||
--hash=sha256:28cd7d67669651f2a4518367838c49539457504584a139709b2b8f6c208ef339
|
||||
configobj==5.0.6 \
|
||||
--hash=sha256:a2f5650770e1c87fb335af19a9b7eb73fc05ccf22144eb68db7d00cd2bcb0902
|
||||
cryptography==1.8.2 \
|
||||
--hash=sha256:e572527dc4eae300d4ac58c3a49fd0fe1a0178baf341f536d22c45455c958410 \
|
||||
--hash=sha256:15448bcfc4ef0f58c8e049f06cb10c296d75456ced02466dff3c82cc9c85f0a6 \
|
||||
--hash=sha256:771171a4b7677ee791f74928030fb59ca83a1710d32eaec8395c5170fc520741 \
|
||||
--hash=sha256:06e47faef940bc53ca23f5c6a29f5d4ebc47f0c7632f356da8ce4cc3ae99e908 \
|
||||
--hash=sha256:730a4f2c028b33b3e6b1a3caa7a3048a1e1e6ff2fe9043acdb21b31a2e711742 \
|
||||
--hash=sha256:1bf2299033c5a517014ffd5ec2e267f6a220d9095e75dd002dc33a898a7857cc \
|
||||
--hash=sha256:5e7249bc0360d834b89631e83c1a7bbb28098b59fab9816e5e19efdef7b71a1c \
|
||||
--hash=sha256:3971bdb5054257c922d95839d10ad347dcaa7137efeed34ce33ee660ea52b7e2 \
|
||||
--hash=sha256:a8b431f82972cec974766a484eba02d7bbf6a5c042c13c25f1a23d4a3a31bfb4 \
|
||||
--hash=sha256:a9281f0292131747f94219793438d78823bb72fbcafd1b415e99af1d8c42e11c \
|
||||
--hash=sha256:502237c0ed9ca77212cf1673627fd2c361ee989cdde2ac54a0cd3f17cbc79e5a \
|
||||
--hash=sha256:c63625ec36d1615fff69b068a95ea038d5f8df961901a097dfedc7e7410794d5 \
|
||||
--hash=sha256:bda0a32d99ee1f86fcd46bbb10f43216f101df3349187ea8999967cddbfede86 \
|
||||
--hash=sha256:a4a69088671eb31aa292a5996d9dd7a4ccb585b6fc7eb7b9e47051e1169fc479 \
|
||||
--hash=sha256:0f7127b0034d5112b190de6bf46fadc41940983a91836acfdaa16c44f44beb75 \
|
||||
--hash=sha256:9049c073f86155dfcd93434d63db80f753cd2e5bebf1d6172b112de215369b07 \
|
||||
--hash=sha256:264dd80150f24a6fffb3ce5be32705e6a27160df055b3582925c2ed170f4f430 \
|
||||
--hash=sha256:a05f899c311f6810ae4981edcd23d1587be207de554cf0530f8facbe836483cb \
|
||||
--hash=sha256:91970de4b3dbf0b9b36745e9f346265d225d906188dec3d02c2179fbdb49b167 \
|
||||
--hash=sha256:908cd59ae3c177c28e7a3eb519dade45748ba9af9959796c699f4f1b56caea8d \
|
||||
--hash=sha256:f04fd7c4b7b4c0b97186f31a315fe88d20087a7148ff06a9c0348b35e39531f8 \
|
||||
--hash=sha256:757dd412a49ea396b52b051c364bf8f9262dfa6665270f68208a0d6ed5999f1b \
|
||||
--hash=sha256:7d6ab4507cf52328b27c57107491b2699a5e25097213a2d201fab0157cb5dd09 \
|
||||
--hash=sha256:e3183550d129b7103914cad049a3b2358d9405c6e4baf3a7c92a82004f5e4814 \
|
||||
--hash=sha256:9d1f63e2f4530a919ef87b4f1b3d814389604486f8b8c090aefccace4d1f98f8 \
|
||||
--hash=sha256:8e88ebac371a388024dab3ccf393bf3c1790d21bc3c299d5a6f9f83fb823beda
|
||||
cryptography==2.0.2 \
|
||||
--hash=sha256:187ae17358436d2c760f28c2aeb02fefa3f37647a9c5b6f7f7c3e83cd1c5a972 \
|
||||
--hash=sha256:19e43a13bbf52028dd1e810c803f2ad8880d0692d772f98d42e1eaf34bdee3d6 \
|
||||
--hash=sha256:da9291502cbc87dc0284a20c56876e4d2e68deac61cc43df4aec934e44ca97b1 \
|
||||
--hash=sha256:0954f8813095f581669330e0a2d5e726c33ac7f450c1458fac58bab54595e516 \
|
||||
--hash=sha256:d68b0cc40a8432ed3fc84876c519de704d6001800ec22b136e75ae841910c45b \
|
||||
--hash=sha256:2f8ad9580ab4da645cfea52a91d2da99a49a1e76616d8be68441a986fad652b0 \
|
||||
--hash=sha256:cc00b4511294f5f6b65c4e77a1a9c62f52490a63d2c120f3872176b40a82351e \
|
||||
--hash=sha256:cf896020f6a9f095a547b3d672c8db1ef2ed71fca11250731fa1d4a4cb8b1590 \
|
||||
--hash=sha256:e0fdb8322206fa02aa38f71519ff75dce2eb481b7e1110e2936795cb376bb6ee \
|
||||
--hash=sha256:277538466657ca5d6637f80be100242f9831d75138b788d718edd3aab34621f8 \
|
||||
--hash=sha256:2c77eb0560f54ce654ab82d6b2a64327a71ee969b29022bf9746ca311c9f5069 \
|
||||
--hash=sha256:755a7853b679e79d0a799351c092a9b0271f95ff54c8dd8823d8b527a2926a86 \
|
||||
--hash=sha256:77197a2d525e761cdd4c771180b4bd0d80703654c6385e4311cbbbe2beb56fa1 \
|
||||
--hash=sha256:eb8bb79d0ab00c931c8333b745f06fec481a51c52d70acd4ee95d6093ba5c386 \
|
||||
--hash=sha256:131f61de82ef28f3e20beb4bfc24f9692d28cecfd704e20e6c7f070f7793013a \
|
||||
--hash=sha256:ac35435974b2e27cd4520f29c191d7da36f4189aa3264e52c4c6c6d089ab6142 \
|
||||
--hash=sha256:04b6ea99daa2a8460728794213d76d45ad58ea247dc7e7ff148d7dd726e87863 \
|
||||
--hash=sha256:2b9442f8b4c3d575f6cc3db0e856034e0f5a9d55ecd636f52d8c496795b26952 \
|
||||
--hash=sha256:b3d3b3ecba1fe1bdb6f180770a137f877c8f07571f7b2934bb269475bcf0e5e8 \
|
||||
--hash=sha256:670a58c0d75cb0e78e73dd003bd96d4440bbb1f2bc041dcf7b81767ca4fb0ce9 \
|
||||
--hash=sha256:5af84d23bdb86b5e90aca263df1424b43f1748480bfcde3ac2a3cbe622612468 \
|
||||
--hash=sha256:ba22e8eefabdd7aca37d0c0c00d2274000d2cebb5cce9e5a710cb55bf8797b31 \
|
||||
--hash=sha256:b798b22fa7e92b439547323b8b719d217f1e1b7677585cfeeedf3b55c70bb7fb \
|
||||
--hash=sha256:59cff28af8cce96cb7e94a459726e1d88f6f5fa75097f9dcbebd99118d64ea4c \
|
||||
--hash=sha256:fe859e445abc9ba9e97950ddafb904e23234c4ecb76b0fae6c86e80592ce464a \
|
||||
--hash=sha256:655f3c474067f1e277430f23cc0549f0b1dc99b82aec6e53f80b9b2db7f76f11 \
|
||||
--hash=sha256:0ebc2be053c9a03a2f3e20a466e87bf12a51586b3c79bd2a22171b073a805346 \
|
||||
--hash=sha256:01e6e60654df64cca53733cda39446d67100c819c181d403afb120e0d2a71e1b \
|
||||
--hash=sha256:d46f4e5d455cb5563685c52ef212696f0a6cc1ea627603218eabbd8a095291d8 \
|
||||
--hash=sha256:3780b2663ee7ebb37cb83263326e3cd7f8b2ea439c448539d4b87de12c8d06ab
|
||||
enum34==1.1.2 \
|
||||
--hash=sha256:2475d7fcddf5951e92ff546972758802de5260bf409319a9f1934e6bbc8b1dc7 \
|
||||
--hash=sha256:35907defb0f992b75ab7788f65fedc1cf20ffa22688e0e6f6f12afc06b3ea501
|
||||
@@ -864,18 +1077,18 @@ letsencrypt==0.7.0 \
|
||||
--hash=sha256:105a5fb107e45bcd0722eb89696986dcf5f08a86a321d6aef25a0c7c63375ade \
|
||||
--hash=sha256:c36e532c486a7e92155ee09da54b436a3c420813ec1c590b98f635d924720de9
|
||||
|
||||
certbot==0.15.0 \
|
||||
--hash=sha256:f052a1ee9d0e71b73d893c26ee3aa343f6f3abe7de8471437779d541f8bf7824 \
|
||||
--hash=sha256:b8c4043b2b8df39660d4ce4a2a6eca590f98ece0e1b97eba53ab95f3bbac3beb
|
||||
acme==0.15.0 \
|
||||
--hash=sha256:d423a14a8fde089d6854ccbe1314f6a80553ef06799ac6f671b90d8399835e60 \
|
||||
--hash=sha256:9fadd63322a1eb95f58e6cda8ca2095c750e828ae470bc6e3925ef618c7cfc87
|
||||
certbot-apache==0.15.0 \
|
||||
--hash=sha256:07fa99b264e0ea489695cc2a5353f3fe9459422ad549de02c46da24ae546acd9 \
|
||||
--hash=sha256:6da1433381bd2c2ea7c395be57ca1bcdb7c1c04ce3d12b67a3fa207a3bfa41ca
|
||||
certbot-nginx==0.15.0 \
|
||||
--hash=sha256:7b0622f8a9031e24105f9b5c8d98d4ed83ae393517ed35cc2a4fa50098122922 \
|
||||
--hash=sha256:0b98dedb22492d6f88dffdfbd721b4d4c98bfe361df35bc97bf5bd3047f01234
|
||||
certbot==0.19.0 \
|
||||
--hash=sha256:3207ee5319bfc37e855c25a43148275fcfb37869eefde9087405012049734a20 \
|
||||
--hash=sha256:a7230791dff5d085738119fc22d88ad9d8a35d0b6a3d67806fe33990c7c79d53
|
||||
acme==0.19.0 \
|
||||
--hash=sha256:c612eafe234d722d97bb5d3dbc49e5522f44be29611f7577954eb893e5c2d6de \
|
||||
--hash=sha256:1fa23d64d494aaf001e6fe857c461fcfff10f75a1c2c35ec831447f641e1e822
|
||||
certbot-apache==0.19.0 \
|
||||
--hash=sha256:fadb28b33bfabc85cdb962b5b149bef58b98f0606b78581db7895fe38323f37c \
|
||||
--hash=sha256:70306ca2d5be7f542af68d46883c0ae39527cf202f17ef92cd256fb0bc3f1619
|
||||
certbot-nginx==0.19.0 \
|
||||
--hash=sha256:4909cb3db49919fb35590793cac28e1c0b6dbd29cbedf887b9106e5fcef5362c \
|
||||
--hash=sha256:cb5a224a3f277092555c25096d1678fc735306fd3a43447649ebe524c7ca79e1
|
||||
|
||||
UNLIKELY_EOF
|
||||
# -------------------------------------------------------------------------
|
||||
@@ -903,6 +1116,7 @@ anything goes wrong, it will exit with a non-zero status code.
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
from __future__ import print_function
|
||||
from distutils.version import StrictVersion
|
||||
from hashlib import sha256
|
||||
from os.path import join
|
||||
from pipes import quote
|
||||
@@ -937,12 +1151,14 @@ except ImportError:
|
||||
from urllib.parse import urlparse # 3.4
|
||||
|
||||
|
||||
__version__ = 1, 1, 1
|
||||
__version__ = 1, 3, 0
|
||||
PIP_VERSION = '9.0.1'
|
||||
|
||||
|
||||
# wheel has a conditional dependency on argparse:
|
||||
maybe_argparse = (
|
||||
[('https://pypi.python.org/packages/source/a/argparse/'
|
||||
[('https://pypi.python.org/packages/18/dd/'
|
||||
'e617cfc3f6210ae183374cd9f6a26b20514bbb5a792af97949c5aacddf0f/'
|
||||
'argparse-1.4.0.tar.gz',
|
||||
'62b089a55be1d8949cd2bc7e0df0bddb9e028faefc8c32038cc84862aefdd6e4')]
|
||||
if version_info < (2, 7, 0) else [])
|
||||
@@ -950,13 +1166,19 @@ maybe_argparse = (
|
||||
|
||||
PACKAGES = maybe_argparse + [
|
||||
# Pip has no dependencies, as it vendors everything:
|
||||
('https://pypi.python.org/packages/source/p/pip/pip-8.0.3.tar.gz',
|
||||
'30f98b66f3fe1069c529a491597d34a1c224a68640c82caf2ade5f88aa1405e8'),
|
||||
('https://pypi.python.org/packages/11/b6/'
|
||||
'abcb525026a4be042b486df43905d6893fb04f05aac21c32c638e939e447/'
|
||||
'pip-{0}.tar.gz'
|
||||
.format(PIP_VERSION),
|
||||
'09f243e1a7b461f654c26a725fa373211bb7ff17a9300058b205c61658ca940d'),
|
||||
# This version of setuptools has only optional dependencies:
|
||||
('https://pypi.python.org/packages/source/s/setuptools/'
|
||||
('https://pypi.python.org/packages/69/65/'
|
||||
'4c544cde88d4d876cdf5cbc5f3f15d02646477756d89547e9a7ecd6afa76/'
|
||||
'setuptools-20.2.2.tar.gz',
|
||||
'24fcfc15364a9fe09a220f37d2dcedc849795e3de3e4b393ee988e66a9cbd85a'),
|
||||
('https://pypi.python.org/packages/source/w/wheel/wheel-0.29.0.tar.gz',
|
||||
('https://pypi.python.org/packages/c9/1d/'
|
||||
'bd19e691fd4cfe908c76c429fe6e4436c9e83583c4414b54f6c85471954a/'
|
||||
'wheel-0.29.0.tar.gz',
|
||||
'1ebb8ad7e26b448e9caa4773d2357849bf80ff9e313964bcaf79cbf0201a1648')
|
||||
]
|
||||
|
||||
@@ -1006,11 +1228,21 @@ def hashed_download(url, temp, digest):
|
||||
|
||||
|
||||
def main():
|
||||
pip_version = StrictVersion(check_output(['pip', '--version'])
|
||||
.decode('utf-8').split()[1])
|
||||
min_pip_version = StrictVersion(PIP_VERSION)
|
||||
if pip_version >= min_pip_version:
|
||||
return 0
|
||||
has_pip_cache = pip_version >= StrictVersion('6.0')
|
||||
|
||||
temp = mkdtemp(prefix='pipstrap-')
|
||||
try:
|
||||
downloads = [hashed_download(url, temp, digest)
|
||||
for url, digest in PACKAGES]
|
||||
check_output('pip install --no-index --no-deps -U ' +
|
||||
# Disable cache since we're not using it and it otherwise
|
||||
# sometimes throws permission warnings:
|
||||
('--no-cache-dir ' if has_pip_cache else '') +
|
||||
' '.join(quote(d) for d in downloads),
|
||||
shell=True)
|
||||
except HashError as exc:
|
||||
@@ -1033,9 +1265,9 @@ UNLIKELY_EOF
|
||||
PATH="$VENV_BIN:$PATH" "$VENV_BIN/python" "$TEMP_DIR/pipstrap.py"
|
||||
set +e
|
||||
if [ "$VERBOSE" = 1 ]; then
|
||||
"$VENV_BIN/pip" install --no-cache-dir --require-hashes -r "$TEMP_DIR/letsencrypt-auto-requirements.txt"
|
||||
"$VENV_BIN/pip" install --disable-pip-version-check --no-cache-dir --require-hashes -r "$TEMP_DIR/letsencrypt-auto-requirements.txt"
|
||||
else
|
||||
PIP_OUT=`"$VENV_BIN/pip" install --no-cache-dir --require-hashes -r "$TEMP_DIR/letsencrypt-auto-requirements.txt" 2>&1`
|
||||
PIP_OUT=`"$VENV_BIN/pip" install --disable-pip-version-check --no-cache-dir --require-hashes -r "$TEMP_DIR/letsencrypt-auto-requirements.txt" 2>&1`
|
||||
fi
|
||||
PIP_STATUS=$?
|
||||
set -e
|
||||
@@ -1069,20 +1301,15 @@ UNLIKELY_EOF
|
||||
rm -rf "$VENV_PATH"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ -d "$OLD_VENV_PATH" -a ! -L "$OLD_VENV_PATH" ]; then
|
||||
rm -rf "$OLD_VENV_PATH"
|
||||
ln -s "$VENV_PATH" "$OLD_VENV_PATH"
|
||||
fi
|
||||
|
||||
say "Installation succeeded."
|
||||
fi
|
||||
if [ -n "$SUDO" ]; then
|
||||
# SUDO is su wrapper or sudo
|
||||
say "Requesting root privileges to run certbot..."
|
||||
say " $VENV_BIN/letsencrypt" "$@"
|
||||
fi
|
||||
if [ -z "$SUDO_ENV" ] ; then
|
||||
# SUDO is su wrapper / noop
|
||||
$SUDO "$VENV_BIN/letsencrypt" "$@"
|
||||
else
|
||||
# sudo
|
||||
$SUDO "$SUDO_ENV" "$VENV_BIN/letsencrypt" "$@"
|
||||
fi
|
||||
"$VENV_BIN/letsencrypt" "$@"
|
||||
|
||||
else
|
||||
# Phase 1: Upgrade certbot-auto if necessary, then self-invoke.
|
||||
@@ -1093,12 +1320,14 @@ else
|
||||
# package). Phase 2 checks the version of the locally installed certbot.
|
||||
|
||||
if [ ! -f "$VENV_BIN/letsencrypt" ]; then
|
||||
if [ "$HELP" = 1 ]; then
|
||||
echo "$USAGE"
|
||||
exit 0
|
||||
if [ -z "$OLD_VENV_PATH" -o ! -f "$OLD_VENV_PATH/bin/letsencrypt" ]; then
|
||||
if [ "$HELP" = 1 ]; then
|
||||
echo "$USAGE"
|
||||
exit 0
|
||||
fi
|
||||
# If it looks like we've never bootstrapped before, bootstrap:
|
||||
Bootstrap
|
||||
fi
|
||||
# If it looks like we've never bootstrapped before, bootstrap:
|
||||
Bootstrap
|
||||
fi
|
||||
if [ "$OS_PACKAGES_ONLY" = 1 ]; then
|
||||
say "OS packages installed."
|
||||
@@ -1132,7 +1361,8 @@ from os.path import dirname, join
|
||||
import re
|
||||
from subprocess import check_call, CalledProcessError
|
||||
from sys import argv, exit
|
||||
from urllib2 import build_opener, HTTPHandler, HTTPSHandler, HTTPError
|
||||
from urllib2 import build_opener, HTTPHandler, HTTPSHandler
|
||||
from urllib2 import HTTPError, URLError
|
||||
|
||||
PUBLIC_KEY = environ.get('LE_AUTO_PUBLIC_KEY', """-----BEGIN PUBLIC KEY-----
|
||||
MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA6MR8W/galdxnpGqBsYbq
|
||||
@@ -1168,7 +1398,9 @@ class HttpsGetter(object):
|
||||
|
||||
"""
|
||||
try:
|
||||
return self._opener.open(url).read()
|
||||
# socket module docs say default timeout is None: that is, no
|
||||
# timeout
|
||||
return self._opener.open(url, timeout=30).read()
|
||||
except (HTTPError, IOError) as exc:
|
||||
raise ExpectedError("Couldn't download %s." % url, exc)
|
||||
|
||||
@@ -1258,15 +1490,15 @@ UNLIKELY_EOF
|
||||
say "Replacing certbot-auto..."
|
||||
# Clone permissions with cp. chmod and chown don't have a --reference
|
||||
# option on macOS or BSD, and stat -c on Linux is stat -f on macOS and BSD:
|
||||
$SUDO cp -p "$0" "$TEMP_DIR/letsencrypt-auto.permission-clone"
|
||||
$SUDO cp "$TEMP_DIR/letsencrypt-auto" "$TEMP_DIR/letsencrypt-auto.permission-clone"
|
||||
cp -p "$0" "$TEMP_DIR/letsencrypt-auto.permission-clone"
|
||||
cp "$TEMP_DIR/letsencrypt-auto" "$TEMP_DIR/letsencrypt-auto.permission-clone"
|
||||
# Using mv rather than cp leaves the old file descriptor pointing to the
|
||||
# original copy so the shell can continue to read it unmolested. mv across
|
||||
# filesystems is non-atomic, doing `rm dest, cp src dest, rm src`, but the
|
||||
# cp is unlikely to fail (esp. under sudo) if the rm doesn't.
|
||||
$SUDO mv -f "$TEMP_DIR/letsencrypt-auto.permission-clone" "$0"
|
||||
# cp is unlikely to fail if the rm doesn't.
|
||||
mv -f "$TEMP_DIR/letsencrypt-auto.permission-clone" "$0"
|
||||
fi # A newer version is available.
|
||||
fi # Self-upgrading is allowed.
|
||||
|
||||
"$0" --le-auto-phase2 "$@"
|
||||
RerunWithArgs --le-auto-phase2 "$@"
|
||||
fi
|
||||
|
||||
@@ -8,8 +8,8 @@ MAINTAINER Brad Warren <bmw@eff.org>
|
||||
|
||||
# TODO: Install non-default Python versions for tox.
|
||||
# TODO: Install Apache/Nginx for plugin development.
|
||||
COPY certbot-auto /opt/certbot/src/certbot-auto
|
||||
RUN /opt/certbot/src/certbot-auto -n --os-packages-only
|
||||
COPY letsencrypt-auto-source /opt/certbot/src/letsencrypt-auto-source
|
||||
RUN /opt/certbot/src/letsencrypt-auto-source/letsencrypt-auto --os-packages-only
|
||||
|
||||
# the above is not likely to change, so by putting it further up the
|
||||
# Dockerfile we make sure we cache as much as possible
|
||||
@@ -29,16 +29,18 @@ COPY acme /opt/certbot/src/acme/
|
||||
COPY certbot-apache /opt/certbot/src/certbot-apache/
|
||||
COPY certbot-nginx /opt/certbot/src/certbot-nginx/
|
||||
COPY certbot-compatibility-test /opt/certbot/src/certbot-compatibility-test/
|
||||
COPY tools /opt/certbot/src/tools
|
||||
|
||||
RUN virtualenv --no-site-packages -p python2 /opt/certbot/venv && \
|
||||
/opt/certbot/venv/bin/pip install -U setuptools && \
|
||||
/opt/certbot/venv/bin/pip install -U pip && \
|
||||
/opt/certbot/venv/bin/pip install \
|
||||
-e /opt/certbot/src/acme \
|
||||
-e /opt/certbot/src \
|
||||
-e /opt/certbot/src/certbot-apache \
|
||||
-e /opt/certbot/src/certbot-nginx \
|
||||
-e /opt/certbot/src/certbot-compatibility-test
|
||||
/opt/certbot/venv/bin/pip install -U pip
|
||||
ENV PATH /opt/certbot/venv/bin:$PATH
|
||||
RUN /opt/certbot/src/tools/pip_install_editable.sh \
|
||||
/opt/certbot/src/acme \
|
||||
/opt/certbot/src \
|
||||
/opt/certbot/src/certbot-apache \
|
||||
/opt/certbot/src/certbot-nginx \
|
||||
/opt/certbot/src/certbot-compatibility-test
|
||||
|
||||
# install in editable mode (-e) to save space: it's not possible to
|
||||
# "rm -rf /opt/certbot/src" (it's stays in the underlaying image);
|
||||
@@ -46,5 +48,3 @@ RUN virtualenv --no-site-packages -p python2 /opt/certbot/venv && \
|
||||
# bash" and investigate, apply patches, etc.
|
||||
|
||||
WORKDIR /opt/certbot/src/certbot-compatibility-test/certbot_compatibility_test/testdata
|
||||
|
||||
ENV PATH /opt/certbot/venv/bin:$PATH
|
||||
|
||||
@@ -79,6 +79,8 @@ def _get_names(config):
|
||||
if line.strip().startswith("server_name"):
|
||||
names = line.partition("server_name")[2].rpartition(";")[0]
|
||||
for n in names.split():
|
||||
all_names.add(n)
|
||||
# Filter out wildcards in both all_names and test_names
|
||||
if not n.startswith("*."):
|
||||
all_names.add(n)
|
||||
non_ip_names = set(n for n in all_names if not util.IP_REGEX.match(n))
|
||||
return all_names, non_ip_names
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
"""Tests Certbot plugins against different server configurations."""
|
||||
import argparse
|
||||
import filecmp
|
||||
import functools
|
||||
import logging
|
||||
import os
|
||||
import shutil
|
||||
@@ -64,17 +63,17 @@ def test_authenticator(plugin, config, temp_dir):
|
||||
type(achalls[i]), achalls[i].domain, config)
|
||||
success = False
|
||||
elif isinstance(responses[i], challenges.TLSSNI01Response):
|
||||
verify = functools.partial(responses[i].simple_verify, achalls[i].chall,
|
||||
achalls[i].domain,
|
||||
util.JWK.public_key(),
|
||||
host="127.0.0.1",
|
||||
port=plugin.https_port)
|
||||
if _try_until_true(verify):
|
||||
verified = responses[i].simple_verify(achalls[i].chall,
|
||||
achalls[i].domain,
|
||||
util.JWK.public_key(),
|
||||
host="127.0.0.1",
|
||||
port=plugin.https_port)
|
||||
if verified:
|
||||
logger.info(
|
||||
"tls-sni-01 verification for %s succeeded", achalls[i].domain)
|
||||
else:
|
||||
logger.error(
|
||||
"tls-sni-01 verification for %s in %s failed",
|
||||
"**** tls-sni-01 verification for %s in %s failed",
|
||||
achalls[i].domain, config)
|
||||
success = False
|
||||
|
||||
@@ -122,7 +121,7 @@ def test_installer(args, plugin, config, temp_dir):
|
||||
if names_match:
|
||||
logger.info("get_all_names test succeeded")
|
||||
else:
|
||||
logger.error("get_all_names test failed for config %s", config)
|
||||
logger.error("**** get_all_names test failed for config %s", config)
|
||||
|
||||
domains = list(plugin.get_testable_domain_names())
|
||||
success = test_deploy_cert(plugin, temp_dir, domains)
|
||||
@@ -147,7 +146,7 @@ def test_deploy_cert(plugin, temp_dir, domains):
|
||||
plugin.deploy_cert(domain, cert_path, util.KEY_PATH, cert_path, cert_path)
|
||||
plugin.save() # Needed by the Apache plugin
|
||||
except le_errors.Error as error:
|
||||
logger.error("Plugin failed to deploy certificate for %s:", domain)
|
||||
logger.error("**** Plugin failed to deploy certificate for %s:", domain)
|
||||
logger.exception(error)
|
||||
return False
|
||||
|
||||
@@ -155,11 +154,12 @@ def test_deploy_cert(plugin, temp_dir, domains):
|
||||
return False
|
||||
|
||||
success = True
|
||||
time.sleep(3)
|
||||
for domain in domains:
|
||||
verify = functools.partial(validator.Validator().certificate, cert,
|
||||
domain, "127.0.0.1", plugin.https_port)
|
||||
if not _try_until_true(verify):
|
||||
logger.error("Could not verify certificate for domain %s", domain)
|
||||
verified = validator.Validator().certificate(
|
||||
cert, domain, "127.0.0.1", plugin.https_port)
|
||||
if not verified:
|
||||
logger.error("**** Could not verify certificate for domain %s", domain)
|
||||
success = False
|
||||
|
||||
if success:
|
||||
@@ -177,16 +177,21 @@ def test_enhancements(plugin, domains):
|
||||
"enhancements")
|
||||
return False
|
||||
|
||||
for domain in domains:
|
||||
domains_and_info = [(domain, []) for domain in domains]
|
||||
|
||||
for domain, info in domains_and_info:
|
||||
try:
|
||||
previous_redirect = validator.Validator().any_redirect(
|
||||
"localhost", plugin.http_port, headers={"Host": domain})
|
||||
info.append(previous_redirect)
|
||||
plugin.enhance(domain, "redirect")
|
||||
plugin.save() # Needed by the Apache plugin
|
||||
except le_errors.PluginError as error:
|
||||
# Don't immediately fail because a redirect may already be enabled
|
||||
logger.warning("Plugin failed to enable redirect for %s:", domain)
|
||||
logger.warning("*** Plugin failed to enable redirect for %s:", domain)
|
||||
logger.warning("%s", error)
|
||||
except le_errors.Error as error:
|
||||
logger.error("An error occurred while enabling redirect for %s:",
|
||||
logger.error("*** An error occurred while enabling redirect for %s:",
|
||||
domain)
|
||||
logger.exception(error)
|
||||
|
||||
@@ -194,12 +199,14 @@ def test_enhancements(plugin, domains):
|
||||
return False
|
||||
|
||||
success = True
|
||||
for domain in domains:
|
||||
verify = functools.partial(validator.Validator().redirect, "localhost",
|
||||
plugin.http_port, headers={"Host": domain})
|
||||
if not _try_until_true(verify):
|
||||
logger.error("Improper redirect for domain %s", domain)
|
||||
success = False
|
||||
for domain, info in domains_and_info:
|
||||
previous_redirect = info[0]
|
||||
if not previous_redirect:
|
||||
verified = validator.Validator().redirect(
|
||||
"localhost", plugin.http_port, headers={"Host": domain})
|
||||
if not verified:
|
||||
logger.error("*** Improper redirect for domain %s", domain)
|
||||
success = False
|
||||
|
||||
if success:
|
||||
logger.info("Enhancements test succeeded")
|
||||
@@ -207,17 +214,6 @@ def test_enhancements(plugin, domains):
|
||||
return success
|
||||
|
||||
|
||||
def _try_until_true(func, max_tries=5, sleep_time=0.5):
|
||||
"""Calls func up to max_tries times until it returns True"""
|
||||
for _ in xrange(0, max_tries):
|
||||
if func():
|
||||
return True
|
||||
else:
|
||||
time.sleep(sleep_time)
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def _save_and_restart(plugin, title=None):
|
||||
"""Saves and restart the plugin, returning True if no errors occurred"""
|
||||
try:
|
||||
@@ -225,7 +221,7 @@ def _save_and_restart(plugin, title=None):
|
||||
plugin.restart()
|
||||
return True
|
||||
except le_errors.Error as error:
|
||||
logger.error("Plugin failed to save and restart server:")
|
||||
logger.error("*** Plugin failed to save and restart server:")
|
||||
logger.exception(error)
|
||||
return False
|
||||
|
||||
@@ -235,12 +231,12 @@ def test_rollback(plugin, config, backup):
|
||||
try:
|
||||
plugin.rollback_checkpoints(1337)
|
||||
except le_errors.Error as error:
|
||||
logger.error("Plugin raised an exception during rollback:")
|
||||
logger.error("*** Plugin raised an exception during rollback:")
|
||||
logger.exception(error)
|
||||
return False
|
||||
|
||||
if _dirs_are_unequal(config, backup):
|
||||
logger.error("Rollback failed for config `%s`", config)
|
||||
logger.error("*** Rollback failed for config `%s`", config)
|
||||
return False
|
||||
else:
|
||||
logger.info("Rollback succeeded")
|
||||
|
||||
Binary file not shown.
@@ -45,19 +45,12 @@ class Validator(object):
|
||||
else:
|
||||
response = requests.get(url, allow_redirects=False)
|
||||
|
||||
if response.status_code not in (301, 303):
|
||||
return False
|
||||
|
||||
redirect_location = response.headers.get("location", "")
|
||||
# We're checking that the redirect we added behaves correctly.
|
||||
# It's okay for some server configuration to redirect to an
|
||||
# http URL, as long as it's on some other domain.
|
||||
if not redirect_location.startswith("https://"):
|
||||
if not redirect_location.startswith("http://"):
|
||||
return False
|
||||
else:
|
||||
if redirect_location[len("http://"):] == name:
|
||||
return False
|
||||
return False
|
||||
|
||||
if response.status_code != 301:
|
||||
logger.error("Server did not redirect with permanent code")
|
||||
@@ -65,6 +58,16 @@ class Validator(object):
|
||||
|
||||
return True
|
||||
|
||||
def any_redirect(self, name, port=80, headers=None):
|
||||
"""Test whether webserver redirects."""
|
||||
url = "http://{0}:{1}".format(name, port)
|
||||
if headers:
|
||||
response = requests.get(url, headers=headers, allow_redirects=False)
|
||||
else:
|
||||
response = requests.get(url, allow_redirects=False)
|
||||
|
||||
return response.status_code in xrange(300, 309)
|
||||
|
||||
def hsts(self, name):
|
||||
"""Test for HTTP Strict Transport Security header"""
|
||||
headers = requests.get("https://" + name).headers
|
||||
|
||||
@@ -4,7 +4,7 @@ from setuptools import setup
|
||||
from setuptools import find_packages
|
||||
|
||||
|
||||
version = '0.16.0.dev0'
|
||||
version = '0.20.0.dev0'
|
||||
|
||||
install_requires = [
|
||||
'certbot',
|
||||
|
||||
@@ -4,7 +4,7 @@ from setuptools import setup
|
||||
from setuptools import find_packages
|
||||
|
||||
|
||||
version = '0.16.0.dev0'
|
||||
version = '0.20.0.dev0'
|
||||
|
||||
# Please update tox.ini when modifying dependency version requirements
|
||||
install_requires = [
|
||||
|
||||
@@ -4,7 +4,7 @@ from setuptools import setup
|
||||
from setuptools import find_packages
|
||||
|
||||
|
||||
version = '0.16.0.dev0'
|
||||
version = '0.20.0.dev0'
|
||||
|
||||
# Please update tox.ini when modifying dependency version requirements
|
||||
install_requires = [
|
||||
|
||||
@@ -4,7 +4,7 @@ from setuptools import setup
|
||||
from setuptools import find_packages
|
||||
|
||||
|
||||
version = '0.16.0.dev0'
|
||||
version = '0.20.0.dev0'
|
||||
|
||||
# Please update tox.ini when modifying dependency version requirements
|
||||
install_requires = [
|
||||
|
||||
@@ -4,7 +4,7 @@ from setuptools import setup
|
||||
from setuptools import find_packages
|
||||
|
||||
|
||||
version = '0.16.0.dev0'
|
||||
version = '0.20.0.dev0'
|
||||
|
||||
# Please update tox.ini when modifying dependency version requirements
|
||||
install_requires = [
|
||||
|
||||
@@ -4,7 +4,7 @@ from setuptools import setup
|
||||
from setuptools import find_packages
|
||||
|
||||
|
||||
version = '0.16.0.dev0'
|
||||
version = '0.20.0.dev0'
|
||||
|
||||
# Please update tox.ini when modifying dependency version requirements
|
||||
install_requires = [
|
||||
|
||||
@@ -10,7 +10,7 @@ Named Arguments
|
||||
======================================== =====================================
|
||||
``--dns-google-credentials`` Google Cloud Platform credentials_
|
||||
JSON file.
|
||||
(Required)
|
||||
(Required - Optional on Google Compute Engine)
|
||||
``--dns-google-propagation-seconds`` The number of seconds to wait for DNS
|
||||
to propagate before asking the ACME
|
||||
server to verify the DNS record.
|
||||
@@ -21,8 +21,8 @@ Named Arguments
|
||||
Credentials
|
||||
-----------
|
||||
|
||||
Use of this plugin requires a configuration file containing Google Cloud
|
||||
Platform API credentials for an account with the following permissions:
|
||||
Use of this plugin requires Google Cloud Platform API credentials
|
||||
for an account with the following permissions:
|
||||
|
||||
* ``dns.changes.create``
|
||||
* ``dns.changes.get``
|
||||
@@ -33,7 +33,12 @@ Platform API credentials for an account with the following permissions:
|
||||
Google provides instructions for `creating a service account <https://developers
|
||||
.google.com/identity/protocols/OAuth2ServiceAccount#creatinganaccount>`_ and
|
||||
`information about the required permissions <https://cloud.google.com/dns/access
|
||||
-control#permissions_and_roles>`_.
|
||||
-control#permissions_and_roles>`_. If you're running on Google Compute Engine,
|
||||
you can `assign the service account to the instance <https://cloud.google.com/
|
||||
compute/docs/access/create-enable-service-accounts-for-instances>`_ which
|
||||
is running certbot. A credentials file is not required in this case, as they
|
||||
are automatically obtained by certbot through the `metadata service
|
||||
<https://cloud.google.com/compute/docs/storing-retrieving-metadata>`_ .
|
||||
|
||||
.. code-block:: json
|
||||
:name: credentials.json
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
import json
|
||||
import logging
|
||||
|
||||
import httplib2
|
||||
import zope.interface
|
||||
from googleapiclient import discovery
|
||||
from googleapiclient import errors as googleapiclient_errors
|
||||
@@ -15,6 +16,8 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
ACCT_URL = 'https://developers.google.com/identity/protocols/OAuth2ServiceAccount#creatinganaccount'
|
||||
PERMISSIONS_URL = 'https://cloud.google.com/dns/access-control#permissions_and_roles'
|
||||
METADATA_URL = 'http://metadata.google.internal/computeMetadata/v1/'
|
||||
METADATA_HEADERS = {'Metadata-Flavor': 'Google'}
|
||||
|
||||
|
||||
@zope.interface.implementer(interfaces.IAuthenticator)
|
||||
@@ -39,16 +42,29 @@ class Authenticator(dns_common.DNSAuthenticator):
|
||||
add('credentials',
|
||||
help=('Path to Google Cloud DNS service account JSON file. (See {0} for' +
|
||||
'information about creating a service account and {1} for information about the' +
|
||||
'required permissions.)').format(ACCT_URL, PERMISSIONS_URL))
|
||||
'required permissions.)').format(ACCT_URL, PERMISSIONS_URL),
|
||||
default=None)
|
||||
|
||||
def more_info(self): # pylint: disable=missing-docstring,no-self-use
|
||||
return 'This plugin configures a DNS TXT record to respond to a dns-01 challenge using ' + \
|
||||
'the Google Cloud DNS API.'
|
||||
|
||||
def _setup_credentials(self):
|
||||
self._configure_file('credentials', 'path to Google Cloud DNS service account JSON file')
|
||||
if self.conf('credentials') is None:
|
||||
try:
|
||||
# use project_id query to check for availability of google metadata server
|
||||
# we won't use the result but know we're not on GCP when an exception is thrown
|
||||
_GoogleClient.get_project_id()
|
||||
except (ValueError, httplib2.ServerNotFoundError):
|
||||
raise errors.PluginError('Unable to get Google Cloud Metadata and no credentials'
|
||||
' specified. Automatic credential lookup is only '
|
||||
'available on Google Cloud Platform. Please configure'
|
||||
' credentials using --dns-google-credentials <file>')
|
||||
else:
|
||||
self._configure_file('credentials',
|
||||
'path to Google Cloud DNS service account JSON file')
|
||||
|
||||
dns_common.validate_file_permissions(self.conf('credentials'))
|
||||
dns_common.validate_file_permissions(self.conf('credentials'))
|
||||
|
||||
def _perform(self, domain, validation_name, validation):
|
||||
self._get_google_client().add_txt_record(domain, validation_name, validation, self.ttl)
|
||||
@@ -65,13 +81,18 @@ class _GoogleClient(object):
|
||||
Encapsulates all communication with the Google Cloud DNS API.
|
||||
"""
|
||||
|
||||
def __init__(self, account_json):
|
||||
def __init__(self, account_json=None):
|
||||
|
||||
scopes = ['https://www.googleapis.com/auth/ndev.clouddns.readwrite']
|
||||
credentials = ServiceAccountCredentials.from_json_keyfile_name(account_json, scopes)
|
||||
if account_json is not None:
|
||||
credentials = ServiceAccountCredentials.from_json_keyfile_name(account_json, scopes)
|
||||
with open(account_json) as account:
|
||||
self.project_id = json.load(account)['project_id']
|
||||
else:
|
||||
credentials = None
|
||||
self.project_id = self.get_project_id()
|
||||
|
||||
self.dns = discovery.build('dns', 'v1', credentials=credentials, cache_discovery=False)
|
||||
with open(account_json) as account:
|
||||
self.project_id = json.load(account)['project_id']
|
||||
|
||||
def add_txt_record(self, domain, record_name, record_content, record_ttl):
|
||||
"""
|
||||
@@ -183,3 +204,24 @@ class _GoogleClient(object):
|
||||
|
||||
raise errors.PluginError('Unable to determine managed zone for {0} using zone names: {1}.'
|
||||
.format(domain, zone_dns_name_guesses))
|
||||
|
||||
@staticmethod
|
||||
def get_project_id():
|
||||
"""
|
||||
Query the google metadata service for the current project ID
|
||||
|
||||
This only works on Google Cloud Platform
|
||||
|
||||
:raises ServerNotFoundError: Not running on Google Compute or DNS not available
|
||||
:raises ValueError: Server is found, but response code is not 200
|
||||
:returns: project id
|
||||
"""
|
||||
url = '{0}project/project-id'.format(METADATA_URL)
|
||||
|
||||
# Request an access token from the metadata server.
|
||||
http = httplib2.Http()
|
||||
r, content = http.request(url, headers=METADATA_HEADERS)
|
||||
if r.status != 200:
|
||||
raise ValueError("Invalid status code: {0}".format(r))
|
||||
|
||||
return content
|
||||
|
||||
@@ -5,8 +5,10 @@ import unittest
|
||||
|
||||
import mock
|
||||
from googleapiclient.errors import Error
|
||||
from httplib2 import ServerNotFoundError
|
||||
|
||||
from certbot import errors
|
||||
from certbot.errors import PluginError
|
||||
from certbot.plugins import dns_test_common
|
||||
from certbot.plugins.dns_test_common import DOMAIN
|
||||
from certbot.tests import util as test_util
|
||||
@@ -50,6 +52,11 @@ class AuthenticatorTest(test_util.TempDirTestCase, dns_test_common.BaseAuthentic
|
||||
expected = [mock.call.del_txt_record(DOMAIN, '_acme-challenge.'+DOMAIN, mock.ANY, mock.ANY)]
|
||||
self.assertEqual(expected, self.mock_client.mock_calls)
|
||||
|
||||
@mock.patch('httplib2.Http.request', side_effect=ServerNotFoundError)
|
||||
def test_without_auth(self, unused_mock):
|
||||
self.config.google_credentials = None
|
||||
self.assertRaises(PluginError, self.auth.perform, [self.achall])
|
||||
|
||||
|
||||
class GoogleClientTest(unittest.TestCase):
|
||||
record_name = "foo"
|
||||
@@ -74,11 +81,24 @@ class GoogleClientTest(unittest.TestCase):
|
||||
|
||||
return client, mock_changes
|
||||
|
||||
@mock.patch('googleapiclient.discovery.build')
|
||||
@mock.patch('oauth2client.service_account.ServiceAccountCredentials.from_json_keyfile_name')
|
||||
@mock.patch('certbot_dns_google.dns_google._GoogleClient.get_project_id')
|
||||
def test_client_without_credentials(self, get_project_id_mock, credential_mock,
|
||||
unused_discovery_mock):
|
||||
from certbot_dns_google.dns_google import _GoogleClient
|
||||
_GoogleClient(None)
|
||||
self.assertFalse(credential_mock.called)
|
||||
self.assertTrue(get_project_id_mock.called)
|
||||
|
||||
@mock.patch('oauth2client.service_account.ServiceAccountCredentials.from_json_keyfile_name')
|
||||
@mock.patch('certbot_dns_google.dns_google.open',
|
||||
mock.mock_open(read_data='{"project_id": "' + PROJECT_ID + '"}'))
|
||||
def test_add_txt_record(self, unused_credential_mock):
|
||||
mock.mock_open(read_data='{"project_id": "' + PROJECT_ID + '"}'), create=True)
|
||||
@mock.patch('certbot_dns_google.dns_google._GoogleClient.get_project_id')
|
||||
def test_add_txt_record(self, get_project_id_mock, credential_mock):
|
||||
client, changes = self._setUp_client_with_mock([{'managedZones': [{'id': self.zone}]}])
|
||||
credential_mock.assert_called_once_with('/not/a/real/path.json', mock.ANY)
|
||||
self.assertFalse(get_project_id_mock.called)
|
||||
|
||||
client.add_txt_record(DOMAIN, self.record_name, self.record_content, self.record_ttl)
|
||||
|
||||
@@ -101,7 +121,7 @@ class GoogleClientTest(unittest.TestCase):
|
||||
|
||||
@mock.patch('oauth2client.service_account.ServiceAccountCredentials.from_json_keyfile_name')
|
||||
@mock.patch('certbot_dns_google.dns_google.open',
|
||||
mock.mock_open(read_data='{"project_id": "' + PROJECT_ID + '"}'))
|
||||
mock.mock_open(read_data='{"project_id": "' + PROJECT_ID + '"}'), create=True)
|
||||
def test_add_txt_record_and_poll(self, unused_credential_mock):
|
||||
client, changes = self._setUp_client_with_mock([{'managedZones': [{'id': self.zone}]}])
|
||||
changes.create.return_value.execute.return_value = {'status': 'pending', 'id': self.change}
|
||||
@@ -119,7 +139,7 @@ class GoogleClientTest(unittest.TestCase):
|
||||
|
||||
@mock.patch('oauth2client.service_account.ServiceAccountCredentials.from_json_keyfile_name')
|
||||
@mock.patch('certbot_dns_google.dns_google.open',
|
||||
mock.mock_open(read_data='{"project_id": "' + PROJECT_ID + '"}'))
|
||||
mock.mock_open(read_data='{"project_id": "' + PROJECT_ID + '"}'), create=True)
|
||||
def test_add_txt_record_error_during_zone_lookup(self, unused_credential_mock):
|
||||
client, unused_changes = self._setUp_client_with_mock(API_ERROR)
|
||||
|
||||
@@ -128,7 +148,7 @@ class GoogleClientTest(unittest.TestCase):
|
||||
|
||||
@mock.patch('oauth2client.service_account.ServiceAccountCredentials.from_json_keyfile_name')
|
||||
@mock.patch('certbot_dns_google.dns_google.open',
|
||||
mock.mock_open(read_data='{"project_id": "' + PROJECT_ID + '"}'))
|
||||
mock.mock_open(read_data='{"project_id": "' + PROJECT_ID + '"}'), create=True)
|
||||
def test_add_txt_record_zone_not_found(self, unused_credential_mock):
|
||||
client, unused_changes = self._setUp_client_with_mock([{'managedZones': []},
|
||||
{'managedZones': []}])
|
||||
@@ -138,7 +158,7 @@ class GoogleClientTest(unittest.TestCase):
|
||||
|
||||
@mock.patch('oauth2client.service_account.ServiceAccountCredentials.from_json_keyfile_name')
|
||||
@mock.patch('certbot_dns_google.dns_google.open',
|
||||
mock.mock_open(read_data='{"project_id": "' + PROJECT_ID + '"}'))
|
||||
mock.mock_open(read_data='{"project_id": "' + PROJECT_ID + '"}'), create=True)
|
||||
def test_add_txt_record_error_during_add(self, unused_credential_mock):
|
||||
client, changes = self._setUp_client_with_mock([{'managedZones': [{'id': self.zone}]}])
|
||||
changes.create.side_effect = API_ERROR
|
||||
@@ -148,7 +168,7 @@ class GoogleClientTest(unittest.TestCase):
|
||||
|
||||
@mock.patch('oauth2client.service_account.ServiceAccountCredentials.from_json_keyfile_name')
|
||||
@mock.patch('certbot_dns_google.dns_google.open',
|
||||
mock.mock_open(read_data='{"project_id": "' + PROJECT_ID + '"}'))
|
||||
mock.mock_open(read_data='{"project_id": "' + PROJECT_ID + '"}'), create=True)
|
||||
def test_del_txt_record(self, unused_credential_mock):
|
||||
client, changes = self._setUp_client_with_mock([{'managedZones': [{'id': self.zone}]}])
|
||||
|
||||
@@ -173,7 +193,7 @@ class GoogleClientTest(unittest.TestCase):
|
||||
|
||||
@mock.patch('oauth2client.service_account.ServiceAccountCredentials.from_json_keyfile_name')
|
||||
@mock.patch('certbot_dns_google.dns_google.open',
|
||||
mock.mock_open(read_data='{"project_id": "' + PROJECT_ID + '"}'))
|
||||
mock.mock_open(read_data='{"project_id": "' + PROJECT_ID + '"}'), create=True)
|
||||
def test_del_txt_record_error_during_zone_lookup(self, unused_credential_mock):
|
||||
client, unused_changes = self._setUp_client_with_mock(API_ERROR)
|
||||
|
||||
@@ -181,7 +201,7 @@ class GoogleClientTest(unittest.TestCase):
|
||||
|
||||
@mock.patch('oauth2client.service_account.ServiceAccountCredentials.from_json_keyfile_name')
|
||||
@mock.patch('certbot_dns_google.dns_google.open',
|
||||
mock.mock_open(read_data='{"project_id": "' + PROJECT_ID + '"}'))
|
||||
mock.mock_open(read_data='{"project_id": "' + PROJECT_ID + '"}'), create=True)
|
||||
def test_del_txt_record_zone_not_found(self, unused_credential_mock):
|
||||
client, unused_changes = self._setUp_client_with_mock([{'managedZones': []},
|
||||
{'managedZones': []}])
|
||||
@@ -190,13 +210,41 @@ class GoogleClientTest(unittest.TestCase):
|
||||
|
||||
@mock.patch('oauth2client.service_account.ServiceAccountCredentials.from_json_keyfile_name')
|
||||
@mock.patch('certbot_dns_google.dns_google.open',
|
||||
mock.mock_open(read_data='{"project_id": "' + PROJECT_ID + '"}'))
|
||||
mock.mock_open(read_data='{"project_id": "' + PROJECT_ID + '"}'), create=True)
|
||||
def test_del_txt_record_error_during_delete(self, unused_credential_mock):
|
||||
client, changes = self._setUp_client_with_mock([{'managedZones': [{'id': self.zone}]}])
|
||||
changes.create.side_effect = API_ERROR
|
||||
|
||||
client.del_txt_record(DOMAIN, self.record_name, self.record_content, self.record_ttl)
|
||||
|
||||
def test_get_project_id(self):
|
||||
from certbot_dns_google.dns_google import _GoogleClient
|
||||
|
||||
response = DummyResponse()
|
||||
response.status = 200
|
||||
|
||||
with mock.patch('httplib2.Http.request', return_value=(response, 1234)):
|
||||
project_id = _GoogleClient.get_project_id()
|
||||
self.assertEqual(project_id, 1234)
|
||||
|
||||
failed_response = DummyResponse()
|
||||
failed_response.status = 404
|
||||
|
||||
with mock.patch('httplib2.Http.request',
|
||||
return_value=(failed_response, "some detailed http error response")):
|
||||
self.assertRaises(ValueError, _GoogleClient.get_project_id)
|
||||
|
||||
with mock.patch('httplib2.Http.request', side_effect=ServerNotFoundError):
|
||||
self.assertRaises(ServerNotFoundError, _GoogleClient.get_project_id)
|
||||
|
||||
|
||||
class DummyResponse(object):
|
||||
"""
|
||||
Dummy object to create a fake HTTPResponse (the actual one requires a socket and we only
|
||||
need the status attribute)
|
||||
"""
|
||||
def __init__(self):
|
||||
self.status = 200
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main() # pragma: no cover
|
||||
|
||||
@@ -4,19 +4,23 @@ from setuptools import setup
|
||||
from setuptools import find_packages
|
||||
|
||||
|
||||
version = '0.16.0.dev0'
|
||||
version = '0.20.0.dev0'
|
||||
|
||||
# Please update tox.ini when modifying dependency version requirements
|
||||
install_requires = [
|
||||
'acme=={0}'.format(version),
|
||||
'certbot=={0}'.format(version),
|
||||
'google-api-python-client',
|
||||
# 1.5 is the first version that supports oauth2client>=2.0
|
||||
'google-api-python-client>=1.5',
|
||||
'mock',
|
||||
'oauth2client',
|
||||
# for oauth2client.service_account.ServiceAccountCredentials
|
||||
'oauth2client>=2.0',
|
||||
# For pkg_resources. >=1.0 so pip resolves it to a version cryptography
|
||||
# will tolerate; see #2599:
|
||||
'setuptools>=1.0',
|
||||
'zope.interface',
|
||||
# already a dependency of google-api-python-client, but added for consistency
|
||||
'httplib2'
|
||||
]
|
||||
|
||||
docs_extras = [
|
||||
|
||||
@@ -4,7 +4,7 @@ from setuptools import setup
|
||||
from setuptools import find_packages
|
||||
|
||||
|
||||
version = '0.16.0.dev0'
|
||||
version = '0.20.0.dev0'
|
||||
|
||||
# Please update tox.ini when modifying dependency version requirements
|
||||
install_requires = [
|
||||
|
||||
@@ -4,7 +4,7 @@ from setuptools import setup
|
||||
from setuptools import find_packages
|
||||
|
||||
|
||||
version = '0.16.0.dev0'
|
||||
version = '0.20.0.dev0'
|
||||
|
||||
# Please update tox.ini when modifying dependency version requirements
|
||||
install_requires = [
|
||||
|
||||
@@ -208,8 +208,8 @@ class _RFC2136Client(object):
|
||||
rcode = response.rcode()
|
||||
|
||||
# Authoritative Answer bit should be set
|
||||
if (rcode == dns.rcode.NOERROR and len(response.answer) > 0 and
|
||||
response.flags & dns.flags.AA):
|
||||
if (rcode == dns.rcode.NOERROR and response.get_rrset(response.answer,
|
||||
domain, dns.rdataclass.IN, dns.rdatatype.SOA) and response.flags & dns.flags.AA):
|
||||
logger.debug('Received authoritative SOA response for %s', domain_name)
|
||||
return True
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@ from setuptools import setup
|
||||
from setuptools import find_packages
|
||||
|
||||
|
||||
version = '0.16.0.dev0'
|
||||
version = '0.20.0.dev0'
|
||||
|
||||
# Please update tox.ini when modifying dependency version requirements
|
||||
install_requires = [
|
||||
|
||||
@@ -3,7 +3,7 @@ import sys
|
||||
from distutils.core import setup
|
||||
from setuptools import find_packages
|
||||
|
||||
version = '0.16.0.dev0'
|
||||
version = '0.20.0.dev0'
|
||||
|
||||
install_requires = [
|
||||
'acme=={0}'.format(version),
|
||||
|
||||
@@ -19,7 +19,6 @@ from certbot import crypto_util
|
||||
from certbot import errors
|
||||
from certbot import interfaces
|
||||
from certbot import util
|
||||
from certbot import reverter
|
||||
|
||||
from certbot.plugins import common
|
||||
|
||||
@@ -63,7 +62,7 @@ TEST_REDIRECT_COMMENT_BLOCK = [
|
||||
|
||||
@zope.interface.implementer(interfaces.IAuthenticator, interfaces.IInstaller)
|
||||
@zope.interface.provider(interfaces.IPluginFactory)
|
||||
class NginxConfigurator(common.Plugin):
|
||||
class NginxConfigurator(common.Installer):
|
||||
# pylint: disable=too-many-instance-attributes,too-many-public-methods
|
||||
"""Nginx configurator.
|
||||
|
||||
@@ -87,8 +86,6 @@ class NginxConfigurator(common.Plugin):
|
||||
|
||||
description = "Nginx Web Server plugin - Alpha"
|
||||
|
||||
hidden = True
|
||||
|
||||
DEFAULT_LISTEN_PORT = '80'
|
||||
|
||||
@classmethod
|
||||
@@ -120,6 +117,9 @@ class NginxConfigurator(common.Plugin):
|
||||
# Files to save
|
||||
self.save_notes = ""
|
||||
|
||||
# For creating new vhosts if no names match
|
||||
self.new_vhost = None
|
||||
|
||||
# Add number of outstanding challenges
|
||||
self._chall_out = 0
|
||||
|
||||
@@ -129,8 +129,6 @@ class NginxConfigurator(common.Plugin):
|
||||
self._enhance_func = {"redirect": self._enable_redirect,
|
||||
"staple-ocsp": self._enable_ocsp_stapling}
|
||||
|
||||
# Set up reverter
|
||||
self.reverter = reverter.Reverter(self.config)
|
||||
self.reverter.recovery_routine()
|
||||
|
||||
@property
|
||||
@@ -162,6 +160,8 @@ class NginxConfigurator(common.Plugin):
|
||||
|
||||
install_ssl_options_conf(self.mod_ssl_conf, self.updated_mod_ssl_conf_digest)
|
||||
|
||||
self.install_ssl_dhparams()
|
||||
|
||||
# Set Version
|
||||
if self.version is None:
|
||||
self.version = self.get_version()
|
||||
@@ -194,20 +194,16 @@ class NginxConfigurator(common.Plugin):
|
||||
"The nginx plugin currently requires --fullchain-path to "
|
||||
"install a cert.")
|
||||
|
||||
vhost = self.choose_vhost(domain)
|
||||
cert_directives = [['\n', 'ssl_certificate', ' ', fullchain_path],
|
||||
['\n', 'ssl_certificate_key', ' ', key_path]]
|
||||
vhost = self.choose_vhost(domain, raise_if_no_match=False)
|
||||
if vhost is None:
|
||||
vhost = self._vhost_from_duplicated_default(domain)
|
||||
cert_directives = [['\n ', 'ssl_certificate', ' ', fullchain_path],
|
||||
['\n ', 'ssl_certificate_key', ' ', key_path]]
|
||||
|
||||
try:
|
||||
self.parser.add_server_directives(vhost,
|
||||
cert_directives, replace=True)
|
||||
logger.info("Deployed Certificate to VirtualHost %s for %s",
|
||||
vhost.filep, vhost.names)
|
||||
except errors.MisconfigurationError as error:
|
||||
logger.debug(error)
|
||||
# Presumably break here so that the virtualhost is not modified
|
||||
raise errors.PluginError("Cannot find a cert or key directive in {0} for {1}. "
|
||||
"VirtualHost was not modified.".format(vhost.filep, vhost.names))
|
||||
self.parser.add_server_directives(vhost,
|
||||
cert_directives, replace=True)
|
||||
logger.info("Deployed Certificate to VirtualHost %s for %s",
|
||||
vhost.filep, vhost.names)
|
||||
|
||||
self.save_notes += ("Changed vhost at %s with addresses of %s\n" %
|
||||
(vhost.filep,
|
||||
@@ -218,7 +214,7 @@ class NginxConfigurator(common.Plugin):
|
||||
#######################
|
||||
# Vhost parsing methods
|
||||
#######################
|
||||
def choose_vhost(self, target_name):
|
||||
def choose_vhost(self, target_name, raise_if_no_match=True):
|
||||
"""Chooses a virtual host based on the given domain name.
|
||||
|
||||
.. note:: This makes the vhost SSL-enabled if it isn't already. Follows
|
||||
@@ -232,6 +228,8 @@ class NginxConfigurator(common.Plugin):
|
||||
hostname. Currently we just ignore this.
|
||||
|
||||
:param str target_name: domain name
|
||||
:param bool raise_if_no_match: True iff not finding a match is an error;
|
||||
otherwise, return None
|
||||
|
||||
:returns: ssl vhost associated with name
|
||||
:rtype: :class:`~certbot_nginx.obj.VirtualHost`
|
||||
@@ -242,9 +240,16 @@ class NginxConfigurator(common.Plugin):
|
||||
matches = self._get_ranked_matches(target_name)
|
||||
vhost = self._select_best_name_match(matches)
|
||||
if not vhost:
|
||||
# No matches. Raise a misconfiguration error.
|
||||
raise errors.MisconfigurationError(
|
||||
"Cannot find a VirtualHost matching domain %s." % (target_name))
|
||||
if raise_if_no_match:
|
||||
# No matches. Raise a misconfiguration error.
|
||||
raise errors.MisconfigurationError(
|
||||
("Cannot find a VirtualHost matching domain %s. "
|
||||
"In order for Certbot to correctly perform the challenge "
|
||||
"please add a corresponding server_name directive to your "
|
||||
"nginx configuration: "
|
||||
"https://nginx.org/en/docs/http/server_names.html") % (target_name))
|
||||
else:
|
||||
return None
|
||||
else:
|
||||
# Note: if we are enhancing with ocsp, vhost should already be ssl.
|
||||
if not vhost.ssl:
|
||||
@@ -252,6 +257,62 @@ class NginxConfigurator(common.Plugin):
|
||||
|
||||
return vhost
|
||||
|
||||
|
||||
def ipv6_info(self, port):
|
||||
"""Returns tuple of booleans (ipv6_active, ipv6only_present)
|
||||
ipv6_active is true if any server block listens ipv6 address in any port
|
||||
|
||||
ipv6only_present is true if ipv6only=on option exists in any server
|
||||
block ipv6 listen directive for the specified port.
|
||||
|
||||
:param str port: Port to check ipv6only=on directive for
|
||||
|
||||
:returns: Tuple containing information if IPv6 is enabled in the global
|
||||
configuration, and existence of ipv6only directive for specified port
|
||||
:rtype: tuple of type (bool, bool)
|
||||
"""
|
||||
vhosts = self.parser.get_vhosts()
|
||||
ipv6_active = False
|
||||
ipv6only_present = False
|
||||
for vh in vhosts:
|
||||
for addr in vh.addrs:
|
||||
if addr.ipv6:
|
||||
ipv6_active = True
|
||||
if addr.ipv6only and addr.get_port() == port:
|
||||
ipv6only_present = True
|
||||
return (ipv6_active, ipv6only_present)
|
||||
|
||||
def _vhost_from_duplicated_default(self, domain):
|
||||
if self.new_vhost is None:
|
||||
default_vhost = self._get_default_vhost()
|
||||
self.new_vhost = self.parser.create_new_vhost_from_default(default_vhost)
|
||||
if not self.new_vhost.ssl:
|
||||
self._make_server_ssl(self.new_vhost)
|
||||
self.new_vhost.names = set()
|
||||
|
||||
self.new_vhost.names.add(domain)
|
||||
name_block = [['\n ', 'server_name', ' ', ' '.join(self.new_vhost.names)]]
|
||||
self.parser.add_server_directives(self.new_vhost, name_block, replace=True)
|
||||
return self.new_vhost
|
||||
|
||||
def _get_default_vhost(self):
|
||||
vhost_list = self.parser.get_vhosts()
|
||||
# if one has default_server set, return that one
|
||||
default_vhosts = []
|
||||
for vhost in vhost_list:
|
||||
for addr in vhost.addrs:
|
||||
if addr.default:
|
||||
default_vhosts.append(vhost)
|
||||
break
|
||||
|
||||
if len(default_vhosts) == 1:
|
||||
return default_vhosts[0]
|
||||
|
||||
# TODO: present a list of vhosts for user to choose from
|
||||
|
||||
raise errors.MisconfigurationError("Could not automatically find a matching server"
|
||||
" block. Set the `server_name` directive to use the Nginx installer.")
|
||||
|
||||
def _get_ranked_matches(self, target_name):
|
||||
"""Returns a ranked list of vhosts that match target_name.
|
||||
The ranking gives preference to SSL vhosts.
|
||||
@@ -410,9 +471,12 @@ class NginxConfigurator(common.Plugin):
|
||||
all_names.add(host)
|
||||
elif not common.private_ips_regex.match(host):
|
||||
# If it isn't a private IP, do a reverse DNS lookup
|
||||
# TODO: IPv6 support
|
||||
try:
|
||||
socket.inet_aton(host)
|
||||
if addr.ipv6:
|
||||
host = addr.get_ipv6_exploded()
|
||||
socket.inet_pton(socket.AF_INET6, host)
|
||||
else:
|
||||
socket.inet_pton(socket.AF_INET, host)
|
||||
all_names.add(socket.gethostbyaddr(host)[0])
|
||||
except (socket.error, socket.herror, socket.timeout):
|
||||
continue
|
||||
@@ -448,19 +512,43 @@ class NginxConfigurator(common.Plugin):
|
||||
:type vhost: :class:`~certbot_nginx.obj.VirtualHost`
|
||||
|
||||
"""
|
||||
ipv6info = self.ipv6_info(self.config.tls_sni_01_port)
|
||||
ipv6_block = ['']
|
||||
ipv4_block = ['']
|
||||
|
||||
# If the vhost was implicitly listening on the default Nginx port,
|
||||
# have it continue to do so.
|
||||
if len(vhost.addrs) == 0:
|
||||
listen_block = [['\n ', 'listen', ' ', self.DEFAULT_LISTEN_PORT]]
|
||||
self.parser.add_server_directives(vhost, listen_block, replace=False)
|
||||
|
||||
if vhost.ipv6_enabled():
|
||||
ipv6_block = ['\n ',
|
||||
'listen',
|
||||
' ',
|
||||
'[::]:{0} ssl'.format(self.config.tls_sni_01_port)]
|
||||
if not ipv6info[1]:
|
||||
# ipv6only=on is absent in global config
|
||||
ipv6_block.append(' ')
|
||||
ipv6_block.append('ipv6only=on')
|
||||
|
||||
if vhost.ipv4_enabled():
|
||||
ipv4_block = ['\n ',
|
||||
'listen',
|
||||
' ',
|
||||
'{0} ssl'.format(self.config.tls_sni_01_port)]
|
||||
|
||||
|
||||
snakeoil_cert, snakeoil_key = self._get_snakeoil_paths()
|
||||
|
||||
ssl_block = (
|
||||
[['\n ', 'listen', ' ', '{0} ssl'.format(self.config.tls_sni_01_port)],
|
||||
['\n ', 'ssl_certificate', ' ', snakeoil_cert],
|
||||
['\n ', 'ssl_certificate_key', ' ', snakeoil_key],
|
||||
['\n ', 'include', ' ', self.mod_ssl_conf]])
|
||||
ssl_block = ([
|
||||
ipv6_block,
|
||||
ipv4_block,
|
||||
['\n ', 'ssl_certificate', ' ', snakeoil_cert],
|
||||
['\n ', 'ssl_certificate_key', ' ', snakeoil_key],
|
||||
['\n ', 'include', ' ', self.mod_ssl_conf],
|
||||
['\n ', 'ssl_dhparam', ' ', self.ssl_dhparams],
|
||||
])
|
||||
|
||||
self.parser.add_server_directives(
|
||||
vhost, ssl_block, replace=False)
|
||||
@@ -701,31 +789,13 @@ class NginxConfigurator(common.Plugin):
|
||||
|
||||
"""
|
||||
save_files = set(self.parser.parsed.keys())
|
||||
|
||||
try: # TODO: make a common base for Apache and Nginx plugins
|
||||
# Create Checkpoint
|
||||
if temporary:
|
||||
self.reverter.add_to_temp_checkpoint(
|
||||
save_files, self.save_notes)
|
||||
# how many comments does it take
|
||||
else:
|
||||
self.reverter.add_to_checkpoint(save_files,
|
||||
self.save_notes)
|
||||
# to confuse a linter?
|
||||
except errors.ReverterError as err:
|
||||
raise errors.PluginError(str(err))
|
||||
|
||||
self.add_to_checkpoint(save_files, self.save_notes, temporary)
|
||||
self.save_notes = ""
|
||||
|
||||
# Change 'ext' to something else to not override existing conf files
|
||||
self.parser.filedump(ext='')
|
||||
if title and not temporary:
|
||||
try:
|
||||
self.reverter.finalize_checkpoint(title)
|
||||
except errors.ReverterError as err:
|
||||
raise errors.PluginError(str(err))
|
||||
|
||||
return True
|
||||
self.finalize_checkpoint(title)
|
||||
|
||||
def recovery_routine(self):
|
||||
"""Revert all previously modified files.
|
||||
@@ -735,10 +805,7 @@ class NginxConfigurator(common.Plugin):
|
||||
:raises .errors.PluginError: If unable to recover the configuration
|
||||
|
||||
"""
|
||||
try:
|
||||
self.reverter.recovery_routine()
|
||||
except errors.ReverterError as err:
|
||||
raise errors.PluginError(str(err))
|
||||
super(NginxConfigurator, self).recovery_routine()
|
||||
self.parser.load()
|
||||
|
||||
def revert_challenge_config(self):
|
||||
@@ -747,10 +814,7 @@ class NginxConfigurator(common.Plugin):
|
||||
:raises .errors.PluginError: If unable to revert the challenge config.
|
||||
|
||||
"""
|
||||
try:
|
||||
self.reverter.revert_temporary_config()
|
||||
except errors.ReverterError as err:
|
||||
raise errors.PluginError(str(err))
|
||||
self.revert_temporary_config()
|
||||
self.parser.load()
|
||||
|
||||
def rollback_checkpoints(self, rollback=1):
|
||||
@@ -762,24 +826,9 @@ class NginxConfigurator(common.Plugin):
|
||||
the function is unable to correctly revert the configuration
|
||||
|
||||
"""
|
||||
try:
|
||||
self.reverter.rollback_checkpoints(rollback)
|
||||
except errors.ReverterError as err:
|
||||
raise errors.PluginError(str(err))
|
||||
super(NginxConfigurator, self).rollback_checkpoints(rollback)
|
||||
self.parser.load()
|
||||
|
||||
def view_config_changes(self):
|
||||
"""Show all of the configuration changes that have taken place.
|
||||
|
||||
:raises .errors.PluginError: If there is a problem while processing
|
||||
the checkpoints directories.
|
||||
|
||||
"""
|
||||
try:
|
||||
self.reverter.view_config_changes()
|
||||
except errors.ReverterError as err:
|
||||
raise errors.PluginError(str(err))
|
||||
|
||||
###########################################################################
|
||||
# Challenges Section for IAuthenticator
|
||||
###########################################################################
|
||||
@@ -868,5 +917,5 @@ def nginx_restart(nginx_ctl, nginx_conf):
|
||||
|
||||
def install_ssl_options_conf(options_ssl, options_ssl_digest):
|
||||
"""Copy Certbot's SSL options file into the system's config dir if required."""
|
||||
return common.install_ssl_options_conf(options_ssl, options_ssl_digest,
|
||||
return common.install_version_controlled_file(options_ssl, options_ssl_digest,
|
||||
constants.MOD_SSL_CONF_SRC, constants.ALL_SSL_OPTIONS_HASHES)
|
||||
|
||||
@@ -7,6 +7,7 @@ from pyparsing import (
|
||||
Literal, White, Forward, Group, Optional, OneOrMore, QuotedString, Regex, ZeroOrMore, Combine)
|
||||
from pyparsing import stringEnd
|
||||
from pyparsing import restOfLine
|
||||
import six
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -71,7 +72,7 @@ class RawNginxDumper(object):
|
||||
"""Iterates the dumped nginx content."""
|
||||
blocks = blocks or self.blocks
|
||||
for b0 in blocks:
|
||||
if isinstance(b0, str):
|
||||
if isinstance(b0, six.string_types):
|
||||
yield b0
|
||||
continue
|
||||
item = copy.deepcopy(b0)
|
||||
@@ -88,7 +89,7 @@ class RawNginxDumper(object):
|
||||
yield '}'
|
||||
else: # not a block - list of strings
|
||||
semicolon = ";"
|
||||
if isinstance(item[0], str) and item[0].strip() == '#': # comment
|
||||
if isinstance(item[0], six.string_types) and item[0].strip() == '#': # comment
|
||||
semicolon = ""
|
||||
yield "".join(item) + semicolon
|
||||
|
||||
@@ -145,7 +146,7 @@ def dump(blocks, _file):
|
||||
return _file.write(dumps(blocks))
|
||||
|
||||
|
||||
spacey = lambda x: (isinstance(x, str) and x.isspace()) or x == ''
|
||||
spacey = lambda x: (isinstance(x, six.string_types) and x.isspace()) or x == ''
|
||||
|
||||
class UnspacedList(list):
|
||||
"""Wrap a list [of lists], making any whitespace entries magically invisible"""
|
||||
@@ -189,13 +190,15 @@ class UnspacedList(list):
|
||||
item, spaced_item = self._coerce(x)
|
||||
slicepos = self._spaced_position(i) if i < len(self) else len(self.spaced)
|
||||
self.spaced.insert(slicepos, spaced_item)
|
||||
list.insert(self, i, item)
|
||||
if not spacey(item):
|
||||
list.insert(self, i, item)
|
||||
self.dirty = True
|
||||
|
||||
def append(self, x):
|
||||
item, spaced_item = self._coerce(x)
|
||||
self.spaced.append(spaced_item)
|
||||
list.append(self, item)
|
||||
if not spacey(item):
|
||||
list.append(self, item)
|
||||
self.dirty = True
|
||||
|
||||
def extend(self, x):
|
||||
@@ -226,7 +229,8 @@ class UnspacedList(list):
|
||||
raise NotImplementedError("Slice operations on UnspacedLists not yet implemented")
|
||||
item, spaced_item = self._coerce(value)
|
||||
self.spaced.__setitem__(self._spaced_position(i), spaced_item)
|
||||
list.__setitem__(self, i, item)
|
||||
if not spacey(item):
|
||||
list.__setitem__(self, i, item)
|
||||
self.dirty = True
|
||||
|
||||
def __delitem__(self, i):
|
||||
@@ -235,8 +239,8 @@ class UnspacedList(list):
|
||||
self.dirty = True
|
||||
|
||||
def __deepcopy__(self, memo):
|
||||
l = UnspacedList(self[:])
|
||||
l.spaced = copy.deepcopy(self.spaced, memo=memo)
|
||||
new_spaced = copy.deepcopy(self.spaced, memo=memo)
|
||||
l = UnspacedList(new_spaced)
|
||||
l.dirty = self.dirty
|
||||
return l
|
||||
|
||||
|
||||
@@ -34,10 +34,13 @@ class Addr(common.Addr):
|
||||
UNSPECIFIED_IPV4_ADDRESSES = ('', '*', '0.0.0.0')
|
||||
CANONICAL_UNSPECIFIED_ADDRESS = UNSPECIFIED_IPV4_ADDRESSES[0]
|
||||
|
||||
def __init__(self, host, port, ssl, default):
|
||||
def __init__(self, host, port, ssl, default, ipv6, ipv6only):
|
||||
# pylint: disable=too-many-arguments
|
||||
super(Addr, self).__init__((host, port))
|
||||
self.ssl = ssl
|
||||
self.default = default
|
||||
self.ipv6 = ipv6
|
||||
self.ipv6only = ipv6only
|
||||
self.unspecified_address = host in self.UNSPECIFIED_IPV4_ADDRESSES
|
||||
|
||||
@classmethod
|
||||
@@ -46,6 +49,8 @@ class Addr(common.Addr):
|
||||
parts = str_addr.split(' ')
|
||||
ssl = False
|
||||
default = False
|
||||
ipv6 = False
|
||||
ipv6only = False
|
||||
host = ''
|
||||
port = ''
|
||||
|
||||
@@ -56,15 +61,25 @@ class Addr(common.Addr):
|
||||
if addr.startswith('unix:'):
|
||||
return None
|
||||
|
||||
tup = addr.partition(':')
|
||||
if re.match(r'^\d+$', tup[0]):
|
||||
# This is a bare port, not a hostname. E.g. listen 80
|
||||
host = ''
|
||||
port = tup[0]
|
||||
# IPv6 check
|
||||
ipv6_match = re.match(r'\[.*\]', addr)
|
||||
if ipv6_match:
|
||||
ipv6 = True
|
||||
# IPv6 handling
|
||||
host = ipv6_match.group()
|
||||
# The rest of the addr string will be the port, if any
|
||||
port = addr[ipv6_match.end()+1:]
|
||||
else:
|
||||
# This is a host-port tuple. E.g. listen 127.0.0.1:*
|
||||
host = tup[0]
|
||||
port = tup[2]
|
||||
# IPv4 handling
|
||||
tup = addr.partition(':')
|
||||
if re.match(r'^\d+$', tup[0]):
|
||||
# This is a bare port, not a hostname. E.g. listen 80
|
||||
host = ''
|
||||
port = tup[0]
|
||||
else:
|
||||
# This is a host-port tuple. E.g. listen 127.0.0.1:*
|
||||
host = tup[0]
|
||||
port = tup[2]
|
||||
|
||||
# The rest of the parts are options; we only care about ssl and default
|
||||
while len(parts) > 0:
|
||||
@@ -73,8 +88,10 @@ class Addr(common.Addr):
|
||||
ssl = True
|
||||
elif nextpart == 'default_server':
|
||||
default = True
|
||||
elif nextpart == "ipv6only=on":
|
||||
ipv6only = True
|
||||
|
||||
return cls(host, port, ssl, default)
|
||||
return cls(host, port, ssl, default, ipv6, ipv6only)
|
||||
|
||||
def to_string(self, include_default=True):
|
||||
"""Return string representation of Addr"""
|
||||
@@ -114,8 +131,6 @@ class Addr(common.Addr):
|
||||
self.tup[1]), self.ipv6) == \
|
||||
common.Addr((other.CANONICAL_UNSPECIFIED_ADDRESS,
|
||||
other.tup[1]), other.ipv6)
|
||||
# Nginx plugin currently doesn't support IPv6 but this will
|
||||
# future-proof it
|
||||
return super(Addr, self).__eq__(other)
|
||||
|
||||
def __eq__(self, other):
|
||||
@@ -195,10 +210,24 @@ class VirtualHost(object): # pylint: disable=too-few-public-methods
|
||||
return True
|
||||
return False
|
||||
|
||||
def ipv6_enabled(self):
|
||||
"""Return true if one or more of the listen directives in vhost supports
|
||||
IPv6"""
|
||||
for a in self.addrs:
|
||||
if a.ipv6:
|
||||
return True
|
||||
|
||||
def ipv4_enabled(self):
|
||||
"""Return true if one or more of the listen directives in vhost are IPv4
|
||||
only"""
|
||||
for a in self.addrs:
|
||||
if not a.ipv6:
|
||||
return True
|
||||
|
||||
def _find_directive(directives, directive_name):
|
||||
"""Find a directive of type directive_name in directives
|
||||
"""
|
||||
if not directives or isinstance(directives, str) or len(directives) == 0:
|
||||
if not directives or isinstance(directives, six.string_types) or len(directives) == 0:
|
||||
return None
|
||||
|
||||
if directives[0] == directive_name:
|
||||
|
||||
@@ -6,6 +6,8 @@ import os
|
||||
import pyparsing
|
||||
import re
|
||||
|
||||
import six
|
||||
|
||||
from certbot import errors
|
||||
|
||||
from certbot_nginx import obj
|
||||
@@ -278,8 +280,8 @@ class NginxParser(object):
|
||||
|
||||
This method modifies vhost to be fully consistent with the new directives.
|
||||
|
||||
..note :: If replace is True, this raises a misconfiguration error
|
||||
if the directive does not already exist.
|
||||
..note :: If replace is True and the directive already exists, the first
|
||||
instance will be replaced. Otherwise, the directive is added.
|
||||
..note :: If replace is False nothing gets added if an identical
|
||||
block exists already.
|
||||
|
||||
@@ -312,6 +314,32 @@ class NginxParser(object):
|
||||
except errors.MisconfigurationError as err:
|
||||
raise errors.MisconfigurationError("Problem in %s: %s" % (filename, str(err)))
|
||||
|
||||
def create_new_vhost_from_default(self, vhost_template):
|
||||
"""Duplicate the default vhost in the configuration files.
|
||||
|
||||
:param :class:`~certbot_nginx.obj.VirtualHost` vhost_template: The vhost
|
||||
whose information we copy
|
||||
|
||||
:returns: A vhost object for the newly created vhost
|
||||
:rtype: :class:`~certbot_nginx.obj.VirtualHost`
|
||||
"""
|
||||
# TODO: https://github.com/certbot/certbot/issues/5185
|
||||
# put it in the same file as the template, at the same level
|
||||
enclosing_block = self.parsed[vhost_template.filep]
|
||||
for index in vhost_template.path[:-1]:
|
||||
enclosing_block = enclosing_block[index]
|
||||
new_location = vhost_template.path[-1] + 1
|
||||
raw_in_parsed = copy.deepcopy(enclosing_block[vhost_template.path[-1]])
|
||||
enclosing_block.insert(new_location, raw_in_parsed)
|
||||
new_vhost = copy.deepcopy(vhost_template)
|
||||
new_vhost.path[-1] = new_location
|
||||
for addr in new_vhost.addrs:
|
||||
addr.default = False
|
||||
for directive in enclosing_block[new_vhost.path[-1]][1]:
|
||||
if len(directive) > 0 and directive[0] == 'listen' and 'default_server' in directive:
|
||||
del directive[directive.index('default_server')]
|
||||
return new_vhost
|
||||
|
||||
def _parse_ssl_options(ssl_options):
|
||||
if ssl_options is not None:
|
||||
try:
|
||||
@@ -444,7 +472,7 @@ def _is_include_directive(entry):
|
||||
"""
|
||||
return (isinstance(entry, list) and
|
||||
len(entry) == 2 and entry[0] == 'include' and
|
||||
isinstance(entry[1], str))
|
||||
isinstance(entry[1], six.string_types))
|
||||
|
||||
def _is_ssl_on_directive(entry):
|
||||
"""Checks if an nginx parsed entry is an 'ssl on' directive.
|
||||
@@ -464,8 +492,9 @@ def _add_directives(block, directives, replace):
|
||||
When replace=False, it's an error to try and add a directive that already
|
||||
exists in the config block with a conflicting value.
|
||||
|
||||
When replace=True, a directive with the same name MUST already exist in the
|
||||
config block, and the first instance will be replaced.
|
||||
When replace=True and a directive with the same name already exists in the
|
||||
config block, the first instance will be replaced. Otherwise, the directive
|
||||
will be added to the config block.
|
||||
|
||||
..todo :: Find directives that are in included files.
|
||||
|
||||
@@ -547,49 +576,47 @@ def _add_directive(block, directive, replace):
|
||||
location = find_location(directive)
|
||||
|
||||
if replace:
|
||||
if location is None:
|
||||
raise errors.MisconfigurationError(
|
||||
'expected directive for {0} in the Nginx '
|
||||
'config but did not find it.'.format(directive[0]))
|
||||
block[location] = directive
|
||||
_comment_directive(block, location)
|
||||
else:
|
||||
# Append directive. Fail if the name is not a repeatable directive name,
|
||||
# and there is already a copy of that directive with a different value
|
||||
# in the config file.
|
||||
if location is not None:
|
||||
block[location] = directive
|
||||
_comment_directive(block, location)
|
||||
return
|
||||
# Append directive. Fail if the name is not a repeatable directive name,
|
||||
# and there is already a copy of that directive with a different value
|
||||
# in the config file.
|
||||
|
||||
# handle flat include files
|
||||
# handle flat include files
|
||||
|
||||
directive_name = directive[0]
|
||||
def can_append(loc, dir_name):
|
||||
""" Can we append this directive to the block? """
|
||||
return loc is None or (isinstance(dir_name, str) and dir_name in REPEATABLE_DIRECTIVES)
|
||||
directive_name = directive[0]
|
||||
def can_append(loc, dir_name):
|
||||
""" Can we append this directive to the block? """
|
||||
return loc is None or (isinstance(dir_name, six.string_types)
|
||||
and dir_name in REPEATABLE_DIRECTIVES)
|
||||
|
||||
err_fmt = 'tried to insert directive "{0}" but found conflicting "{1}".'
|
||||
err_fmt = 'tried to insert directive "{0}" but found conflicting "{1}".'
|
||||
|
||||
# Give a better error message about the specific directive than Nginx's "fail to restart"
|
||||
if directive_name == INCLUDE:
|
||||
# in theory, we might want to do this recursively, but in practice, that's really not
|
||||
# necessary because we know what file we're talking about (and if we don't recurse, we
|
||||
# just give a worse error message)
|
||||
included_directives = _parse_ssl_options(directive[1])
|
||||
# Give a better error message about the specific directive than Nginx's "fail to restart"
|
||||
if directive_name == INCLUDE:
|
||||
# in theory, we might want to do this recursively, but in practice, that's really not
|
||||
# necessary because we know what file we're talking about (and if we don't recurse, we
|
||||
# just give a worse error message)
|
||||
included_directives = _parse_ssl_options(directive[1])
|
||||
|
||||
for included_directive in included_directives:
|
||||
included_dir_loc = find_location(included_directive)
|
||||
included_dir_name = included_directive[0]
|
||||
if not is_whitespace_or_comment(included_directive) \
|
||||
and not can_append(included_dir_loc, included_dir_name):
|
||||
if block[included_dir_loc] != included_directive:
|
||||
raise errors.MisconfigurationError(err_fmt.format(included_directive,
|
||||
block[included_dir_loc]))
|
||||
else:
|
||||
_comment_out_directive(block, included_dir_loc, directive[1])
|
||||
for included_directive in included_directives:
|
||||
included_dir_loc = find_location(included_directive)
|
||||
included_dir_name = included_directive[0]
|
||||
if not is_whitespace_or_comment(included_directive) \
|
||||
and not can_append(included_dir_loc, included_dir_name):
|
||||
if block[included_dir_loc] != included_directive:
|
||||
raise errors.MisconfigurationError(err_fmt.format(included_directive,
|
||||
block[included_dir_loc]))
|
||||
else:
|
||||
_comment_out_directive(block, included_dir_loc, directive[1])
|
||||
|
||||
if can_append(location, directive_name):
|
||||
block.append(directive)
|
||||
_comment_directive(block, len(block) - 1)
|
||||
elif block[location] != directive:
|
||||
raise errors.MisconfigurationError(err_fmt.format(directive, block[location]))
|
||||
if can_append(location, directive_name):
|
||||
block.append(directive)
|
||||
_comment_directive(block, len(block) - 1)
|
||||
elif block[location] != directive:
|
||||
raise errors.MisconfigurationError(err_fmt.format(directive, block[location]))
|
||||
|
||||
def _apply_global_addr_ssl(addr_to_ssl, parsed_server):
|
||||
"""Apply global sslishness information to the parsed server block
|
||||
|
||||
@@ -46,7 +46,7 @@ class NginxConfiguratorTest(util.NginxTest):
|
||||
|
||||
def test_prepare(self):
|
||||
self.assertEqual((1, 6, 2), self.config.version)
|
||||
self.assertEqual(8, len(self.config.parser.parsed))
|
||||
self.assertEqual(10, len(self.config.parser.parsed))
|
||||
|
||||
@mock.patch("certbot_nginx.configurator.util.exe_exists")
|
||||
@mock.patch("certbot_nginx.configurator.subprocess.Popen")
|
||||
@@ -90,7 +90,7 @@ class NginxConfiguratorTest(util.NginxTest):
|
||||
self.assertEqual(names, set(
|
||||
["155.225.50.69.nephoscale.net", "www.example.org", "another.alias",
|
||||
"migration.com", "summer.com", "geese.com", "sslon.com",
|
||||
"globalssl.com", "globalsslsetssl.com"]))
|
||||
"globalssl.com", "globalsslsetssl.com", "ipv6.com", "ipv6ssl.com"]))
|
||||
|
||||
def test_supported_enhancements(self):
|
||||
self.assertEqual(['redirect', 'staple-ocsp'],
|
||||
@@ -132,6 +132,7 @@ class NginxConfiguratorTest(util.NginxTest):
|
||||
server_conf = set(['somename', 'another.alias', 'alias'])
|
||||
example_conf = set(['.example.com', 'example.*'])
|
||||
foo_conf = set(['*.www.foo.com', '*.www.example.com'])
|
||||
ipv6_conf = set(['ipv6.com'])
|
||||
|
||||
results = {'localhost': localhost_conf,
|
||||
'alias': server_conf,
|
||||
@@ -140,7 +141,8 @@ class NginxConfiguratorTest(util.NginxTest):
|
||||
'www.example.com': example_conf,
|
||||
'test.www.example.com': foo_conf,
|
||||
'abc.www.foo.com': foo_conf,
|
||||
'www.bar.co.uk': localhost_conf}
|
||||
'www.bar.co.uk': localhost_conf,
|
||||
'ipv6.com': ipv6_conf}
|
||||
|
||||
conf_path = {'localhost': "etc_nginx/nginx.conf",
|
||||
'alias': "etc_nginx/nginx.conf",
|
||||
@@ -149,7 +151,8 @@ class NginxConfiguratorTest(util.NginxTest):
|
||||
'www.example.com': "etc_nginx/sites-enabled/example.com",
|
||||
'test.www.example.com': "etc_nginx/foo.conf",
|
||||
'abc.www.foo.com': "etc_nginx/foo.conf",
|
||||
'www.bar.co.uk': "etc_nginx/nginx.conf"}
|
||||
'www.bar.co.uk': "etc_nginx/nginx.conf",
|
||||
'ipv6.com': "etc_nginx/sites-enabled/ipv6.com"}
|
||||
|
||||
bad_results = ['www.foo.com', 'example', 't.www.bar.co',
|
||||
'69.255.225.155']
|
||||
@@ -160,11 +163,24 @@ class NginxConfiguratorTest(util.NginxTest):
|
||||
|
||||
self.assertEqual(results[name], vhost.names)
|
||||
self.assertEqual(conf_path[name], path)
|
||||
# IPv6 specific checks
|
||||
if name == "ipv6.com":
|
||||
self.assertTrue(vhost.ipv6_enabled())
|
||||
# Make sure that we have SSL enabled also for IPv6 addr
|
||||
self.assertTrue(
|
||||
any([True for x in vhost.addrs if x.ssl and x.ipv6]))
|
||||
|
||||
for name in bad_results:
|
||||
self.assertRaises(errors.MisconfigurationError,
|
||||
self.config.choose_vhost, name)
|
||||
|
||||
def test_ipv6only(self):
|
||||
# ipv6_info: (ipv6_active, ipv6only_present)
|
||||
self.assertEquals((True, False), self.config.ipv6_info("80"))
|
||||
# Port 443 has ipv6only=on because of ipv6ssl.com vhost
|
||||
self.assertEquals((True, True), self.config.ipv6_info("443"))
|
||||
|
||||
|
||||
def test_more_info(self):
|
||||
self.assertTrue('nginx.conf' in self.config.more_info())
|
||||
|
||||
@@ -226,8 +242,9 @@ class NginxConfiguratorTest(util.NginxTest):
|
||||
['listen', '5001', 'ssl'],
|
||||
['ssl_certificate', 'example/fullchain.pem'],
|
||||
['ssl_certificate_key', 'example/key.pem'],
|
||||
['include', self.config.mod_ssl_conf]]
|
||||
]],
|
||||
['include', self.config.mod_ssl_conf],
|
||||
['ssl_dhparam', self.config.ssl_dhparams],
|
||||
]]],
|
||||
parsed_example_conf)
|
||||
self.assertEqual([['server_name', 'somename', 'alias', 'another.alias']],
|
||||
parsed_server_conf)
|
||||
@@ -244,8 +261,9 @@ class NginxConfiguratorTest(util.NginxTest):
|
||||
['listen', '5001', 'ssl'],
|
||||
['ssl_certificate', '/etc/nginx/fullchain.pem'],
|
||||
['ssl_certificate_key', '/etc/nginx/key.pem'],
|
||||
['include', self.config.mod_ssl_conf]]
|
||||
],
|
||||
['include', self.config.mod_ssl_conf],
|
||||
['ssl_dhparam', self.config.ssl_dhparams],
|
||||
]],
|
||||
2))
|
||||
|
||||
def test_deploy_cert_add_explicit_listen(self):
|
||||
@@ -268,8 +286,9 @@ class NginxConfiguratorTest(util.NginxTest):
|
||||
['listen', '5001', 'ssl'],
|
||||
['ssl_certificate', 'summer/fullchain.pem'],
|
||||
['ssl_certificate_key', 'summer/key.pem'],
|
||||
['include', self.config.mod_ssl_conf]]
|
||||
],
|
||||
['include', self.config.mod_ssl_conf],
|
||||
['ssl_dhparam', self.config.ssl_dhparams],
|
||||
]],
|
||||
parsed_migration_conf[0])
|
||||
|
||||
@mock.patch("certbot_nginx.configurator.tls_sni_01.NginxTlsSni01.perform")
|
||||
@@ -539,6 +558,138 @@ class NginxConfiguratorTest(util.NginxTest):
|
||||
self.assertTrue(util.contains_at_depth(
|
||||
generated_conf, ['ssl_stapling_verify', 'on'], 2))
|
||||
|
||||
def test_deploy_no_match_default_set(self):
|
||||
default_conf = self.config.parser.abs_path('sites-enabled/default')
|
||||
foo_conf = self.config.parser.abs_path('foo.conf')
|
||||
del self.config.parser.parsed[foo_conf][2][1][0][1][0] # remove default_server
|
||||
self.config.version = (1, 3, 1)
|
||||
|
||||
self.config.deploy_cert(
|
||||
"www.nomatch.com",
|
||||
"example/cert.pem",
|
||||
"example/key.pem",
|
||||
"example/chain.pem",
|
||||
"example/fullchain.pem")
|
||||
self.config.save()
|
||||
|
||||
self.config.parser.load()
|
||||
|
||||
parsed_default_conf = util.filter_comments(self.config.parser.parsed[default_conf])
|
||||
|
||||
self.assertEqual([[['server'],
|
||||
[['listen', 'myhost', 'default_server'],
|
||||
['listen', 'otherhost', 'default_server'],
|
||||
['server_name', 'www.example.org'],
|
||||
[['location', '/'],
|
||||
[['root', 'html'],
|
||||
['index', 'index.html', 'index.htm']]]]],
|
||||
[['server'],
|
||||
[['listen', 'myhost'],
|
||||
['listen', 'otherhost'],
|
||||
['server_name', 'www.nomatch.com'],
|
||||
[['location', '/'],
|
||||
[['root', 'html'],
|
||||
['index', 'index.html', 'index.htm']]],
|
||||
['listen', '5001', 'ssl'],
|
||||
['ssl_certificate', 'example/fullchain.pem'],
|
||||
['ssl_certificate_key', 'example/key.pem'],
|
||||
['include', self.config.mod_ssl_conf],
|
||||
['ssl_dhparam', self.config.ssl_dhparams]]]],
|
||||
parsed_default_conf)
|
||||
|
||||
self.config.deploy_cert(
|
||||
"nomatch.com",
|
||||
"example/cert.pem",
|
||||
"example/key.pem",
|
||||
"example/chain.pem",
|
||||
"example/fullchain.pem")
|
||||
self.config.save()
|
||||
|
||||
self.config.parser.load()
|
||||
|
||||
parsed_default_conf = util.filter_comments(self.config.parser.parsed[default_conf])
|
||||
|
||||
self.assertTrue(util.contains_at_depth(parsed_default_conf, "nomatch.com", 3))
|
||||
|
||||
def test_deploy_no_match_default_set_multi_level_path(self):
|
||||
default_conf = self.config.parser.abs_path('sites-enabled/default')
|
||||
foo_conf = self.config.parser.abs_path('foo.conf')
|
||||
del self.config.parser.parsed[default_conf][0][1][0]
|
||||
del self.config.parser.parsed[default_conf][0][1][0]
|
||||
self.config.version = (1, 3, 1)
|
||||
|
||||
self.config.deploy_cert(
|
||||
"www.nomatch.com",
|
||||
"example/cert.pem",
|
||||
"example/key.pem",
|
||||
"example/chain.pem",
|
||||
"example/fullchain.pem")
|
||||
self.config.save()
|
||||
|
||||
self.config.parser.load()
|
||||
|
||||
parsed_foo_conf = util.filter_comments(self.config.parser.parsed[foo_conf])
|
||||
|
||||
self.assertEqual([['server'],
|
||||
[['listen', '*:80', 'ssl'],
|
||||
['server_name', 'www.nomatch.com'],
|
||||
['root', '/home/ubuntu/sites/foo/'],
|
||||
[['location', '/status'], [[['types'], [['image/jpeg', 'jpg']]]]],
|
||||
[['location', '~', 'case_sensitive\\.php$'], [['index', 'index.php'],
|
||||
['root', '/var/root']]],
|
||||
[['location', '~*', 'case_insensitive\\.php$'], []],
|
||||
[['location', '=', 'exact_match\\.php$'], []],
|
||||
[['location', '^~', 'ignore_regex\\.php$'], []],
|
||||
['ssl_certificate', 'example/fullchain.pem'],
|
||||
['ssl_certificate_key', 'example/key.pem']]],
|
||||
parsed_foo_conf[1][1][1])
|
||||
|
||||
def test_deploy_no_match_no_default_set(self):
|
||||
default_conf = self.config.parser.abs_path('sites-enabled/default')
|
||||
foo_conf = self.config.parser.abs_path('foo.conf')
|
||||
del self.config.parser.parsed[default_conf][0][1][0]
|
||||
del self.config.parser.parsed[default_conf][0][1][0]
|
||||
del self.config.parser.parsed[foo_conf][2][1][0][1][0]
|
||||
self.config.version = (1, 3, 1)
|
||||
|
||||
self.assertRaises(errors.MisconfigurationError, self.config.deploy_cert,
|
||||
"www.nomatch.com", "example/cert.pem", "example/key.pem",
|
||||
"example/chain.pem", "example/fullchain.pem")
|
||||
|
||||
def test_deploy_no_match_fail_multiple_defaults(self):
|
||||
self.config.version = (1, 3, 1)
|
||||
self.assertRaises(errors.MisconfigurationError, self.config.deploy_cert,
|
||||
"www.nomatch.com", "example/cert.pem", "example/key.pem",
|
||||
"example/chain.pem", "example/fullchain.pem")
|
||||
|
||||
def test_deploy_no_match_add_redirect(self):
|
||||
default_conf = self.config.parser.abs_path('sites-enabled/default')
|
||||
foo_conf = self.config.parser.abs_path('foo.conf')
|
||||
del self.config.parser.parsed[foo_conf][2][1][0][1][0] # remove default_server
|
||||
self.config.version = (1, 3, 1)
|
||||
|
||||
self.config.deploy_cert(
|
||||
"www.nomatch.com",
|
||||
"example/cert.pem",
|
||||
"example/key.pem",
|
||||
"example/chain.pem",
|
||||
"example/fullchain.pem")
|
||||
|
||||
self.config.enhance("www.nomatch.com", "redirect")
|
||||
|
||||
self.config.save()
|
||||
|
||||
self.config.parser.load()
|
||||
|
||||
expected = [
|
||||
['if', '($scheme', '!=', '"https")'],
|
||||
[['return', '301', 'https://$host$request_uri']]
|
||||
]
|
||||
|
||||
generated_conf = self.config.parser.parsed[default_conf]
|
||||
self.assertTrue(util.contains_at_depth(generated_conf, expected, 2))
|
||||
|
||||
|
||||
class InstallSslOptionsConfTest(util.NginxTest):
|
||||
"""Test that the options-ssl-nginx.conf file is installed and updated properly."""
|
||||
|
||||
@@ -601,7 +752,7 @@ class InstallSslOptionsConfTest(util.NginxTest):
|
||||
with mock.patch("certbot.plugins.common.logger") as mock_logger:
|
||||
self._call()
|
||||
self.assertEqual(mock_logger.warning.call_args[0][0],
|
||||
"%s has been manually modified; updated ssl configuration options "
|
||||
"%s has been manually modified; updated file "
|
||||
"saved to %s. We recommend updating %s for security purposes.")
|
||||
self.assertEqual(crypto_util.sha256sum(constants.MOD_SSL_CONF_SRC),
|
||||
self._current_ssl_options_hash())
|
||||
|
||||
@@ -50,7 +50,9 @@ class NginxParserTest(util.NginxTest): #pylint: disable=too-many-public-methods
|
||||
'sites-enabled/example.com',
|
||||
'sites-enabled/migration.com',
|
||||
'sites-enabled/sslon.com',
|
||||
'sites-enabled/globalssl.com']]),
|
||||
'sites-enabled/globalssl.com',
|
||||
'sites-enabled/ipv6.com',
|
||||
'sites-enabled/ipv6ssl.com']]),
|
||||
set(nparser.parsed.keys()))
|
||||
self.assertEqual([['server_name', 'somename', 'alias', 'another.alias']],
|
||||
nparser.parsed[nparser.abs_path('server.conf')])
|
||||
@@ -74,7 +76,7 @@ class NginxParserTest(util.NginxTest): #pylint: disable=too-many-public-methods
|
||||
parsed = nparser._parse_files(nparser.abs_path(
|
||||
'sites-enabled/example.com.test'))
|
||||
self.assertEqual(3, len(glob.glob(nparser.abs_path('*.test'))))
|
||||
self.assertEqual(5, len(
|
||||
self.assertEqual(7, len(
|
||||
glob.glob(nparser.abs_path('sites-enabled/*.test'))))
|
||||
self.assertEqual([[['server'], [['listen', '69.50.225.155:9000'],
|
||||
['listen', '127.0.0.1'],
|
||||
@@ -110,7 +112,8 @@ class NginxParserTest(util.NginxTest): #pylint: disable=too-many-public-methods
|
||||
vhosts = nparser.get_vhosts()
|
||||
|
||||
vhost = obj.VirtualHost(nparser.abs_path('sites-enabled/globalssl.com'),
|
||||
[obj.Addr('4.8.2.6', '57', True, False)],
|
||||
[obj.Addr('4.8.2.6', '57', True, False,
|
||||
False, False)],
|
||||
True, True, set(['globalssl.com']), [], [0])
|
||||
|
||||
globalssl_com = [x for x in vhosts if 'globalssl.com' in x.filep][0]
|
||||
@@ -121,34 +124,42 @@ class NginxParserTest(util.NginxTest): #pylint: disable=too-many-public-methods
|
||||
vhosts = nparser.get_vhosts()
|
||||
|
||||
vhost1 = obj.VirtualHost(nparser.abs_path('nginx.conf'),
|
||||
[obj.Addr('', '8080', False, False)],
|
||||
[obj.Addr('', '8080', False, False,
|
||||
False, False)],
|
||||
False, True,
|
||||
set(['localhost',
|
||||
r'~^(www\.)?(example|bar)\.']),
|
||||
[], [10, 1, 9])
|
||||
vhost2 = obj.VirtualHost(nparser.abs_path('nginx.conf'),
|
||||
[obj.Addr('somename', '8080', False, False),
|
||||
obj.Addr('', '8000', False, False)],
|
||||
[obj.Addr('somename', '8080', False, False,
|
||||
False, False),
|
||||
obj.Addr('', '8000', False, False,
|
||||
False, False)],
|
||||
False, True,
|
||||
set(['somename', 'another.alias', 'alias']),
|
||||
[], [10, 1, 12])
|
||||
vhost3 = obj.VirtualHost(nparser.abs_path('sites-enabled/example.com'),
|
||||
[obj.Addr('69.50.225.155', '9000',
|
||||
False, False),
|
||||
obj.Addr('127.0.0.1', '', False, False)],
|
||||
False, False, False, False),
|
||||
obj.Addr('127.0.0.1', '', False, False,
|
||||
False, False)],
|
||||
False, True,
|
||||
set(['.example.com', 'example.*']), [], [0])
|
||||
vhost4 = obj.VirtualHost(nparser.abs_path('sites-enabled/default'),
|
||||
[obj.Addr('myhost', '', False, True)],
|
||||
[obj.Addr('myhost', '', False, True,
|
||||
False, False),
|
||||
obj.Addr('otherhost', '', False, True,
|
||||
False, False)],
|
||||
False, True, set(['www.example.org']),
|
||||
[], [0])
|
||||
vhost5 = obj.VirtualHost(nparser.abs_path('foo.conf'),
|
||||
[obj.Addr('*', '80', True, True)],
|
||||
[obj.Addr('*', '80', True, True,
|
||||
False, False)],
|
||||
True, True, set(['*.www.foo.com',
|
||||
'*.www.example.com']),
|
||||
[], [2, 1, 0])
|
||||
|
||||
self.assertEqual(10, len(vhosts))
|
||||
self.assertEqual(12, len(vhosts))
|
||||
example_com = [x for x in vhosts if 'example.com' in x.filep][0]
|
||||
self.assertEqual(vhost3, example_com)
|
||||
default = [x for x in vhosts if 'default' in x.filep][0]
|
||||
@@ -273,11 +284,16 @@ class NginxParserTest(util.NginxTest): #pylint: disable=too-many-public-methods
|
||||
['server_name', 'example.*'], []
|
||||
]]])
|
||||
mock_vhost.names = set(['foobar.com', 'example.*'])
|
||||
self.assertRaises(errors.MisconfigurationError,
|
||||
nparser.add_server_directives,
|
||||
mock_vhost,
|
||||
[['ssl_certificate', 'cert.pem']],
|
||||
replace=True)
|
||||
nparser.add_server_directives(
|
||||
mock_vhost, [['ssl_certificate', 'cert.pem']], replace=True)
|
||||
self.assertEqual(
|
||||
nparser.parsed[filep],
|
||||
[[['server'], [['listen', '69.50.225.155:9000'],
|
||||
['listen', '127.0.0.1'],
|
||||
['server_name', 'foobar.com'], ['#', COMMENT],
|
||||
['server_name', 'example.*'], [],
|
||||
['ssl_certificate', 'cert.pem'], ['#', COMMENT], [],
|
||||
]]])
|
||||
|
||||
def test_get_best_match(self):
|
||||
target_name = 'www.eff.org'
|
||||
@@ -390,6 +406,29 @@ class NginxParserTest(util.NginxTest): #pylint: disable=too-many-public-methods
|
||||
])
|
||||
self.assertTrue(server['ssl'])
|
||||
|
||||
def test_create_new_vhost_from_default(self):
|
||||
nparser = parser.NginxParser(self.config_path)
|
||||
|
||||
vhosts = nparser.get_vhosts()
|
||||
default = [x for x in vhosts if 'default' in x.filep][0]
|
||||
new_vhost = nparser.create_new_vhost_from_default(default)
|
||||
nparser.filedump(ext='')
|
||||
|
||||
# check properties of new vhost
|
||||
self.assertFalse(next(iter(new_vhost.addrs)).default)
|
||||
self.assertNotEqual(new_vhost.path, default.path)
|
||||
|
||||
# check that things are written to file correctly
|
||||
new_nparser = parser.NginxParser(self.config_path)
|
||||
new_vhosts = new_nparser.get_vhosts()
|
||||
new_defaults = [x for x in new_vhosts if 'default' in x.filep]
|
||||
self.assertEqual(len(new_defaults), 2)
|
||||
new_vhost_parsed = new_defaults[1]
|
||||
self.assertFalse(next(iter(new_vhost_parsed.addrs)).default)
|
||||
self.assertEqual(next(iter(default.names)), next(iter(new_vhost_parsed.names)))
|
||||
self.assertEqual(len(default.raw), len(new_vhost_parsed.raw))
|
||||
self.assertTrue(next(iter(default.addrs)).super_eq(next(iter(new_vhost_parsed.addrs))))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main() # pragma: no cover
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
server {
|
||||
listen myhost default_server;
|
||||
listen otherhost default_server;
|
||||
server_name www.example.org;
|
||||
|
||||
location / {
|
||||
|
||||
5
certbot-nginx/certbot_nginx/tests/testdata/etc_nginx/sites-enabled/ipv6.com
vendored
Normal file
5
certbot-nginx/certbot_nginx/tests/testdata/etc_nginx/sites-enabled/ipv6.com
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
server {
|
||||
listen 80;
|
||||
listen [::]:80;
|
||||
server_name ipv6.com;
|
||||
}
|
||||
5
certbot-nginx/certbot_nginx/tests/testdata/etc_nginx/sites-enabled/ipv6ssl.com
vendored
Normal file
5
certbot-nginx/certbot_nginx/tests/testdata/etc_nginx/sites-enabled/ipv6ssl.com
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
server {
|
||||
listen 443 ssl;
|
||||
listen [::]:443 ssl ipv6only=on;
|
||||
server_name ipv6ssl.com;
|
||||
}
|
||||
@@ -66,7 +66,7 @@ class TlsSniPerformTest(util.NginxTest):
|
||||
self.sni.add_chall(self.achalls[1])
|
||||
mock_choose.return_value = None
|
||||
result = self.sni.perform()
|
||||
self.assertTrue(result is None)
|
||||
self.assertFalse(result is None)
|
||||
|
||||
def test_perform0(self):
|
||||
responses = self.sni.perform()
|
||||
@@ -125,10 +125,10 @@ class TlsSniPerformTest(util.NginxTest):
|
||||
self.sni.add_chall(self.achalls[0])
|
||||
self.sni.add_chall(self.achalls[2])
|
||||
|
||||
v_addr1 = [obj.Addr("69.50.225.155", "9000", True, False),
|
||||
obj.Addr("127.0.0.1", "", False, False)]
|
||||
v_addr2 = [obj.Addr("myhost", "", False, True)]
|
||||
v_addr2_print = [obj.Addr("myhost", "", False, False)]
|
||||
v_addr1 = [obj.Addr("69.50.225.155", "9000", True, False, False, False),
|
||||
obj.Addr("127.0.0.1", "", False, False, False, False)]
|
||||
v_addr2 = [obj.Addr("myhost", "", False, True, False, False)]
|
||||
v_addr2_print = [obj.Addr("myhost", "", False, False, False, False)]
|
||||
ll_addr = [v_addr1, v_addr2]
|
||||
self.sni._mod_config(ll_addr) # pylint: disable=protected-access
|
||||
|
||||
|
||||
@@ -65,7 +65,6 @@ def get_nginx_configurator(
|
||||
in_progress_dir=os.path.join(backups, "IN_PROGRESS"),
|
||||
server="https://acme-server.org:443/new",
|
||||
tls_sni_01_port=5001,
|
||||
dry_run=False,
|
||||
),
|
||||
name="nginx",
|
||||
version=version)
|
||||
|
||||
@@ -51,19 +51,32 @@ class NginxTlsSni01(common.TLSSNI01):
|
||||
default_addr = "{0} ssl".format(
|
||||
self.configurator.config.tls_sni_01_port)
|
||||
|
||||
for achall in self.achalls:
|
||||
vhost = self.configurator.choose_vhost(achall.domain)
|
||||
if vhost is None:
|
||||
logger.error(
|
||||
"No nginx vhost exists with server_name matching: %s. "
|
||||
"Please specify server_names in the Nginx config.",
|
||||
achall.domain)
|
||||
return None
|
||||
ipv6, ipv6only = self.configurator.ipv6_info(
|
||||
self.configurator.config.tls_sni_01_port)
|
||||
|
||||
if vhost.addrs:
|
||||
for achall in self.achalls:
|
||||
vhost = self.configurator.choose_vhost(achall.domain, raise_if_no_match=False)
|
||||
|
||||
if vhost is not None and vhost.addrs:
|
||||
addresses.append(list(vhost.addrs))
|
||||
else:
|
||||
addresses.append([obj.Addr.fromstring(default_addr)])
|
||||
if ipv6:
|
||||
# If IPv6 is active in Nginx configuration
|
||||
ipv6_addr = "[::]:{0} ssl".format(
|
||||
self.configurator.config.tls_sni_01_port)
|
||||
if not ipv6only:
|
||||
# If ipv6only=on is not already present in the config
|
||||
ipv6_addr = ipv6_addr + " ipv6only=on"
|
||||
addresses.append([obj.Addr.fromstring(default_addr),
|
||||
obj.Addr.fromstring(ipv6_addr)])
|
||||
logger.info(("Using default addresses %s and %s for " +
|
||||
"TLSSNI01 authentication."),
|
||||
default_addr,
|
||||
ipv6_addr)
|
||||
else:
|
||||
addresses.append([obj.Addr.fromstring(default_addr)])
|
||||
logger.info("Using default address %s for TLSSNI01 authentication.",
|
||||
default_addr)
|
||||
|
||||
# Create challenge certs
|
||||
responses = [self._setup_challenge_cert(x) for x in self.achalls]
|
||||
@@ -115,9 +128,8 @@ class NginxTlsSni01(common.TLSSNI01):
|
||||
break
|
||||
if not included:
|
||||
raise errors.MisconfigurationError(
|
||||
'LetsEncrypt could not find an HTTP block to include '
|
||||
'Certbot could not find an HTTP block to include '
|
||||
'TLS-SNI-01 challenges in %s.' % root)
|
||||
|
||||
config = [self._make_server_block(pair[0], pair[1])
|
||||
for pair in six.moves.zip(self.achalls, ll_addrs)]
|
||||
config = nginxparser.UnspacedList(config)
|
||||
|
||||
@@ -4,7 +4,7 @@ from setuptools import setup
|
||||
from setuptools import find_packages
|
||||
|
||||
|
||||
version = '0.16.0.dev0'
|
||||
version = '0.20.0.dev0'
|
||||
|
||||
# Please update tox.ini when modifying dependency version requirements
|
||||
install_requires = [
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
"""Certbot client."""
|
||||
|
||||
# version number like 1.2.3a0, must have at least 2 parts, like 1.2
|
||||
__version__ = '0.16.0.dev0'
|
||||
__version__ = '0.20.0.dev0'
|
||||
|
||||
@@ -28,7 +28,6 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
# pylint: disable=too-few-public-methods
|
||||
|
||||
|
||||
class AnnotatedChallenge(jose.ImmutableMap):
|
||||
"""Client annotated challenge.
|
||||
|
||||
|
||||
@@ -3,6 +3,7 @@ import datetime
|
||||
import logging
|
||||
import os
|
||||
import pytz
|
||||
import re
|
||||
import traceback
|
||||
import zope.component
|
||||
|
||||
@@ -45,7 +46,7 @@ def rename_lineage(config):
|
||||
"""
|
||||
disp = zope.component.getUtility(interfaces.IDisplay)
|
||||
|
||||
certname = _get_certname(config, "rename")
|
||||
certname = _get_certnames(config, "rename")[0]
|
||||
|
||||
new_certname = config.new_certname
|
||||
if not new_certname:
|
||||
@@ -87,11 +88,12 @@ def certificates(config):
|
||||
|
||||
def delete(config):
|
||||
"""Delete Certbot files associated with a certificate lineage."""
|
||||
certname = _get_certname(config, "delete")
|
||||
storage.delete_files(config, certname)
|
||||
disp = zope.component.getUtility(interfaces.IDisplay)
|
||||
disp.notification("Deleted all files relating to certificate {0}."
|
||||
.format(certname), pause=False)
|
||||
certnames = _get_certnames(config, "delete", allow_multiple=True)
|
||||
for certname in certnames:
|
||||
storage.delete_files(config, certname)
|
||||
disp = zope.component.getUtility(interfaces.IDisplay)
|
||||
disp.notification("Deleted all files relating to certificate {0}."
|
||||
.format(certname), pause=False)
|
||||
|
||||
###################
|
||||
# Public Helpers
|
||||
@@ -141,28 +143,162 @@ def find_duplicative_certs(config, domains):
|
||||
|
||||
return _search_lineages(config, update_certs_for_domain_matches, (None, None))
|
||||
|
||||
def _archive_files(candidate_lineage, filetype):
|
||||
""" In order to match things like:
|
||||
/etc/letsencrypt/archive/example.com/chain1.pem.
|
||||
|
||||
Anonymous functions which call this function are eventually passed (in a list) to
|
||||
`match_and_check_overlaps` to help specify the acceptable_matches.
|
||||
|
||||
:param `.storage.RenewableCert` candidate_lineage: Lineage whose archive dir is to
|
||||
be searched.
|
||||
:param str filetype: main file name prefix e.g. "fullchain" or "chain".
|
||||
|
||||
:returns: Files in candidate_lineage's archive dir that match the provided filetype.
|
||||
:rtype: list of str or None
|
||||
"""
|
||||
archive_dir = candidate_lineage.archive_dir
|
||||
pattern = [os.path.join(archive_dir, f) for f in os.listdir(archive_dir)
|
||||
if re.match("{0}[0-9]*.pem".format(filetype), f)]
|
||||
if len(pattern) > 0:
|
||||
return pattern
|
||||
else:
|
||||
return None
|
||||
|
||||
def _acceptable_matches():
|
||||
""" Generates the list that's passed to match_and_check_overlaps. Is its own function to
|
||||
make unit testing easier.
|
||||
|
||||
:returns: list of functions
|
||||
:rtype: list
|
||||
"""
|
||||
return [lambda x: x.fullchain_path, lambda x: x.cert_path,
|
||||
lambda x: _archive_files(x, "cert"), lambda x: _archive_files(x, "fullchain")]
|
||||
|
||||
def cert_path_to_lineage(cli_config):
|
||||
""" If config.cert_path is defined, try to find an appropriate value for config.certname.
|
||||
|
||||
:param `configuration.NamespaceConfig` cli_config: parsed command line arguments
|
||||
|
||||
:returns: a lineage name
|
||||
:rtype: str
|
||||
|
||||
:raises `errors.Error`: If the specified cert path can't be matched to a lineage name.
|
||||
:raises `errors.OverlappingMatchFound`: If the matched lineage's archive is shared.
|
||||
"""
|
||||
acceptable_matches = _acceptable_matches()
|
||||
match = match_and_check_overlaps(cli_config, acceptable_matches,
|
||||
lambda x: cli_config.cert_path[0], lambda x: x.lineagename)
|
||||
return match[0]
|
||||
|
||||
def match_and_check_overlaps(cli_config, acceptable_matches, match_func, rv_func):
|
||||
""" Searches through all lineages for a match, and checks for duplicates.
|
||||
If a duplicate is found, an error is raised, as performing operations on lineages
|
||||
that have their properties incorrectly duplicated elsewhere is probably a bad idea.
|
||||
|
||||
:param `configuration.NamespaceConfig` cli_config: parsed command line arguments
|
||||
:param list acceptable_matches: a list of functions that specify acceptable matches
|
||||
:param function match_func: specifies what to match
|
||||
:param function rv_func: specifies what to return
|
||||
|
||||
"""
|
||||
def find_matches(candidate_lineage, return_value, acceptable_matches):
|
||||
"""Returns a list of matches using _search_lineages."""
|
||||
acceptable_matches = [func(candidate_lineage) for func in acceptable_matches]
|
||||
acceptable_matches_rv = []
|
||||
for item in acceptable_matches:
|
||||
if isinstance(item, list):
|
||||
acceptable_matches_rv += item
|
||||
else:
|
||||
acceptable_matches_rv.append(item)
|
||||
match = match_func(candidate_lineage)
|
||||
if match in acceptable_matches_rv:
|
||||
return_value.append(rv_func(candidate_lineage))
|
||||
return return_value
|
||||
|
||||
matched = _search_lineages(cli_config, find_matches, [], acceptable_matches)
|
||||
if not matched:
|
||||
raise errors.Error("No match found for cert-path {0}!".format(cli_config.cert_path[0]))
|
||||
elif len(matched) > 1:
|
||||
raise errors.OverlappingMatchFound()
|
||||
else:
|
||||
return matched
|
||||
|
||||
def human_readable_cert_info(config, cert, skip_filter_checks=False):
|
||||
""" Returns a human readable description of info about a RenewableCert object"""
|
||||
certinfo = []
|
||||
checker = ocsp.RevocationChecker()
|
||||
|
||||
if config.certname and cert.lineagename != config.certname and not skip_filter_checks:
|
||||
return ""
|
||||
if config.domains and not set(config.domains).issubset(cert.names()):
|
||||
return ""
|
||||
now = pytz.UTC.fromutc(datetime.datetime.utcnow())
|
||||
|
||||
reasons = []
|
||||
if cert.is_test_cert:
|
||||
reasons.append('TEST_CERT')
|
||||
if cert.target_expiry <= now:
|
||||
reasons.append('EXPIRED')
|
||||
if checker.ocsp_revoked(cert.cert, cert.chain):
|
||||
reasons.append('REVOKED')
|
||||
|
||||
if reasons:
|
||||
status = "INVALID: " + ", ".join(reasons)
|
||||
else:
|
||||
diff = cert.target_expiry - now
|
||||
if diff.days == 1:
|
||||
status = "VALID: 1 day"
|
||||
elif diff.days < 1:
|
||||
status = "VALID: {0} hour(s)".format(diff.seconds // 3600)
|
||||
else:
|
||||
status = "VALID: {0} days".format(diff.days)
|
||||
|
||||
valid_string = "{0} ({1})".format(cert.target_expiry, status)
|
||||
certinfo.append(" Certificate Name: {0}\n"
|
||||
" Domains: {1}\n"
|
||||
" Expiry Date: {2}\n"
|
||||
" Certificate Path: {3}\n"
|
||||
" Private Key Path: {4}".format(
|
||||
cert.lineagename,
|
||||
" ".join(cert.names()),
|
||||
valid_string,
|
||||
cert.fullchain,
|
||||
cert.privkey))
|
||||
return "".join(certinfo)
|
||||
|
||||
###################
|
||||
# Private Helpers
|
||||
###################
|
||||
|
||||
def _get_certname(config, verb):
|
||||
def _get_certnames(config, verb, allow_multiple=False):
|
||||
"""Get certname from flag, interactively, or error out.
|
||||
"""
|
||||
certname = config.certname
|
||||
if not certname:
|
||||
if certname:
|
||||
certnames = [certname]
|
||||
else:
|
||||
disp = zope.component.getUtility(interfaces.IDisplay)
|
||||
filenames = storage.renewal_conf_files(config)
|
||||
choices = [storage.lineagename_for_filename(name) for name in filenames]
|
||||
if not choices:
|
||||
raise errors.Error("No existing certificates found.")
|
||||
code, index = disp.menu("Which certificate would you like to {0}?".format(verb),
|
||||
choices, flag="--cert-name",
|
||||
force_interactive=True)
|
||||
if code != display_util.OK or not index in range(0, len(choices)):
|
||||
raise errors.Error("User ended interaction.")
|
||||
certname = choices[index]
|
||||
return certname
|
||||
if allow_multiple:
|
||||
code, certnames = disp.checklist(
|
||||
"Which certificate(s) would you like to {0}?".format(verb),
|
||||
choices, cli_flag="--cert-name",
|
||||
force_interactive=True)
|
||||
if code != display_util.OK:
|
||||
raise errors.Error("User ended interaction.")
|
||||
else:
|
||||
code, index = disp.menu("Which certificate would you like to {0}?".format(verb),
|
||||
choices, cli_flag="--cert-name",
|
||||
force_interactive=True)
|
||||
|
||||
if code != display_util.OK or index not in range(0, len(choices)):
|
||||
raise errors.Error("User ended interaction.")
|
||||
certnames = [choices[index]]
|
||||
return certnames
|
||||
|
||||
def _report_lines(msgs):
|
||||
"""Format a results report for a category of single-line renewal outcomes"""
|
||||
@@ -171,44 +307,8 @@ def _report_lines(msgs):
|
||||
def _report_human_readable(config, parsed_certs):
|
||||
"""Format a results report for a parsed cert"""
|
||||
certinfo = []
|
||||
checker = ocsp.RevocationChecker()
|
||||
for cert in parsed_certs:
|
||||
if config.certname and cert.lineagename != config.certname:
|
||||
continue
|
||||
if config.domains and not set(config.domains).issubset(cert.names()):
|
||||
continue
|
||||
now = pytz.UTC.fromutc(datetime.datetime.utcnow())
|
||||
|
||||
reasons = []
|
||||
if cert.is_test_cert:
|
||||
reasons.append('TEST_CERT')
|
||||
if cert.target_expiry <= now:
|
||||
reasons.append('EXPIRED')
|
||||
if checker.ocsp_revoked(cert.cert, cert.chain):
|
||||
reasons.append('REVOKED')
|
||||
|
||||
if reasons:
|
||||
status = "INVALID: " + ", ".join(reasons)
|
||||
else:
|
||||
diff = cert.target_expiry - now
|
||||
if diff.days == 1:
|
||||
status = "VALID: 1 day"
|
||||
elif diff.days < 1:
|
||||
status = "VALID: {0} hour(s)".format(diff.seconds // 3600)
|
||||
else:
|
||||
status = "VALID: {0} days".format(diff.days)
|
||||
|
||||
valid_string = "{0} ({1})".format(cert.target_expiry, status)
|
||||
certinfo.append(" Certificate Name: {0}\n"
|
||||
" Domains: {1}\n"
|
||||
" Expiry Date: {2}\n"
|
||||
" Certificate Path: {3}\n"
|
||||
" Private Key Path: {4}".format(
|
||||
cert.lineagename,
|
||||
" ".join(cert.names()),
|
||||
valid_string,
|
||||
cert.fullchain,
|
||||
cert.privkey))
|
||||
certinfo.append(human_readable_cert_info(config, cert))
|
||||
return "\n".join(certinfo)
|
||||
|
||||
def _describe_certs(config, parsed_certs, parse_failures):
|
||||
@@ -232,11 +332,17 @@ def _describe_certs(config, parsed_certs, parse_failures):
|
||||
disp = zope.component.getUtility(interfaces.IDisplay)
|
||||
disp.notification("\n".join(out), pause=False, wrap=False)
|
||||
|
||||
def _search_lineages(cli_config, func, initial_rv):
|
||||
def _search_lineages(cli_config, func, initial_rv, *args):
|
||||
"""Iterate func over unbroken lineages, allowing custom return conditions.
|
||||
|
||||
Allows flexible customization of return values, including multiple
|
||||
return values and complex checks.
|
||||
|
||||
:param `configuration.NamespaceConfig` cli_config: parsed command line arguments
|
||||
:param function func: function used while searching over lineages
|
||||
:param initial_rv: initial return value of the function (any type)
|
||||
|
||||
:returns: Whatever was specified by `func` if a match is found.
|
||||
"""
|
||||
configs_dir = cli_config.renewal_configs_dir
|
||||
# Verify the directory is there
|
||||
@@ -250,5 +356,5 @@ def _search_lineages(cli_config, func, initial_rv):
|
||||
logger.debug("Renewal conf file %s is broken. Skipping.", renewal_file)
|
||||
logger.debug("Traceback was:\n%s", traceback.format_exc())
|
||||
continue
|
||||
rv = func(candidate_lineage, rv)
|
||||
rv = func(candidate_lineage, rv, *args)
|
||||
return rv
|
||||
|
||||
347
certbot/cli.py
347
certbot/cli.py
@@ -11,6 +11,9 @@ import sys
|
||||
|
||||
import configargparse
|
||||
import six
|
||||
import zope.component
|
||||
|
||||
from zope.interface import interfaces as zope_interfaces
|
||||
|
||||
from acme import challenges
|
||||
|
||||
@@ -23,6 +26,7 @@ from certbot import hooks
|
||||
from certbot import interfaces
|
||||
from certbot import util
|
||||
|
||||
from certbot.display import util as display_util
|
||||
from certbot.plugins import disco as plugins_disco
|
||||
import certbot.plugins.selection as plugin_selection
|
||||
|
||||
@@ -45,8 +49,13 @@ if "CERTBOT_AUTO" in os.environ:
|
||||
# user saved the script under a different name
|
||||
LEAUTO = os.path.basename(os.environ["CERTBOT_AUTO"])
|
||||
|
||||
fragment = os.path.join(".local", "share", "letsencrypt")
|
||||
cli_command = LEAUTO if fragment in sys.argv[0] else "certbot"
|
||||
old_path_fragment = os.path.join(".local", "share", "letsencrypt")
|
||||
new_path_prefix = os.path.abspath(os.path.join(os.sep, "opt",
|
||||
"eff.org", "certbot", "venv"))
|
||||
if old_path_fragment in sys.argv[0] or sys.argv[0].startswith(new_path_prefix):
|
||||
cli_command = LEAUTO
|
||||
else:
|
||||
cli_command = "certbot"
|
||||
|
||||
# Argparse's help formatting has a lot of unhelpful peculiarities, so we want
|
||||
# to replace as much of it as we can...
|
||||
@@ -120,6 +129,7 @@ ZERO_ARG_ACTIONS = set(("store_const", "store_true",
|
||||
# This dictionary is used recursively, so if A modifies B and B modifies C,
|
||||
# it is determined that C was modified by the user if A was modified.
|
||||
VAR_MODIFIERS = {"account": set(("server",)),
|
||||
"renew_hook": set(("deploy_hook",)),
|
||||
"server": set(("dry_run", "staging",)),
|
||||
"webroot_map": set(("webroot_path",))}
|
||||
|
||||
@@ -273,7 +283,7 @@ def flag_default(name):
|
||||
# argparse has been set up; it is not accurate for all flags. Call it
|
||||
# with caution. Plugin defaults are missing, and some things are using
|
||||
# defaults defined in this file, not in constants.py :(
|
||||
return constants.CLI_DEFAULTS[name]
|
||||
return copy.deepcopy(constants.CLI_DEFAULTS[name])
|
||||
|
||||
|
||||
def config_help(name, hidden=False):
|
||||
@@ -347,7 +357,7 @@ VERB_HELP = [
|
||||
" before and after renewal; see"
|
||||
" https://certbot.eff.org/docs/using.html#renewal for more"
|
||||
" information on these."),
|
||||
"usage": "\n\n certbot renew [--cert-name NAME] [options]\n\n"
|
||||
"usage": "\n\n certbot renew [--cert-name CERTNAME] [options]\n\n"
|
||||
}),
|
||||
("certificates", {
|
||||
"short": "List certificates managed by Certbot",
|
||||
@@ -439,9 +449,18 @@ class HelpfulArgumentParser(object):
|
||||
"delete": main.delete,
|
||||
}
|
||||
|
||||
# Get notification function for printing
|
||||
try:
|
||||
self.notify = zope.component.getUtility(
|
||||
interfaces.IDisplay).notification
|
||||
except zope_interfaces.ComponentLookupError:
|
||||
self.notify = display_util.NoninteractiveDisplay(
|
||||
sys.stdout).notification
|
||||
|
||||
|
||||
# List of topics for which additional help can be provided
|
||||
HELP_TOPICS = ["all", "security", "paths", "automation", "testing"] + list(self.VERBS)
|
||||
HELP_TOPICS += self.COMMANDS_TOPICS + ["manage"]
|
||||
HELP_TOPICS = ["all", "security", "paths", "automation", "testing"]
|
||||
HELP_TOPICS += list(self.VERBS) + self.COMMANDS_TOPICS + ["manage"]
|
||||
|
||||
plugin_names = list(plugins)
|
||||
self.help_topics = HELP_TOPICS + plugin_names + [None]
|
||||
@@ -510,10 +529,10 @@ class HelpfulArgumentParser(object):
|
||||
|
||||
usage = SHORT_USAGE
|
||||
if help_arg == True:
|
||||
print(usage + COMMAND_OVERVIEW % (apache_doc, nginx_doc) + HELP_USAGE)
|
||||
self.notify(usage + COMMAND_OVERVIEW % (apache_doc, nginx_doc) + HELP_USAGE)
|
||||
sys.exit(0)
|
||||
elif help_arg in self.COMMANDS_TOPICS:
|
||||
print(usage + self._list_subcommands())
|
||||
self.notify(usage + self._list_subcommands())
|
||||
sys.exit(0)
|
||||
elif help_arg == "all":
|
||||
# if we're doing --help all, the OVERVIEW is part of the SHORT_USAGE at
|
||||
@@ -848,36 +867,58 @@ def prepare_and_parse_args(plugins, args, detect_defaults=False): # pylint: dis
|
||||
"e.g. -vvv.")
|
||||
helpful.add(
|
||||
None, "-t", "--text", dest="text_mode", action="store_true",
|
||||
help=argparse.SUPPRESS)
|
||||
default=flag_default("text_mode"), help=argparse.SUPPRESS)
|
||||
helpful.add(
|
||||
None, "--max-log-backups", type=nonnegative_int,
|
||||
default=flag_default("max_log_backups"),
|
||||
help="Specifies the maximum number of backup logs that should "
|
||||
"be kept by Certbot's built in log rotation. Setting this "
|
||||
"flag to 0 disables log rotation entirely, causing "
|
||||
"Certbot to always append to the same log file.")
|
||||
helpful.add(
|
||||
[None, "automation", "run", "certonly"], "-n", "--non-interactive", "--noninteractive",
|
||||
dest="noninteractive_mode", action="store_true",
|
||||
default=flag_default("noninteractive_mode"),
|
||||
help="Run without ever asking for user input. This may require "
|
||||
"additional command line flags; the client will try to explain "
|
||||
"which ones are required if it finds one missing")
|
||||
helpful.add(
|
||||
[None, "register", "run", "certonly"],
|
||||
constants.FORCE_INTERACTIVE_FLAG, action="store_true",
|
||||
default=flag_default("force_interactive"),
|
||||
help="Force Certbot to be interactive even if it detects it's not "
|
||||
"being run in a terminal. This flag cannot be used with the "
|
||||
"renew subcommand.")
|
||||
helpful.add(
|
||||
[None, "run", "certonly", "certificates"],
|
||||
"-d", "--domains", "--domain", dest="domains",
|
||||
metavar="DOMAIN", action=_DomainsAction, default=[],
|
||||
metavar="DOMAIN", action=_DomainsAction,
|
||||
default=flag_default("domains"),
|
||||
help="Domain names to apply. For multiple domains you can use "
|
||||
"multiple -d flags or enter a comma separated list of domains "
|
||||
"as a parameter. (default: Ask)")
|
||||
"as a parameter. The first domain provided will be the "
|
||||
"subject CN of the certificate, and all domains will be "
|
||||
"Subject Alternative Names on the certificate. "
|
||||
"The first domain will also be used in "
|
||||
"some software user interfaces and as the file paths for the "
|
||||
"certificate and related material unless otherwise "
|
||||
"specified or you already have a certificate with the same "
|
||||
"name. In the case of a name collision it will append a number "
|
||||
"like 0001 to the file path name. (default: Ask)")
|
||||
helpful.add(
|
||||
[None, "run", "certonly", "manage", "delete", "certificates"],
|
||||
[None, "run", "certonly", "manage", "delete", "certificates", "renew"],
|
||||
"--cert-name", dest="certname",
|
||||
metavar="CERTNAME", default=None,
|
||||
help="Certificate name to apply. Only one certificate name can be used "
|
||||
"per Certbot run. To see certificate names, run 'certbot certificates'. "
|
||||
"When creating a new certificate, specifies the new certificate's name.")
|
||||
metavar="CERTNAME", default=flag_default("certname"),
|
||||
help="Certificate name to apply. This name is used by Certbot for housekeeping "
|
||||
"and in file paths; it doesn't affect the content of the certificate itself. "
|
||||
"To see certificate names, run 'certbot certificates'. "
|
||||
"When creating a new certificate, specifies the new certificate's name. "
|
||||
"(default: the first provided domain or the name of an existing "
|
||||
"certificate on your system for the same domains)")
|
||||
helpful.add(
|
||||
[None, "testing", "renew", "certonly"],
|
||||
"--dry-run", action="store_true", dest="dry_run",
|
||||
default=flag_default("dry_run"),
|
||||
help="Perform a test run of the client, obtaining test (invalid) certificates"
|
||||
" but not saving them to disk. This can currently only be used"
|
||||
" with the 'certonly' and 'renew' subcommands. \nNote: Although --dry-run"
|
||||
@@ -887,9 +928,10 @@ def prepare_and_parse_args(plugins, args, detect_defaults=False): # pylint: dis
|
||||
" in order to obtain test certificates, and reloads webservers to deploy and then"
|
||||
" roll back those changes. It also calls --pre-hook and --post-hook commands"
|
||||
" if they are defined because they may be necessary to accurately simulate"
|
||||
" renewal. --renew-hook commands are not called.")
|
||||
" renewal. --deploy-hook commands are not called.")
|
||||
helpful.add(
|
||||
["register", "automation"], "--register-unsafely-without-email", action="store_true",
|
||||
default=flag_default("register_unsafely_without_email"),
|
||||
help="Specifying this flag enables registering an account with no "
|
||||
"email address. This is strongly discouraged, because in the "
|
||||
"event of key loss or account compromise you will irrevocably "
|
||||
@@ -900,27 +942,29 @@ def prepare_and_parse_args(plugins, args, detect_defaults=False): # pylint: dis
|
||||
"update to the web site.")
|
||||
helpful.add(
|
||||
"register", "--update-registration", action="store_true",
|
||||
default=flag_default("update_registration"),
|
||||
help="With the register verb, indicates that details associated "
|
||||
"with an existing registration, such as the e-mail address, "
|
||||
"should be updated, rather than registering a new account.")
|
||||
helpful.add(
|
||||
["register", "unregister", "automation"], "-m", "--email",
|
||||
default=flag_default("email"),
|
||||
help=config_help("email"))
|
||||
helpful.add(["register", "automation"], "--eff-email", action="store_true",
|
||||
default=None, dest="eff_email",
|
||||
default=flag_default("eff_email"), dest="eff_email",
|
||||
help="Share your e-mail address with EFF")
|
||||
helpful.add(["register", "automation"], "--no-eff-email", action="store_false",
|
||||
default=None, dest="eff_email",
|
||||
default=flag_default("eff_email"), dest="eff_email",
|
||||
help="Don't share your e-mail address with EFF")
|
||||
helpful.add(
|
||||
["automation", "certonly", "run"],
|
||||
"--keep-until-expiring", "--keep", "--reinstall",
|
||||
dest="reinstall", action="store_true",
|
||||
dest="reinstall", action="store_true", default=flag_default("reinstall"),
|
||||
help="If the requested certificate matches an existing certificate, always keep the "
|
||||
"existing one until it is due for renewal (for the "
|
||||
"'run' subcommand this means reinstall the existing certificate). (default: Ask)")
|
||||
helpful.add(
|
||||
"automation", "--expand", action="store_true",
|
||||
"automation", "--expand", action="store_true", default=flag_default("expand"),
|
||||
help="If an existing certificate is a strict subset of the requested names, "
|
||||
"always expand and replace it with the additional names. (default: Ask)")
|
||||
helpful.add(
|
||||
@@ -929,21 +973,24 @@ def prepare_and_parse_args(plugins, args, detect_defaults=False): # pylint: dis
|
||||
help="show program's version number and exit")
|
||||
helpful.add(
|
||||
["automation", "renew"],
|
||||
"--force-renewal", "--renew-by-default",
|
||||
action="store_true", dest="renew_by_default", help="If a certificate "
|
||||
"--force-renewal", "--renew-by-default", dest="renew_by_default",
|
||||
action="store_true", default=flag_default("renew_by_default"),
|
||||
help="If a certificate "
|
||||
"already exists for the requested domains, renew it now, "
|
||||
"regardless of whether it is near expiry. (Often "
|
||||
"--keep-until-expiring is more appropriate). Also implies "
|
||||
"--expand.")
|
||||
helpful.add(
|
||||
"automation", "--renew-with-new-domains",
|
||||
action="store_true", dest="renew_with_new_domains", help="If a "
|
||||
"automation", "--renew-with-new-domains", dest="renew_with_new_domains",
|
||||
action="store_true", default=flag_default("renew_with_new_domains"),
|
||||
help="If a "
|
||||
"certificate already exists for the requested certificate name "
|
||||
"but does not match the requested domains, renew it now, "
|
||||
"regardless of whether it is near expiry.")
|
||||
helpful.add(
|
||||
["automation", "renew", "certonly"],
|
||||
"--allow-subset-of-names", action="store_true",
|
||||
default=flag_default("allow_subset_of_names"),
|
||||
help="When performing domain validation, do not consider it a failure "
|
||||
"if authorizations can not be obtained for a strict subset of "
|
||||
"the requested domains. This may be useful for allowing renewals for "
|
||||
@@ -951,39 +998,46 @@ def prepare_and_parse_args(plugins, args, detect_defaults=False): # pylint: dis
|
||||
"at this system. This option cannot be used with --csr.")
|
||||
helpful.add(
|
||||
"automation", "--agree-tos", dest="tos", action="store_true",
|
||||
default=flag_default("tos"),
|
||||
help="Agree to the ACME Subscriber Agreement (default: Ask)")
|
||||
helpful.add(
|
||||
["unregister", "automation"], "--account", metavar="ACCOUNT_ID",
|
||||
default=flag_default("account"),
|
||||
help="Account ID to use")
|
||||
helpful.add(
|
||||
"automation", "--duplicate", dest="duplicate", action="store_true",
|
||||
default=flag_default("duplicate"),
|
||||
help="Allow making a certificate lineage that duplicates an existing one "
|
||||
"(both can be renewed in parallel)")
|
||||
helpful.add(
|
||||
"automation", "--os-packages-only", action="store_true",
|
||||
default=flag_default("os_packages_only"),
|
||||
help="(certbot-auto only) install OS package dependencies and then stop")
|
||||
helpful.add(
|
||||
"automation", "--no-self-upgrade", action="store_true",
|
||||
default=flag_default("no_self_upgrade"),
|
||||
help="(certbot-auto only) prevent the certbot-auto script from"
|
||||
" upgrading itself to newer released versions (default: Upgrade"
|
||||
" automatically)")
|
||||
helpful.add(
|
||||
"automation", "--no-bootstrap", action="store_true",
|
||||
default=flag_default("no_bootstrap"),
|
||||
help="(certbot-auto only) prevent the certbot-auto script from"
|
||||
" installing OS-level dependencies (default: Prompt to install "
|
||||
" OS-wide dependencies, but exit if the user says 'No')")
|
||||
helpful.add(
|
||||
["automation", "renew", "certonly", "run"],
|
||||
"-q", "--quiet", dest="quiet", action="store_true",
|
||||
default=flag_default("quiet"),
|
||||
help="Silence all output except errors. Useful for automation via cron."
|
||||
" Implies --non-interactive.")
|
||||
# overwrites server, handled in HelpfulArgumentParser.parse_args()
|
||||
helpful.add(["testing", "revoke", "run"], "--test-cert", "--staging",
|
||||
action='store_true', dest='staging',
|
||||
help='Use the staging server to obtain or revoke test (invalid) certificates; equivalent'
|
||||
' to --server ' + constants.STAGING_URI)
|
||||
dest="staging", action="store_true", default=flag_default("staging"),
|
||||
help="Use the staging server to obtain or revoke test (invalid) certificates; equivalent"
|
||||
" to --server " + constants.STAGING_URI)
|
||||
helpful.add(
|
||||
"testing", "--debug", action="store_true",
|
||||
"testing", "--debug", action="store_true", default=flag_default("debug"),
|
||||
help="Show tracebacks in case of errors, and allow certbot-auto "
|
||||
"execution on experimental platforms")
|
||||
helpful.add(
|
||||
@@ -1013,6 +1067,7 @@ def prepare_and_parse_args(plugins, args, detect_defaults=False): # pylint: dis
|
||||
default=flag_default("http01_address"), help=config_help("http01_address"))
|
||||
helpful.add(
|
||||
"testing", "--break-my-certs", action="store_true",
|
||||
default=flag_default("break_my_certs"),
|
||||
help="Be willing to replace or renew valid certificates with invalid "
|
||||
"(testing/staging) certificates")
|
||||
helpful.add(
|
||||
@@ -1020,47 +1075,51 @@ def prepare_and_parse_args(plugins, args, detect_defaults=False): # pylint: dis
|
||||
default=flag_default("rsa_key_size"), help=config_help("rsa_key_size"))
|
||||
helpful.add(
|
||||
"security", "--must-staple", action="store_true",
|
||||
help=config_help("must_staple"), dest="must_staple", default=False)
|
||||
dest="must_staple", default=flag_default("must_staple"),
|
||||
help=config_help("must_staple"))
|
||||
helpful.add(
|
||||
"security", "--redirect", action="store_true",
|
||||
"security", "--redirect", action="store_true", dest="redirect",
|
||||
default=flag_default("redirect"),
|
||||
help="Automatically redirect all HTTP traffic to HTTPS for the newly "
|
||||
"authenticated vhost. (default: Ask)", dest="redirect", default=None)
|
||||
"authenticated vhost. (default: Ask)")
|
||||
helpful.add(
|
||||
"security", "--no-redirect", action="store_false",
|
||||
"security", "--no-redirect", action="store_false", dest="redirect",
|
||||
default=flag_default("redirect"),
|
||||
help="Do not automatically redirect all HTTP traffic to HTTPS for the newly "
|
||||
"authenticated vhost. (default: Ask)", dest="redirect", default=None)
|
||||
"authenticated vhost. (default: Ask)")
|
||||
helpful.add(
|
||||
"security", "--hsts", action="store_true",
|
||||
"security", "--hsts", action="store_true", dest="hsts", default=flag_default("hsts"),
|
||||
help="Add the Strict-Transport-Security header to every HTTP response."
|
||||
" Forcing browser to always use SSL for the domain."
|
||||
" Defends against SSL Stripping.", dest="hsts", default=False)
|
||||
" Defends against SSL Stripping.")
|
||||
helpful.add(
|
||||
"security", "--no-hsts", action="store_false",
|
||||
help=argparse.SUPPRESS, dest="hsts", default=False)
|
||||
"security", "--no-hsts", action="store_false", dest="hsts",
|
||||
default=flag_default("hsts"), help=argparse.SUPPRESS)
|
||||
helpful.add(
|
||||
"security", "--uir", action="store_true",
|
||||
help="Add the \"Content-Security-Policy: upgrade-insecure-requests\""
|
||||
" header to every HTTP response. Forcing the browser to use"
|
||||
" https:// for every http:// resource.", dest="uir", default=None)
|
||||
"security", "--uir", action="store_true", dest="uir", default=flag_default("uir"),
|
||||
help='Add the "Content-Security-Policy: upgrade-insecure-requests"'
|
||||
' header to every HTTP response. Forcing the browser to use'
|
||||
' https:// for every http:// resource.')
|
||||
helpful.add(
|
||||
"security", "--no-uir", action="store_false",
|
||||
help=argparse.SUPPRESS, dest="uir", default=None)
|
||||
"security", "--no-uir", action="store_false", dest="uir", default=flag_default("uir"),
|
||||
help=argparse.SUPPRESS)
|
||||
helpful.add(
|
||||
"security", "--staple-ocsp", action="store_true",
|
||||
"security", "--staple-ocsp", action="store_true", dest="staple",
|
||||
default=flag_default("staple"),
|
||||
help="Enables OCSP Stapling. A valid OCSP response is stapled to"
|
||||
" the certificate that the server offers during TLS.",
|
||||
dest="staple", default=None)
|
||||
" the certificate that the server offers during TLS.")
|
||||
helpful.add(
|
||||
"security", "--no-staple-ocsp", action="store_false",
|
||||
help=argparse.SUPPRESS, dest="staple", default=None)
|
||||
"security", "--no-staple-ocsp", action="store_false", dest="staple",
|
||||
default=flag_default("staple"), help=argparse.SUPPRESS)
|
||||
helpful.add(
|
||||
"security", "--strict-permissions", action="store_true",
|
||||
default=flag_default("strict_permissions"),
|
||||
help="Require that all configuration files are owned by the current "
|
||||
"user; only needed if your config is somewhere unsafe like /tmp/")
|
||||
helpful.add(
|
||||
["manual", "standalone", "certonly", "renew"],
|
||||
"--preferred-challenges", dest="pref_challs",
|
||||
action=_PrefChallAction, default=[],
|
||||
action=_PrefChallAction, default=flag_default("pref_challs"),
|
||||
help='A sorted, comma delimited list of the preferred challenge to '
|
||||
'use during authorization with the most preferred challenge '
|
||||
'listed first (Eg, "dns" or "tls-sni-01,http,dns"). '
|
||||
@@ -1085,25 +1144,35 @@ def prepare_and_parse_args(plugins, args, detect_defaults=False): # pylint: dis
|
||||
" run if an attempt was made to obtain/renew a certificate. If"
|
||||
" multiple renewed certificates have identical post-hooks, only"
|
||||
" one will be run.")
|
||||
helpful.add("renew", "--renew-hook",
|
||||
action=_RenewHookAction, help=argparse.SUPPRESS)
|
||||
helpful.add(
|
||||
"renew", "--renew-hook",
|
||||
help="Command to be run in a shell once for each successfully renewed"
|
||||
" certificate. For this command, the shell variable $RENEWED_LINEAGE"
|
||||
" will point to the config live subdirectory (for example,"
|
||||
" \"/etc/letsencrypt/live/example.com\") containing the new certificates"
|
||||
" and keys; the shell variable $RENEWED_DOMAINS will contain a"
|
||||
" space-delimited list of renewed certificate domains (for example,"
|
||||
" \"example.com www.example.com\"")
|
||||
"renew", "--deploy-hook", action=_DeployHookAction,
|
||||
help='Command to be run in a shell once for each successfully'
|
||||
' issued certificate. For this command, the shell variable'
|
||||
' $RENEWED_LINEAGE will point to the config live subdirectory'
|
||||
' (for example, "/etc/letsencrypt/live/example.com") containing'
|
||||
' the new certificates and keys; the shell variable'
|
||||
' $RENEWED_DOMAINS will contain a space-delimited list of'
|
||||
' renewed certificate domains (for example, "example.com'
|
||||
' www.example.com"')
|
||||
helpful.add(
|
||||
"renew", "--disable-hook-validation",
|
||||
action='store_false', dest='validate_hooks', default=True,
|
||||
action="store_false", dest="validate_hooks",
|
||||
default=flag_default("validate_hooks"),
|
||||
help="Ordinarily the commands specified for"
|
||||
" --pre-hook/--post-hook/--renew-hook will be checked for validity, to"
|
||||
" see if the programs being run are in the $PATH, so that mistakes can"
|
||||
" be caught early, even when the hooks aren't being run just yet. The"
|
||||
" validation is rather simplistic and fails if you use more advanced"
|
||||
" shell constructs, so you can use this switch to disable it."
|
||||
" --pre-hook/--post-hook/--deploy-hook will be checked for"
|
||||
" validity, to see if the programs being run are in the $PATH,"
|
||||
" so that mistakes can be caught early, even when the hooks"
|
||||
" aren't being run just yet. The validation is rather"
|
||||
" simplistic and fails if you use more advanced shell"
|
||||
" constructs, so you can use this switch to disable it."
|
||||
" (default: False)")
|
||||
helpful.add(
|
||||
"renew", "--no-directory-hooks", action="store_false",
|
||||
default=flag_default("directory_hooks"), dest="directory_hooks",
|
||||
help="Disable running executables found in Certbot's hook directories"
|
||||
" during renewal. (default: False)")
|
||||
|
||||
helpful.add_deprecated_argument("--agree-dev-preview", 0)
|
||||
helpful.add_deprecated_argument("--dialog", 0)
|
||||
@@ -1121,42 +1190,53 @@ def prepare_and_parse_args(plugins, args, detect_defaults=False): # pylint: dis
|
||||
|
||||
|
||||
def _create_subparsers(helpful):
|
||||
helpful.add("config_changes", "--num", type=int,
|
||||
helpful.add("config_changes", "--num", type=int, default=flag_default("num"),
|
||||
help="How many past revisions you want to be displayed")
|
||||
|
||||
from certbot.client import sample_user_agent # avoid import loops
|
||||
helpful.add(
|
||||
None, "--user-agent", default=None,
|
||||
help="Set a custom user agent string for the client. User agent strings allow "
|
||||
"the CA to collect high level statistics about success rates by OS, "
|
||||
"plugin and use case, and to know when to deprecate support for past Python "
|
||||
None, "--user-agent", default=flag_default("user_agent"),
|
||||
help='Set a custom user agent string for the client. User agent strings allow '
|
||||
'the CA to collect high level statistics about success rates by OS, '
|
||||
'plugin and use case, and to know when to deprecate support for past Python '
|
||||
"versions and flags. If you wish to hide this information from the Let's "
|
||||
'Encrypt server, set this to "". '
|
||||
'(default: {0}). The flags encoded in the user agent are: '
|
||||
'--duplicate, --force-renew, --allow-subset-of-names, -n, and '
|
||||
'whether any hooks are set.'.format(sample_user_agent()))
|
||||
helpful.add(
|
||||
None, "--user-agent-comment", default=flag_default("user_agent_comment"),
|
||||
type=_user_agent_comment_type,
|
||||
help="Add a comment to the default user agent string. May be used when repackaging Certbot "
|
||||
"or calling it from another tool to allow additional statistical data to be collected."
|
||||
" Ignored if --user-agent is set. (Example: Foo-Wrapper/1.0)")
|
||||
helpful.add("certonly",
|
||||
"--csr", type=read_file,
|
||||
"--csr", default=flag_default("csr"), type=read_file,
|
||||
help="Path to a Certificate Signing Request (CSR) in DER or PEM format."
|
||||
" Currently --csr only works with the 'certonly' subcommand.")
|
||||
helpful.add("revoke",
|
||||
"--reason", dest="reason",
|
||||
choices=CaseInsensitiveList(constants.REVOCATION_REASONS.keys()),
|
||||
action=_EncodeReasonAction, default=0,
|
||||
help="Specify reason for revoking certificate.")
|
||||
choices=CaseInsensitiveList(sorted(constants.REVOCATION_REASONS,
|
||||
key=constants.REVOCATION_REASONS.get)),
|
||||
action=_EncodeReasonAction, default=flag_default("reason"),
|
||||
help="Specify reason for revoking certificate. (default: unspecified)")
|
||||
helpful.add("rollback",
|
||||
"--checkpoints", type=int, metavar="N",
|
||||
default=flag_default("rollback_checkpoints"),
|
||||
help="Revert configuration N number of checkpoints.")
|
||||
helpful.add("plugins",
|
||||
"--init", action="store_true", help="Initialize plugins.")
|
||||
"--init", action="store_true", default=flag_default("init"),
|
||||
help="Initialize plugins.")
|
||||
helpful.add("plugins",
|
||||
"--prepare", action="store_true", help="Initialize and prepare plugins.")
|
||||
"--prepare", action="store_true", default=flag_default("prepare"),
|
||||
help="Initialize and prepare plugins.")
|
||||
helpful.add("plugins",
|
||||
"--authenticators", action="append_const", dest="ifaces",
|
||||
default=flag_default("ifaces"),
|
||||
const=interfaces.IAuthenticator, help="Limit to authenticator plugins only.")
|
||||
helpful.add("plugins",
|
||||
"--installers", action="append_const", dest="ifaces",
|
||||
default=flag_default("ifaces"),
|
||||
const=interfaces.IInstaller, help="Limit to installer plugins only.")
|
||||
|
||||
|
||||
@@ -1222,53 +1302,68 @@ def _plugins_parsing(helpful, plugins):
|
||||
"a particular plugin by setting options provided below. Running "
|
||||
"--help <plugin_name> will list flags specific to that plugin.")
|
||||
|
||||
helpful.add("plugins", "--configurator",
|
||||
helpful.add("plugins", "--configurator", default=flag_default("configurator"),
|
||||
help="Name of the plugin that is both an authenticator and an installer."
|
||||
" Should not be used together with --authenticator or --installer. "
|
||||
"(default: Ask)")
|
||||
helpful.add("plugins", "-a", "--authenticator", help="Authenticator plugin name.")
|
||||
helpful.add("plugins", "-i", "--installer",
|
||||
helpful.add("plugins", "-a", "--authenticator", default=flag_default("authenticator"),
|
||||
help="Authenticator plugin name.")
|
||||
helpful.add("plugins", "-i", "--installer", default=flag_default("installer"),
|
||||
help="Installer plugin name (also used to find domains).")
|
||||
helpful.add(["plugins", "certonly", "run", "install", "config_changes"],
|
||||
"--apache", action="store_true",
|
||||
"--apache", action="store_true", default=flag_default("apache"),
|
||||
help="Obtain and install certificates using Apache")
|
||||
helpful.add(["plugins", "certonly", "run", "install", "config_changes"],
|
||||
"--nginx", action="store_true", help="Obtain and install certificates using Nginx")
|
||||
"--nginx", action="store_true", default=flag_default("nginx"),
|
||||
help="Obtain and install certificates using Nginx")
|
||||
helpful.add(["plugins", "certonly"], "--standalone", action="store_true",
|
||||
default=flag_default("standalone"),
|
||||
help='Obtain certificates using a "standalone" webserver.')
|
||||
helpful.add(["plugins", "certonly"], "--manual", action="store_true",
|
||||
help='Provide laborious manual instructions for obtaining a certificate')
|
||||
default=flag_default("manual"),
|
||||
help="Provide laborious manual instructions for obtaining a certificate")
|
||||
helpful.add(["plugins", "certonly"], "--webroot", action="store_true",
|
||||
help='Obtain certificates by placing files in a webroot directory.')
|
||||
default=flag_default("webroot"),
|
||||
help="Obtain certificates by placing files in a webroot directory.")
|
||||
helpful.add(["plugins", "certonly"], "--dns-cloudflare", action="store_true",
|
||||
help=('Obtain certificates using a DNS TXT record (if you are '
|
||||
'using Cloudflare for DNS).'))
|
||||
default=flag_default("dns_cloudflare"),
|
||||
help=("Obtain certificates using a DNS TXT record (if you are "
|
||||
"using Cloudflare for DNS)."))
|
||||
helpful.add(["plugins", "certonly"], "--dns-cloudxns", action="store_true",
|
||||
help=('Obtain certificates using a DNS TXT record (if you are '
|
||||
'using CloudXNS for DNS).'))
|
||||
default=flag_default("dns_cloudxns"),
|
||||
help=("Obtain certificates using a DNS TXT record (if you are "
|
||||
"using CloudXNS for DNS)."))
|
||||
helpful.add(["plugins", "certonly"], "--dns-digitalocean", action="store_true",
|
||||
help=('Obtain certificates using a DNS TXT record (if you are '
|
||||
'using DigitalOcean for DNS).'))
|
||||
default=flag_default("dns_digitalocean"),
|
||||
help=("Obtain certificates using a DNS TXT record (if you are "
|
||||
"using DigitalOcean for DNS)."))
|
||||
helpful.add(["plugins", "certonly"], "--dns-dnsimple", action="store_true",
|
||||
help=('Obtain certificates using a DNS TXT record (if you are '
|
||||
'using DNSimple for DNS).'))
|
||||
default=flag_default("dns_dnsimple"),
|
||||
help=("Obtain certificates using a DNS TXT record (if you are "
|
||||
"using DNSimple for DNS)."))
|
||||
helpful.add(["plugins", "certonly"], "--dns-dnsmadeeasy", action="store_true",
|
||||
help=('Obtain certificates using a DNS TXT record (if you are'
|
||||
'using DNS Made Easy for DNS).'))
|
||||
default=flag_default("dns_dnsmadeeasy"),
|
||||
help=("Obtain certificates using a DNS TXT record (if you are"
|
||||
"using DNS Made Easy for DNS)."))
|
||||
helpful.add(["plugins", "certonly"], "--dns-google", action="store_true",
|
||||
help=('Obtain certificates using a DNS TXT record (if you are '
|
||||
'using Google Cloud DNS).'))
|
||||
default=flag_default("dns_google"),
|
||||
help=("Obtain certificates using a DNS TXT record (if you are "
|
||||
"using Google Cloud DNS)."))
|
||||
helpful.add(["plugins", "certonly"], "--dns-luadns", action="store_true",
|
||||
help=('Obtain certificates using a DNS TXT record (if you are '
|
||||
'using LuaDNS for DNS).'))
|
||||
default=flag_default("dns_luadns"),
|
||||
help=("Obtain certificates using a DNS TXT record (if you are "
|
||||
"using LuaDNS for DNS)."))
|
||||
helpful.add(["plugins", "certonly"], "--dns-nsone", action="store_true",
|
||||
help=('Obtain certificates using a DNS TXT record (if you are '
|
||||
'using NS1 for DNS).'))
|
||||
default=flag_default("dns_nsone"),
|
||||
help=("Obtain certificates using a DNS TXT record (if you are "
|
||||
"using NS1 for DNS)."))
|
||||
helpful.add(["plugins", "certonly"], "--dns-rfc2136", action="store_true",
|
||||
help='Obtain certificates using a DNS TXT record (if you are using BIND for DNS).')
|
||||
default=flag_default("dns_rfc2136"),
|
||||
help="Obtain certificates using a DNS TXT record (if you are using BIND for DNS).")
|
||||
helpful.add(["plugins", "certonly"], "--dns-route53", action="store_true",
|
||||
help=('Obtain certificates using a DNS TXT record (if you are using Route53 for '
|
||||
'DNS).'))
|
||||
default=flag_default("dns_route53"),
|
||||
help=("Obtain certificates using a DNS TXT record (if you are using Route53 for "
|
||||
"DNS)."))
|
||||
|
||||
# things should not be reorder past/pre this comment:
|
||||
# plugins_group should be displayed in --help before plugin
|
||||
@@ -1349,3 +1444,53 @@ def parse_preferred_challenges(pref_challs):
|
||||
raise errors.Error(
|
||||
"Unrecognized challenges: {0}".format(unrecognized))
|
||||
return challs
|
||||
|
||||
def _user_agent_comment_type(value):
|
||||
if "(" in value or ")" in value:
|
||||
raise argparse.ArgumentTypeError("may not contain parentheses")
|
||||
return value
|
||||
|
||||
class _DeployHookAction(argparse.Action):
|
||||
"""Action class for parsing deploy hooks."""
|
||||
|
||||
def __call__(self, parser, namespace, values, option_string=None):
|
||||
renew_hook_set = namespace.deploy_hook != namespace.renew_hook
|
||||
if renew_hook_set and namespace.renew_hook != values:
|
||||
raise argparse.ArgumentError(
|
||||
self, "conflicts with --renew-hook value")
|
||||
namespace.deploy_hook = namespace.renew_hook = values
|
||||
|
||||
|
||||
class _RenewHookAction(argparse.Action):
|
||||
"""Action class for parsing renew hooks."""
|
||||
|
||||
def __call__(self, parser, namespace, values, option_string=None):
|
||||
deploy_hook_set = namespace.deploy_hook is not None
|
||||
if deploy_hook_set and namespace.deploy_hook != values:
|
||||
raise argparse.ArgumentError(
|
||||
self, "conflicts with --deploy-hook value")
|
||||
namespace.renew_hook = values
|
||||
|
||||
|
||||
def nonnegative_int(value):
|
||||
"""Converts value to an int and checks that it is not negative.
|
||||
|
||||
This function should used as the type parameter for argparse
|
||||
arguments.
|
||||
|
||||
:param str value: value provided on the command line
|
||||
|
||||
:returns: integer representation of value
|
||||
:rtype: int
|
||||
|
||||
:raises argparse.ArgumentTypeError: if value isn't a non-negative integer
|
||||
|
||||
"""
|
||||
try:
|
||||
int_value = int(value)
|
||||
except ValueError:
|
||||
raise argparse.ArgumentTypeError("value must be an integer")
|
||||
|
||||
if int_value < 0:
|
||||
raise argparse.ArgumentTypeError("value must be non-negative")
|
||||
return int_value
|
||||
|
||||
@@ -9,6 +9,7 @@ import OpenSSL
|
||||
import zope.component
|
||||
|
||||
from acme import client as acme_client
|
||||
from acme import crypto_util as acme_crypto_util
|
||||
from acme import errors as acme_errors
|
||||
from acme import jose
|
||||
from acme import messages
|
||||
@@ -57,11 +58,12 @@ def determine_user_agent(config):
|
||||
# policy, talk to a core Certbot team member before making any
|
||||
# changes here.
|
||||
if config.user_agent is None:
|
||||
ua = ("CertbotACMEClient/{0} ({1}; {2}) Authenticator/{3} Installer/{4} "
|
||||
ua = ("CertbotACMEClient/{0} ({1}; {2}{8}) Authenticator/{3} Installer/{4} "
|
||||
"({5}; flags: {6}) Py/{7}")
|
||||
ua = ua.format(certbot.__version__, cli.cli_command, util.get_os_info_ua(),
|
||||
config.authenticator, config.installer, config.verb,
|
||||
ua_flags(config), platform.python_version())
|
||||
ua_flags(config), platform.python_version(),
|
||||
"; " + config.user_agent_comment if config.user_agent_comment else "")
|
||||
else:
|
||||
ua = config.user_agent
|
||||
return ua
|
||||
@@ -319,9 +321,17 @@ class Client(object):
|
||||
domains = [d for d in domains if d in auth_domains]
|
||||
|
||||
# Create CSR from names
|
||||
key = crypto_util.init_save_key(
|
||||
self.config.rsa_key_size, self.config.key_dir)
|
||||
csr = crypto_util.init_save_csr(key, domains, self.config.csr_dir)
|
||||
if self.config.dry_run:
|
||||
key = util.Key(file=None,
|
||||
pem=crypto_util.make_key(self.config.rsa_key_size))
|
||||
csr = util.CSR(file=None, form="pem",
|
||||
data=acme_crypto_util.make_csr(
|
||||
key.pem, domains, self.config.must_staple))
|
||||
else:
|
||||
key = crypto_util.init_save_key(
|
||||
self.config.rsa_key_size, self.config.key_dir)
|
||||
csr = crypto_util.init_save_csr(key, domains, self.config.csr_dir)
|
||||
|
||||
certr, chain = self.obtain_certificate_from_csr(
|
||||
domains, csr, authzr=authzr)
|
||||
|
||||
|
||||
@@ -108,6 +108,30 @@ class NamespaceConfig(object):
|
||||
return os.path.join(
|
||||
self.namespace.config_dir, constants.RENEWAL_CONFIGS_DIR)
|
||||
|
||||
@property
|
||||
def renewal_hooks_dir(self):
|
||||
"""Path to directory with hooks to run with the renew subcommand."""
|
||||
return os.path.join(self.namespace.config_dir,
|
||||
constants.RENEWAL_HOOKS_DIR)
|
||||
|
||||
@property
|
||||
def renewal_pre_hooks_dir(self):
|
||||
"""Path to the pre-hook directory for the renew subcommand."""
|
||||
return os.path.join(self.renewal_hooks_dir,
|
||||
constants.RENEWAL_PRE_HOOKS_DIR)
|
||||
|
||||
@property
|
||||
def renewal_deploy_hooks_dir(self):
|
||||
"""Path to the deploy-hook directory for the renew subcommand."""
|
||||
return os.path.join(self.renewal_hooks_dir,
|
||||
constants.RENEWAL_DEPLOY_HOOKS_DIR)
|
||||
|
||||
@property
|
||||
def renewal_post_hooks_dir(self):
|
||||
"""Path to the post-hook directory for the renew subcommand."""
|
||||
return os.path.join(self.renewal_hooks_dir,
|
||||
constants.RENEWAL_POST_HOOKS_DIR)
|
||||
|
||||
|
||||
def check_config_sanity(config):
|
||||
"""Validate command line options and display error message if
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
"""Certbot constants."""
|
||||
import os
|
||||
import logging
|
||||
import os
|
||||
import pkg_resources
|
||||
|
||||
from acme import challenges
|
||||
|
||||
@@ -18,24 +19,92 @@ CLI_DEFAULTS = dict(
|
||||
os.path.join(os.environ.get("XDG_CONFIG_HOME", "~/.config"),
|
||||
"letsencrypt", "cli.ini"),
|
||||
],
|
||||
dry_run=False,
|
||||
|
||||
# Main parser
|
||||
verbose_count=-int(logging.INFO / 10),
|
||||
server="https://acme-v01.api.letsencrypt.org/directory",
|
||||
text_mode=False,
|
||||
max_log_backups=1000,
|
||||
noninteractive_mode=False,
|
||||
force_interactive=False,
|
||||
domains=[],
|
||||
certname=None,
|
||||
dry_run=False,
|
||||
register_unsafely_without_email=False,
|
||||
update_registration=False,
|
||||
email=None,
|
||||
eff_email=None,
|
||||
reinstall=False,
|
||||
expand=False,
|
||||
renew_by_default=False,
|
||||
renew_with_new_domains=False,
|
||||
allow_subset_of_names=False,
|
||||
tos=False,
|
||||
account=None,
|
||||
duplicate=False,
|
||||
os_packages_only=False,
|
||||
no_self_upgrade=False,
|
||||
no_bootstrap=False,
|
||||
quiet=False,
|
||||
staging=False,
|
||||
debug=False,
|
||||
debug_challenges=False,
|
||||
no_verify_ssl=False,
|
||||
tls_sni_01_port=challenges.TLSSNI01Response.PORT,
|
||||
tls_sni_01_address="",
|
||||
http01_port=challenges.HTTP01Response.PORT,
|
||||
http01_address="",
|
||||
break_my_certs=False,
|
||||
rsa_key_size=2048,
|
||||
must_staple=False,
|
||||
redirect=None,
|
||||
hsts=None,
|
||||
uir=None,
|
||||
staple=None,
|
||||
strict_permissions=False,
|
||||
pref_challs=[],
|
||||
validate_hooks=True,
|
||||
directory_hooks=True,
|
||||
|
||||
# Subparsers
|
||||
num=None,
|
||||
user_agent=None,
|
||||
user_agent_comment=None,
|
||||
csr=None,
|
||||
reason=0,
|
||||
rollback_checkpoints=1,
|
||||
init=False,
|
||||
prepare=False,
|
||||
ifaces=None,
|
||||
|
||||
# Path parsers
|
||||
auth_cert_path="./cert.pem",
|
||||
auth_chain_path="./chain.pem",
|
||||
key_path=None,
|
||||
config_dir="/etc/letsencrypt",
|
||||
work_dir="/var/lib/letsencrypt",
|
||||
logs_dir="/var/log/letsencrypt",
|
||||
no_verify_ssl=False,
|
||||
http01_port=challenges.HTTP01Response.PORT,
|
||||
http01_address="",
|
||||
tls_sni_01_port=challenges.TLSSNI01Response.PORT,
|
||||
tls_sni_01_address="",
|
||||
server="https://acme-v01.api.letsencrypt.org/directory",
|
||||
|
||||
# Plugins parsers
|
||||
configurator=None,
|
||||
authenticator=None,
|
||||
installer=None,
|
||||
apache=False,
|
||||
nginx=False,
|
||||
standalone=False,
|
||||
manual=False,
|
||||
webroot=False,
|
||||
dns_cloudflare=False,
|
||||
dns_cloudxns=False,
|
||||
dns_digitalocean=False,
|
||||
dns_dnsimple=False,
|
||||
dns_dnsmadeeasy=False,
|
||||
dns_google=False,
|
||||
dns_luadns=False,
|
||||
dns_nsone=False,
|
||||
dns_rfc2136=False,
|
||||
dns_route53=False
|
||||
|
||||
auth_cert_path="./cert.pem",
|
||||
auth_chain_path="./chain.pem",
|
||||
strict_permissions=False,
|
||||
debug_challenges=False,
|
||||
)
|
||||
STAGING_URI = "https://acme-staging.api.letsencrypt.org/directory"
|
||||
|
||||
@@ -108,8 +177,35 @@ TEMP_CHECKPOINT_DIR = "temp_checkpoint"
|
||||
RENEWAL_CONFIGS_DIR = "renewal"
|
||||
"""Renewal configs directory, relative to `IConfig.config_dir`."""
|
||||
|
||||
RENEWAL_HOOKS_DIR = "renewal-hooks"
|
||||
"""Basename of directory containing hooks to run with the renew command."""
|
||||
|
||||
RENEWAL_PRE_HOOKS_DIR = "pre"
|
||||
"""Basename of directory containing pre-hooks to run with the renew command."""
|
||||
|
||||
RENEWAL_DEPLOY_HOOKS_DIR = "deploy"
|
||||
"""Basename of directory containing deploy-hooks to run with the renew command."""
|
||||
|
||||
RENEWAL_POST_HOOKS_DIR = "post"
|
||||
"""Basename of directory containing post-hooks to run with the renew command."""
|
||||
|
||||
FORCE_INTERACTIVE_FLAG = "--force-interactive"
|
||||
"""Flag to disable TTY checking in IDisplay."""
|
||||
|
||||
EFF_SUBSCRIBE_URI = "https://supporters.eff.org/subscribe/certbot"
|
||||
"""EFF URI used to submit the e-mail address of users who opt-in."""
|
||||
|
||||
SSL_DHPARAMS_DEST = "ssl-dhparams.pem"
|
||||
"""Name of the ssl_dhparams file as saved in `IConfig.config_dir`."""
|
||||
|
||||
SSL_DHPARAMS_SRC = pkg_resources.resource_filename(
|
||||
"certbot", "ssl-dhparams.pem")
|
||||
"""Path to the nginx ssl_dhparams file found in the Certbot distribution."""
|
||||
|
||||
UPDATED_SSL_DHPARAMS_DIGEST = ".updated-ssl-dhparams-pem-digest.txt"
|
||||
"""Name of the hash of the updated or informed ssl_dhparams as saved in `IConfig.config_dir`."""
|
||||
|
||||
ALL_SSL_DHPARAMS_HASHES = [
|
||||
'9ba6429597aeed2d8617a7705b56e96d044f64b07971659382e426675105654b',
|
||||
]
|
||||
"""SHA256 hashes of the contents of all versions of SSL_DHPARAMS_SRC"""
|
||||
|
||||
@@ -55,15 +55,11 @@ def init_save_key(key_size, key_dir, keyname="key-certbot.pem"):
|
||||
# Save file
|
||||
util.make_or_verify_dir(key_dir, 0o700, os.geteuid(),
|
||||
config.strict_permissions)
|
||||
if config.dry_run:
|
||||
key_path = None
|
||||
logger.debug("Generating key (%d bits), not saving to file", key_size)
|
||||
else:
|
||||
key_f, key_path = util.unique_file(
|
||||
os.path.join(key_dir, keyname), 0o600, "wb")
|
||||
with key_f:
|
||||
key_f.write(key_pem)
|
||||
logger.debug("Generating key (%d bits): %s", key_size, key_path)
|
||||
key_f, key_path = util.unique_file(
|
||||
os.path.join(key_dir, keyname), 0o600, "wb")
|
||||
with key_f:
|
||||
key_f.write(key_pem)
|
||||
logger.debug("Generating key (%d bits): %s", key_size, key_path)
|
||||
|
||||
return util.Key(key_path, key_pem)
|
||||
|
||||
@@ -90,15 +86,11 @@ def init_save_csr(privkey, names, path):
|
||||
# Save CSR
|
||||
util.make_or_verify_dir(path, 0o755, os.geteuid(),
|
||||
config.strict_permissions)
|
||||
if config.dry_run:
|
||||
csr_filename = None
|
||||
logger.debug("Creating CSR: not saving to file")
|
||||
else:
|
||||
csr_f, csr_filename = util.unique_file(
|
||||
os.path.join(path, "csr-certbot.pem"), 0o644, "wb")
|
||||
with csr_f:
|
||||
csr_f.write(csr_pem)
|
||||
logger.debug("Creating CSR: %s", csr_filename)
|
||||
csr_f, csr_filename = util.unique_file(
|
||||
os.path.join(path, "csr-certbot.pem"), 0o644, "wb")
|
||||
with csr_f:
|
||||
csr_f.write(csr_pem)
|
||||
logger.debug("Creating CSR: %s", csr_filename)
|
||||
|
||||
return util.CSR(csr_filename, csr_pem, "pem")
|
||||
|
||||
@@ -222,7 +214,7 @@ def verify_renewable_cert(renewable_cert):
|
||||
"""
|
||||
verify_renewable_cert_sig(renewable_cert)
|
||||
verify_fullchain(renewable_cert)
|
||||
verify_cert_matches_priv_key(renewable_cert)
|
||||
verify_cert_matches_priv_key(renewable_cert.cert, renewable_cert.privkey)
|
||||
|
||||
|
||||
def verify_renewable_cert_sig(renewable_cert):
|
||||
@@ -246,27 +238,24 @@ def verify_renewable_cert_sig(renewable_cert):
|
||||
raise errors.Error(error_str)
|
||||
|
||||
|
||||
def verify_cert_matches_priv_key(renewable_cert):
|
||||
def verify_cert_matches_priv_key(cert_path, key_path):
|
||||
""" Verifies that the private key and cert match.
|
||||
|
||||
:param `.storage.RenewableCert` renewable_cert: cert to verify
|
||||
:param str cert_path: path to a cert in PEM format
|
||||
:param str key_path: path to a private key file
|
||||
|
||||
:raises errors.Error: If they don't match.
|
||||
"""
|
||||
try:
|
||||
with open(renewable_cert.cert) as cert:
|
||||
cert = OpenSSL.crypto.load_certificate(OpenSSL.crypto.FILETYPE_PEM, cert.read())
|
||||
with open(renewable_cert.privkey) as privkey:
|
||||
privkey = OpenSSL.crypto.load_privatekey(OpenSSL.crypto.FILETYPE_PEM, privkey.read())
|
||||
context = OpenSSL.SSL.Context(OpenSSL.SSL.SSLv23_METHOD)
|
||||
context.use_privatekey(privkey)
|
||||
context.use_certificate(cert)
|
||||
context.use_certificate_file(cert_path)
|
||||
context.use_privatekey_file(key_path)
|
||||
context.check_privatekey()
|
||||
except (IOError, OpenSSL.SSL.Error) as e:
|
||||
error_str = "verifying the cert located at {0} matches the \
|
||||
private key located at {1} has failed. \
|
||||
Details: {2}".format(renewable_cert.cert,
|
||||
renewable_cert.privkey, e)
|
||||
Details: {2}".format(cert_path,
|
||||
key_path, e)
|
||||
logger.exception(error_str)
|
||||
raise errors.Error(error_str)
|
||||
|
||||
|
||||
@@ -42,12 +42,14 @@ def redirect_by_default():
|
||||
|
||||
"""
|
||||
choices = [
|
||||
("Easy", "Allow both HTTP and HTTPS access to these sites"),
|
||||
("Secure", "Make all requests redirect to secure HTTPS access"),
|
||||
("No redirect", "Make no further changes to the webserver configuration."),
|
||||
("Redirect", "Make all requests redirect to secure HTTPS access. "
|
||||
"Choose this for new sites, or if you're confident your site works on HTTPS. "
|
||||
"You can undo this change by editing your web server's configuration."),
|
||||
]
|
||||
|
||||
code, selection = util(interfaces.IDisplay).menu(
|
||||
"Please choose whether HTTPS access is required or optional.",
|
||||
"Please choose whether or not to redirect HTTP traffic to HTTPS, removing HTTP access.",
|
||||
choices, default=0,
|
||||
cli_flag="--redirect / --no-redirect", force_interactive=True)
|
||||
|
||||
|
||||
@@ -117,6 +117,7 @@ class FileDisplay(object):
|
||||
self.outfile.write(
|
||||
"{line}{frame}{line}{msg}{line}{frame}{line}".format(
|
||||
line=os.linesep, frame=side_frame, msg=message))
|
||||
self.outfile.flush()
|
||||
if pause:
|
||||
if self._can_interact(force_interactive):
|
||||
input_with_timeout("Press Enter to Continue")
|
||||
@@ -213,6 +214,7 @@ class FileDisplay(object):
|
||||
|
||||
self.outfile.write("{0}{frame}{msg}{0}{frame}".format(
|
||||
os.linesep, frame=side_frame, msg=message))
|
||||
self.outfile.flush()
|
||||
|
||||
while True:
|
||||
ans = input_with_timeout("{yes}/{no}: ".format(
|
||||
@@ -267,6 +269,7 @@ class FileDisplay(object):
|
||||
else:
|
||||
self.outfile.write(
|
||||
"** Error - Invalid selection **%s" % os.linesep)
|
||||
self.outfile.flush()
|
||||
else:
|
||||
return code, []
|
||||
|
||||
@@ -395,6 +398,7 @@ class FileDisplay(object):
|
||||
self.outfile.write(os.linesep)
|
||||
|
||||
self.outfile.write(side_frame)
|
||||
self.outfile.flush()
|
||||
|
||||
def _get_valid_int_ans(self, max_):
|
||||
"""Get a numerical selection.
|
||||
@@ -428,6 +432,7 @@ class FileDisplay(object):
|
||||
except ValueError:
|
||||
self.outfile.write(
|
||||
"{0}** Invalid input **{0}".format(os.linesep))
|
||||
self.outfile.flush()
|
||||
|
||||
return OK, selection
|
||||
|
||||
@@ -483,6 +488,7 @@ class NoninteractiveDisplay(object):
|
||||
self.outfile.write(
|
||||
"{line}{frame}{line}{msg}{line}{frame}{line}".format(
|
||||
line=os.linesep, frame=side_frame, msg=message))
|
||||
self.outfile.flush()
|
||||
|
||||
def menu(self, message, choices, ok_label=None, cancel_label=None,
|
||||
help_label=None, default=None, cli_flag=None, **unused_kwargs):
|
||||
|
||||
@@ -29,18 +29,19 @@ class ErrorHandler(object):
|
||||
"""Context manager for running code that must be cleaned up on failure.
|
||||
|
||||
The context manager allows you to register functions that will be called
|
||||
when an exception (excluding SystemExit) or signal is encountered. Usage:
|
||||
when an exception (excluding SystemExit) or signal is encountered.
|
||||
Usage::
|
||||
|
||||
handler = ErrorHandler(cleanup1_func, *cleanup1_args, **cleanup1_kwargs)
|
||||
handler.register(cleanup2_func, *cleanup2_args, **cleanup2_kwargs)
|
||||
handler = ErrorHandler(cleanup1_func, *cleanup1_args, **cleanup1_kwargs)
|
||||
handler.register(cleanup2_func, *cleanup2_args, **cleanup2_kwargs)
|
||||
|
||||
with handler:
|
||||
do_something()
|
||||
with handler:
|
||||
do_something()
|
||||
|
||||
Or for one cleanup function:
|
||||
Or for one cleanup function::
|
||||
|
||||
with ErrorHandler(func, args, kwargs):
|
||||
do_something()
|
||||
with ErrorHandler(func, args, kwargs):
|
||||
do_something()
|
||||
|
||||
If an exception is raised out of do_something, the cleanup functions will
|
||||
be called in last in first out order. Then the exception is raised.
|
||||
@@ -84,7 +85,7 @@ class ErrorHandler(object):
|
||||
return retval
|
||||
|
||||
def register(self, func, *args, **kwargs):
|
||||
"""Sets func to be called with *args and **kwargs during cleanup
|
||||
"""Sets func to be run with the given arguments during cleanup.
|
||||
|
||||
:param function func: function to be called in case of an error
|
||||
|
||||
|
||||
@@ -32,6 +32,8 @@ class HookCommandNotFound(Error):
|
||||
class SignalExit(Error):
|
||||
"""A Unix signal was received while in the ErrorHandler context manager."""
|
||||
|
||||
class OverlappingMatchFound(Error):
|
||||
"""Multiple lineages matched what should have been a unique result."""
|
||||
|
||||
class LockError(Error):
|
||||
"""File locking error."""
|
||||
|
||||
166
certbot/hooks.py
166
certbot/hooks.py
@@ -18,6 +18,7 @@ def validate_hooks(config):
|
||||
"""Check hook commands are executable."""
|
||||
validate_hook(config.pre_hook, "pre")
|
||||
validate_hook(config.post_hook, "post")
|
||||
validate_hook(config.deploy_hook, "deploy")
|
||||
validate_hook(config.renew_hook, "renew")
|
||||
|
||||
|
||||
@@ -56,30 +57,71 @@ def validate_hook(shell_cmd, hook_name):
|
||||
|
||||
|
||||
def pre_hook(config):
|
||||
"Run pre-hook if it's defined and hasn't been run."
|
||||
"""Run pre-hooks if they exist and haven't already been run.
|
||||
|
||||
When Certbot is running with the renew subcommand, this function
|
||||
runs any hooks found in the config.renewal_pre_hooks_dir (if they
|
||||
have not already been run) followed by any pre-hook in the config.
|
||||
If hooks in config.renewal_pre_hooks_dir are run and the pre-hook in
|
||||
the config is a path to one of these scripts, it is not run twice.
|
||||
|
||||
:param configuration.NamespaceConfig config: Certbot settings
|
||||
|
||||
"""
|
||||
if config.verb == "renew" and config.directory_hooks:
|
||||
for hook in list_hooks(config.renewal_pre_hooks_dir):
|
||||
_run_pre_hook_if_necessary(hook)
|
||||
|
||||
cmd = config.pre_hook
|
||||
if cmd and cmd not in pre_hook.already:
|
||||
logger.info("Running pre-hook command: %s", cmd)
|
||||
_run_hook(cmd)
|
||||
pre_hook.already.add(cmd)
|
||||
elif cmd:
|
||||
logger.info("Pre-hook command already run, skipping: %s", cmd)
|
||||
if cmd:
|
||||
_run_pre_hook_if_necessary(cmd)
|
||||
|
||||
pre_hook.already = set() # type: ignore
|
||||
|
||||
|
||||
def post_hook(config):
|
||||
"""Run post hook if defined.
|
||||
def _run_pre_hook_if_necessary(command):
|
||||
"""Run the specified pre-hook if we haven't already.
|
||||
|
||||
If we've already run this exact command before, a message is logged
|
||||
saying the pre-hook was skipped.
|
||||
|
||||
:param str command: pre-hook to be run
|
||||
|
||||
"""
|
||||
if command in pre_hook.already:
|
||||
logger.info("Pre-hook command already run, skipping: %s", command)
|
||||
else:
|
||||
logger.info("Running pre-hook command: %s", command)
|
||||
_run_hook(command)
|
||||
pre_hook.already.add(command)
|
||||
|
||||
|
||||
def post_hook(config):
|
||||
"""Run post-hooks if defined.
|
||||
|
||||
This function also registers any executables found in
|
||||
config.renewal_post_hooks_dir to be run when Certbot is used with
|
||||
the renew subcommand.
|
||||
|
||||
If the verb is renew, we delay executing any post-hooks until
|
||||
:func:`run_saved_post_hooks` is called. In this case, this function
|
||||
registers all hooks found in config.renewal_post_hooks_dir to be
|
||||
called followed by any post-hook in the config. If the post-hook in
|
||||
the config is a path to an executable in the post-hook directory, it
|
||||
is not scheduled to be run twice.
|
||||
|
||||
:param configuration.NamespaceConfig config: Certbot settings
|
||||
|
||||
If the verb is renew, we might have more certs to renew, so we wait until
|
||||
run_saved_post_hooks() is called.
|
||||
"""
|
||||
|
||||
cmd = config.post_hook
|
||||
# In the "renew" case, we save these up to run at the end
|
||||
if config.verb == "renew":
|
||||
if cmd and cmd not in post_hook.eventually:
|
||||
post_hook.eventually.append(cmd)
|
||||
if config.directory_hooks:
|
||||
for hook in list_hooks(config.renewal_post_hooks_dir):
|
||||
_run_eventually(hook)
|
||||
if cmd:
|
||||
_run_eventually(cmd)
|
||||
# certonly / run
|
||||
elif cmd:
|
||||
logger.info("Running post-hook command: %s", cmd)
|
||||
@@ -88,6 +130,19 @@ def post_hook(config):
|
||||
post_hook.eventually = [] # type: ignore
|
||||
|
||||
|
||||
def _run_eventually(command):
|
||||
"""Registers a post-hook to be run eventually.
|
||||
|
||||
All commands given to this function will be run exactly once in the
|
||||
order they were given when :func:`run_saved_post_hooks` is called.
|
||||
|
||||
:param str command: post-hook to register to be run
|
||||
|
||||
"""
|
||||
if command not in post_hook.eventually:
|
||||
post_hook.eventually.append(command)
|
||||
|
||||
|
||||
def run_saved_post_hooks():
|
||||
"""Run any post hooks that were saved up in the course of the 'renew' verb"""
|
||||
for cmd in post_hook.eventually:
|
||||
@@ -95,16 +150,75 @@ def run_saved_post_hooks():
|
||||
_run_hook(cmd)
|
||||
|
||||
|
||||
def deploy_hook(config, domains, lineage_path):
|
||||
"""Run post-issuance hook if defined.
|
||||
|
||||
:param configuration.NamespaceConfig config: Certbot settings
|
||||
:param domains: domains in the obtained certificate
|
||||
:type domains: `list` of `str`
|
||||
:param str lineage_path: live directory path for the new cert
|
||||
|
||||
"""
|
||||
if config.deploy_hook:
|
||||
_run_deploy_hook(config.deploy_hook, domains,
|
||||
lineage_path, config.dry_run)
|
||||
|
||||
|
||||
def renew_hook(config, domains, lineage_path):
|
||||
"""Run post-renewal hook if defined."""
|
||||
"""Run post-renewal hooks.
|
||||
|
||||
This function runs any hooks found in
|
||||
config.renewal_deploy_hooks_dir followed by any renew-hook in the
|
||||
config. If the renew-hook in the config is a path to a script in
|
||||
config.renewal_deploy_hooks_dir, it is not run twice.
|
||||
|
||||
If Certbot is doing a dry run, no hooks are run and messages are
|
||||
logged saying that they were skipped.
|
||||
|
||||
:param configuration.NamespaceConfig config: Certbot settings
|
||||
:param domains: domains in the obtained certificate
|
||||
:type domains: `list` of `str`
|
||||
:param str lineage_path: live directory path for the new cert
|
||||
|
||||
"""
|
||||
executed_dir_hooks = set()
|
||||
if config.directory_hooks:
|
||||
for hook in list_hooks(config.renewal_deploy_hooks_dir):
|
||||
_run_deploy_hook(hook, domains, lineage_path, config.dry_run)
|
||||
executed_dir_hooks.add(hook)
|
||||
|
||||
if config.renew_hook:
|
||||
if not config.dry_run:
|
||||
os.environ["RENEWED_DOMAINS"] = " ".join(domains)
|
||||
os.environ["RENEWED_LINEAGE"] = lineage_path
|
||||
logger.info("Running renew-hook command: %s", config.renew_hook)
|
||||
_run_hook(config.renew_hook)
|
||||
if config.renew_hook in executed_dir_hooks:
|
||||
logger.info("Skipping deploy-hook '%s' as it was already run.",
|
||||
config.renew_hook)
|
||||
else:
|
||||
logger.warning("Dry run: skipping renewal hook command: %s", config.renew_hook)
|
||||
_run_deploy_hook(config.renew_hook, domains,
|
||||
lineage_path, config.dry_run)
|
||||
|
||||
|
||||
def _run_deploy_hook(command, domains, lineage_path, dry_run):
|
||||
"""Run the specified deploy-hook (if not doing a dry run).
|
||||
|
||||
If dry_run is True, command is not run and a message is logged
|
||||
saying that it was skipped. If dry_run is False, the hook is run
|
||||
after setting the appropriate environment variables.
|
||||
|
||||
:param str command: command to run as a deploy-hook
|
||||
:param domains: domains in the obtained certificate
|
||||
:type domains: `list` of `str`
|
||||
:param str lineage_path: live directory path for the new cert
|
||||
:param bool dry_run: True iff Certbot is doing a dry run
|
||||
|
||||
"""
|
||||
if dry_run:
|
||||
logger.warning("Dry run: skipping deploy hook command: %s",
|
||||
command)
|
||||
return
|
||||
|
||||
os.environ["RENEWED_DOMAINS"] = " ".join(domains)
|
||||
os.environ["RENEWED_LINEAGE"] = lineage_path
|
||||
logger.info("Running deploy-hook command: %s", command)
|
||||
_run_hook(command)
|
||||
|
||||
|
||||
def _run_hook(shell_cmd):
|
||||
@@ -136,3 +250,15 @@ def execute(shell_cmd):
|
||||
logger.error('Error output from %s:\n%s', base_cmd, err)
|
||||
return (err, out)
|
||||
|
||||
|
||||
def list_hooks(dir_path):
|
||||
"""List paths to all hooks found in dir_path in sorted order.
|
||||
|
||||
:param str dir_path: directory to search
|
||||
|
||||
:returns: `list` of `str`
|
||||
:rtype: sorted list of absolute paths to executables in dir_path
|
||||
|
||||
"""
|
||||
paths = (os.path.join(dir_path, f) for f in os.listdir(dir_path))
|
||||
return sorted(path for path in paths if util.is_exe(path))
|
||||
|
||||
@@ -138,7 +138,8 @@ def setup_log_file_handler(config, logfile, fmt):
|
||||
log_file_path = os.path.join(config.logs_dir, logfile)
|
||||
try:
|
||||
handler = logging.handlers.RotatingFileHandler(
|
||||
log_file_path, maxBytes=2 ** 20, backupCount=1000)
|
||||
log_file_path, maxBytes=2 ** 20,
|
||||
backupCount=config.max_log_backups)
|
||||
except IOError as error:
|
||||
raise errors.Error(util.PERM_ERR_FMT.format(error))
|
||||
# rotate on each invocation, rollover only possible when maxBytes
|
||||
@@ -358,11 +359,11 @@ def post_arg_parse_except_hook(exc_type, exc_value, trace, debug, log_path):
|
||||
logger.debug('Exiting abnormally:', exc_info=exc_info)
|
||||
if issubclass(exc_type, errors.Error):
|
||||
sys.exit(exc_value)
|
||||
print('An unexpected error occurred:', file=sys.stderr)
|
||||
logger.error('An unexpected error occurred:')
|
||||
if messages.is_acme_error(exc_value):
|
||||
# Remove the ACME error prefix from the exception
|
||||
_, _, exc_str = str(exc_value).partition(':: ')
|
||||
print(exc_str, file=sys.stderr)
|
||||
logger.error(exc_str)
|
||||
else:
|
||||
traceback.print_exception(exc_type, exc_value, None)
|
||||
exit_with_log_path(log_path)
|
||||
|
||||
148
certbot/main.py
148
certbot/main.py
@@ -1,9 +1,11 @@
|
||||
"""Certbot main entry point."""
|
||||
from __future__ import print_function
|
||||
import functools
|
||||
import logging.handlers
|
||||
import os
|
||||
import sys
|
||||
|
||||
import configobj
|
||||
import zope.component
|
||||
|
||||
from acme import jose
|
||||
@@ -25,6 +27,7 @@ from certbot import interfaces
|
||||
from certbot import log
|
||||
from certbot import renewal
|
||||
from certbot import reporter
|
||||
from certbot import storage
|
||||
from certbot import util
|
||||
|
||||
from certbot.display import util as display_util, ops as display_ops
|
||||
@@ -82,6 +85,8 @@ def _get_and_save_cert(le_client, config, domains=None, certname=None, lineage=N
|
||||
lineage = le_client.obtain_and_enroll_certificate(domains, certname)
|
||||
if lineage is False:
|
||||
raise errors.Error("Certificate could not be obtained")
|
||||
elif lineage is not None:
|
||||
hooks.deploy_hook(config, lineage.names(), lineage.live_dir)
|
||||
finally:
|
||||
hooks.post_hook(config)
|
||||
|
||||
@@ -291,11 +296,12 @@ def _find_domains_or_certname(config, installer):
|
||||
return domains, certname
|
||||
|
||||
|
||||
def _report_new_cert(config, cert_path, fullchain_path):
|
||||
def _report_new_cert(config, cert_path, fullchain_path, key_path=None):
|
||||
"""Reports the creation of a new certificate to the user.
|
||||
|
||||
:param str cert_path: path to cert
|
||||
:param str fullchain_path: path to full chain
|
||||
:param str key_path: path to private key, if available
|
||||
|
||||
"""
|
||||
if config.dry_run:
|
||||
@@ -310,13 +316,17 @@ def _report_new_cert(config, cert_path, fullchain_path):
|
||||
# (Nginx and Apache2.4) will want.
|
||||
|
||||
verbswitch = ' with the "certonly" option' if config.verb == "run" else ""
|
||||
privkey_statement = 'Your key file has been saved at:{br}{0}{br}'.format(
|
||||
key_path, br=os.linesep) if key_path else ""
|
||||
# XXX Perhaps one day we could detect the presence of known old webservers
|
||||
# and say something more informative here.
|
||||
msg = ('Congratulations! Your certificate and chain have been saved at {0}.'
|
||||
' Your cert will expire on {1}. To obtain a new or tweaked version of this '
|
||||
'certificate in the future, simply run {2} again{3}. '
|
||||
'To non-interactively renew *all* of your certificates, run "{2} renew"'
|
||||
.format(fullchain_path, expiry, cli.cli_command, verbswitch))
|
||||
msg = ('Congratulations! Your certificate and chain have been saved at:{br}'
|
||||
'{0}{br}{1}'
|
||||
'Your cert will expire on {2}. To obtain a new or tweaked version of this '
|
||||
'certificate in the future, simply run {3} again{4}. '
|
||||
'To non-interactively renew *all* of your certificates, run "{3} renew"'
|
||||
.format(fullchain_path, privkey_statement, expiry, cli.cli_command, verbswitch,
|
||||
br=os.linesep))
|
||||
reporter_util.add_message(msg, reporter_util.MEDIUM_PRIORITY)
|
||||
|
||||
|
||||
@@ -377,6 +387,92 @@ def _determine_account(config):
|
||||
return acc, acme
|
||||
|
||||
|
||||
def _delete_if_appropriate(config): # pylint: disable=too-many-locals,too-many-branches
|
||||
"""Does the user want to delete their now-revoked certs? If run in non-interactive mode,
|
||||
deleting happens automatically, unless if both `--cert-name` and `--cert-path` were
|
||||
specified with conflicting values.
|
||||
|
||||
:param `configuration.NamespaceConfig` config: parsed command line arguments
|
||||
|
||||
:raises `error.Errors`: If anything goes wrong, including bad user input, if an overlapping
|
||||
archive dir is found for the specified lineage, etc ...
|
||||
"""
|
||||
display = zope.component.getUtility(interfaces.IDisplay)
|
||||
reporter_util = zope.component.getUtility(interfaces.IReporter)
|
||||
|
||||
msg = ("Would you like to delete the cert(s) you just revoked?")
|
||||
attempt_deletion = display.yesno(msg, yes_label="Yes (recommended)", no_label="No",
|
||||
force_interactive=True, default=True)
|
||||
|
||||
if not attempt_deletion:
|
||||
reporter_util.add_message("Not deleting revoked certs.", reporter_util.LOW_PRIORITY)
|
||||
return
|
||||
|
||||
if not (config.certname or config.cert_path):
|
||||
raise errors.Error('At least one of --cert-path or --cert-name must be specified.')
|
||||
|
||||
if config.certname and config.cert_path:
|
||||
# first, check if certname and cert_path imply the same certs
|
||||
implied_cert_name = cert_manager.cert_path_to_lineage(config)
|
||||
|
||||
if implied_cert_name != config.certname:
|
||||
cert_path_implied_cert_name = cert_manager.cert_path_to_lineage(config)
|
||||
cert_path_implied_conf = storage.renewal_file_for_certname(config,
|
||||
cert_path_implied_cert_name)
|
||||
cert_path_cert = storage.RenewableCert(cert_path_implied_conf, config)
|
||||
cert_path_info = cert_manager.human_readable_cert_info(config, cert_path_cert,
|
||||
skip_filter_checks=True)
|
||||
|
||||
cert_name_implied_conf = storage.renewal_file_for_certname(config, config.certname)
|
||||
cert_name_cert = storage.RenewableCert(cert_name_implied_conf, config)
|
||||
cert_name_info = cert_manager.human_readable_cert_info(config, cert_name_cert)
|
||||
|
||||
msg = ("You specified conflicting values for --cert-path and --cert-name. "
|
||||
"Which did you mean to select?")
|
||||
choices = [cert_path_info, cert_name_info]
|
||||
try:
|
||||
code, index = display.menu(msg,
|
||||
choices, ok_label="Select", force_interactive=True)
|
||||
except errors.MissingCommandlineFlag:
|
||||
error_msg = ('To run in non-interactive mode, you must either specify only one of '
|
||||
'--cert-path or --cert-name, or both must point to the same certificate lineages.')
|
||||
raise errors.Error(error_msg)
|
||||
|
||||
if code != display_util.OK or not index in range(0, len(choices)):
|
||||
raise errors.Error("User ended interaction.")
|
||||
|
||||
if index == 0:
|
||||
config.certname = cert_path_implied_cert_name
|
||||
else:
|
||||
config.cert_path = storage.cert_path_for_cert_name(config, config.certname)
|
||||
|
||||
elif config.cert_path:
|
||||
config.certname = cert_manager.cert_path_to_lineage(config)
|
||||
|
||||
else: # if only config.certname was specified
|
||||
config.cert_path = storage.cert_path_for_cert_name(config, config.certname)
|
||||
|
||||
# don't delete if the archive_dir is used by some other lineage
|
||||
archive_dir = storage.full_archive_path(
|
||||
configobj.ConfigObj(storage.renewal_file_for_certname(config, config.certname)),
|
||||
config, config.certname)
|
||||
try:
|
||||
cert_manager.match_and_check_overlaps(config, [lambda x: archive_dir],
|
||||
lambda x: x.archive_dir, lambda x: x)
|
||||
except errors.OverlappingMatchFound:
|
||||
msg = ('Not deleting revoked certs due to overlapping archive dirs. More than '
|
||||
'one lineage is using {0}'.format(archive_dir))
|
||||
reporter_util.add_message(''.join(msg), reporter_util.MEDIUM_PRIORITY)
|
||||
return
|
||||
except Exception as e:
|
||||
msg = ('config.default_archive_dir: {0}, config.live_dir: {1}, archive_dir: {2},'
|
||||
'original exception: {3}')
|
||||
msg = msg.format(config.default_archive_dir, config.live_dir, archive_dir, e)
|
||||
raise errors.Error(msg)
|
||||
|
||||
cert_manager.delete(config)
|
||||
|
||||
|
||||
def _init_le_client(config, authenticator, installer):
|
||||
if authenticator is not None:
|
||||
# if authenticator was given, then we will need account...
|
||||
@@ -485,7 +581,7 @@ def install(config, plugins):
|
||||
_install_cert(config, le_client, domains)
|
||||
|
||||
|
||||
def plugins_cmd(config, plugins): # TODO: Use IDisplay rather than print
|
||||
def plugins_cmd(config, plugins):
|
||||
"""List server software plugins."""
|
||||
logger.debug("Expected interfaces: %s", config.ifaces)
|
||||
|
||||
@@ -493,8 +589,10 @@ def plugins_cmd(config, plugins): # TODO: Use IDisplay rather than print
|
||||
filtered = plugins.visible().ifaces(ifaces)
|
||||
logger.debug("Filtered plugins: %r", filtered)
|
||||
|
||||
notify = functools.partial(zope.component.getUtility(
|
||||
interfaces.IDisplay).notification, pause=False)
|
||||
if not config.init and not config.prepare:
|
||||
print(str(filtered))
|
||||
notify(str(filtered))
|
||||
return
|
||||
|
||||
filtered.init(config)
|
||||
@@ -502,13 +600,13 @@ def plugins_cmd(config, plugins): # TODO: Use IDisplay rather than print
|
||||
logger.debug("Verified plugins: %r", verified)
|
||||
|
||||
if not config.prepare:
|
||||
print(str(verified))
|
||||
notify(str(verified))
|
||||
return
|
||||
|
||||
verified.prepare()
|
||||
available = verified.available()
|
||||
logger.debug("Prepared plugins: %s", available)
|
||||
print(str(available))
|
||||
notify(str(available))
|
||||
|
||||
|
||||
def rollback(config, plugins):
|
||||
@@ -560,6 +658,7 @@ def revoke(config, unused_plugins): # TODO: coop with renewal config
|
||||
if config.key_path is not None: # revocation by cert key
|
||||
logger.debug("Revoking %s using cert key %s",
|
||||
config.cert_path[0], config.key_path[0])
|
||||
crypto_util.verify_cert_matches_priv_key(config.cert_path[0], config.key_path[0])
|
||||
key = jose.JWK.load(config.key_path[1])
|
||||
else: # revocation by account key
|
||||
logger.debug("Revoking %s using Account Key", config.cert_path[0])
|
||||
@@ -571,6 +670,7 @@ def revoke(config, unused_plugins): # TODO: coop with renewal config
|
||||
|
||||
try:
|
||||
acme.revoke(jose.ComparableX509(cert), config.reason)
|
||||
_delete_if_appropriate(config)
|
||||
except acme_errors.ClientError as e:
|
||||
return str(e)
|
||||
|
||||
@@ -599,7 +699,8 @@ def run(config, plugins): # pylint: disable=too-many-branches,too-many-locals
|
||||
|
||||
cert_path = new_lineage.cert_path if new_lineage else None
|
||||
fullchain_path = new_lineage.fullchain_path if new_lineage else None
|
||||
_report_new_cert(config, cert_path, fullchain_path)
|
||||
key_path = new_lineage.key_path if new_lineage else None
|
||||
_report_new_cert(config, cert_path, fullchain_path, key_path)
|
||||
|
||||
_install_cert(config, le_client, domains, new_lineage)
|
||||
|
||||
@@ -636,6 +737,7 @@ def renew_cert(config, plugins, lineage):
|
||||
except errors.PluginSelectionError as e:
|
||||
logger.info("Could not choose appropriate plugin: %s", e)
|
||||
raise
|
||||
|
||||
le_client = _init_le_client(config, auth, installer)
|
||||
|
||||
_get_and_save_cert(le_client, config, lineage=lineage)
|
||||
@@ -664,6 +766,7 @@ def certonly(config, plugins):
|
||||
except errors.PluginSelectionError as e:
|
||||
logger.info("Could not choose appropriate plugin: %s", e)
|
||||
raise
|
||||
|
||||
le_client = _init_le_client(config, auth, installer)
|
||||
|
||||
if config.csr:
|
||||
@@ -684,7 +787,8 @@ def certonly(config, plugins):
|
||||
|
||||
cert_path = lineage.cert_path if lineage else None
|
||||
fullchain_path = lineage.fullchain_path if lineage else None
|
||||
_report_new_cert(config, cert_path, fullchain_path)
|
||||
key_path = lineage.key_path if lineage else None
|
||||
_report_new_cert(config, cert_path, fullchain_path, key_path)
|
||||
_suggest_donation_if_appropriate(config)
|
||||
|
||||
def renew(config, unused_plugins):
|
||||
@@ -696,12 +800,20 @@ def renew(config, unused_plugins):
|
||||
|
||||
|
||||
def make_or_verify_needed_dirs(config):
|
||||
"""Create or verify existence of config and work directories"""
|
||||
"""Create or verify existence of config, work, and hook directories."""
|
||||
util.set_up_core_dir(config.config_dir, constants.CONFIG_DIRS_MODE,
|
||||
os.geteuid(), config.strict_permissions)
|
||||
util.set_up_core_dir(config.work_dir, constants.CONFIG_DIRS_MODE,
|
||||
os.geteuid(), config.strict_permissions)
|
||||
|
||||
hook_dirs = (config.renewal_pre_hooks_dir,
|
||||
config.renewal_deploy_hooks_dir,
|
||||
config.renewal_post_hooks_dir,)
|
||||
for hook_dir in hook_dirs:
|
||||
util.make_or_verify_dir(hook_dir,
|
||||
uid=os.geteuid(),
|
||||
strict=config.strict_permissions)
|
||||
|
||||
|
||||
def set_displayer(config):
|
||||
"""Set the displayer"""
|
||||
@@ -731,8 +843,14 @@ def main(cli_args=sys.argv[1:]):
|
||||
config = configuration.NamespaceConfig(args)
|
||||
zope.component.provideUtility(config)
|
||||
|
||||
log.post_arg_parse_setup(config)
|
||||
make_or_verify_needed_dirs(config)
|
||||
try:
|
||||
log.post_arg_parse_setup(config)
|
||||
make_or_verify_needed_dirs(config)
|
||||
except errors.Error:
|
||||
# Let plugins_cmd be run as un-privileged user.
|
||||
if config.func != plugins_cmd:
|
||||
raise
|
||||
|
||||
set_displayer(config)
|
||||
|
||||
# Reporter
|
||||
|
||||
@@ -13,7 +13,9 @@ from acme.jose import util as jose_util
|
||||
|
||||
from certbot import constants
|
||||
from certbot import crypto_util
|
||||
from certbot import errors
|
||||
from certbot import interfaces
|
||||
from certbot import reverter
|
||||
from certbot import util
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@@ -100,6 +102,120 @@ class Plugin(object):
|
||||
# other
|
||||
|
||||
|
||||
class Installer(Plugin):
|
||||
"""An installer base class with reverter and ssl_dhparam methods defined.
|
||||
|
||||
Installer plugins do not have to inherit from this class.
|
||||
|
||||
"""
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(Installer, self).__init__(*args, **kwargs)
|
||||
self.reverter = reverter.Reverter(self.config)
|
||||
|
||||
def add_to_checkpoint(self, save_files, save_notes, temporary=False):
|
||||
"""Add files to a checkpoint.
|
||||
|
||||
:param set save_files: set of filepaths to save
|
||||
:param str save_notes: notes about changes during the save
|
||||
:param bool temporary: True if the files should be added to a
|
||||
temporary checkpoint rather than a permanent one. This is
|
||||
usually used for changes that will soon be reverted.
|
||||
|
||||
:raises .errors.PluginError: when unable to add to checkpoint
|
||||
|
||||
"""
|
||||
if temporary:
|
||||
checkpoint_func = self.reverter.add_to_temp_checkpoint
|
||||
else:
|
||||
checkpoint_func = self.reverter.add_to_checkpoint
|
||||
|
||||
try:
|
||||
checkpoint_func(save_files, save_notes)
|
||||
except errors.ReverterError as err:
|
||||
raise errors.PluginError(str(err))
|
||||
|
||||
def finalize_checkpoint(self, title):
|
||||
"""Timestamp and save changes made through the reverter.
|
||||
|
||||
:param str title: Title describing checkpoint
|
||||
|
||||
:raises .errors.PluginError: when an error occurs
|
||||
|
||||
"""
|
||||
try:
|
||||
self.reverter.finalize_checkpoint(title)
|
||||
except errors.ReverterError as err:
|
||||
raise errors.PluginError(str(err))
|
||||
|
||||
def recovery_routine(self):
|
||||
"""Revert all previously modified files.
|
||||
|
||||
Reverts all modified files that have not been saved as a checkpoint
|
||||
|
||||
:raises .errors.PluginError: If unable to recover the configuration
|
||||
|
||||
"""
|
||||
try:
|
||||
self.reverter.recovery_routine()
|
||||
except errors.ReverterError as err:
|
||||
raise errors.PluginError(str(err))
|
||||
|
||||
def revert_temporary_config(self):
|
||||
"""Rollback temporary checkpoint.
|
||||
|
||||
:raises .errors.PluginError: when unable to revert config
|
||||
|
||||
"""
|
||||
try:
|
||||
self.reverter.revert_temporary_config()
|
||||
except errors.ReverterError as err:
|
||||
raise errors.PluginError(str(err))
|
||||
|
||||
def rollback_checkpoints(self, rollback=1):
|
||||
"""Rollback saved checkpoints.
|
||||
|
||||
:param int rollback: Number of checkpoints to revert
|
||||
|
||||
:raises .errors.PluginError: If there is a problem with the input or
|
||||
the function is unable to correctly revert the configuration
|
||||
|
||||
"""
|
||||
try:
|
||||
self.reverter.rollback_checkpoints(rollback)
|
||||
except errors.ReverterError as err:
|
||||
raise errors.PluginError(str(err))
|
||||
|
||||
def view_config_changes(self):
|
||||
"""Show all of the configuration changes that have taken place.
|
||||
|
||||
:raises .errors.PluginError: If there is a problem while processing
|
||||
the checkpoints directories.
|
||||
|
||||
"""
|
||||
try:
|
||||
self.reverter.view_config_changes()
|
||||
except errors.ReverterError as err:
|
||||
raise errors.PluginError(str(err))
|
||||
|
||||
@property
|
||||
def ssl_dhparams(self):
|
||||
"""Full absolute path to ssl_dhparams file."""
|
||||
return os.path.join(self.config.config_dir, constants.SSL_DHPARAMS_DEST)
|
||||
|
||||
@property
|
||||
def updated_ssl_dhparams_digest(self):
|
||||
"""Full absolute path to digest of updated ssl_dhparams file."""
|
||||
return os.path.join(self.config.config_dir, constants.UPDATED_SSL_DHPARAMS_DIGEST)
|
||||
|
||||
def install_ssl_dhparams(self):
|
||||
"""Copy Certbot's ssl_dhparams file into the system's config dir if required."""
|
||||
return install_version_controlled_file(
|
||||
self.ssl_dhparams,
|
||||
self.updated_ssl_dhparams_digest,
|
||||
constants.SSL_DHPARAMS_SRC,
|
||||
constants.ALL_SSL_DHPARAMS_HASHES)
|
||||
|
||||
|
||||
class Addr(object):
|
||||
r"""Represents an virtual host address.
|
||||
|
||||
@@ -135,7 +251,7 @@ class Addr(object):
|
||||
"""Normalized representation of addr/port tuple
|
||||
"""
|
||||
if self.ipv6:
|
||||
return (self._normalize_ipv6(self.tup[0]), self.tup[1])
|
||||
return (self.get_ipv6_exploded(), self.tup[1])
|
||||
return self.tup
|
||||
|
||||
def __eq__(self, other):
|
||||
@@ -270,51 +386,50 @@ class TLSSNI01(object):
|
||||
return response
|
||||
|
||||
|
||||
def install_ssl_options_conf(options_ssl, options_ssl_digest, mod_ssl_conf_src,
|
||||
all_ssl_options_hashes):
|
||||
"""Copy Certbot's SSL options file into the system's config dir if required.
|
||||
def install_version_controlled_file(dest_path, digest_path, src_path, all_hashes):
|
||||
"""Copy a file into an active location (likely the system's config dir) if required.
|
||||
|
||||
:param str options_ssl: destination path for file containing ssl options
|
||||
:param str options_ssl_digest: path to save a digest of options_ssl in
|
||||
:param str mod_ssl_conf_src: path to file containing ssl options found in distribution
|
||||
:param list all_ssl_options_hashes: hashes of every released version of options_ssl
|
||||
:param str dest_path: destination path for version controlled file
|
||||
:param str digest_path: path to save a digest of the file in
|
||||
:param str src_path: path to version controlled file found in distribution
|
||||
:param list all_hashes: hashes of every released version of the file
|
||||
"""
|
||||
current_ssl_options_hash = crypto_util.sha256sum(mod_ssl_conf_src)
|
||||
current_hash = crypto_util.sha256sum(src_path)
|
||||
|
||||
def _write_current_hash():
|
||||
with open(options_ssl_digest, "w") as f:
|
||||
f.write(current_ssl_options_hash)
|
||||
with open(digest_path, "w") as f:
|
||||
f.write(current_hash)
|
||||
|
||||
def _install_current_file():
|
||||
shutil.copyfile(mod_ssl_conf_src, options_ssl)
|
||||
shutil.copyfile(src_path, dest_path)
|
||||
_write_current_hash()
|
||||
|
||||
# Check to make sure options-ssl.conf is installed
|
||||
if not os.path.isfile(options_ssl):
|
||||
if not os.path.isfile(dest_path):
|
||||
_install_current_file()
|
||||
return
|
||||
# there's already a file there. if it's up to date, do nothing. if it's not but
|
||||
# it matches a known file hash, we can update it.
|
||||
# otherwise, print a warning once per new version.
|
||||
active_file_digest = crypto_util.sha256sum(options_ssl)
|
||||
if active_file_digest == current_ssl_options_hash: # already up to date
|
||||
active_file_digest = crypto_util.sha256sum(dest_path)
|
||||
if active_file_digest == current_hash: # already up to date
|
||||
return
|
||||
elif active_file_digest in all_ssl_options_hashes: # safe to update
|
||||
elif active_file_digest in all_hashes: # safe to update
|
||||
_install_current_file()
|
||||
else: # has been manually modified, not safe to update
|
||||
# did they modify the current version or an old version?
|
||||
if os.path.isfile(options_ssl_digest):
|
||||
with open(options_ssl_digest, "r") as f:
|
||||
if os.path.isfile(digest_path):
|
||||
with open(digest_path, "r") as f:
|
||||
saved_digest = f.read()
|
||||
# they modified it after we either installed or told them about this version, so return
|
||||
if saved_digest == current_ssl_options_hash:
|
||||
if saved_digest == current_hash:
|
||||
return
|
||||
# there's a new version but we couldn't update the file, or they deleted the digest.
|
||||
# save the current digest so we only print this once, and print a warning
|
||||
_write_current_hash()
|
||||
logger.warning("%s has been manually modified; updated ssl configuration options "
|
||||
logger.warning("%s has been manually modified; updated file "
|
||||
"saved to %s. We recommend updating %s for security purposes.",
|
||||
options_ssl, mod_ssl_conf_src, options_ssl)
|
||||
dest_path, src_path, dest_path)
|
||||
|
||||
|
||||
# test utils used by certbot_apache/certbot_nginx (hence
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
"""Tests for certbot.plugins.common."""
|
||||
import functools
|
||||
import os
|
||||
import shutil
|
||||
import tempfile
|
||||
@@ -12,6 +13,7 @@ from acme import jose
|
||||
|
||||
from certbot import achallenges
|
||||
from certbot import crypto_util
|
||||
from certbot import errors
|
||||
|
||||
from certbot.tests import acme_util
|
||||
from certbot.tests import util as test_util
|
||||
@@ -77,6 +79,107 @@ class PluginTest(unittest.TestCase):
|
||||
"--mock-foo-bar", dest="different_to_foo_bar", x=1, y=None)
|
||||
|
||||
|
||||
class InstallerTest(test_util.ConfigTestCase):
|
||||
"""Tests for certbot.plugins.common.Installer."""
|
||||
|
||||
def setUp(self):
|
||||
super(InstallerTest, self).setUp()
|
||||
os.mkdir(self.config.config_dir)
|
||||
from certbot.plugins.common import Installer
|
||||
|
||||
with mock.patch("certbot.plugins.common.reverter.Reverter"):
|
||||
self.installer = Installer(config=self.config,
|
||||
name="Installer")
|
||||
self.reverter = self.installer.reverter
|
||||
|
||||
def test_add_to_real_checkpoint(self):
|
||||
files = set(("foo.bar", "baz.qux",))
|
||||
save_notes = "foo bar baz qux"
|
||||
self._test_wrapped_method("add_to_checkpoint", files, save_notes)
|
||||
|
||||
def test_add_to_real_checkpoint2(self):
|
||||
self._test_add_to_checkpoint_common(False)
|
||||
|
||||
def test_add_to_temporary_checkpoint(self):
|
||||
self._test_add_to_checkpoint_common(True)
|
||||
|
||||
def _test_add_to_checkpoint_common(self, temporary):
|
||||
files = set(("foo.bar", "baz.qux",))
|
||||
save_notes = "foo bar baz qux"
|
||||
|
||||
installer_func = functools.partial(self.installer.add_to_checkpoint,
|
||||
temporary=temporary)
|
||||
|
||||
if temporary:
|
||||
reverter_func = self.reverter.add_to_temp_checkpoint
|
||||
else:
|
||||
reverter_func = self.reverter.add_to_checkpoint
|
||||
|
||||
self._test_adapted_method(
|
||||
installer_func, reverter_func, files, save_notes)
|
||||
|
||||
def test_finalize_checkpoint(self):
|
||||
self._test_wrapped_method("finalize_checkpoint", "foo")
|
||||
|
||||
def test_recovery_routine(self):
|
||||
self._test_wrapped_method("recovery_routine")
|
||||
|
||||
def test_revert_temporary_config(self):
|
||||
self._test_wrapped_method("revert_temporary_config")
|
||||
|
||||
def test_rollback_checkpoints(self):
|
||||
self._test_wrapped_method("rollback_checkpoints", 42)
|
||||
|
||||
def test_view_config_changes(self):
|
||||
self._test_wrapped_method("view_config_changes")
|
||||
|
||||
def _test_wrapped_method(self, name, *args, **kwargs):
|
||||
"""Test a wrapped reverter method.
|
||||
|
||||
:param str name: name of the method to test
|
||||
:param tuple args: position arguments to method
|
||||
:param dict kwargs: keyword arguments to method
|
||||
|
||||
"""
|
||||
installer_func = getattr(self.installer, name)
|
||||
reverter_func = getattr(self.reverter, name)
|
||||
self._test_adapted_method(
|
||||
installer_func, reverter_func, *args, **kwargs)
|
||||
|
||||
def _test_adapted_method(self, installer_func,
|
||||
reverter_func, *passed_args, **passed_kwargs):
|
||||
"""Test an adapted reverter method
|
||||
|
||||
:param callable installer_func: installer method to test
|
||||
:param mock.MagicMock reverter_func: mocked adapated
|
||||
reverter method
|
||||
:param tuple passed_args: positional arguments passed from
|
||||
installer method to the reverter method
|
||||
:param dict passed_kargs: keyword arguments passed from
|
||||
installer method to the reverter method
|
||||
|
||||
"""
|
||||
installer_func(*passed_args, **passed_kwargs)
|
||||
reverter_func.assert_called_once_with(*passed_args, **passed_kwargs)
|
||||
reverter_func.side_effect = errors.ReverterError
|
||||
self.assertRaises(
|
||||
errors.PluginError, installer_func, *passed_args, **passed_kwargs)
|
||||
|
||||
def test_install_ssl_dhparams(self):
|
||||
self.installer.install_ssl_dhparams()
|
||||
self.assertTrue(os.path.isfile(self.installer.ssl_dhparams))
|
||||
|
||||
def _current_ssl_dhparams_hash(self):
|
||||
from certbot.constants import SSL_DHPARAMS_SRC
|
||||
return crypto_util.sha256sum(SSL_DHPARAMS_SRC)
|
||||
|
||||
def test_current_file_hash_in_all_hashes(self):
|
||||
from certbot.constants import ALL_SSL_DHPARAMS_HASHES
|
||||
self.assertTrue(self._current_ssl_dhparams_hash() in ALL_SSL_DHPARAMS_HASHES,
|
||||
"Constants.ALL_SSL_DHPARAMS_HASHES must be appended"
|
||||
" with the sha256 hash of self.config.ssl_dhparams when it is updated.")
|
||||
|
||||
|
||||
class AddrTest(unittest.TestCase):
|
||||
"""Tests for certbot.client.plugins.common.Addr."""
|
||||
|
||||
@@ -202,7 +305,7 @@ class TLSSNI01Test(unittest.TestCase):
|
||||
achall.chall.encode.return_value = "token"
|
||||
key = test_util.load_pyopenssl_private_key("rsa512_key.pem")
|
||||
achall.response_and_validation.return_value = (
|
||||
response, (test_util.load_cert("cert.pem"), key))
|
||||
response, (test_util.load_cert("cert_512.pem"), key))
|
||||
|
||||
with mock.patch("certbot.plugins.common.open",
|
||||
mock_open, create=True):
|
||||
@@ -215,7 +318,7 @@ class TLSSNI01Test(unittest.TestCase):
|
||||
# pylint: disable=no-member
|
||||
mock_open.assert_called_once_with(self.sni.get_cert_path(achall), "wb")
|
||||
mock_open.return_value.write.assert_called_once_with(
|
||||
test_util.load_vector("cert.pem"))
|
||||
test_util.load_vector("cert_512.pem"))
|
||||
mock_safe_open.assert_called_once_with(
|
||||
self.sni.get_key_path(achall), "wb", chmod=0o400)
|
||||
mock_safe_open.return_value.write.assert_called_once_with(
|
||||
@@ -227,11 +330,11 @@ class TLSSNI01Test(unittest.TestCase):
|
||||
achall.response(achall.account_key).z_domain.decode("utf-8"))
|
||||
|
||||
|
||||
class InstallSslOptionsConfTest(test_util.TempDirTestCase):
|
||||
"""Tests for certbot.plugins.common.install_ssl_options_conf."""
|
||||
class InstallVersionControlledFileTest(test_util.TempDirTestCase):
|
||||
"""Tests for certbot.plugins.common.install_version_controlled_file."""
|
||||
|
||||
def setUp(self):
|
||||
super(InstallSslOptionsConfTest, self).setUp()
|
||||
super(InstallVersionControlledFileTest, self).setUp()
|
||||
self.hashes = ["someotherhash"]
|
||||
self.dest_path = os.path.join(self.tempdir, "options-ssl-dest.conf")
|
||||
self.hash_path = os.path.join(self.tempdir, ".options-ssl-conf.txt")
|
||||
@@ -243,19 +346,19 @@ class InstallSslOptionsConfTest(test_util.TempDirTestCase):
|
||||
self.hashes.append(crypto_util.sha256sum(path))
|
||||
|
||||
def _call(self):
|
||||
from certbot.plugins.common import install_ssl_options_conf
|
||||
install_ssl_options_conf(self.dest_path,
|
||||
self.hash_path,
|
||||
self.source_path,
|
||||
self.hashes)
|
||||
from certbot.plugins.common import install_version_controlled_file
|
||||
install_version_controlled_file(self.dest_path,
|
||||
self.hash_path,
|
||||
self.source_path,
|
||||
self.hashes)
|
||||
|
||||
def _current_ssl_options_hash(self):
|
||||
def _current_file_hash(self):
|
||||
return crypto_util.sha256sum(self.source_path)
|
||||
|
||||
def _assert_current_file(self):
|
||||
self.assertTrue(os.path.isfile(self.dest_path))
|
||||
self.assertEqual(crypto_util.sha256sum(self.dest_path),
|
||||
self._current_ssl_options_hash())
|
||||
self._current_file_hash())
|
||||
|
||||
def test_no_file(self):
|
||||
self.assertFalse(os.path.isfile(self.dest_path))
|
||||
@@ -282,9 +385,9 @@ class InstallSslOptionsConfTest(test_util.TempDirTestCase):
|
||||
self.assertFalse(mock_logger.warning.called)
|
||||
self.assertTrue(os.path.isfile(self.dest_path))
|
||||
self.assertEqual(crypto_util.sha256sum(self.source_path),
|
||||
self._current_ssl_options_hash())
|
||||
self._current_file_hash())
|
||||
self.assertNotEqual(crypto_util.sha256sum(self.dest_path),
|
||||
self._current_ssl_options_hash())
|
||||
self._current_file_hash())
|
||||
|
||||
def test_manually_modified_past_file_warns(self):
|
||||
with open(self.dest_path, "a") as mod_ssl_conf:
|
||||
@@ -294,10 +397,10 @@ class InstallSslOptionsConfTest(test_util.TempDirTestCase):
|
||||
with mock.patch("certbot.plugins.common.logger") as mock_logger:
|
||||
self._call()
|
||||
self.assertEqual(mock_logger.warning.call_args[0][0],
|
||||
"%s has been manually modified; updated ssl configuration options "
|
||||
"%s has been manually modified; updated file "
|
||||
"saved to %s. We recommend updating %s for security purposes.")
|
||||
self.assertEqual(crypto_util.sha256sum(self.source_path),
|
||||
self._current_ssl_options_hash())
|
||||
self._current_file_hash())
|
||||
# only print warning once
|
||||
with mock.patch("certbot.plugins.common.logger") as mock_logger:
|
||||
self._call()
|
||||
|
||||
@@ -108,11 +108,19 @@ def choose_plugin(prepared, question):
|
||||
opts = [plugin_ep.description_with_name +
|
||||
(" [Misconfigured]" if plugin_ep.misconfigured else "")
|
||||
for plugin_ep in prepared]
|
||||
names = set(plugin_ep.name for plugin_ep in prepared)
|
||||
|
||||
while True:
|
||||
disp = z_util(interfaces.IDisplay)
|
||||
code, index = disp.menu(
|
||||
question, opts, force_interactive=True)
|
||||
if "CERTBOT_AUTO" in os.environ and names == set(("apache", "nginx")):
|
||||
# The possibility of being offered exactly apache and nginx here
|
||||
# is new interactivity brought by https://github.com/certbot/certbot/issues/4079,
|
||||
# so set apache as a default for those kinds of non-interactive use
|
||||
# (the user will get a warning to set --non-interactive or --force-interactive)
|
||||
apache_idx = [n for n, p in enumerate(prepared) if p.name == "apache"][0]
|
||||
code, index = disp.menu(question, opts, default=apache_idx)
|
||||
else:
|
||||
code, index = disp.menu(question, opts, force_interactive=True)
|
||||
|
||||
if code == display_util.OK:
|
||||
plugin_ep = prepared[index]
|
||||
@@ -134,6 +142,8 @@ def record_chosen_plugins(config, plugins, auth, inst):
|
||||
"Update the config entries to reflect the plugins we actually selected."
|
||||
config.authenticator = plugins.find_init(auth).name if auth else "None"
|
||||
config.installer = plugins.find_init(inst).name if inst else "None"
|
||||
logger.info("Plugins selected: Authenticator %s, Installer %s",
|
||||
config.authenticator, config.installer)
|
||||
|
||||
|
||||
def choose_configurator_plugins(config, plugins, verb):
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
"""Tests for letsencrypt.plugins.selection"""
|
||||
import os
|
||||
import sys
|
||||
import unittest
|
||||
|
||||
@@ -115,6 +116,7 @@ class ChoosePluginTest(unittest.TestCase):
|
||||
False))
|
||||
self.mock_apache = mock.Mock(
|
||||
description_with_name="a", misconfigured=True)
|
||||
self.mock_apache.name = "apache"
|
||||
self.mock_stand = mock.Mock(
|
||||
description_with_name="s", misconfigured=False)
|
||||
self.mock_stand.init().more_info.return_value = "standalone"
|
||||
@@ -146,3 +148,26 @@ class ChoosePluginTest(unittest.TestCase):
|
||||
def test_no_choice(self, mock_util):
|
||||
mock_util().menu.return_value = (display_util.CANCEL, 0)
|
||||
self.assertTrue(self._call() is None)
|
||||
|
||||
@test_util.patch_get_utility("certbot.plugins.selection.z_util")
|
||||
def test_new_interaction_avoidance(self, mock_util):
|
||||
mock_nginx = mock.Mock(
|
||||
description_with_name="n", misconfigured=False)
|
||||
mock_nginx.init().more_info.return_value = "nginx plugin"
|
||||
mock_nginx.name = "nginx"
|
||||
self.plugins[1] = mock_nginx
|
||||
mock_util().menu.return_value = (display_util.CANCEL, 0)
|
||||
|
||||
unset_cb_auto = os.environ.get("CERTBOT_AUTO") is None
|
||||
if unset_cb_auto:
|
||||
os.environ["CERTBOT_AUTO"] = "foo"
|
||||
try:
|
||||
self._call()
|
||||
finally:
|
||||
if unset_cb_auto:
|
||||
del os.environ["CERTBOT_AUTO"]
|
||||
|
||||
self.assertTrue("default" in mock_util().menu.call_args[1])
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main() # pragma: no cover
|
||||
|
||||
@@ -158,10 +158,11 @@ class AuthenticatorTest(unittest.TestCase):
|
||||
|
||||
@test_util.patch_get_utility()
|
||||
def test_perform_eaddrinuse_retry(self, mock_get_utility):
|
||||
mock_utility = mock_get_utility()
|
||||
errno = socket.errno.EADDRINUSE
|
||||
error = errors.StandaloneBindError(mock.MagicMock(errno=errno), -1)
|
||||
self.auth.servers.run.side_effect = [error] + 2 * [mock.MagicMock()]
|
||||
mock_yesno = mock_get_utility.return_value.yesno
|
||||
mock_yesno = mock_utility.yesno
|
||||
mock_yesno.return_value = True
|
||||
|
||||
self.test_perform()
|
||||
@@ -169,7 +170,8 @@ class AuthenticatorTest(unittest.TestCase):
|
||||
|
||||
@test_util.patch_get_utility()
|
||||
def test_perform_eaddrinuse_no_retry(self, mock_get_utility):
|
||||
mock_yesno = mock_get_utility.return_value.yesno
|
||||
mock_utility = mock_get_utility()
|
||||
mock_yesno = mock_utility.yesno
|
||||
mock_yesno.return_value = False
|
||||
|
||||
errno = socket.errno.EADDRINUSE
|
||||
|
||||
@@ -102,10 +102,14 @@ to serve all files under specified web root ({0})."""
|
||||
webroot = None
|
||||
|
||||
while webroot is None:
|
||||
webroot = self._prompt_with_webroot_list(domain, known_webroots)
|
||||
|
||||
if webroot is None:
|
||||
webroot = self._prompt_for_new_webroot(domain)
|
||||
if known_webroots:
|
||||
# Only show the menu if we have options for it
|
||||
webroot = self._prompt_with_webroot_list(domain, known_webroots)
|
||||
if webroot is None:
|
||||
webroot = self._prompt_for_new_webroot(domain)
|
||||
else:
|
||||
# Allow prompt to raise PluginError instead of looping forever
|
||||
webroot = self._prompt_for_new_webroot(domain, True)
|
||||
|
||||
return webroot
|
||||
|
||||
@@ -125,13 +129,18 @@ to serve all files under specified web root ({0})."""
|
||||
else: # code == display_util.OK
|
||||
return None if index == 0 else known_webroots[index - 1]
|
||||
|
||||
def _prompt_for_new_webroot(self, domain):
|
||||
def _prompt_for_new_webroot(self, domain, allowraise=False):
|
||||
code, webroot = ops.validated_directory(
|
||||
_validate_webroot,
|
||||
"Input the webroot for {0}:".format(domain),
|
||||
force_interactive=True)
|
||||
if code == display_util.CANCEL:
|
||||
return None
|
||||
if not allowraise:
|
||||
return None
|
||||
else:
|
||||
raise errors.PluginError(
|
||||
"Every requested domain must have a "
|
||||
"webroot when using the webroot plugin.")
|
||||
else: # code == display_util.OK
|
||||
return _validate_webroot(webroot)
|
||||
|
||||
|
||||
@@ -96,7 +96,7 @@ class AuthenticatorTest(unittest.TestCase):
|
||||
@test_util.patch_get_utility()
|
||||
def test_new_webroot(self, mock_get_utility):
|
||||
self.config.webroot_path = []
|
||||
self.config.webroot_map = {}
|
||||
self.config.webroot_map = {"something.com": self.path}
|
||||
|
||||
mock_display = mock_get_utility()
|
||||
mock_display.menu.return_value = (display_util.OK, 0,)
|
||||
@@ -108,6 +108,19 @@ class AuthenticatorTest(unittest.TestCase):
|
||||
|
||||
self.assertEqual(self.config.webroot_map[self.achall.domain], self.path)
|
||||
|
||||
@test_util.patch_get_utility()
|
||||
def test_new_webroot_empty_map_cancel(self, mock_get_utility):
|
||||
self.config.webroot_path = []
|
||||
self.config.webroot_map = {}
|
||||
|
||||
mock_display = mock_get_utility()
|
||||
mock_display.menu.return_value = (display_util.OK, 0,)
|
||||
with mock.patch('certbot.display.ops.validated_directory') as m:
|
||||
m.return_value = (display_util.CANCEL, -1)
|
||||
self.assertRaises(errors.PluginError,
|
||||
self.auth.perform,
|
||||
[self.achall])
|
||||
|
||||
def test_perform_missing_root(self):
|
||||
self.config.webroot_path = None
|
||||
self.config.webroot_map = {}
|
||||
@@ -132,6 +145,22 @@ class AuthenticatorTest(unittest.TestCase):
|
||||
mock_chown.side_effect = OSError(errno.EACCES, "msg")
|
||||
self.auth.perform([self.achall]) # exception caught and logged
|
||||
|
||||
|
||||
@test_util.patch_get_utility()
|
||||
def test_perform_new_webroot_not_in_map(self, mock_get_utility):
|
||||
new_webroot = tempfile.mkdtemp()
|
||||
self.config.webroot_path = []
|
||||
self.config.webroot_map = {"whatever.com": self.path}
|
||||
mock_display = mock_get_utility()
|
||||
mock_display.menu.side_effect = ((display_util.OK, 0),
|
||||
(display_util.OK, new_webroot))
|
||||
achall = achallenges.KeyAuthorizationAnnotatedChallenge(
|
||||
challb=acme_util.HTTP01_P, domain="something.com", account_key=KEY)
|
||||
with mock.patch('certbot.display.ops.validated_directory') as m:
|
||||
m.return_value = (display_util.OK, new_webroot,)
|
||||
self.auth.perform([achall])
|
||||
self.assertEqual(self.config.webroot_map[achall.domain], new_webroot)
|
||||
|
||||
def test_perform_permissions(self):
|
||||
self.auth.prepare()
|
||||
|
||||
|
||||
@@ -320,6 +320,12 @@ def _renew_describe_results(config, renew_successes, renew_failures,
|
||||
|
||||
out = []
|
||||
notify = out.append
|
||||
disp = zope.component.getUtility(interfaces.IDisplay)
|
||||
|
||||
def notify_error(err):
|
||||
"""Notify and log errors."""
|
||||
notify(err)
|
||||
logger.error(err)
|
||||
|
||||
if config.dry_run:
|
||||
notify("** DRY RUN: simulating 'certbot renew' close to cert expiry")
|
||||
@@ -338,14 +344,14 @@ def _renew_describe_results(config, renew_successes, renew_failures,
|
||||
"have been renewed:")
|
||||
notify(report(renew_successes, "success"))
|
||||
elif renew_failures and not renew_successes:
|
||||
notify("All renewal attempts failed. The following certs could not be "
|
||||
"renewed:")
|
||||
notify(report(renew_failures, "failure"))
|
||||
notify_error("All renewal attempts failed. The following certs could "
|
||||
"not be renewed:")
|
||||
notify_error(report(renew_failures, "failure"))
|
||||
elif renew_failures and renew_successes:
|
||||
notify("The following certs were successfully renewed:")
|
||||
notify(report(renew_successes, "success"))
|
||||
notify("\nThe following certs could not be renewed:")
|
||||
notify(report(renew_failures, "failure"))
|
||||
notify(report(renew_successes, "success") + "\n")
|
||||
notify_error("The following certs could not be renewed:")
|
||||
notify_error(report(renew_failures, "failure"))
|
||||
|
||||
if parse_failures:
|
||||
notify("\nAdditionally, the following renewal configuration files "
|
||||
@@ -356,9 +362,7 @@ def _renew_describe_results(config, renew_successes, renew_failures,
|
||||
notify("** DRY RUN: simulating 'certbot renew' close to cert expiry")
|
||||
notify("** (The test certificates above have not been saved.)")
|
||||
|
||||
if config.quiet and not (renew_failures or parse_failures):
|
||||
return
|
||||
print("\n".join(out))
|
||||
disp.notification("\n".join(out), wrap=False)
|
||||
|
||||
|
||||
def handle_renewal_request(config):
|
||||
@@ -372,8 +376,8 @@ def handle_renewal_request(config):
|
||||
"renewing all installed certificates that are due "
|
||||
"to be renewed or renewing a single certificate specified "
|
||||
"by its name. If you would like to renew specific "
|
||||
"certificates by their domains, use the certonly "
|
||||
"command. The renew verb may provide other options "
|
||||
"certificates by their domains, use the certonly command "
|
||||
"instead. The renew verb may provide other options "
|
||||
"for selecting certificates to renew in the future.")
|
||||
|
||||
if config.certname:
|
||||
@@ -389,14 +393,16 @@ def handle_renewal_request(config):
|
||||
disp = zope.component.getUtility(interfaces.IDisplay)
|
||||
disp.notification("Processing " + renewal_file, pause=False)
|
||||
lineage_config = copy.deepcopy(config)
|
||||
lineagename = storage.lineagename_for_filename(renewal_file)
|
||||
|
||||
# Note that this modifies config (to add back the configuration
|
||||
# elements from within the renewal configuration file).
|
||||
try:
|
||||
renewal_candidate = _reconstitute(lineage_config, renewal_file)
|
||||
except Exception as e: # pylint: disable=broad-except
|
||||
logger.warning("Renewal configuration file %s produced an "
|
||||
"unexpected error: %s. Skipping.", renewal_file, e)
|
||||
logger.warning("Renewal configuration file %s (cert: %s) "
|
||||
"produced an unexpected error: %s. Skipping.",
|
||||
renewal_file, lineagename, e)
|
||||
logger.debug("Traceback was:\n%s", traceback.format_exc())
|
||||
parse_failures.append(renewal_file)
|
||||
continue
|
||||
@@ -422,8 +428,9 @@ def handle_renewal_request(config):
|
||||
renew_skipped.append(renewal_candidate.fullchain)
|
||||
except Exception as e: # pylint: disable=broad-except
|
||||
# obtain_cert (presumably) encountered an unanticipated problem.
|
||||
logger.warning("Attempting to renew cert from %s produced an "
|
||||
"unexpected error: %s. Skipping.", renewal_file, e)
|
||||
logger.warning("Attempting to renew cert (%s) from %s produced an "
|
||||
"unexpected error: %s. Skipping.", lineagename,
|
||||
renewal_file, e)
|
||||
logger.debug("Traceback was:\n%s", traceback.format_exc())
|
||||
renew_failures.append(renewal_candidate.fullchain)
|
||||
|
||||
|
||||
8
certbot/ssl-dhparams.pem
Normal file
8
certbot/ssl-dhparams.pem
Normal file
@@ -0,0 +1,8 @@
|
||||
-----BEGIN DH PARAMETERS-----
|
||||
MIIBCAKCAQEA//////////+t+FRYortKmq/cViAnPTzx2LnFg84tNpWp4TZBFGQz
|
||||
+8yTnc4kmz75fS/jY2MMddj2gbICrsRhetPfHtXV/WVhJDP1H18GbtCFY2VVPe0a
|
||||
87VXE15/V8k1mE8McODmi3fipona8+/och3xWKE2rec1MKzKT0g6eXq8CrGCsyT7
|
||||
YdEIqUuyyOP7uWrat2DX9GgdT0Kj3jlN9K5W7edjcrsZCwenyO4KbXCeAvzhzffi
|
||||
7MA0BM0oNC9hkXL+nOmFg/+OTxIy7vKBg8P+OxtMb61zO7X8vC7CIAXFjvGDfRaD
|
||||
ssbzSibBsu/6iGtCOGEoXJf//////////wIBAg==
|
||||
-----END DH PARAMETERS-----
|
||||
@@ -49,6 +49,21 @@ def renewal_file_for_certname(config, certname):
|
||||
"{1}).".format(certname, path))
|
||||
return path
|
||||
|
||||
|
||||
def cert_path_for_cert_name(config, cert_name):
|
||||
""" If `--cert-name` was specified, but you need a value for `--cert-path`.
|
||||
|
||||
:param `configuration.NamespaceConfig` config: parsed command line arguments
|
||||
:param str cert_name: cert name.
|
||||
|
||||
"""
|
||||
cert_name_implied_conf = renewal_file_for_certname(config, cert_name)
|
||||
fullchain_path = configobj.ConfigObj(cert_name_implied_conf)["fullchain"]
|
||||
with open(fullchain_path) as f:
|
||||
cert_path = (fullchain_path, f.read())
|
||||
return cert_path
|
||||
|
||||
|
||||
def config_with_defaults(config=None):
|
||||
"""Merge supplied config, if provided, on top of builtin defaults."""
|
||||
defaults_copy = configobj.ConfigObj(constants.RENEWER_DEFAULTS)
|
||||
@@ -186,8 +201,15 @@ def get_link_target(link):
|
||||
:returns: Absolute path to the target of link
|
||||
:rtype: str
|
||||
|
||||
:raises .CertStorageError: If link does not exists.
|
||||
|
||||
"""
|
||||
target = os.readlink(link)
|
||||
try:
|
||||
target = os.readlink(link)
|
||||
except OSError:
|
||||
raise errors.CertStorageError(
|
||||
"Expected {0} to be a symlink".format(link))
|
||||
|
||||
if not os.path.isabs(target):
|
||||
target = os.path.join(os.path.dirname(link), target)
|
||||
return os.path.abspath(target)
|
||||
@@ -239,7 +261,7 @@ def _relpath_from_file(archive_dir, from_file):
|
||||
"""Path to a directory from a file"""
|
||||
return os.path.relpath(archive_dir, os.path.dirname(from_file))
|
||||
|
||||
def _full_archive_path(config_obj, cli_config, lineagename):
|
||||
def full_archive_path(config_obj, cli_config, lineagename):
|
||||
"""Returns the full archive path for a lineagename
|
||||
|
||||
Uses cli_config to determine archive path if not available from config_obj.
|
||||
@@ -264,7 +286,7 @@ def delete_files(config, certname):
|
||||
"""
|
||||
renewal_filename = renewal_file_for_certname(config, certname)
|
||||
# file exists
|
||||
full_default_archive_dir = _full_archive_path(None, config, certname)
|
||||
full_default_archive_dir = full_archive_path(None, config, certname)
|
||||
full_default_live_dir = _full_live_path(config, certname)
|
||||
try:
|
||||
renewal_config = configobj.ConfigObj(renewal_filename)
|
||||
@@ -316,7 +338,7 @@ def delete_files(config, certname):
|
||||
|
||||
# archive directory
|
||||
try:
|
||||
archive_path = _full_archive_path(renewal_config, config, certname)
|
||||
archive_path = full_archive_path(renewal_config, config, certname)
|
||||
shutil.rmtree(archive_path)
|
||||
logger.debug("Removed %s", archive_path)
|
||||
except OSError:
|
||||
@@ -443,7 +465,7 @@ class RenewableCert(object):
|
||||
@property
|
||||
def archive_dir(self):
|
||||
"""Returns the default or specified archive directory"""
|
||||
return _full_archive_path(self.configuration,
|
||||
return full_archive_path(self.configuration,
|
||||
self.cli_config, self.lineagename)
|
||||
|
||||
def relative_archive_dir(self, from_file):
|
||||
@@ -985,7 +1007,7 @@ class RenewableCert(object):
|
||||
# lineagename will now potentially be modified based on which
|
||||
# renewal configuration file could actually be created
|
||||
lineagename = lineagename_for_filename(config_filename)
|
||||
archive = _full_archive_path(None, cli_config, lineagename)
|
||||
archive = full_archive_path(None, cli_config, lineagename)
|
||||
live_dir = _full_live_path(cli_config, lineagename)
|
||||
if os.path.exists(archive):
|
||||
raise errors.CertStorageError(
|
||||
|
||||
@@ -14,12 +14,10 @@ from acme import messages
|
||||
|
||||
from certbot import errors
|
||||
|
||||
from certbot.tests import util
|
||||
|
||||
from certbot.tests.util import TempDirTestCase
|
||||
import certbot.tests.util as test_util
|
||||
|
||||
|
||||
KEY = jose.JWKRSA.load(util.load_vector("rsa512_key_2.pem"))
|
||||
KEY = jose.JWKRSA.load(test_util.load_vector("rsa512_key.pem"))
|
||||
|
||||
|
||||
class AccountTest(unittest.TestCase):
|
||||
@@ -48,22 +46,19 @@ class AccountTest(unittest.TestCase):
|
||||
|
||||
def test_id(self):
|
||||
self.assertEqual(
|
||||
self.acc.id, "bca5889f66457d5b62fbba7b25f9ab6f")
|
||||
self.acc.id, "7adac10320f585ddf118429c0c4af2cd")
|
||||
|
||||
def test_slug(self):
|
||||
self.assertEqual(
|
||||
self.acc.slug, "test.certbot.org@2015-07-04T14:04:10Z (bca5)")
|
||||
self.acc.slug, "test.certbot.org@2015-07-04T14:04:10Z (7ada)")
|
||||
|
||||
def test_repr(self):
|
||||
self.assertTrue(repr(self.acc).startswith(
|
||||
"<Account(i_am_a_regr, bca5889f66457d5b62fbba7b25f9ab6f, Meta("))
|
||||
"<Account(i_am_a_regr, 7adac10320f585ddf118429c0c4af2cd, Meta("))
|
||||
|
||||
class ReportNewAccountTest(unittest.TestCase):
|
||||
class ReportNewAccountTest(test_util.ConfigTestCase):
|
||||
"""Tests for certbot.account.report_new_account."""
|
||||
|
||||
def setUp(self):
|
||||
self.config = mock.MagicMock(config_dir="/etc/letsencrypt")
|
||||
|
||||
def _call(self):
|
||||
from certbot.account import report_new_account
|
||||
report_new_account(self.config)
|
||||
@@ -98,14 +93,12 @@ class AccountMemoryStorageTest(unittest.TestCase):
|
||||
self.assertEqual([account], self.storage.find_all())
|
||||
|
||||
|
||||
class AccountFileStorageTest(TempDirTestCase):
|
||||
class AccountFileStorageTest(test_util.ConfigTestCase):
|
||||
"""Tests for certbot.account.AccountFileStorage."""
|
||||
|
||||
def setUp(self):
|
||||
super(AccountFileStorageTest, self).setUp()
|
||||
|
||||
self.config = mock.MagicMock(
|
||||
accounts_dir=os.path.join(self.tempdir, "accounts"))
|
||||
from certbot.account import AccountFileStorage
|
||||
self.storage = AccountFileStorage(self.config)
|
||||
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
|
||||
"""Tests for certbot.cert_manager."""
|
||||
# pylint: disable=protected-access
|
||||
import os
|
||||
@@ -18,58 +19,50 @@ from certbot.storage import ALL_FOUR
|
||||
from certbot.tests import storage_test
|
||||
from certbot.tests import util as test_util
|
||||
|
||||
from certbot.tests.util import TempDirTestCase
|
||||
|
||||
|
||||
class BaseCertManagerTest(TempDirTestCase):
|
||||
class BaseCertManagerTest(test_util.ConfigTestCase):
|
||||
"""Base class for setting up Cert Manager tests.
|
||||
"""
|
||||
def setUp(self):
|
||||
super(BaseCertManagerTest, self).setUp()
|
||||
|
||||
os.makedirs(os.path.join(self.tempdir, "renewal"))
|
||||
|
||||
self.cli_config = configuration.NamespaceConfig(mock.MagicMock(
|
||||
config_dir=self.tempdir,
|
||||
work_dir=self.tempdir,
|
||||
logs_dir=self.tempdir,
|
||||
quiet=False,
|
||||
))
|
||||
self.config.quiet = False
|
||||
os.makedirs(self.config.renewal_configs_dir)
|
||||
|
||||
self.domains = {
|
||||
"example.org": None,
|
||||
"other.com": os.path.join(self.tempdir, "specialarchive")
|
||||
"other.com": os.path.join(self.config.config_dir, "specialarchive")
|
||||
}
|
||||
self.configs = dict((domain, self._set_up_config(domain, self.domains[domain]))
|
||||
self.config_files = dict((domain, self._set_up_config(domain, self.domains[domain]))
|
||||
for domain in self.domains)
|
||||
|
||||
# We also create a file that isn't a renewal config in the same
|
||||
# location to test that logic that reads in all-and-only renewal
|
||||
# configs will ignore it and NOT attempt to parse it.
|
||||
junk = open(os.path.join(self.tempdir, "renewal", "IGNORE.THIS"), "w")
|
||||
junk = open(os.path.join(self.config.renewal_configs_dir, "IGNORE.THIS"), "w")
|
||||
junk.write("This file should be ignored!")
|
||||
junk.close()
|
||||
|
||||
def _set_up_config(self, domain, custom_archive):
|
||||
# TODO: maybe provide NamespaceConfig.make_dirs?
|
||||
# TODO: main() should create those dirs, c.f. #902
|
||||
os.makedirs(os.path.join(self.tempdir, "live", domain))
|
||||
config = configobj.ConfigObj()
|
||||
os.makedirs(os.path.join(self.config.live_dir, domain))
|
||||
config_file = configobj.ConfigObj()
|
||||
|
||||
if custom_archive is not None:
|
||||
os.makedirs(custom_archive)
|
||||
config["archive_dir"] = custom_archive
|
||||
config_file["archive_dir"] = custom_archive
|
||||
else:
|
||||
os.makedirs(os.path.join(self.tempdir, "archive", domain))
|
||||
os.makedirs(os.path.join(self.config.default_archive_dir, domain))
|
||||
|
||||
for kind in ALL_FOUR:
|
||||
config[kind] = os.path.join(self.tempdir, "live", domain,
|
||||
config_file[kind] = os.path.join(self.config.live_dir, domain,
|
||||
kind + ".pem")
|
||||
|
||||
config.filename = os.path.join(self.tempdir, "renewal",
|
||||
config_file.filename = os.path.join(self.config.renewal_configs_dir,
|
||||
domain + ".conf")
|
||||
config.write()
|
||||
return config
|
||||
config_file.write()
|
||||
return config_file
|
||||
|
||||
|
||||
class UpdateLiveSymlinksTest(BaseCertManagerTest):
|
||||
@@ -86,27 +79,27 @@ class UpdateLiveSymlinksTest(BaseCertManagerTest):
|
||||
if custom_archive is not None:
|
||||
archive_dir_path = custom_archive
|
||||
else:
|
||||
archive_dir_path = os.path.join(self.tempdir, "archive", domain)
|
||||
archive_dir_path = os.path.join(self.config.default_archive_dir, domain)
|
||||
archive_paths[domain] = dict((kind,
|
||||
os.path.join(archive_dir_path, kind + "1.pem")) for kind in ALL_FOUR)
|
||||
for kind in ALL_FOUR:
|
||||
live_path = self.configs[domain][kind]
|
||||
live_path = self.config_files[domain][kind]
|
||||
archive_path = archive_paths[domain][kind]
|
||||
open(archive_path, 'a').close()
|
||||
# path is incorrect but base must be correct
|
||||
os.symlink(os.path.join(self.tempdir, kind + "1.pem"), live_path)
|
||||
os.symlink(os.path.join(self.config.config_dir, kind + "1.pem"), live_path)
|
||||
|
||||
# run update symlinks
|
||||
cert_manager.update_live_symlinks(self.cli_config)
|
||||
cert_manager.update_live_symlinks(self.config)
|
||||
|
||||
# check that symlinks go where they should
|
||||
prev_dir = os.getcwd()
|
||||
try:
|
||||
for domain in self.domains:
|
||||
for kind in ALL_FOUR:
|
||||
os.chdir(os.path.dirname(self.configs[domain][kind]))
|
||||
os.chdir(os.path.dirname(self.config_files[domain][kind]))
|
||||
self.assertEqual(
|
||||
os.path.realpath(os.readlink(self.configs[domain][kind])),
|
||||
os.path.realpath(os.readlink(self.config_files[domain][kind])),
|
||||
os.path.realpath(archive_paths[domain][kind]))
|
||||
finally:
|
||||
os.chdir(prev_dir)
|
||||
@@ -115,16 +108,45 @@ class UpdateLiveSymlinksTest(BaseCertManagerTest):
|
||||
class DeleteTest(storage_test.BaseRenewableCertTest):
|
||||
"""Tests for certbot.cert_manager.delete
|
||||
"""
|
||||
|
||||
def _call(self):
|
||||
from certbot import cert_manager
|
||||
cert_manager.delete(self.config)
|
||||
|
||||
@test_util.patch_get_utility()
|
||||
@mock.patch('certbot.cert_manager.lineage_for_certname')
|
||||
@mock.patch('certbot.storage.delete_files')
|
||||
def test_delete(self, mock_delete_files, mock_lineage_for_certname, unused_get_utility):
|
||||
def test_delete_from_config(self, mock_delete_files, mock_lineage_for_certname,
|
||||
unused_get_utility):
|
||||
"""Test delete"""
|
||||
mock_lineage_for_certname.return_value = self.test_rc
|
||||
self.cli_config.certname = "example.org"
|
||||
from certbot import cert_manager
|
||||
cert_manager.delete(self.cli_config)
|
||||
self.assertTrue(mock_delete_files.called)
|
||||
self.config.certname = "example.org"
|
||||
self._call()
|
||||
mock_delete_files.assert_called_once_with(self.config, "example.org")
|
||||
|
||||
@test_util.patch_get_utility()
|
||||
@mock.patch('certbot.cert_manager.lineage_for_certname')
|
||||
@mock.patch('certbot.storage.delete_files')
|
||||
def test_delete_interactive_single(self, mock_delete_files, mock_lineage_for_certname,
|
||||
mock_util):
|
||||
"""Test delete"""
|
||||
mock_lineage_for_certname.return_value = self.test_rc
|
||||
mock_util().checklist.return_value = (display_util.OK, ["example.org"])
|
||||
self._call()
|
||||
mock_delete_files.assert_called_once_with(self.config, "example.org")
|
||||
|
||||
@test_util.patch_get_utility()
|
||||
@mock.patch('certbot.cert_manager.lineage_for_certname')
|
||||
@mock.patch('certbot.storage.delete_files')
|
||||
def test_delete_interactive_multiple(self, mock_delete_files, mock_lineage_for_certname,
|
||||
mock_util):
|
||||
"""Test delete"""
|
||||
mock_lineage_for_certname.return_value = self.test_rc
|
||||
mock_util().checklist.return_value = (display_util.OK, ["example.org", "other.org"])
|
||||
self._call()
|
||||
mock_delete_files.assert_any_call(self.config, "example.org")
|
||||
mock_delete_files.assert_any_call(self.config, "other.org")
|
||||
self.assertEqual(mock_delete_files.call_count, 2)
|
||||
|
||||
|
||||
class CertificatesTest(BaseCertManagerTest):
|
||||
@@ -137,15 +159,15 @@ class CertificatesTest(BaseCertManagerTest):
|
||||
@mock.patch('certbot.cert_manager.logger')
|
||||
@test_util.patch_get_utility()
|
||||
def test_certificates_parse_fail(self, mock_utility, mock_logger):
|
||||
self._certificates(self.cli_config)
|
||||
self._certificates(self.config)
|
||||
self.assertTrue(mock_logger.warning.called) #pylint: disable=no-member
|
||||
self.assertTrue(mock_utility.called)
|
||||
|
||||
@mock.patch('certbot.cert_manager.logger')
|
||||
@test_util.patch_get_utility()
|
||||
def test_certificates_quiet(self, mock_utility, mock_logger):
|
||||
self.cli_config.quiet = True
|
||||
self._certificates(self.cli_config)
|
||||
self.config.quiet = True
|
||||
self._certificates(self.config)
|
||||
self.assertFalse(mock_utility.notification.called)
|
||||
self.assertTrue(mock_logger.warning.called) #pylint: disable=no-member
|
||||
|
||||
@@ -158,7 +180,7 @@ class CertificatesTest(BaseCertManagerTest):
|
||||
mock_utility, mock_logger, mock_verifier):
|
||||
mock_verifier.return_value = None
|
||||
mock_report.return_value = ""
|
||||
self._certificates(self.cli_config)
|
||||
self._certificates(self.config)
|
||||
self.assertFalse(mock_logger.warning.called) #pylint: disable=no-member
|
||||
self.assertTrue(mock_report.called)
|
||||
self.assertTrue(mock_utility.called)
|
||||
@@ -167,20 +189,19 @@ class CertificatesTest(BaseCertManagerTest):
|
||||
@mock.patch('certbot.cert_manager.logger')
|
||||
@test_util.patch_get_utility()
|
||||
def test_certificates_no_files(self, mock_utility, mock_logger):
|
||||
tempdir = tempfile.mkdtemp()
|
||||
|
||||
cli_config = configuration.NamespaceConfig(mock.MagicMock(
|
||||
config_dir=tempdir,
|
||||
work_dir=tempdir,
|
||||
logs_dir=tempdir,
|
||||
quiet=False,
|
||||
empty_tempdir = tempfile.mkdtemp()
|
||||
empty_config = configuration.NamespaceConfig(mock.MagicMock(
|
||||
config_dir=os.path.join(empty_tempdir, "config"),
|
||||
work_dir=os.path.join(empty_tempdir, "work"),
|
||||
logs_dir=os.path.join(empty_tempdir, "logs"),
|
||||
quiet=False
|
||||
))
|
||||
|
||||
os.makedirs(os.path.join(tempdir, "renewal"))
|
||||
self._certificates(cli_config)
|
||||
os.makedirs(empty_config.renewal_configs_dir)
|
||||
self._certificates(empty_config)
|
||||
self.assertFalse(mock_logger.warning.called) #pylint: disable=no-member
|
||||
self.assertTrue(mock_utility.called)
|
||||
shutil.rmtree(tempdir)
|
||||
shutil.rmtree(empty_tempdir)
|
||||
|
||||
@mock.patch('certbot.cert_manager.ocsp.RevocationChecker.ocsp_revoked')
|
||||
def test_report_human_readable(self, mock_revoked):
|
||||
@@ -261,7 +282,7 @@ class SearchLineagesTest(BaseCertManagerTest):
|
||||
mock_renewable_cert.side_effect = errors.CertStorageError
|
||||
from certbot import cert_manager
|
||||
# pylint: disable=protected-access
|
||||
self.assertEqual(cert_manager._search_lineages(self.cli_config, lambda x: x, "check"),
|
||||
self.assertEqual(cert_manager._search_lineages(self.config, lambda x: x, "check"),
|
||||
"check")
|
||||
self.assertTrue(mock_make_or_verify_dir.called)
|
||||
|
||||
@@ -278,7 +299,7 @@ class LineageForCertnameTest(BaseCertManagerTest):
|
||||
mock_match = mock.Mock(lineagename="example.com")
|
||||
mock_renewable_cert.return_value = mock_match
|
||||
from certbot import cert_manager
|
||||
self.assertEqual(cert_manager.lineage_for_certname(self.cli_config, "example.com"),
|
||||
self.assertEqual(cert_manager.lineage_for_certname(self.config, "example.com"),
|
||||
mock_match)
|
||||
self.assertTrue(mock_make_or_verify_dir.called)
|
||||
|
||||
@@ -288,7 +309,7 @@ class LineageForCertnameTest(BaseCertManagerTest):
|
||||
mock_make_or_verify_dir):
|
||||
mock_renewal_conf_file.return_value = "other.com.conf"
|
||||
from certbot import cert_manager
|
||||
self.assertEqual(cert_manager.lineage_for_certname(self.cli_config, "example.com"),
|
||||
self.assertEqual(cert_manager.lineage_for_certname(self.config, "example.com"),
|
||||
None)
|
||||
self.assertTrue(mock_make_or_verify_dir.called)
|
||||
|
||||
@@ -298,7 +319,7 @@ class LineageForCertnameTest(BaseCertManagerTest):
|
||||
mock_make_or_verify_dir):
|
||||
mock_renewal_conf_file.side_effect = errors.CertStorageError()
|
||||
from certbot import cert_manager
|
||||
self.assertEqual(cert_manager.lineage_for_certname(self.cli_config, "example.com"),
|
||||
self.assertEqual(cert_manager.lineage_for_certname(self.config, "example.com"),
|
||||
None)
|
||||
self.assertTrue(mock_make_or_verify_dir.called)
|
||||
|
||||
@@ -317,7 +338,7 @@ class DomainsForCertnameTest(BaseCertManagerTest):
|
||||
mock_match.names.return_value = domains
|
||||
mock_renewable_cert.return_value = mock_match
|
||||
from certbot import cert_manager
|
||||
self.assertEqual(cert_manager.domains_for_certname(self.cli_config, "example.com"),
|
||||
self.assertEqual(cert_manager.domains_for_certname(self.config, "example.com"),
|
||||
domains)
|
||||
self.assertTrue(mock_make_or_verify_dir.called)
|
||||
|
||||
@@ -327,7 +348,7 @@ class DomainsForCertnameTest(BaseCertManagerTest):
|
||||
mock_make_or_verify_dir):
|
||||
mock_renewal_conf_file.return_value = "somefile.conf"
|
||||
from certbot import cert_manager
|
||||
self.assertEqual(cert_manager.domains_for_certname(self.cli_config, "other.com"),
|
||||
self.assertEqual(cert_manager.domains_for_certname(self.config, "other.com"),
|
||||
None)
|
||||
self.assertTrue(mock_make_or_verify_dir.called)
|
||||
|
||||
@@ -337,15 +358,8 @@ class RenameLineageTest(BaseCertManagerTest):
|
||||
|
||||
def setUp(self):
|
||||
super(RenameLineageTest, self).setUp()
|
||||
self.mock_config = configuration.NamespaceConfig(
|
||||
namespace=mock.MagicMock(
|
||||
config_dir=self.tempdir,
|
||||
work_dir=self.tempdir,
|
||||
logs_dir=self.tempdir,
|
||||
certname="example.org",
|
||||
new_certname="after",
|
||||
)
|
||||
)
|
||||
self.config.certname = "example.org"
|
||||
self.config.new_certname = "after"
|
||||
|
||||
def _call(self, *args, **kwargs):
|
||||
from certbot import cert_manager
|
||||
@@ -354,81 +368,76 @@ class RenameLineageTest(BaseCertManagerTest):
|
||||
@mock.patch('certbot.storage.renewal_conf_files')
|
||||
@test_util.patch_get_utility()
|
||||
def test_no_certname(self, mock_get_utility, mock_renewal_conf_files):
|
||||
mock_config = mock.Mock(certname=None, new_certname="two")
|
||||
self.config.certname = None
|
||||
self.config.new_certname = "two"
|
||||
|
||||
# if not choices
|
||||
mock_renewal_conf_files.return_value = []
|
||||
self.assertRaises(errors.Error, self._call, mock_config)
|
||||
self.assertRaises(errors.Error, self._call, self.config)
|
||||
|
||||
mock_renewal_conf_files.return_value = ["one.conf"]
|
||||
util_mock = mock.Mock()
|
||||
util_mock = mock_get_utility()
|
||||
util_mock.menu.return_value = (display_util.CANCEL, 0)
|
||||
mock_get_utility.return_value = util_mock
|
||||
self.assertRaises(errors.Error, self._call, mock_config)
|
||||
self.assertRaises(errors.Error, self._call, self.config)
|
||||
|
||||
util_mock.menu.return_value = (display_util.OK, -1)
|
||||
self.assertRaises(errors.Error, self._call, mock_config)
|
||||
self.assertRaises(errors.Error, self._call, self.config)
|
||||
|
||||
@test_util.patch_get_utility()
|
||||
def test_no_new_certname(self, mock_get_utility):
|
||||
mock_config = mock.Mock(certname="one", new_certname=None)
|
||||
self.config.certname = "one"
|
||||
self.config.new_certname = None
|
||||
|
||||
util_mock = mock.Mock()
|
||||
util_mock = mock_get_utility()
|
||||
util_mock.input.return_value = (display_util.CANCEL, "name")
|
||||
mock_get_utility.return_value = util_mock
|
||||
self.assertRaises(errors.Error, self._call, mock_config)
|
||||
self.assertRaises(errors.Error, self._call, self.config)
|
||||
|
||||
util_mock = mock.Mock()
|
||||
util_mock.input.return_value = (display_util.OK, None)
|
||||
mock_get_utility.return_value = util_mock
|
||||
self.assertRaises(errors.Error, self._call, mock_config)
|
||||
self.assertRaises(errors.Error, self._call, self.config)
|
||||
|
||||
@test_util.patch_get_utility()
|
||||
@mock.patch('certbot.cert_manager.lineage_for_certname')
|
||||
def test_no_existing_certname(self, mock_lineage_for_certname, unused_get_utility):
|
||||
mock_config = mock.Mock(certname="one", new_certname="two")
|
||||
self.config.certname = "one"
|
||||
self.config.new_certname = "two"
|
||||
mock_lineage_for_certname.return_value = None
|
||||
self.assertRaises(errors.ConfigurationError,
|
||||
self._call, mock_config)
|
||||
self._call, self.config)
|
||||
|
||||
@test_util.patch_get_utility()
|
||||
@mock.patch("certbot.storage.RenewableCert._check_symlinks")
|
||||
def test_rename_cert(self, mock_check, unused_get_utility):
|
||||
mock_check.return_value = True
|
||||
mock_config = self.mock_config
|
||||
self._call(mock_config)
|
||||
self._call(self.config)
|
||||
from certbot import cert_manager
|
||||
updated_lineage = cert_manager.lineage_for_certname(mock_config, mock_config.new_certname)
|
||||
updated_lineage = cert_manager.lineage_for_certname(self.config, self.config.new_certname)
|
||||
self.assertTrue(updated_lineage is not None)
|
||||
self.assertEqual(updated_lineage.lineagename, mock_config.new_certname)
|
||||
self.assertEqual(updated_lineage.lineagename, self.config.new_certname)
|
||||
|
||||
@test_util.patch_get_utility()
|
||||
@mock.patch("certbot.storage.RenewableCert._check_symlinks")
|
||||
def test_rename_cert_interactive_certname(self, mock_check, mock_get_utility):
|
||||
mock_check.return_value = True
|
||||
mock_config = self.mock_config
|
||||
mock_config.certname = None
|
||||
util_mock = mock.Mock()
|
||||
self.config.certname = None
|
||||
util_mock = mock_get_utility()
|
||||
util_mock.menu.return_value = (display_util.OK, 0)
|
||||
mock_get_utility.return_value = util_mock
|
||||
self._call(mock_config)
|
||||
self._call(self.config)
|
||||
from certbot import cert_manager
|
||||
updated_lineage = cert_manager.lineage_for_certname(mock_config, mock_config.new_certname)
|
||||
updated_lineage = cert_manager.lineage_for_certname(self.config, self.config.new_certname)
|
||||
self.assertTrue(updated_lineage is not None)
|
||||
self.assertEqual(updated_lineage.lineagename, mock_config.new_certname)
|
||||
self.assertEqual(updated_lineage.lineagename, self.config.new_certname)
|
||||
|
||||
@test_util.patch_get_utility()
|
||||
@mock.patch("certbot.storage.RenewableCert._check_symlinks")
|
||||
def test_rename_cert_bad_new_certname(self, mock_check, unused_get_utility):
|
||||
mock_check.return_value = True
|
||||
mock_config = self.mock_config
|
||||
|
||||
# for example, don't rename to existing certname
|
||||
mock_config.new_certname = "example.org"
|
||||
self.assertRaises(errors.ConfigurationError, self._call, mock_config)
|
||||
self.config.new_certname = "example.org"
|
||||
self.assertRaises(errors.ConfigurationError, self._call, self.config)
|
||||
|
||||
mock_config.new_certname = "one{0}two".format(os.path.sep)
|
||||
self.assertRaises(errors.ConfigurationError, self._call, mock_config)
|
||||
self.config.new_certname = "one{0}two".format(os.path.sep)
|
||||
self.assertRaises(errors.ConfigurationError, self._call, self.config)
|
||||
|
||||
|
||||
class DuplicativeCertsTest(storage_test.BaseRenewableCertTest):
|
||||
@@ -436,38 +445,128 @@ class DuplicativeCertsTest(storage_test.BaseRenewableCertTest):
|
||||
|
||||
def setUp(self):
|
||||
super(DuplicativeCertsTest, self).setUp()
|
||||
self.config.write()
|
||||
self.config_file.write()
|
||||
self._write_out_ex_kinds()
|
||||
|
||||
@mock.patch('certbot.util.make_or_verify_dir')
|
||||
def test_find_duplicative_names(self, unused_makedir):
|
||||
from certbot.cert_manager import find_duplicative_certs
|
||||
test_cert = test_util.load_vector('cert-san.pem')
|
||||
test_cert = test_util.load_vector('cert-san_512.pem')
|
||||
with open(self.test_rc.cert, 'wb') as f:
|
||||
f.write(test_cert)
|
||||
|
||||
# No overlap at all
|
||||
result = find_duplicative_certs(
|
||||
self.cli_config, ['wow.net', 'hooray.org'])
|
||||
self.config, ['wow.net', 'hooray.org'])
|
||||
self.assertEqual(result, (None, None))
|
||||
|
||||
# Totally identical
|
||||
result = find_duplicative_certs(
|
||||
self.cli_config, ['example.com', 'www.example.com'])
|
||||
self.config, ['example.com', 'www.example.com'])
|
||||
self.assertTrue(result[0].configfile.filename.endswith('example.org.conf'))
|
||||
self.assertEqual(result[1], None)
|
||||
|
||||
# Superset
|
||||
result = find_duplicative_certs(
|
||||
self.cli_config, ['example.com', 'www.example.com', 'something.new'])
|
||||
self.config, ['example.com', 'www.example.com', 'something.new'])
|
||||
self.assertEqual(result[0], None)
|
||||
self.assertTrue(result[1].configfile.filename.endswith('example.org.conf'))
|
||||
|
||||
# Partial overlap doesn't count
|
||||
result = find_duplicative_certs(
|
||||
self.cli_config, ['example.com', 'something.new'])
|
||||
self.config, ['example.com', 'something.new'])
|
||||
self.assertEqual(result, (None, None))
|
||||
|
||||
|
||||
class CertPathToLineageTest(storage_test.BaseRenewableCertTest):
|
||||
"""Tests for certbot.cert_manager.cert_path_to_lineage"""
|
||||
|
||||
def setUp(self):
|
||||
super(CertPathToLineageTest, self).setUp()
|
||||
self.config_file.write()
|
||||
self._write_out_ex_kinds()
|
||||
self.fullchain = os.path.join(self.config.config_dir, 'live', 'example.org',
|
||||
'fullchain.pem')
|
||||
self.config.cert_path = (self.fullchain, '')
|
||||
|
||||
def _call(self, cli_config):
|
||||
from certbot.cert_manager import cert_path_to_lineage
|
||||
return cert_path_to_lineage(cli_config)
|
||||
|
||||
def _archive_files(self, cli_config, filetype):
|
||||
from certbot.cert_manager import _archive_files
|
||||
return _archive_files(cli_config, filetype)
|
||||
|
||||
def test_basic_match(self):
|
||||
self.assertEqual('example.org', self._call(self.config))
|
||||
|
||||
def test_no_match_exists(self):
|
||||
bad_test_config = self.config
|
||||
bad_test_config.cert_path = os.path.join(self.config.config_dir, 'live',
|
||||
'SailorMoon', 'fullchain.pem')
|
||||
self.assertRaises(errors.Error, self._call, bad_test_config)
|
||||
|
||||
@mock.patch('certbot.cert_manager._acceptable_matches')
|
||||
def test_options_fullchain(self, mock_acceptable_matches):
|
||||
mock_acceptable_matches.return_value = [lambda x: x.fullchain_path]
|
||||
self.config.fullchain_path = self.fullchain
|
||||
self.assertEqual('example.org', self._call(self.config))
|
||||
|
||||
@mock.patch('certbot.cert_manager._acceptable_matches')
|
||||
def test_options_cert_path(self, mock_acceptable_matches):
|
||||
mock_acceptable_matches.return_value = [lambda x: x.cert_path]
|
||||
test_cert_path = os.path.join(self.config.config_dir, 'live', 'example.org',
|
||||
'cert.pem')
|
||||
self.config.cert_path = (test_cert_path, '')
|
||||
self.assertEqual('example.org', self._call(self.config))
|
||||
|
||||
@mock.patch('certbot.cert_manager._acceptable_matches')
|
||||
def test_options_archive_cert(self, mock_acceptable_matches):
|
||||
# Also this and the next test check that the regex of _archive_files is working.
|
||||
self.config.cert_path = (os.path.join(self.config.config_dir, 'archive', 'example.org',
|
||||
'cert11.pem'), '')
|
||||
mock_acceptable_matches.return_value = [lambda x: self._archive_files(x, 'cert')]
|
||||
self.assertEqual('example.org', self._call(self.config))
|
||||
|
||||
@mock.patch('certbot.cert_manager._acceptable_matches')
|
||||
def test_options_archive_fullchain(self, mock_acceptable_matches):
|
||||
self.config.cert_path = (os.path.join(self.config.config_dir, 'archive',
|
||||
'example.org', 'fullchain11.pem'), '')
|
||||
mock_acceptable_matches.return_value = [lambda x:
|
||||
self._archive_files(x, 'fullchain')]
|
||||
self.assertEqual('example.org', self._call(self.config))
|
||||
|
||||
|
||||
class MatchAndCheckOverlaps(storage_test.BaseRenewableCertTest):
|
||||
"""Tests for certbot.cert_manager.match_and_check_overlaps w/o overlapping archive dirs."""
|
||||
# A test with real overlapping archive dirs can be found in tests/boulder_integration.sh
|
||||
def setUp(self):
|
||||
super(MatchAndCheckOverlaps, self).setUp()
|
||||
self.config_file.write()
|
||||
self._write_out_ex_kinds()
|
||||
self.fullchain = os.path.join(self.config.config_dir, 'live', 'example.org',
|
||||
'fullchain.pem')
|
||||
self.config.cert_path = (self.fullchain, '')
|
||||
|
||||
def _call(self, cli_config, acceptable_matches, match_func, rv_func):
|
||||
from certbot.cert_manager import match_and_check_overlaps
|
||||
return match_and_check_overlaps(cli_config, acceptable_matches, match_func, rv_func)
|
||||
|
||||
def test_basic_match(self):
|
||||
from certbot.cert_manager import _acceptable_matches
|
||||
self.assertEqual(['example.org'], self._call(self.config, _acceptable_matches(),
|
||||
lambda x: self.config.cert_path[0], lambda x: x.lineagename))
|
||||
|
||||
@mock.patch('certbot.cert_manager._search_lineages')
|
||||
def test_no_matches(self, mock_search_lineages):
|
||||
mock_search_lineages.return_value = []
|
||||
self.assertRaises(errors.Error, self._call, self.config, None, None, None)
|
||||
|
||||
@mock.patch('certbot.cert_manager._search_lineages')
|
||||
def test_too_many_matches(self, mock_search_lineages):
|
||||
mock_search_lineages.return_value = ['spider', 'dance']
|
||||
self.assertRaises(errors.OverlappingMatchFound, self._call, self.config, None, None, None)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main() # pragma: no cover
|
||||
|
||||
@@ -3,6 +3,7 @@ import argparse
|
||||
import unittest
|
||||
import os
|
||||
import tempfile
|
||||
import copy
|
||||
|
||||
import mock
|
||||
import six
|
||||
@@ -15,6 +16,8 @@ from certbot import constants
|
||||
from certbot import errors
|
||||
from certbot.plugins import disco
|
||||
|
||||
import certbot.tests.util as test_util
|
||||
|
||||
from certbot.tests.util import TempDirTestCase
|
||||
|
||||
PLUGINS = disco.PluginsRegistry.find_all()
|
||||
@@ -40,7 +43,7 @@ class TestReadFile(TempDirTestCase):
|
||||
|
||||
|
||||
|
||||
class ParseTest(unittest.TestCase):
|
||||
class ParseTest(unittest.TestCase): # pylint: disable=too-many-public-methods
|
||||
'''Test the cli args entrypoint'''
|
||||
|
||||
_multiprocess_can_split_ = True
|
||||
@@ -49,24 +52,41 @@ class ParseTest(unittest.TestCase):
|
||||
reload_module(cli)
|
||||
|
||||
@staticmethod
|
||||
def parse(*args, **kwargs):
|
||||
def _unmocked_parse(*args, **kwargs):
|
||||
"""Get result of cli.prepare_and_parse_args."""
|
||||
return cli.prepare_and_parse_args(PLUGINS, *args, **kwargs)
|
||||
|
||||
@staticmethod
|
||||
def parse(*args, **kwargs):
|
||||
"""Mocks zope.component.getUtility and calls _unmocked_parse."""
|
||||
with test_util.patch_get_utility():
|
||||
return ParseTest._unmocked_parse(*args, **kwargs)
|
||||
|
||||
def _help_output(self, args):
|
||||
"Run a command, and return the output string for scrutiny"
|
||||
|
||||
output = six.StringIO()
|
||||
|
||||
def write_msg(message, *args, **kwargs): # pylint: disable=missing-docstring,unused-argument
|
||||
output.write(message)
|
||||
|
||||
with mock.patch('certbot.main.sys.stdout', new=output):
|
||||
with mock.patch('certbot.main.sys.stderr'):
|
||||
self.assertRaises(SystemExit, self.parse, args, output)
|
||||
with test_util.patch_get_utility() as mock_get_utility:
|
||||
mock_get_utility().notification.side_effect = write_msg
|
||||
with mock.patch('certbot.main.sys.stderr'):
|
||||
self.assertRaises(SystemExit, self._unmocked_parse, args, output)
|
||||
|
||||
return output.getvalue()
|
||||
|
||||
@mock.patch("certbot.cli.flag_default")
|
||||
def test_cli_ini_domains(self, mock_flag_default):
|
||||
tmp_config = tempfile.NamedTemporaryFile()
|
||||
# use a shim to get ConfigArgParse to pick up tmp_config
|
||||
shim = lambda v: constants.CLI_DEFAULTS[v] if v != "config_files" else [tmp_config.name]
|
||||
shim = (
|
||||
lambda v: copy.deepcopy(constants.CLI_DEFAULTS[v])
|
||||
if v != "config_files"
|
||||
else [tmp_config.name]
|
||||
)
|
||||
mock_flag_default.side_effect = shim
|
||||
|
||||
namespace = self.parse(["certonly"])
|
||||
@@ -109,6 +129,7 @@ class ParseTest(unittest.TestCase):
|
||||
self.assertTrue("--dialog" not in out)
|
||||
self.assertTrue("%s" not in out)
|
||||
self.assertTrue("{0}" not in out)
|
||||
self.assertTrue("--renew-hook" not in out)
|
||||
|
||||
out = self._help_output(['-h', 'nginx'])
|
||||
if "nginx" in PLUGINS:
|
||||
@@ -323,6 +344,76 @@ class ParseTest(unittest.TestCase):
|
||||
self.assertRaises(
|
||||
errors.Error, self.parse, "-n --force-interactive".split())
|
||||
|
||||
def test_deploy_hook_conflict(self):
|
||||
with mock.patch("certbot.cli.sys.stderr"):
|
||||
self.assertRaises(SystemExit, self.parse,
|
||||
"--renew-hook foo --deploy-hook bar".split())
|
||||
|
||||
def test_deploy_hook_matches_renew_hook(self):
|
||||
value = "foo"
|
||||
namespace = self.parse(["--renew-hook", value,
|
||||
"--deploy-hook", value,
|
||||
"--disable-hook-validation"])
|
||||
self.assertEqual(namespace.deploy_hook, value)
|
||||
self.assertEqual(namespace.renew_hook, value)
|
||||
|
||||
def test_deploy_hook_sets_renew_hook(self):
|
||||
value = "foo"
|
||||
namespace = self.parse(
|
||||
["--deploy-hook", value, "--disable-hook-validation"])
|
||||
self.assertEqual(namespace.deploy_hook, value)
|
||||
self.assertEqual(namespace.renew_hook, value)
|
||||
|
||||
def test_renew_hook_conflict(self):
|
||||
with mock.patch("certbot.cli.sys.stderr"):
|
||||
self.assertRaises(SystemExit, self.parse,
|
||||
"--deploy-hook foo --renew-hook bar".split())
|
||||
|
||||
def test_renew_hook_matches_deploy_hook(self):
|
||||
value = "foo"
|
||||
namespace = self.parse(["--deploy-hook", value,
|
||||
"--renew-hook", value,
|
||||
"--disable-hook-validation"])
|
||||
self.assertEqual(namespace.deploy_hook, value)
|
||||
self.assertEqual(namespace.renew_hook, value)
|
||||
|
||||
def test_renew_hook_does_not_set_renew_hook(self):
|
||||
value = "foo"
|
||||
namespace = self.parse(
|
||||
["--renew-hook", value, "--disable-hook-validation"])
|
||||
self.assertEqual(namespace.deploy_hook, None)
|
||||
self.assertEqual(namespace.renew_hook, value)
|
||||
|
||||
def test_max_log_backups_error(self):
|
||||
with mock.patch('certbot.cli.sys.stderr'):
|
||||
self.assertRaises(
|
||||
SystemExit, self.parse, "--max-log-backups foo".split())
|
||||
self.assertRaises(
|
||||
SystemExit, self.parse, "--max-log-backups -42".split())
|
||||
|
||||
def test_max_log_backups_success(self):
|
||||
value = "42"
|
||||
namespace = self.parse(["--max-log-backups", value])
|
||||
self.assertEqual(namespace.max_log_backups, int(value))
|
||||
|
||||
def test_unchanging_defaults(self):
|
||||
namespace = self.parse([])
|
||||
self.assertEqual(namespace.domains, [])
|
||||
self.assertEqual(namespace.pref_challs, [])
|
||||
|
||||
namespace.pref_challs = [challenges.HTTP01.typ]
|
||||
namespace.domains = ['example.com']
|
||||
|
||||
namespace = self.parse([])
|
||||
self.assertEqual(namespace.domains, [])
|
||||
self.assertEqual(namespace.pref_challs, [])
|
||||
|
||||
def test_no_directory_hooks_set(self):
|
||||
self.assertFalse(self.parse(["--no-directory-hooks"]).directory_hooks)
|
||||
|
||||
def test_no_directory_hooks_unset(self):
|
||||
self.assertTrue(self.parse([]).directory_hooks)
|
||||
|
||||
|
||||
class DefaultTest(unittest.TestCase):
|
||||
"""Tests for certbot.cli._Default."""
|
||||
@@ -353,6 +444,10 @@ class SetByCliTest(unittest.TestCase):
|
||||
def setUp(self):
|
||||
reload_module(cli)
|
||||
|
||||
def test_deploy_hook(self):
|
||||
self.assertTrue(_call_set_by_cli(
|
||||
'renew_hook', '--deploy-hook foo'.split(), 'renew'))
|
||||
|
||||
def test_webroot_map(self):
|
||||
args = '-w /var/www/html -d example.com'.split()
|
||||
verb = 'renew'
|
||||
@@ -397,9 +492,10 @@ class SetByCliTest(unittest.TestCase):
|
||||
|
||||
def _call_set_by_cli(var, args, verb):
|
||||
with mock.patch('certbot.cli.helpful_parser') as mock_parser:
|
||||
mock_parser.args = args
|
||||
mock_parser.verb = verb
|
||||
return cli.set_by_cli(var)
|
||||
with test_util.patch_get_utility():
|
||||
mock_parser.args = args
|
||||
mock_parser.verb = verb
|
||||
return cli.set_by_cli(var)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
@@ -18,23 +18,17 @@ import certbot.tests.util as test_util
|
||||
|
||||
|
||||
KEY = test_util.load_vector("rsa512_key.pem")
|
||||
CSR_SAN = test_util.load_vector("csr-san.pem")
|
||||
CSR_SAN = test_util.load_vector("csr-san_512.pem")
|
||||
|
||||
|
||||
class ConfigHelper(object):
|
||||
"""Creates a dummy object to imitate a namespace object
|
||||
|
||||
Example: cfg = ConfigHelper(redirect=True, hsts=False, uir=False)
|
||||
will result in: cfg.redirect=True, cfg.hsts=False, etc.
|
||||
"""
|
||||
def __init__(self, **kwds):
|
||||
self.__dict__.update(kwds)
|
||||
|
||||
class RegisterTest(unittest.TestCase):
|
||||
class RegisterTest(test_util.ConfigTestCase):
|
||||
"""Tests for certbot.client.register."""
|
||||
|
||||
def setUp(self):
|
||||
self.config = mock.MagicMock(rsa_key_size=1024, register_unsafely_without_email=False)
|
||||
super(RegisterTest, self).setUp()
|
||||
self.config.rsa_key_size = 1024
|
||||
self.config.register_unsafely_without_email = False
|
||||
self.config.email = "alias@example.com"
|
||||
self.account_storage = account.AccountMemoryStorage()
|
||||
self.tos_cb = mock.MagicMock()
|
||||
|
||||
@@ -82,6 +76,7 @@ class RegisterTest(unittest.TestCase):
|
||||
@mock.patch("certbot.account.report_new_account")
|
||||
def test_email_invalid_noninteractive(self, _rep):
|
||||
from acme import messages
|
||||
self.config.noninteractive_mode = True
|
||||
msg = "DNS problem: NXDOMAIN looking up MX for example.com"
|
||||
mx_err = messages.Error.with_code('invalidContact', detail=msg)
|
||||
with mock.patch("certbot.client.acme_client.Client") as mock_client:
|
||||
@@ -116,14 +111,12 @@ class RegisterTest(unittest.TestCase):
|
||||
self.assertFalse(mock_handle.called)
|
||||
|
||||
|
||||
class ClientTestCommon(unittest.TestCase):
|
||||
class ClientTestCommon(test_util.ConfigTestCase):
|
||||
"""Common base class for certbot.client.Client tests."""
|
||||
def setUp(self):
|
||||
self.config = mock.MagicMock(
|
||||
no_verify_ssl=False,
|
||||
config_dir="/etc/letsencrypt",
|
||||
work_dir="/var/lib/letsencrypt",
|
||||
allow_subset_of_names=False)
|
||||
super(ClientTestCommon, self).setUp()
|
||||
self.config.no_verify_ssl = False
|
||||
self.config.allow_subset_of_names = False
|
||||
|
||||
# pylint: disable=star-args
|
||||
self.account = mock.MagicMock(**{"key.pem": KEY})
|
||||
@@ -143,7 +136,7 @@ class ClientTest(ClientTestCommon):
|
||||
super(ClientTest, self).setUp()
|
||||
|
||||
self.config.allow_subset_of_names = False
|
||||
self.config.config_dir = "/etc/letsencrypt"
|
||||
self.config.dry_run = False
|
||||
self.eg_domains = ["example.com", "www.example.com"]
|
||||
|
||||
def test_init_acme_verify_ssl(self):
|
||||
@@ -241,15 +234,37 @@ class ClientTest(ClientTestCommon):
|
||||
self.assertEqual(1, mock_get_utility().notification.call_count)
|
||||
|
||||
@mock.patch("certbot.client.crypto_util")
|
||||
@test_util.patch_get_utility()
|
||||
def test_obtain_certificate(self, unused_mock_get_utility,
|
||||
mock_crypto_util):
|
||||
self._mock_obtain_certificate()
|
||||
|
||||
def test_obtain_certificate(self, mock_crypto_util):
|
||||
csr = util.CSR(form="pem", file=None, data=CSR_SAN)
|
||||
mock_crypto_util.init_save_csr.return_value = csr
|
||||
mock_crypto_util.init_save_key.return_value = mock.sentinel.key
|
||||
domains = ["example.com", "www.example.com"]
|
||||
|
||||
self._test_obtain_certificate_common(mock.sentinel.key, csr)
|
||||
|
||||
mock_crypto_util.init_save_key.assert_called_once_with(
|
||||
self.config.rsa_key_size, self.config.key_dir)
|
||||
mock_crypto_util.init_save_csr.assert_called_once_with(
|
||||
mock.sentinel.key, self.eg_domains, self.config.csr_dir)
|
||||
|
||||
@mock.patch("certbot.client.crypto_util")
|
||||
@mock.patch("certbot.client.acme_crypto_util")
|
||||
def test_obtain_certificate_dry_run(self, mock_acme_crypto, mock_crypto):
|
||||
csr = util.CSR(form="pem", file=None, data=CSR_SAN)
|
||||
mock_acme_crypto.make_csr.return_value = CSR_SAN
|
||||
mock_crypto.make_key.return_value = mock.sentinel.key_pem
|
||||
key = util.Key(file=None, pem=mock.sentinel.key_pem)
|
||||
|
||||
self.client.config.dry_run = True
|
||||
self._test_obtain_certificate_common(key, csr)
|
||||
|
||||
mock_crypto.make_key.assert_called_once_with(self.config.rsa_key_size)
|
||||
mock_acme_crypto.make_csr.assert_called_once_with(
|
||||
mock.sentinel.key_pem, self.eg_domains, self.config.must_staple)
|
||||
mock_crypto.init_save_key.assert_not_called()
|
||||
mock_crypto.init_save_csr.assert_not_called()
|
||||
|
||||
def _test_obtain_certificate_common(self, key, csr):
|
||||
self._mock_obtain_certificate()
|
||||
|
||||
# return_value is essentially set to (None, None) in
|
||||
# _mock_obtain_certificate(), which breaks this test.
|
||||
@@ -258,7 +273,7 @@ class ClientTest(ClientTestCommon):
|
||||
authzr = []
|
||||
|
||||
# domain ordering should not be affected by authorization order
|
||||
for domain in reversed(domains):
|
||||
for domain in reversed(self.eg_domains):
|
||||
authzr.append(
|
||||
mock.MagicMock(
|
||||
body=mock.MagicMock(
|
||||
@@ -267,14 +282,12 @@ class ClientTest(ClientTestCommon):
|
||||
|
||||
self.client.auth_handler.get_authorizations.return_value = authzr
|
||||
|
||||
self.assertEqual(
|
||||
self.client.obtain_certificate(domains),
|
||||
(mock.sentinel.certr, mock.sentinel.chain, mock.sentinel.key, csr))
|
||||
with test_util.patch_get_utility():
|
||||
result = self.client.obtain_certificate(self.eg_domains)
|
||||
|
||||
mock_crypto_util.init_save_key.assert_called_once_with(
|
||||
self.config.rsa_key_size, self.config.key_dir)
|
||||
mock_crypto_util.init_save_csr.assert_called_once_with(
|
||||
mock.sentinel.key, domains, self.config.csr_dir)
|
||||
self.assertEqual(
|
||||
result,
|
||||
(mock.sentinel.certr, mock.sentinel.chain, key, csr))
|
||||
self._check_obtain_certificate()
|
||||
|
||||
@mock.patch('certbot.client.Client.obtain_certificate')
|
||||
@@ -301,14 +314,14 @@ class ClientTest(ClientTestCommon):
|
||||
@mock.patch("certbot.cli.helpful_parser")
|
||||
def test_save_certificate(self, mock_parser):
|
||||
# pylint: disable=too-many-locals
|
||||
certs = ["matching_cert.pem", "cert.pem", "cert-san.pem"]
|
||||
certs = ["cert_512.pem", "cert-san_512.pem"]
|
||||
tmp_path = tempfile.mkdtemp()
|
||||
os.chmod(tmp_path, 0o755) # TODO: really??
|
||||
|
||||
certr = mock.MagicMock(body=test_util.load_comparable_cert(certs[0]))
|
||||
chain_cert = [test_util.load_comparable_cert(certs[1]),
|
||||
test_util.load_comparable_cert(certs[2])]
|
||||
candidate_cert_path = os.path.join(tmp_path, "certs", "cert.pem")
|
||||
chain_cert = [test_util.load_comparable_cert(certs[0]),
|
||||
test_util.load_comparable_cert(certs[1])]
|
||||
candidate_cert_path = os.path.join(tmp_path, "certs", "cert_512.pem")
|
||||
candidate_chain_path = os.path.join(tmp_path, "chains", "chain.pem")
|
||||
candidate_fullchain_path = os.path.join(tmp_path, "chains", "fullchain.pem")
|
||||
mock_parser.verb = "certonly"
|
||||
@@ -333,8 +346,8 @@ class ClientTest(ClientTestCommon):
|
||||
|
||||
with open(chain_path, "rb") as chain_file:
|
||||
chain_contents = chain_file.read()
|
||||
self.assertEqual(chain_contents, test_util.load_vector(certs[1]) +
|
||||
test_util.load_vector(certs[2]))
|
||||
self.assertEqual(chain_contents, test_util.load_vector(certs[0]) +
|
||||
test_util.load_vector(certs[1]))
|
||||
|
||||
shutil.rmtree(tmp_path)
|
||||
|
||||
|
||||
@@ -4,56 +4,62 @@ import unittest
|
||||
|
||||
import mock
|
||||
|
||||
from certbot import constants
|
||||
from certbot import errors
|
||||
|
||||
from certbot.tests import util as test_util
|
||||
|
||||
class NamespaceConfigTest(unittest.TestCase):
|
||||
class NamespaceConfigTest(test_util.ConfigTestCase):
|
||||
"""Tests for certbot.configuration.NamespaceConfig."""
|
||||
|
||||
def setUp(self):
|
||||
self.namespace = mock.MagicMock(
|
||||
config_dir='/tmp/config', work_dir='/tmp/foo',
|
||||
logs_dir="/tmp/bar", foo='bar',
|
||||
server='https://acme-server.org:443/new',
|
||||
tls_sni_01_port=1234, http01_port=4321)
|
||||
from certbot.configuration import NamespaceConfig
|
||||
self.config = NamespaceConfig(self.namespace)
|
||||
super(NamespaceConfigTest, self).setUp()
|
||||
self.config.foo = 'bar'
|
||||
self.config.server = 'https://acme-server.org:443/new'
|
||||
self.config.tls_sni_01_port = 1234
|
||||
self.config.http01_port = 4321
|
||||
|
||||
def test_init_same_ports(self):
|
||||
self.namespace.tls_sni_01_port = 4321
|
||||
self.config.namespace.tls_sni_01_port = 4321
|
||||
from certbot.configuration import NamespaceConfig
|
||||
self.assertRaises(errors.Error, NamespaceConfig, self.namespace)
|
||||
self.assertRaises(errors.Error, NamespaceConfig, self.config.namespace)
|
||||
|
||||
def test_proxy_getattr(self):
|
||||
self.assertEqual(self.config.foo, 'bar')
|
||||
self.assertEqual(self.config.work_dir, '/tmp/foo')
|
||||
self.assertEqual(self.config.work_dir, os.path.join(self.tempdir, 'work'))
|
||||
|
||||
def test_server_path(self):
|
||||
self.assertEqual(['acme-server.org:443', 'new'],
|
||||
self.config.server_path.split(os.path.sep))
|
||||
|
||||
self.namespace.server = ('http://user:pass@acme.server:443'
|
||||
self.config.namespace.server = ('http://user:pass@acme.server:443'
|
||||
'/p/a/t/h;parameters?query#fragment')
|
||||
self.assertEqual(['user:pass@acme.server:443', 'p', 'a', 't', 'h'],
|
||||
self.config.server_path.split(os.path.sep))
|
||||
|
||||
@mock.patch('certbot.configuration.constants')
|
||||
def test_dynamic_dirs(self, constants):
|
||||
constants.ACCOUNTS_DIR = 'acc'
|
||||
constants.BACKUP_DIR = 'backups'
|
||||
constants.CSR_DIR = 'csr'
|
||||
def test_dynamic_dirs(self, mock_constants):
|
||||
mock_constants.ACCOUNTS_DIR = 'acc'
|
||||
mock_constants.BACKUP_DIR = 'backups'
|
||||
mock_constants.CSR_DIR = 'csr'
|
||||
|
||||
constants.IN_PROGRESS_DIR = '../p'
|
||||
constants.KEY_DIR = 'keys'
|
||||
constants.TEMP_CHECKPOINT_DIR = 't'
|
||||
mock_constants.IN_PROGRESS_DIR = '../p'
|
||||
mock_constants.KEY_DIR = 'keys'
|
||||
mock_constants.TEMP_CHECKPOINT_DIR = 't'
|
||||
|
||||
self.assertEqual(
|
||||
self.config.accounts_dir, '/tmp/config/acc/acme-server.org:443/new')
|
||||
self.assertEqual(self.config.backup_dir, '/tmp/foo/backups')
|
||||
self.assertEqual(self.config.csr_dir, '/tmp/config/csr')
|
||||
self.assertEqual(self.config.in_progress_dir, '/tmp/foo/../p')
|
||||
self.assertEqual(self.config.key_dir, '/tmp/config/keys')
|
||||
self.assertEqual(self.config.temp_checkpoint_dir, '/tmp/foo/t')
|
||||
self.config.accounts_dir, os.path.join(
|
||||
self.config.config_dir, 'acc/acme-server.org:443/new'))
|
||||
self.assertEqual(
|
||||
self.config.backup_dir, os.path.join(self.config.work_dir, 'backups'))
|
||||
self.assertEqual(
|
||||
self.config.csr_dir, os.path.join(self.config.config_dir, 'csr'))
|
||||
self.assertEqual(
|
||||
self.config.in_progress_dir, os.path.join(self.config.work_dir, '../p'))
|
||||
self.assertEqual(
|
||||
self.config.key_dir, os.path.join(self.config.config_dir, 'keys'))
|
||||
self.assertEqual(
|
||||
self.config.temp_checkpoint_dir, os.path.join(self.config.work_dir, 't'))
|
||||
|
||||
def test_absolute_paths(self):
|
||||
from certbot.configuration import NamespaceConfig
|
||||
@@ -90,15 +96,18 @@ class NamespaceConfigTest(unittest.TestCase):
|
||||
self.assertTrue(os.path.isabs(config.temp_checkpoint_dir))
|
||||
|
||||
@mock.patch('certbot.configuration.constants')
|
||||
def test_renewal_dynamic_dirs(self, constants):
|
||||
constants.ARCHIVE_DIR = 'a'
|
||||
constants.LIVE_DIR = 'l'
|
||||
constants.RENEWAL_CONFIGS_DIR = 'renewal_configs'
|
||||
def test_renewal_dynamic_dirs(self, mock_constants):
|
||||
mock_constants.ARCHIVE_DIR = 'a'
|
||||
mock_constants.LIVE_DIR = 'l'
|
||||
mock_constants.RENEWAL_CONFIGS_DIR = 'renewal_configs'
|
||||
|
||||
self.assertEqual(self.config.default_archive_dir, '/tmp/config/a')
|
||||
self.assertEqual(self.config.live_dir, '/tmp/config/l')
|
||||
self.assertEqual(
|
||||
self.config.renewal_configs_dir, '/tmp/config/renewal_configs')
|
||||
self.config.default_archive_dir, os.path.join(self.config.config_dir, 'a'))
|
||||
self.assertEqual(
|
||||
self.config.live_dir, os.path.join(self.config.config_dir, 'l'))
|
||||
self.assertEqual(
|
||||
self.config.renewal_configs_dir, os.path.join(
|
||||
self.config.config_dir, 'renewal_configs'))
|
||||
|
||||
def test_renewal_absolute_paths(self):
|
||||
from certbot.configuration import NamespaceConfig
|
||||
@@ -126,6 +135,20 @@ class NamespaceConfigTest(unittest.TestCase):
|
||||
self.config.namespace.bar = 1337
|
||||
self.assertEqual(self.config.bar, 1337)
|
||||
|
||||
def test_hook_directories(self):
|
||||
self.assertEqual(self.config.renewal_hooks_dir,
|
||||
os.path.join(self.config.config_dir,
|
||||
constants.RENEWAL_HOOKS_DIR))
|
||||
self.assertEqual(self.config.renewal_pre_hooks_dir,
|
||||
os.path.join(self.config.renewal_hooks_dir,
|
||||
constants.RENEWAL_PRE_HOOKS_DIR))
|
||||
self.assertEqual(self.config.renewal_deploy_hooks_dir,
|
||||
os.path.join(self.config.renewal_hooks_dir,
|
||||
constants.RENEWAL_DEPLOY_HOOKS_DIR))
|
||||
self.assertEqual(self.config.renewal_post_hooks_dir,
|
||||
os.path.join(self.config.renewal_hooks_dir,
|
||||
constants.RENEWAL_POST_HOOKS_DIR))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main() # pragma: no cover
|
||||
|
||||
@@ -17,11 +17,10 @@ RSA256_KEY = test_util.load_vector('rsa256_key.pem')
|
||||
RSA256_KEY_PATH = test_util.vector_path('rsa256_key.pem')
|
||||
RSA512_KEY = test_util.load_vector('rsa512_key.pem')
|
||||
RSA2048_KEY_PATH = test_util.vector_path('rsa2048_key.pem')
|
||||
CERT_PATH = test_util.vector_path('cert.pem')
|
||||
CERT = test_util.load_vector('cert.pem')
|
||||
SAN_CERT = test_util.load_vector('cert-san.pem')
|
||||
SS_CERT_PATH = test_util.vector_path('self_signed_cert.pem')
|
||||
SS_CERT = test_util.load_vector('self_signed_cert.pem')
|
||||
CERT_PATH = test_util.vector_path('cert_512.pem')
|
||||
CERT = test_util.load_vector('cert_512.pem')
|
||||
SS_CERT_PATH = test_util.vector_path('cert_2048.pem')
|
||||
SS_CERT = test_util.load_vector('cert_2048.pem')
|
||||
|
||||
class InitSaveKeyTest(test_util.TempDirTestCase):
|
||||
"""Tests for certbot.crypto_util.init_save_key."""
|
||||
@@ -30,8 +29,7 @@ class InitSaveKeyTest(test_util.TempDirTestCase):
|
||||
|
||||
logging.disable(logging.CRITICAL)
|
||||
zope.component.provideUtility(
|
||||
mock.Mock(strict_permissions=True, dry_run=False),
|
||||
interfaces.IConfig)
|
||||
mock.Mock(strict_permissions=True), interfaces.IConfig)
|
||||
|
||||
def tearDown(self):
|
||||
super(InitSaveKeyTest, self).tearDown()
|
||||
@@ -51,16 +49,6 @@ class InitSaveKeyTest(test_util.TempDirTestCase):
|
||||
self.assertTrue('key-certbot.pem' in key.file)
|
||||
self.assertTrue(os.path.exists(os.path.join(self.tempdir, key.file)))
|
||||
|
||||
@mock.patch('certbot.crypto_util.make_key')
|
||||
def test_success_dry_run(self, mock_make):
|
||||
zope.component.provideUtility(
|
||||
mock.Mock(strict_permissions=True, dry_run=True),
|
||||
interfaces.IConfig)
|
||||
mock_make.return_value = b'key_pem'
|
||||
key = self._call(1024, self.tempdir)
|
||||
self.assertEqual(key.pem, b'key_pem')
|
||||
self.assertTrue(key.file is None)
|
||||
|
||||
@mock.patch('certbot.crypto_util.make_key')
|
||||
def test_key_failure(self, mock_make):
|
||||
mock_make.side_effect = ValueError
|
||||
@@ -74,12 +62,11 @@ class InitSaveCSRTest(test_util.TempDirTestCase):
|
||||
super(InitSaveCSRTest, self).setUp()
|
||||
|
||||
zope.component.provideUtility(
|
||||
mock.Mock(strict_permissions=True, dry_run=False),
|
||||
interfaces.IConfig)
|
||||
mock.Mock(strict_permissions=True), interfaces.IConfig)
|
||||
|
||||
@mock.patch('acme.crypto_util.make_csr')
|
||||
@mock.patch('certbot.crypto_util.util.make_or_verify_dir')
|
||||
def test_success(self, unused_mock_verify, mock_csr):
|
||||
def test_it(self, unused_mock_verify, mock_csr):
|
||||
from certbot.crypto_util import init_save_csr
|
||||
|
||||
mock_csr.return_value = b'csr_pem'
|
||||
@@ -90,22 +77,6 @@ class InitSaveCSRTest(test_util.TempDirTestCase):
|
||||
self.assertEqual(csr.data, b'csr_pem')
|
||||
self.assertTrue('csr-certbot.pem' in csr.file)
|
||||
|
||||
@mock.patch('acme.crypto_util.make_csr')
|
||||
@mock.patch('certbot.crypto_util.util.make_or_verify_dir')
|
||||
def test_success_dry_run(self, unused_mock_verify, mock_csr):
|
||||
from certbot.crypto_util import init_save_csr
|
||||
|
||||
zope.component.provideUtility(
|
||||
mock.Mock(strict_permissions=True, dry_run=True),
|
||||
interfaces.IConfig)
|
||||
mock_csr.return_value = b'csr_pem'
|
||||
|
||||
csr = init_save_csr(
|
||||
mock.Mock(pem='dummy_key'), 'example.com', self.tempdir)
|
||||
|
||||
self.assertEqual(csr.data, b'csr_pem')
|
||||
self.assertTrue(csr.file is None)
|
||||
|
||||
|
||||
class ValidCSRTest(unittest.TestCase):
|
||||
"""Tests for certbot.crypto_util.valid_csr."""
|
||||
@@ -116,13 +87,13 @@ class ValidCSRTest(unittest.TestCase):
|
||||
return valid_csr(csr)
|
||||
|
||||
def test_valid_pem_true(self):
|
||||
self.assertTrue(self._call(test_util.load_vector('csr.pem')))
|
||||
self.assertTrue(self._call(test_util.load_vector('csr_512.pem')))
|
||||
|
||||
def test_valid_pem_san_true(self):
|
||||
self.assertTrue(self._call(test_util.load_vector('csr-san.pem')))
|
||||
self.assertTrue(self._call(test_util.load_vector('csr-san_512.pem')))
|
||||
|
||||
def test_valid_der_false(self):
|
||||
self.assertFalse(self._call(test_util.load_vector('csr.der')))
|
||||
self.assertFalse(self._call(test_util.load_vector('csr_512.der')))
|
||||
|
||||
def test_empty_false(self):
|
||||
self.assertFalse(self._call(''))
|
||||
@@ -141,11 +112,11 @@ class CSRMatchesPubkeyTest(unittest.TestCase):
|
||||
|
||||
def test_valid_true(self):
|
||||
self.assertTrue(self._call(
|
||||
test_util.load_vector('csr.pem'), RSA512_KEY))
|
||||
test_util.load_vector('csr_512.pem'), RSA512_KEY))
|
||||
|
||||
def test_invalid_false(self):
|
||||
self.assertFalse(self._call(
|
||||
test_util.load_vector('csr.pem'), RSA256_KEY))
|
||||
test_util.load_vector('csr_512.pem'), RSA256_KEY))
|
||||
|
||||
|
||||
class ImportCSRFileTest(unittest.TestCase):
|
||||
@@ -157,9 +128,9 @@ class ImportCSRFileTest(unittest.TestCase):
|
||||
return import_csr_file(*args, **kwargs)
|
||||
|
||||
def test_der_csr(self):
|
||||
csrfile = test_util.vector_path('csr.der')
|
||||
data = test_util.load_vector('csr.der')
|
||||
data_pem = test_util.load_vector('csr.pem')
|
||||
csrfile = test_util.vector_path('csr_512.der')
|
||||
data = test_util.load_vector('csr_512.der')
|
||||
data_pem = test_util.load_vector('csr_512.pem')
|
||||
|
||||
self.assertEqual(
|
||||
(OpenSSL.crypto.FILETYPE_PEM,
|
||||
@@ -170,8 +141,8 @@ class ImportCSRFileTest(unittest.TestCase):
|
||||
self._call(csrfile, data))
|
||||
|
||||
def test_pem_csr(self):
|
||||
csrfile = test_util.vector_path('csr.pem')
|
||||
data = test_util.load_vector('csr.pem')
|
||||
csrfile = test_util.vector_path('csr_512.pem')
|
||||
data = test_util.load_vector('csr_512.pem')
|
||||
|
||||
self.assertEqual(
|
||||
(OpenSSL.crypto.FILETYPE_PEM,
|
||||
@@ -183,8 +154,8 @@ class ImportCSRFileTest(unittest.TestCase):
|
||||
|
||||
def test_bad_csr(self):
|
||||
self.assertRaises(errors.Error, self._call,
|
||||
test_util.vector_path('cert.pem'),
|
||||
test_util.load_vector('cert.pem'))
|
||||
test_util.vector_path('cert_512.pem'),
|
||||
test_util.load_vector('cert_512.pem'))
|
||||
|
||||
|
||||
class MakeKeyTest(unittest.TestCase): # pylint: disable=too-few-public-methods
|
||||
@@ -207,7 +178,7 @@ class VerifyCertSetup(unittest.TestCase):
|
||||
self.renewable_cert.cert = SS_CERT_PATH
|
||||
self.renewable_cert.chain = SS_CERT_PATH
|
||||
self.renewable_cert.privkey = RSA2048_KEY_PATH
|
||||
self.renewable_cert.fullchain = test_util.vector_path('self_signed_fullchain.pem')
|
||||
self.renewable_cert.fullchain = test_util.vector_path('cert_fullchain_2048.pem')
|
||||
|
||||
self.bad_renewable_cert = mock.MagicMock()
|
||||
self.bad_renewable_cert.chain = SS_CERT_PATH
|
||||
@@ -247,7 +218,7 @@ class VerifyRenewableCertSigTest(VerifyCertSetup):
|
||||
self.assertEqual(None, self._call(self.renewable_cert))
|
||||
|
||||
def test_cert_sig_mismatch(self):
|
||||
self.bad_renewable_cert.cert = test_util.vector_path('self_signed_cert_bad.pem')
|
||||
self.bad_renewable_cert.cert = test_util.vector_path('cert_512_bad.pem')
|
||||
self.assertRaises(errors.Error, self._call, self.bad_renewable_cert)
|
||||
|
||||
|
||||
@@ -280,9 +251,11 @@ class VerifyCertMatchesPrivKeyTest(VerifyCertSetup):
|
||||
|
||||
def _call(self, renewable_cert):
|
||||
from certbot.crypto_util import verify_cert_matches_priv_key
|
||||
return verify_cert_matches_priv_key(renewable_cert)
|
||||
return verify_cert_matches_priv_key(renewable_cert.cert, renewable_cert.privkey)
|
||||
|
||||
def test_cert_priv_key_match(self):
|
||||
self.renewable_cert.cert = SS_CERT_PATH
|
||||
self.renewable_cert.privkey = RSA2048_KEY_PATH
|
||||
self.assertEqual(None, self._call(self.renewable_cert))
|
||||
|
||||
def test_cert_priv_key_mismatch(self):
|
||||
@@ -301,7 +274,7 @@ class ValidPrivkeyTest(unittest.TestCase):
|
||||
return valid_privkey(privkey)
|
||||
|
||||
def test_valid_true(self):
|
||||
self.assertTrue(self._call(RSA256_KEY))
|
||||
self.assertTrue(self._call(RSA512_KEY))
|
||||
|
||||
def test_empty_false(self):
|
||||
self.assertFalse(self._call(''))
|
||||
@@ -319,12 +292,12 @@ class GetSANsFromCertTest(unittest.TestCase):
|
||||
return get_sans_from_cert(*args, **kwargs)
|
||||
|
||||
def test_single(self):
|
||||
self.assertEqual([], self._call(test_util.load_vector('cert.pem')))
|
||||
self.assertEqual([], self._call(test_util.load_vector('cert_512.pem')))
|
||||
|
||||
def test_san(self):
|
||||
self.assertEqual(
|
||||
['example.com', 'www.example.com'],
|
||||
self._call(test_util.load_vector('cert-san.pem')))
|
||||
self._call(test_util.load_vector('cert-san_512.pem')))
|
||||
|
||||
|
||||
class GetNamesFromCertTest(unittest.TestCase):
|
||||
@@ -338,19 +311,19 @@ class GetNamesFromCertTest(unittest.TestCase):
|
||||
def test_single(self):
|
||||
self.assertEqual(
|
||||
['example.com'],
|
||||
self._call(test_util.load_vector('cert.pem')))
|
||||
self._call(test_util.load_vector('cert_512.pem')))
|
||||
|
||||
def test_san(self):
|
||||
self.assertEqual(
|
||||
['example.com', 'www.example.com'],
|
||||
self._call(test_util.load_vector('cert-san.pem')))
|
||||
self._call(test_util.load_vector('cert-san_512.pem')))
|
||||
|
||||
def test_common_name_sans_order(self):
|
||||
# Tests that the common name comes first
|
||||
# followed by the SANS in alphabetical order
|
||||
self.assertEqual(
|
||||
['example.com'] + ['{0}.example.com'.format(c) for c in 'abcd'],
|
||||
self._call(test_util.load_vector('cert-5sans.pem')))
|
||||
self._call(test_util.load_vector('cert-5sans_512.pem')))
|
||||
|
||||
def test_parse_non_cert(self):
|
||||
self.assertRaises(OpenSSL.crypto.Error, self._call, "hello there")
|
||||
|
||||
@@ -310,10 +310,11 @@ class ChooseNamesTest(unittest.TestCase):
|
||||
@test_util.patch_get_utility("certbot.display.ops.z_util")
|
||||
def test_choose_manually(self, mock_util):
|
||||
from certbot.display.ops import _choose_names_manually
|
||||
utility_mock = mock_util()
|
||||
# No retry
|
||||
mock_util().yesno.return_value = False
|
||||
utility_mock.yesno.return_value = False
|
||||
# IDN and no retry
|
||||
mock_util().input.return_value = (display_util.OK,
|
||||
utility_mock.input.return_value = (display_util.OK,
|
||||
"uniçodé.com")
|
||||
self.assertEqual(_choose_names_manually(), [])
|
||||
# IDN exception with previous mocks
|
||||
@@ -324,7 +325,7 @@ class ChooseNamesTest(unittest.TestCase):
|
||||
mock_sli.side_effect = unicode_error
|
||||
self.assertEqual(_choose_names_manually(), [])
|
||||
# Valid domains
|
||||
mock_util().input.return_value = (display_util.OK,
|
||||
utility_mock.input.return_value = (display_util.OK,
|
||||
("example.com,"
|
||||
"under_score.example.com,"
|
||||
"justtld,"
|
||||
@@ -332,14 +333,17 @@ class ChooseNamesTest(unittest.TestCase):
|
||||
self.assertEqual(_choose_names_manually(),
|
||||
["example.com", "under_score.example.com",
|
||||
"justtld", "valid.example.com"])
|
||||
|
||||
@test_util.patch_get_utility("certbot.display.ops.z_util")
|
||||
def test_choose_manually_retry(self, mock_util):
|
||||
from certbot.display.ops import _choose_names_manually
|
||||
utility_mock = mock_util()
|
||||
# Three iterations
|
||||
mock_util().input.return_value = (display_util.OK,
|
||||
utility_mock.input.return_value = (display_util.OK,
|
||||
"uniçodé.com")
|
||||
yn = mock.MagicMock()
|
||||
yn.side_effect = [True, True, False]
|
||||
mock_util().yesno = yn
|
||||
utility_mock.yesno.side_effect = [True, True, False]
|
||||
_choose_names_manually()
|
||||
self.assertEqual(mock_util().yesno.call_count, 3)
|
||||
self.assertEqual(utility_mock.yesno.call_count, 3)
|
||||
|
||||
|
||||
class SuccessInstallationTest(unittest.TestCase):
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user