Compare commits
254 Commits
nginx-reve
...
py3_metacl
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
50fdb44f6a | ||
|
|
552bfa5eb7 | ||
|
|
bf30226c69 | ||
|
|
f510f4bddf | ||
|
|
9c15fd354f | ||
|
|
726f3ce8b3 | ||
|
|
f40e04401f | ||
|
|
398bd4a2cd | ||
|
|
a024aaf59d | ||
|
|
261d063b10 | ||
|
|
a9e01ade4c | ||
|
|
5c7fc07ccf | ||
|
|
6dc8b66760 | ||
|
|
590ec375ec | ||
|
|
523cdc578d | ||
|
|
e0a5b1229f | ||
|
|
6253acf335 | ||
|
|
a708504d5b | ||
|
|
2d31598484 | ||
|
|
6b29d159a2 | ||
|
|
88ceaa38d5 | ||
|
|
e7db97df87 | ||
|
|
4a8e35289c | ||
|
|
58626c3197 | ||
|
|
56fb667e15 | ||
|
|
0153c04af3 | ||
|
|
db938dcc0e | ||
|
|
0e30621355 | ||
|
|
16b2539f72 | ||
|
|
b6afba0d64 | ||
|
|
b24d9dddc3 | ||
|
|
9996730fb1 | ||
|
|
2c502e6f8b | ||
|
|
bdaccb645b | ||
|
|
f5ad08047b | ||
|
|
8fd3f6c64c | ||
|
|
4d706ac77e | ||
|
|
8231b1a19c | ||
|
|
5ff7f2211e | ||
|
|
7630550ac4 | ||
|
|
336950c0b9 | ||
|
|
a779e06d47 | ||
|
|
669312d248 | ||
|
|
4d082e22e6 | ||
|
|
af2cce4ca8 | ||
|
|
804fd4b78a | ||
|
|
8cdb213a61 | ||
|
|
e9707ebc26 | ||
|
|
8d0d42a739 | ||
|
|
693cb1d162 | ||
|
|
8e9a4447ff | ||
|
|
bca0aa48c2 | ||
|
|
afb6260c34 | ||
|
|
3f291e51c6 | ||
|
|
fe8e0c98c5 | ||
|
|
cbd827382e | ||
|
|
f01aa1295f | ||
|
|
c0dc31fd88 | ||
|
|
41ce108881 | ||
|
|
41ed6367b4 | ||
|
|
a26a78e84e | ||
|
|
3077b51500 | ||
|
|
d4834da0f4 | ||
|
|
ba6bdb5099 | ||
|
|
79d90d6745 | ||
|
|
5ecb68f2ed | ||
|
|
b3e73bd2ab | ||
|
|
065e923bc9 | ||
|
|
e405aaa4c1 | ||
|
|
9ea14d2e2b | ||
|
|
1d0e3b1bfa | ||
|
|
d310ad18c7 | ||
|
|
53c6b9a08f | ||
|
|
64d647774e | ||
|
|
f13fdccf04 | ||
|
|
2e6d65d9ec | ||
|
|
cc24b4e40a | ||
|
|
cc18da926e | ||
|
|
f4bac423fb | ||
|
|
7a495f2656 | ||
|
|
77fdb4d7d6 | ||
|
|
e0ae356aa3 | ||
|
|
6357e051f4 | ||
|
|
d62c56f9c9 | ||
|
|
cee9ac586e | ||
|
|
a643877f88 | ||
|
|
7bc45121a1 | ||
|
|
fe682e779b | ||
|
|
441625c610 | ||
|
|
cc344bfd1e | ||
|
|
e1878593d5 | ||
|
|
31805c5a5f | ||
|
|
8bc9cd67f0 | ||
|
|
d8a54dc444 | ||
|
|
8121acf2c1 | ||
|
|
f0b337532c | ||
|
|
559220c2ef | ||
|
|
38d5144fff | ||
|
|
78735fa2c3 | ||
|
|
e9bc4a319b | ||
|
|
a39d2fe55b | ||
|
|
b18696b6a0 | ||
|
|
6f86267a26 | ||
|
|
57bdc590df | ||
|
|
43ba9cbf33 | ||
|
|
f3a0deba84 | ||
|
|
1e46d26ac3 | ||
|
|
990b211a76 | ||
|
|
457269b005 | ||
|
|
c3659c300b | ||
|
|
f3b23662f1 | ||
|
|
f1b7017c0c | ||
|
|
ea3b78e3c9 | ||
|
|
02b56bd7f3 | ||
|
|
d13a4ed18d | ||
|
|
df50f2d5fa | ||
|
|
dea43e90b6 | ||
|
|
a7eadf8862 | ||
|
|
65d0b9674c | ||
|
|
26bcaff85c | ||
|
|
d5a90c5a6e | ||
|
|
051664a142 | ||
|
|
7c073dbcaf | ||
|
|
d29c637bf9 | ||
|
|
d6af978472 | ||
|
|
3dfeb483ee | ||
|
|
76a0cbf9c2 | ||
|
|
a0e84e65ce | ||
|
|
11f2f1e576 | ||
|
|
d6b4e2001b | ||
|
|
59a1387764 | ||
|
|
9c84fe1144 | ||
|
|
68e24a8ea7 | ||
|
|
20d0b91c71 | ||
|
|
ea2022588b | ||
|
|
eaf739184c | ||
|
|
73bd801f35 | ||
|
|
42638afc75 | ||
|
|
e95e963ad6 | ||
|
|
2a142aa932 | ||
|
|
adec7a8fed | ||
|
|
dba6990f70 | ||
|
|
70a75ebe9d | ||
|
|
e48898a8c8 | ||
|
|
d467a4ae95 | ||
|
|
d5efefd979 | ||
|
|
09b5927e6a | ||
|
|
7e6d2f1efe | ||
|
|
608875cd65 | ||
|
|
99aec1394d | ||
|
|
fbace69b5e | ||
|
|
ac464a58e5 | ||
|
|
9277710f6f | ||
|
|
ad0a99a1f5 | ||
|
|
49edf17cb7 | ||
|
|
932ecbb9c2 | ||
|
|
90664f196f | ||
|
|
789be8f9bc | ||
|
|
abc4a27613 | ||
|
|
1f45832460 | ||
|
|
a58c875b2a | ||
|
|
d6b247c002 | ||
|
|
4f0aeb12fa | ||
|
|
530a9590e6 | ||
|
|
0416382633 | ||
|
|
9baf75d6c8 | ||
|
|
e085ff06a1 | ||
|
|
72b63ca5ac | ||
|
|
45613fd31c | ||
|
|
b05be7fa65 | ||
|
|
43bbaadd11 | ||
|
|
a166396358 | ||
|
|
a2239baa45 | ||
|
|
a1aba5842e | ||
|
|
8a9f21cdd3 | ||
|
|
0a4f926b16 | ||
|
|
c0068791ce | ||
|
|
b0aa8b7c0b | ||
|
|
a67a917eca | ||
|
|
103039ca40 | ||
|
|
aa01b7d0c0 | ||
|
|
325a97c1ed | ||
|
|
bf695d048d | ||
|
|
1bb2cfadf7 | ||
|
|
f43a95e9c1 | ||
|
|
522532dc30 | ||
|
|
6dd724e1f4 | ||
|
|
63136be2e5 | ||
|
|
bd231a3855 | ||
|
|
e9b57e1783 | ||
|
|
2c379cd363 | ||
|
|
b8f288a372 | ||
|
|
f420b19492 | ||
|
|
314c5f19e5 | ||
|
|
7e463bccad | ||
|
|
368ca0c109 | ||
|
|
60dd67a60e | ||
|
|
2cb9d9e2aa | ||
|
|
5d58a3d847 | ||
|
|
28dad825af | ||
|
|
f0f5defb6f | ||
|
|
fa97877cfb | ||
|
|
2ba334a182 | ||
|
|
9e95208101 | ||
|
|
39472f88de | ||
|
|
3acf5d1ef9 | ||
|
|
00634394f2 | ||
|
|
6eb459354f | ||
|
|
f5a02714cd | ||
|
|
887a6bcfce | ||
|
|
288c4d956c | ||
|
|
62ffcf5373 | ||
|
|
d557475bb6 | ||
|
|
e02adec26b | ||
|
|
24ddc65cd4 | ||
|
|
8585cdd861 | ||
|
|
18f6deada8 | ||
|
|
a1713c0b79 | ||
|
|
a3a66cd25d | ||
|
|
a7d00ee21b | ||
|
|
5388842e5b | ||
|
|
ed2168aaa8 | ||
|
|
d6b11fea72 | ||
|
|
a1aea021e7 | ||
|
|
1b6005cc61 | ||
|
|
0e92d4ea98 | ||
|
|
2abc94661a | ||
|
|
8bc785ed46 | ||
|
|
0046428382 | ||
|
|
5d0888809f | ||
|
|
8096b91496 | ||
|
|
e696766ed1 | ||
|
|
8b5d6879cc | ||
|
|
d039106b68 | ||
|
|
abed73a8e4 | ||
|
|
3951baf6c0 | ||
|
|
716f25743c | ||
|
|
b3ca6bb2b1 | ||
|
|
78d97ca023 | ||
|
|
f1554324da | ||
|
|
840c943711 | ||
|
|
abdde886fa | ||
|
|
20bca19420 | ||
|
|
e795a79547 | ||
|
|
02126c0961 | ||
|
|
0b843bb851 | ||
|
|
4d60f32865 | ||
|
|
069ce1c55f | ||
|
|
eb26e0aacf | ||
|
|
1173acfaf0 | ||
|
|
0aa9322280 | ||
|
|
89485f7463 | ||
|
|
4e73d7ce00 | ||
|
|
0137055c24 |
7
.gitignore
vendored
7
.gitignore
vendored
@@ -35,3 +35,10 @@ tests/letstest/*.pem
|
||||
tests/letstest/venv/
|
||||
|
||||
.venv
|
||||
|
||||
# pytest cache
|
||||
.cache
|
||||
.mypy_cache/
|
||||
|
||||
# docker files
|
||||
.docker
|
||||
|
||||
68
.travis.yml
68
.travis.yml
@@ -5,27 +5,42 @@ cache:
|
||||
- $HOME/.cache/pip
|
||||
|
||||
before_install:
|
||||
- '([ $TRAVIS_OS_NAME == linux ] && dpkg -s libaugeas0) || (brew update && brew install augeas python3)'
|
||||
- '([ $TRAVIS_OS_NAME == linux ] && dpkg -s libaugeas0) || (brew update && brew install augeas python3 && brew upgrade python && brew link python)'
|
||||
|
||||
before_script:
|
||||
- 'if [ $TRAVIS_OS_NAME = osx ] ; then ulimit -n 1024 ; fi'
|
||||
|
||||
matrix:
|
||||
include:
|
||||
- python: "2.7"
|
||||
env: TOXENV=py27_install BOULDER_INTEGRATION=v1
|
||||
sudo: required
|
||||
services: docker
|
||||
- python: "2.7"
|
||||
env: TOXENV=py27_install BOULDER_INTEGRATION=v2
|
||||
sudo: required
|
||||
services: docker
|
||||
- python: "2.7"
|
||||
env: TOXENV=cover FYI="this also tests py27"
|
||||
- sudo: required
|
||||
env: TOXENV=nginx_compat
|
||||
services: docker
|
||||
before_install:
|
||||
addons:
|
||||
- python: "2.7"
|
||||
env: TOXENV=lint
|
||||
- python: "3.5"
|
||||
env: TOXENV=mypy
|
||||
- python: "2.7"
|
||||
env: TOXENV=py27-oldest
|
||||
env: TOXENV='py27-{acme,apache,certbot,dns,nginx}-oldest'
|
||||
sudo: required
|
||||
services: docker
|
||||
- python: "2.6"
|
||||
env: TOXENV=py26
|
||||
- python: "3.4"
|
||||
env: TOXENV=py34
|
||||
sudo: required
|
||||
services: docker
|
||||
- python: "2.7"
|
||||
env: TOXENV=py27_install BOULDER_INTEGRATION=1
|
||||
- python: "3.6"
|
||||
env: TOXENV=py36
|
||||
sudo: required
|
||||
services: docker
|
||||
- sudo: required
|
||||
@@ -33,55 +48,14 @@ matrix:
|
||||
services: docker
|
||||
before_install:
|
||||
addons:
|
||||
- sudo: required
|
||||
env: TOXENV=nginx_compat
|
||||
services: docker
|
||||
before_install:
|
||||
addons:
|
||||
- sudo: required
|
||||
env: TOXENV=le_auto_precise
|
||||
services: docker
|
||||
before_install:
|
||||
addons:
|
||||
- sudo: required
|
||||
env: TOXENV=le_auto_trusty
|
||||
services: docker
|
||||
before_install:
|
||||
addons:
|
||||
- sudo: required
|
||||
env: TOXENV=le_auto_wheezy
|
||||
services: docker
|
||||
before_install:
|
||||
addons:
|
||||
- sudo: required
|
||||
env: TOXENV=le_auto_centos6
|
||||
services: docker
|
||||
before_install:
|
||||
addons:
|
||||
- sudo: required
|
||||
env: TOXENV=docker_dev
|
||||
services: docker
|
||||
before_install:
|
||||
addons:
|
||||
- python: "2.7"
|
||||
env: TOXENV=apacheconftest
|
||||
sudo: required
|
||||
- python: "3.3"
|
||||
env: TOXENV=py33
|
||||
sudo: required
|
||||
services: docker
|
||||
- python: "3.4"
|
||||
env: TOXENV=py34
|
||||
sudo: required
|
||||
services: docker
|
||||
- python: "3.5"
|
||||
env: TOXENV=py35
|
||||
sudo: required
|
||||
services: docker
|
||||
- python: "3.6"
|
||||
env: TOXENV=py36
|
||||
sudo: required
|
||||
services: docker
|
||||
- python: "2.7"
|
||||
env: TOXENV=nginxroundtrip
|
||||
|
||||
|
||||
246
CHANGELOG.md
246
CHANGELOG.md
@@ -2,6 +2,252 @@
|
||||
|
||||
Certbot adheres to [Semantic Versioning](http://semver.org/).
|
||||
|
||||
## 0.23.0 - 2018-04-04
|
||||
|
||||
### Added
|
||||
|
||||
* Support for OpenResty was added to the Nginx plugin.
|
||||
|
||||
### Changed
|
||||
|
||||
* The timestamps in Certbot's logfiles now use the system's local time zone
|
||||
rather than UTC.
|
||||
* Certbot's DNS plugins that use Lexicon now rely on Lexicon>=2.2.1 to be able
|
||||
to create and delete multiple TXT records on a single domain.
|
||||
* certbot-dns-google's test suite now works without an internet connection.
|
||||
|
||||
### Fixed
|
||||
|
||||
* Removed a small window that if during which an error occurred, Certbot
|
||||
wouldn't clean up performed challenges.
|
||||
* The parameters `default` and `ipv6only` are now removed from `listen`
|
||||
directives when creating a new server block in the Nginx plugin.
|
||||
* `server_name` directives enclosed in quotation marks in Nginx are now properly
|
||||
supported.
|
||||
* Resolved an issue preventing the Apache plugin from starting Apache when it's
|
||||
not currently running on RHEL and Gentoo based systems.
|
||||
|
||||
Despite us having broken lockstep, we are continuing to release new versions of
|
||||
all Certbot components during releases for the time being, however, the only
|
||||
packages with changes other than their version number were:
|
||||
|
||||
* certbot
|
||||
* certbot-apache
|
||||
* certbot-dns-cloudxns
|
||||
* certbot-dns-dnsimple
|
||||
* certbot-dns-dnsmadeeasy
|
||||
* certbot-dns-google
|
||||
* certbot-dns-luadns
|
||||
* certbot-dns-nsone
|
||||
* certbot-dns-rfc2136
|
||||
* certbot-nginx
|
||||
|
||||
More details about these changes can be found on our GitHub repo:
|
||||
https://github.com/certbot/certbot/milestone/50?closed=1
|
||||
|
||||
## 0.22.2 - 2018-03-19
|
||||
|
||||
### Fixed
|
||||
|
||||
* A type error introduced in 0.22.1 that would occur during challenge cleanup
|
||||
when a Certbot plugin raises an exception while trying to complete the
|
||||
challenge was fixed.
|
||||
|
||||
Despite us having broken lockstep, we are continuing to release new versions of
|
||||
all Certbot components during releases for the time being, however, the only
|
||||
packages with changes other than their version number were:
|
||||
|
||||
* certbot
|
||||
|
||||
More details about these changes can be found on our GitHub repo:
|
||||
https://github.com/certbot/certbot/milestone/53?closed=1
|
||||
|
||||
## 0.22.1 - 2018-03-19
|
||||
|
||||
### Changed
|
||||
|
||||
* The ACME server used with Certbot's --dry-run and --staging flags is now
|
||||
Let's Encrypt's ACMEv2 staging server which allows people to also test ACMEv2
|
||||
features with these flags.
|
||||
|
||||
### Fixed
|
||||
|
||||
* The HTTP Content-Type header is now set to the correct value during
|
||||
certificate revocation with new versions of the ACME protocol.
|
||||
* When using Certbot with Let's Encrypt's ACMEv2 server, it would add a blank
|
||||
line to the top of chain.pem and between the certificates in fullchain.pem
|
||||
for each lineage. These blank lines have been removed.
|
||||
* Resolved a bug that caused Certbot's --allow-subset-of-names flag not to
|
||||
work.
|
||||
* Fixed a regression in acme.client.Client that caused the class to not work
|
||||
when it was initialized without a ClientNetwork which is done by some of the
|
||||
other projects using our ACME library.
|
||||
|
||||
Despite us having broken lockstep, we are continuing to release new versions of
|
||||
all Certbot components during releases for the time being, however, the only
|
||||
packages with changes other than their version number were:
|
||||
|
||||
* acme
|
||||
* certbot
|
||||
|
||||
More details about these changes can be found on our GitHub repo:
|
||||
https://github.com/certbot/certbot/milestone/51?closed=1
|
||||
|
||||
## 0.22.0 - 2018-03-07
|
||||
|
||||
### Added
|
||||
|
||||
* Support for obtaining wildcard certificates and a newer version of the ACME
|
||||
protocol such as the one implemented by Let's Encrypt's upcoming ACMEv2
|
||||
endpoint was added to Certbot and its ACME library. Certbot still works with
|
||||
older ACME versions and will automatically change the version of the protocol
|
||||
used based on the version the ACME CA implements.
|
||||
* The Apache and Nginx plugins are now able to automatically install a wildcard
|
||||
certificate to multiple virtual hosts that you select from your server
|
||||
configuration.
|
||||
* The `certbot install` command now accepts the `--cert-name` flag for
|
||||
selecting a certificate.
|
||||
* `acme.client.BackwardsCompatibleClientV2` was added to Certbot's ACME library
|
||||
which automatically handles most of the differences between new and old ACME
|
||||
versions. `acme.client.ClientV2` is also available for people who only want
|
||||
to support one version of the protocol or want to handle the differences
|
||||
between versions themselves.
|
||||
* certbot-auto now supports the flag --install-only which has the script
|
||||
install Certbot and its dependencies and exit without invoking Certbot.
|
||||
* Support for issuing a single certificate for a wildcard and base domain was
|
||||
added to our Google Cloud DNS plugin. To do this, we now require your API
|
||||
credentials have additional permissions, however, your credentials will
|
||||
already have these permissions unless you defined a custom role with fewer
|
||||
permissions than the standard DNS administrator role provided by Google.
|
||||
These permissions are also only needed for the case described above so it
|
||||
will continue to work for existing users. For more information about the
|
||||
permissions changes, see the documentation in the plugin.
|
||||
|
||||
### Changed
|
||||
|
||||
* We have broken lockstep between our ACME library, Certbot, and its plugins.
|
||||
This means that the different components do not need to be the same version
|
||||
to work together like they did previously. This makes packaging easier
|
||||
because not every piece of Certbot needs to be repackaged to ship a change to
|
||||
a subset of its components.
|
||||
* Support for Python 2.6 and Python 3.3 has been removed from ACME, Certbot,
|
||||
Certbot's plugins, and certbot-auto. If you are using certbot-auto on a RHEL
|
||||
6 based system, it will walk you through the process of installing Certbot
|
||||
with Python 3 and refuse to upgrade to a newer version of Certbot until you
|
||||
have done so.
|
||||
* Certbot's components now work with older versions of setuptools to simplify
|
||||
packaging for EPEL 7.
|
||||
|
||||
### Fixed
|
||||
|
||||
* Issues caused by Certbot's Nginx plugin adding multiple ipv6only directives
|
||||
has been resolved.
|
||||
* A problem where Certbot's Apache plugin would add redundant include
|
||||
directives for the TLS configuration managed by Certbot has been fixed.
|
||||
* Certbot's webroot plugin now properly deletes any directories it creates.
|
||||
|
||||
More details about these changes can be found on our GitHub repo:
|
||||
https://github.com/certbot/certbot/milestone/48?closed=1
|
||||
|
||||
## 0.21.1 - 2018-01-25
|
||||
|
||||
### Fixed
|
||||
|
||||
* When creating an HTTP to HTTPS redirect in Nginx, we now ensure the Host
|
||||
header of the request is set to an expected value before redirecting users to
|
||||
the domain found in the header. The previous way Certbot configured Nginx
|
||||
redirects was a potential security issue which you can read more about at
|
||||
https://community.letsencrypt.org/t/security-issue-with-redirects-added-by-certbots-nginx-plugin/51493.
|
||||
* Fixed a problem where Certbot's Apache plugin could fail HTTP-01 challenges
|
||||
if basic authentication is configured for the domain you request a
|
||||
certificate for.
|
||||
* certbot-auto --no-bootstrap now properly tries to use Python 3.4 on RHEL 6
|
||||
based systems rather than Python 2.6.
|
||||
|
||||
More details about these changes can be found on our GitHub repo:
|
||||
https://github.com/certbot/certbot/milestone/49?closed=1
|
||||
|
||||
## 0.21.0 - 2018-01-17
|
||||
|
||||
### Added
|
||||
|
||||
* Support for the HTTP-01 challenge type was added to our Apache and Nginx
|
||||
plugins. For those not aware, Let's Encrypt disabled the TLS-SNI-01 challenge
|
||||
type which was what was previously being used by our Apache and Nginx plugins
|
||||
last week due to a security issue. For more information about Let's Encrypt's
|
||||
change, click
|
||||
[here](https://community.letsencrypt.org/t/2018-01-11-update-regarding-acme-tls-sni-and-shared-hosting-infrastructure/50188).
|
||||
Our Apache and Nginx plugins will automatically switch to use HTTP-01 so no
|
||||
changes need to be made to your Certbot configuration, however, you should
|
||||
make sure your server is accessible on port 80 and isn't behind an external
|
||||
proxy doing things like redirecting all traffic from HTTP to HTTPS. HTTP to
|
||||
HTTPS redirects inside Apache and Nginx are fine.
|
||||
* IPv6 support was added to the Nginx plugin.
|
||||
* Support for automatically creating server blocks based on the default server
|
||||
block was added to the Nginx plugin.
|
||||
* The flags --delete-after-revoke and --no-delete-after-revoke were added
|
||||
allowing users to control whether the revoke subcommand also deletes the
|
||||
certificates it is revoking.
|
||||
|
||||
### Changed
|
||||
|
||||
* We deprecated support for Python 2.6 and Python 3.3 in Certbot and its ACME
|
||||
library. Support for these versions of Python will be removed in the next
|
||||
major release of Certbot. If you are using certbot-auto on a RHEL 6 based
|
||||
system, it will guide you through the process of installing Python 3.
|
||||
* We split our implementation of JOSE (Javascript Object Signing and
|
||||
Encryption) out of our ACME library and into a separate package named josepy.
|
||||
This package is available on [PyPI](https://pypi.python.org/pypi/josepy) and
|
||||
on [GitHub](https://github.com/certbot/josepy).
|
||||
* We updated the ciphersuites used in Apache to the new [values recommended by
|
||||
Mozilla](https://wiki.mozilla.org/Security/Server_Side_TLS#Intermediate_compatibility_.28default.29).
|
||||
The major change here is adding ChaCha20 to the list of supported
|
||||
ciphersuites.
|
||||
|
||||
### Fixed
|
||||
|
||||
* An issue with our Apache plugin on Gentoo due to differences in their
|
||||
apache2ctl command have been resolved.
|
||||
|
||||
More details about these changes can be found on our GitHub repo:
|
||||
https://github.com/certbot/certbot/milestone/47?closed=1
|
||||
|
||||
## 0.20.0 - 2017-12-06
|
||||
|
||||
### Added
|
||||
|
||||
* Certbot's ACME library now recognizes URL fields in challenge objects in
|
||||
preparation for Let's Encrypt's new ACME endpoint. The value is still
|
||||
accessible in our ACME library through the name "uri".
|
||||
|
||||
### Changed
|
||||
|
||||
* The Apache plugin now parses some distro specific Apache configuration files
|
||||
on non-Debian systems allowing it to get a clearer picture on the running
|
||||
configuration. Internally, these changes were structured so that external
|
||||
contributors can easily write patches to make the plugin work in new Apache
|
||||
configurations.
|
||||
* Certbot better reports network failures by removing information about
|
||||
connection retries from the error output.
|
||||
* An unnecessary question when using Certbot's webroot plugin interactively has
|
||||
been removed.
|
||||
|
||||
### Fixed
|
||||
|
||||
* Certbot's NGINX plugin no longer sometimes incorrectly reports that it was
|
||||
unable to deploy a HTTP->HTTPS redirect when requesting Certbot to enable a
|
||||
redirect for multiple domains.
|
||||
* Problems where the Apache plugin was failing to find directives and
|
||||
duplicating existing directives on openSUSE have been resolved.
|
||||
* An issue running the test shipped with Certbot and some our DNS plugins with
|
||||
older versions of mock have been resolved.
|
||||
* On some systems, users reported strangely interleaved output depending on
|
||||
when stdout and stderr were flushed. This problem was resolved by having
|
||||
Certbot regularly flush these streams.
|
||||
|
||||
More details about these changes can be found on our GitHub repo:
|
||||
https://github.com/certbot/certbot/milestone/44?closed=1
|
||||
|
||||
## 0.19.0 - 2017-10-04
|
||||
|
||||
### Added
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
FROM python:2-alpine
|
||||
FROM python:2-alpine3.7
|
||||
|
||||
ENTRYPOINT [ "certbot" ]
|
||||
EXPOSE 80 443
|
||||
|
||||
@@ -1,70 +1,21 @@
|
||||
# This Dockerfile builds an image for development.
|
||||
FROM ubuntu:trusty
|
||||
MAINTAINER Jakub Warmuz <jakub@warmuz.org>
|
||||
MAINTAINER William Budington <bill@eff.org>
|
||||
MAINTAINER Yan <yan@eff.org>
|
||||
FROM ubuntu:xenial
|
||||
|
||||
# Note: this only exposes the port to other docker containers. You
|
||||
# still have to bind to 443@host at runtime, as per the ACME spec.
|
||||
EXPOSE 443
|
||||
|
||||
# TODO: make sure --config-dir and --work-dir cannot be changed
|
||||
# through the CLI (certbot-docker wrapper that uses standalone
|
||||
# authenticator and text mode only?)
|
||||
VOLUME /etc/letsencrypt /var/lib/letsencrypt
|
||||
# Note: this only exposes the port to other docker containers.
|
||||
EXPOSE 80 443
|
||||
|
||||
WORKDIR /opt/certbot/src
|
||||
|
||||
# no need to mkdir anything:
|
||||
# https://docs.docker.com/reference/builder/#copy
|
||||
# If <dest> doesn't exist, it is created along with all missing
|
||||
# directories in its path.
|
||||
|
||||
# TODO: Install Apache/Nginx for plugin development.
|
||||
COPY letsencrypt-auto-source/letsencrypt-auto /opt/certbot/src/letsencrypt-auto-source/letsencrypt-auto
|
||||
RUN /opt/certbot/src/letsencrypt-auto-source/letsencrypt-auto --os-packages-only && \
|
||||
apt-get install python3-dev git -y && \
|
||||
COPY . .
|
||||
RUN apt-get update && \
|
||||
apt-get install apache2 git nginx-light -y && \
|
||||
letsencrypt-auto-source/letsencrypt-auto --os-packages-only && \
|
||||
apt-get clean && \
|
||||
rm -rf /var/lib/apt/lists/* \
|
||||
/tmp/* \
|
||||
/var/tmp/*
|
||||
|
||||
# the above is not likely to change, so by putting it further up the
|
||||
# Dockerfile we make sure we cache as much as possible
|
||||
|
||||
COPY setup.py README.rst CHANGES.rst MANIFEST.in linter_plugin.py tox.cover.sh tox.ini .pylintrc /opt/certbot/src/
|
||||
|
||||
# all above files are necessary for setup.py, however, package source
|
||||
# code directory has to be copied separately to a subdirectory...
|
||||
# https://docs.docker.com/reference/builder/#copy: "If <src> is a
|
||||
# directory, the entire contents of the directory are copied,
|
||||
# including filesystem metadata. Note: The directory itself is not
|
||||
# copied, just its contents." Order again matters, three files are far
|
||||
# more likely to be cached than the whole project directory
|
||||
|
||||
COPY certbot /opt/certbot/src/certbot/
|
||||
COPY acme /opt/certbot/src/acme/
|
||||
COPY certbot-apache /opt/certbot/src/certbot-apache/
|
||||
COPY certbot-nginx /opt/certbot/src/certbot-nginx/
|
||||
COPY letshelp-certbot /opt/certbot/src/letshelp-certbot/
|
||||
COPY certbot-compatibility-test /opt/certbot/src/certbot-compatibility-test/
|
||||
COPY tests /opt/certbot/src/tests/
|
||||
|
||||
RUN virtualenv --no-site-packages -p python2 /opt/certbot/venv && \
|
||||
/opt/certbot/venv/bin/pip install -U pip && \
|
||||
/opt/certbot/venv/bin/pip install -U setuptools && \
|
||||
/opt/certbot/venv/bin/pip install \
|
||||
-e /opt/certbot/src/acme \
|
||||
-e /opt/certbot/src \
|
||||
-e /opt/certbot/src/certbot-apache \
|
||||
-e /opt/certbot/src/certbot-nginx \
|
||||
-e /opt/certbot/src/letshelp-certbot \
|
||||
-e /opt/certbot/src/certbot-compatibility-test \
|
||||
-e /opt/certbot/src[dev,docs]
|
||||
|
||||
# install in editable mode (-e) to save space: it's not possible to
|
||||
# "rm -rf /opt/certbot/src" (it's stays in the underlaying image);
|
||||
# this might also help in debugging: you can "docker run --entrypoint
|
||||
# bash" and investigate, apply patches, etc.
|
||||
RUN VENV_NAME="../venv" tools/venv.sh
|
||||
|
||||
ENV PATH /opt/certbot/venv/bin:$PATH
|
||||
|
||||
@@ -1,3 +1,9 @@
|
||||
If you're having trouble using Certbot and aren't sure you've found a bug or
|
||||
request for a new feature, please first try asking for help at
|
||||
https://community.letsencrypt.org/. There is a much larger community there of
|
||||
people familiar with the project who will be able to more quickly answer your
|
||||
questions.
|
||||
|
||||
## My operating system is (include version):
|
||||
|
||||
|
||||
|
||||
@@ -6,13 +6,13 @@ import logging
|
||||
import socket
|
||||
|
||||
from cryptography.hazmat.primitives import hashes # type: ignore
|
||||
import josepy as jose
|
||||
import OpenSSL
|
||||
import requests
|
||||
|
||||
from acme import errors
|
||||
from acme import crypto_util
|
||||
from acme import fields
|
||||
from acme import jose
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
"""Tests for acme.challenges."""
|
||||
import unittest
|
||||
|
||||
import josepy as jose
|
||||
import mock
|
||||
import OpenSSL
|
||||
import requests
|
||||
@@ -8,7 +9,6 @@ import requests
|
||||
from six.moves.urllib import parse as urllib_parse # pylint: disable=import-error
|
||||
|
||||
from acme import errors
|
||||
from acme import jose
|
||||
from acme import test_util
|
||||
|
||||
CERT = test_util.load_comparable_cert('cert.pem')
|
||||
|
||||
@@ -10,13 +10,14 @@ import time
|
||||
import six
|
||||
from six.moves import http_client # pylint: disable=import-error
|
||||
|
||||
import josepy as jose
|
||||
import OpenSSL
|
||||
import re
|
||||
import requests
|
||||
import sys
|
||||
|
||||
from acme import crypto_util
|
||||
from acme import errors
|
||||
from acme import jose
|
||||
from acme import jws
|
||||
from acme import messages
|
||||
|
||||
@@ -39,39 +40,24 @@ DEFAULT_NETWORK_TIMEOUT = 45
|
||||
DER_CONTENT_TYPE = 'application/pkix-cert'
|
||||
|
||||
|
||||
class Client(object): # pylint: disable=too-many-instance-attributes
|
||||
"""ACME client.
|
||||
|
||||
.. todo::
|
||||
Clean up raised error types hierarchy, document, and handle (wrap)
|
||||
instances of `.DeserializationError` raised in `from_json()`.
|
||||
class ClientBase(object): # pylint: disable=too-many-instance-attributes
|
||||
"""ACME client base object.
|
||||
|
||||
:ivar messages.Directory directory:
|
||||
:ivar key: `.JWK` (private)
|
||||
:ivar alg: `.JWASignature`
|
||||
:ivar bool verify_ssl: Verify SSL certificates?
|
||||
:ivar .ClientNetwork net: Client network. Useful for testing. If not
|
||||
supplied, it will be initialized using `key`, `alg` and
|
||||
`verify_ssl`.
|
||||
|
||||
:ivar .ClientNetwork net: Client network.
|
||||
:ivar int acme_version: ACME protocol version. 1 or 2.
|
||||
"""
|
||||
|
||||
def __init__(self, directory, key, alg=jose.RS256, verify_ssl=True,
|
||||
net=None):
|
||||
def __init__(self, directory, net, acme_version):
|
||||
"""Initialize.
|
||||
|
||||
:param directory: Directory Resource (`.messages.Directory`) or
|
||||
URI from which the resource will be downloaded.
|
||||
|
||||
:param .messages.Directory directory: Directory Resource
|
||||
:param .ClientNetwork net: Client network.
|
||||
:param int acme_version: ACME protocol version. 1 or 2.
|
||||
"""
|
||||
self.key = key
|
||||
self.net = ClientNetwork(key, alg, verify_ssl) if net is None else net
|
||||
|
||||
if isinstance(directory, six.string_types):
|
||||
self.directory = messages.Directory.from_json(
|
||||
self.net.get(directory).json())
|
||||
else:
|
||||
self.directory = directory
|
||||
self.directory = directory
|
||||
self.net = net
|
||||
self.acme_version = acme_version
|
||||
|
||||
@classmethod
|
||||
def _regr_from_response(cls, response, uri=None, terms_of_service=None):
|
||||
@@ -83,28 +69,8 @@ class Client(object): # pylint: disable=too-many-instance-attributes
|
||||
uri=response.headers.get('Location', uri),
|
||||
terms_of_service=terms_of_service)
|
||||
|
||||
def register(self, new_reg=None):
|
||||
"""Register.
|
||||
|
||||
:param .NewRegistration new_reg:
|
||||
|
||||
:returns: Registration Resource.
|
||||
:rtype: `.RegistrationResource`
|
||||
|
||||
"""
|
||||
new_reg = messages.NewRegistration() if new_reg is None else new_reg
|
||||
assert isinstance(new_reg, messages.NewRegistration)
|
||||
|
||||
response = self.net.post(self.directory[new_reg], new_reg)
|
||||
# TODO: handle errors
|
||||
assert response.status_code == http_client.CREATED
|
||||
|
||||
# "Instance of 'Field' has no key/contact member" bug:
|
||||
# pylint: disable=no-member
|
||||
return self._regr_from_response(response)
|
||||
|
||||
def _send_recv_regr(self, regr, body):
|
||||
response = self.net.post(regr.uri, body)
|
||||
response = self._post(regr.uri, body)
|
||||
|
||||
# TODO: Boulder returns httplib.ACCEPTED
|
||||
#assert response.status_code == httplib.OK
|
||||
@@ -116,6 +82,13 @@ class Client(object): # pylint: disable=too-many-instance-attributes
|
||||
response, uri=regr.uri,
|
||||
terms_of_service=regr.terms_of_service)
|
||||
|
||||
def _post(self, *args, **kwargs):
|
||||
"""Wrapper around self.net.post that adds the acme_version.
|
||||
|
||||
"""
|
||||
kwargs.setdefault('acme_version', self.acme_version)
|
||||
return self.net.post(*args, **kwargs)
|
||||
|
||||
def update_registration(self, regr, update=None):
|
||||
"""Update registration.
|
||||
|
||||
@@ -130,6 +103,7 @@ class Client(object): # pylint: disable=too-many-instance-attributes
|
||||
update = regr.body if update is None else update
|
||||
body = messages.UpdateRegistration(**dict(update))
|
||||
updated_regr = self._send_recv_regr(regr, body=body)
|
||||
self.net.account = updated_regr
|
||||
return updated_regr
|
||||
|
||||
def deactivate_registration(self, regr):
|
||||
@@ -153,65 +127,14 @@ class Client(object): # pylint: disable=too-many-instance-attributes
|
||||
"""
|
||||
return self._send_recv_regr(regr, messages.UpdateRegistration())
|
||||
|
||||
def agree_to_tos(self, regr):
|
||||
"""Agree to the terms-of-service.
|
||||
|
||||
Agree to the terms-of-service in a Registration Resource.
|
||||
|
||||
:param regr: Registration Resource.
|
||||
:type regr: `.RegistrationResource`
|
||||
|
||||
:returns: Updated Registration Resource.
|
||||
:rtype: `.RegistrationResource`
|
||||
|
||||
"""
|
||||
return self.update_registration(
|
||||
regr.update(body=regr.body.update(agreement=regr.terms_of_service)))
|
||||
|
||||
def _authzr_from_response(self, response, identifier, uri=None):
|
||||
def _authzr_from_response(self, response, identifier=None, uri=None):
|
||||
authzr = messages.AuthorizationResource(
|
||||
body=messages.Authorization.from_json(response.json()),
|
||||
uri=response.headers.get('Location', uri))
|
||||
if authzr.body.identifier != identifier:
|
||||
if identifier is not None and authzr.body.identifier != identifier:
|
||||
raise errors.UnexpectedUpdate(authzr)
|
||||
return authzr
|
||||
|
||||
def request_challenges(self, identifier, new_authzr_uri=None):
|
||||
"""Request challenges.
|
||||
|
||||
:param .messages.Identifier identifier: Identifier to be challenged.
|
||||
:param str new_authzr_uri: Deprecated. Do not use.
|
||||
|
||||
:returns: Authorization Resource.
|
||||
:rtype: `.AuthorizationResource`
|
||||
|
||||
"""
|
||||
if new_authzr_uri is not None:
|
||||
logger.debug("request_challenges with new_authzr_uri deprecated.")
|
||||
new_authz = messages.NewAuthorization(identifier=identifier)
|
||||
response = self.net.post(self.directory.new_authz, new_authz)
|
||||
# TODO: handle errors
|
||||
assert response.status_code == http_client.CREATED
|
||||
return self._authzr_from_response(response, identifier)
|
||||
|
||||
def request_domain_challenges(self, domain, new_authzr_uri=None):
|
||||
"""Request challenges for domain names.
|
||||
|
||||
This is simply a convenience function that wraps around
|
||||
`request_challenges`, but works with domain names instead of
|
||||
generic identifiers. See ``request_challenges`` for more
|
||||
documentation.
|
||||
|
||||
:param str domain: Domain name to be challenged.
|
||||
:param str new_authzr_uri: Deprecated. Do not use.
|
||||
|
||||
:returns: Authorization Resource.
|
||||
:rtype: `.AuthorizationResource`
|
||||
|
||||
"""
|
||||
return self.request_challenges(messages.Identifier(
|
||||
typ=messages.IDENTIFIER_FQDN, value=domain), new_authzr_uri)
|
||||
|
||||
def answer_challenge(self, challb, response):
|
||||
"""Answer challenge.
|
||||
|
||||
@@ -227,7 +150,7 @@ class Client(object): # pylint: disable=too-many-instance-attributes
|
||||
:raises .UnexpectedUpdate:
|
||||
|
||||
"""
|
||||
response = self.net.post(challb.uri, response)
|
||||
response = self._post(challb.uri, response)
|
||||
try:
|
||||
authzr_uri = response.links['up']['url']
|
||||
except KeyError:
|
||||
@@ -288,6 +211,142 @@ class Client(object): # pylint: disable=too-many-instance-attributes
|
||||
response, authzr.body.identifier, authzr.uri)
|
||||
return updated_authzr, response
|
||||
|
||||
def _revoke(self, cert, rsn, url):
|
||||
"""Revoke certificate.
|
||||
|
||||
:param .ComparableX509 cert: `OpenSSL.crypto.X509` wrapped in
|
||||
`.ComparableX509`
|
||||
|
||||
:param int rsn: Reason code for certificate revocation.
|
||||
|
||||
:param str url: ACME URL to post to
|
||||
|
||||
:raises .ClientError: If revocation is unsuccessful.
|
||||
|
||||
"""
|
||||
response = self._post(url,
|
||||
messages.Revocation(
|
||||
certificate=cert,
|
||||
reason=rsn))
|
||||
if response.status_code != http_client.OK:
|
||||
raise errors.ClientError(
|
||||
'Successful revocation must return HTTP OK status')
|
||||
|
||||
class Client(ClientBase):
|
||||
"""ACME client for a v1 API.
|
||||
|
||||
.. todo::
|
||||
Clean up raised error types hierarchy, document, and handle (wrap)
|
||||
instances of `.DeserializationError` raised in `from_json()`.
|
||||
|
||||
:ivar messages.Directory directory:
|
||||
:ivar key: `josepy.JWK` (private)
|
||||
:ivar alg: `josepy.JWASignature`
|
||||
:ivar bool verify_ssl: Verify SSL certificates?
|
||||
:ivar .ClientNetwork net: Client network. Useful for testing. If not
|
||||
supplied, it will be initialized using `key`, `alg` and
|
||||
`verify_ssl`.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, directory, key, alg=jose.RS256, verify_ssl=True,
|
||||
net=None):
|
||||
"""Initialize.
|
||||
|
||||
:param directory: Directory Resource (`.messages.Directory`) or
|
||||
URI from which the resource will be downloaded.
|
||||
|
||||
"""
|
||||
# pylint: disable=too-many-arguments
|
||||
self.key = key
|
||||
if net is None:
|
||||
net = ClientNetwork(key, alg=alg, verify_ssl=verify_ssl)
|
||||
|
||||
if isinstance(directory, six.string_types):
|
||||
directory = messages.Directory.from_json(
|
||||
net.get(directory).json())
|
||||
super(Client, self).__init__(directory=directory,
|
||||
net=net, acme_version=1)
|
||||
|
||||
def register(self, new_reg=None):
|
||||
"""Register.
|
||||
|
||||
:param .NewRegistration new_reg:
|
||||
|
||||
:returns: Registration Resource.
|
||||
:rtype: `.RegistrationResource`
|
||||
|
||||
"""
|
||||
new_reg = messages.NewRegistration() if new_reg is None else new_reg
|
||||
response = self._post(self.directory[new_reg], new_reg)
|
||||
# TODO: handle errors
|
||||
assert response.status_code == http_client.CREATED
|
||||
|
||||
# "Instance of 'Field' has no key/contact member" bug:
|
||||
# pylint: disable=no-member
|
||||
return self._regr_from_response(response)
|
||||
|
||||
def agree_to_tos(self, regr):
|
||||
"""Agree to the terms-of-service.
|
||||
|
||||
Agree to the terms-of-service in a Registration Resource.
|
||||
|
||||
:param regr: Registration Resource.
|
||||
:type regr: `.RegistrationResource`
|
||||
|
||||
:returns: Updated Registration Resource.
|
||||
:rtype: `.RegistrationResource`
|
||||
|
||||
"""
|
||||
return self.update_registration(
|
||||
regr.update(body=regr.body.update(agreement=regr.terms_of_service)))
|
||||
|
||||
def request_challenges(self, identifier, new_authzr_uri=None):
|
||||
"""Request challenges.
|
||||
|
||||
:param .messages.Identifier identifier: Identifier to be challenged.
|
||||
:param str new_authzr_uri: Deprecated. Do not use.
|
||||
|
||||
:returns: Authorization Resource.
|
||||
:rtype: `.AuthorizationResource`
|
||||
|
||||
:raises errors.WildcardUnsupportedError: if a wildcard is requested
|
||||
|
||||
"""
|
||||
if new_authzr_uri is not None:
|
||||
logger.debug("request_challenges with new_authzr_uri deprecated.")
|
||||
|
||||
if identifier.value.startswith("*"):
|
||||
raise errors.WildcardUnsupportedError(
|
||||
"Requesting an authorization for a wildcard name is"
|
||||
" forbidden by this version of the ACME protocol.")
|
||||
|
||||
new_authz = messages.NewAuthorization(identifier=identifier)
|
||||
response = self._post(self.directory.new_authz, new_authz)
|
||||
# TODO: handle errors
|
||||
assert response.status_code == http_client.CREATED
|
||||
return self._authzr_from_response(response, identifier)
|
||||
|
||||
def request_domain_challenges(self, domain, new_authzr_uri=None):
|
||||
"""Request challenges for domain names.
|
||||
|
||||
This is simply a convenience function that wraps around
|
||||
`request_challenges`, but works with domain names instead of
|
||||
generic identifiers. See ``request_challenges`` for more
|
||||
documentation.
|
||||
|
||||
:param str domain: Domain name to be challenged.
|
||||
:param str new_authzr_uri: Deprecated. Do not use.
|
||||
|
||||
:returns: Authorization Resource.
|
||||
:rtype: `.AuthorizationResource`
|
||||
|
||||
:raises errors.WildcardUnsupportedError: if a wildcard is requested
|
||||
|
||||
"""
|
||||
return self.request_challenges(messages.Identifier(
|
||||
typ=messages.IDENTIFIER_FQDN, value=domain), new_authzr_uri)
|
||||
|
||||
def request_issuance(self, csr, authzrs):
|
||||
"""Request issuance.
|
||||
|
||||
@@ -307,7 +366,7 @@ class Client(object): # pylint: disable=too-many-instance-attributes
|
||||
req = messages.CertificateRequest(csr=csr)
|
||||
|
||||
content_type = DER_CONTENT_TYPE # TODO: add 'cert_type 'argument
|
||||
response = self.net.post(
|
||||
response = self._post(
|
||||
self.directory.new_cert,
|
||||
req,
|
||||
content_type=content_type,
|
||||
@@ -408,7 +467,7 @@ class Client(object): # pylint: disable=too-many-instance-attributes
|
||||
:param str uri: URI of certificate
|
||||
|
||||
:returns: tuple of the form
|
||||
(response, :class:`acme.jose.ComparableX509`)
|
||||
(response, :class:`josepy.util.ComparableX509`)
|
||||
:rtype: tuple
|
||||
|
||||
"""
|
||||
@@ -492,26 +551,317 @@ class Client(object): # pylint: disable=too-many-instance-attributes
|
||||
:raises .ClientError: If revocation is unsuccessful.
|
||||
|
||||
"""
|
||||
response = self.net.post(self.directory[messages.Revocation],
|
||||
messages.Revocation(
|
||||
certificate=cert,
|
||||
reason=rsn),
|
||||
content_type=None)
|
||||
if response.status_code != http_client.OK:
|
||||
raise errors.ClientError(
|
||||
'Successful revocation must return HTTP OK status')
|
||||
return self._revoke(cert, rsn, self.directory[messages.Revocation])
|
||||
|
||||
|
||||
class ClientV2(ClientBase):
|
||||
"""ACME client for a v2 API.
|
||||
|
||||
:ivar messages.Directory directory:
|
||||
:ivar .ClientNetwork net: Client network.
|
||||
"""
|
||||
|
||||
def __init__(self, directory, net):
|
||||
"""Initialize.
|
||||
|
||||
:param .messages.Directory directory: Directory Resource
|
||||
:param .ClientNetwork net: Client network.
|
||||
"""
|
||||
super(ClientV2, self).__init__(directory=directory,
|
||||
net=net, acme_version=2)
|
||||
|
||||
def new_account(self, new_account):
|
||||
"""Register.
|
||||
|
||||
:param .NewRegistration new_account:
|
||||
|
||||
:returns: Registration Resource.
|
||||
:rtype: `.RegistrationResource`
|
||||
"""
|
||||
response = self._post(self.directory['newAccount'], new_account)
|
||||
# "Instance of 'Field' has no key/contact member" bug:
|
||||
# pylint: disable=no-member
|
||||
regr = self._regr_from_response(response)
|
||||
self.net.account = regr
|
||||
return regr
|
||||
|
||||
def new_order(self, csr_pem):
|
||||
"""Request a new Order object from the server.
|
||||
|
||||
:param str csr_pem: A CSR in PEM format.
|
||||
|
||||
:returns: The newly created order.
|
||||
:rtype: OrderResource
|
||||
"""
|
||||
csr = OpenSSL.crypto.load_certificate_request(OpenSSL.crypto.FILETYPE_PEM, csr_pem)
|
||||
# pylint: disable=protected-access
|
||||
dnsNames = crypto_util._pyopenssl_cert_or_req_all_names(csr)
|
||||
|
||||
identifiers = []
|
||||
for name in dnsNames:
|
||||
identifiers.append(messages.Identifier(typ=messages.IDENTIFIER_FQDN,
|
||||
value=name))
|
||||
order = messages.NewOrder(identifiers=identifiers)
|
||||
response = self._post(self.directory['newOrder'], order)
|
||||
body = messages.Order.from_json(response.json())
|
||||
authorizations = []
|
||||
for url in body.authorizations:
|
||||
authorizations.append(self._authzr_from_response(self.net.get(url), uri=url))
|
||||
return messages.OrderResource(
|
||||
body=body,
|
||||
uri=response.headers.get('Location'),
|
||||
authorizations=authorizations,
|
||||
csr_pem=csr_pem)
|
||||
|
||||
def poll_and_finalize(self, orderr, deadline=None):
|
||||
"""Poll authorizations and finalize the order.
|
||||
|
||||
If no deadline is provided, this method will timeout after 90
|
||||
seconds.
|
||||
|
||||
:param messages.OrderResource orderr: order to finalize
|
||||
:param datetime.datetime deadline: when to stop polling and timeout
|
||||
|
||||
:returns: finalized order
|
||||
:rtype: messages.OrderResource
|
||||
|
||||
"""
|
||||
if deadline is None:
|
||||
deadline = datetime.datetime.now() + datetime.timedelta(seconds=90)
|
||||
orderr = self.poll_authorizations(orderr, deadline)
|
||||
return self.finalize_order(orderr, deadline)
|
||||
|
||||
def poll_authorizations(self, orderr, deadline):
|
||||
"""Poll Order Resource for status."""
|
||||
responses = []
|
||||
for url in orderr.body.authorizations:
|
||||
while datetime.datetime.now() < deadline:
|
||||
authzr = self._authzr_from_response(self.net.get(url), uri=url)
|
||||
if authzr.body.status != messages.STATUS_PENDING:
|
||||
responses.append(authzr)
|
||||
break
|
||||
time.sleep(1)
|
||||
# If we didn't get a response for every authorization, we fell through
|
||||
# the bottom of the loop due to hitting the deadline.
|
||||
if len(responses) < len(orderr.body.authorizations):
|
||||
raise errors.TimeoutError()
|
||||
failed = []
|
||||
for authzr in responses:
|
||||
if authzr.body.status != messages.STATUS_VALID:
|
||||
for chall in authzr.body.challenges:
|
||||
if chall.error != None:
|
||||
failed.append(authzr)
|
||||
if len(failed) > 0:
|
||||
raise errors.ValidationError(failed)
|
||||
return orderr.update(authorizations=responses)
|
||||
|
||||
def finalize_order(self, orderr, deadline):
|
||||
"""Finalize an order and obtain a certificate.
|
||||
|
||||
:param messages.OrderResource orderr: order to finalize
|
||||
:param datetime.datetime deadline: when to stop polling and timeout
|
||||
|
||||
:returns: finalized order
|
||||
:rtype: messages.OrderResource
|
||||
|
||||
"""
|
||||
csr = OpenSSL.crypto.load_certificate_request(
|
||||
OpenSSL.crypto.FILETYPE_PEM, orderr.csr_pem)
|
||||
wrapped_csr = messages.CertificateRequest(csr=jose.ComparableX509(csr))
|
||||
self._post(orderr.body.finalize, wrapped_csr)
|
||||
while datetime.datetime.now() < deadline:
|
||||
time.sleep(1)
|
||||
response = self.net.get(orderr.uri)
|
||||
body = messages.Order.from_json(response.json())
|
||||
if body.error is not None:
|
||||
raise errors.IssuanceError(body.error)
|
||||
if body.certificate is not None:
|
||||
certificate_response = self.net.get(body.certificate,
|
||||
content_type=DER_CONTENT_TYPE).text
|
||||
return orderr.update(body=body, fullchain_pem=certificate_response)
|
||||
raise errors.TimeoutError()
|
||||
|
||||
def revoke(self, cert, rsn):
|
||||
"""Revoke certificate.
|
||||
|
||||
:param .ComparableX509 cert: `OpenSSL.crypto.X509` wrapped in
|
||||
`.ComparableX509`
|
||||
|
||||
:param int rsn: Reason code for certificate revocation.
|
||||
|
||||
:raises .ClientError: If revocation is unsuccessful.
|
||||
|
||||
"""
|
||||
return self._revoke(cert, rsn, self.directory['revokeCert'])
|
||||
|
||||
|
||||
class BackwardsCompatibleClientV2(object):
|
||||
"""ACME client wrapper that tends towards V2-style calls, but
|
||||
supports V1 servers.
|
||||
|
||||
.. note:: While this class handles the majority of the differences
|
||||
between versions of the ACME protocol, if you need to support an
|
||||
ACME server based on version 3 or older of the IETF ACME draft
|
||||
that uses combinations in authorizations (or lack thereof) to
|
||||
signal that the client needs to complete something other than
|
||||
any single challenge in the authorization to make it valid, the
|
||||
user of this class needs to understand and handle these
|
||||
differences themselves. This does not apply to either of Let's
|
||||
Encrypt's endpoints where successfully completing any challenge
|
||||
in an authorization will make it valid.
|
||||
|
||||
:ivar int acme_version: 1 or 2, corresponding to the Let's Encrypt endpoint
|
||||
:ivar .ClientBase client: either Client or ClientV2
|
||||
"""
|
||||
|
||||
def __init__(self, net, key, server):
|
||||
directory = messages.Directory.from_json(net.get(server).json())
|
||||
self.acme_version = self._acme_version_from_directory(directory)
|
||||
if self.acme_version == 1:
|
||||
self.client = Client(directory, key=key, net=net)
|
||||
else:
|
||||
self.client = ClientV2(directory, net=net)
|
||||
|
||||
def __getattr__(self, name):
|
||||
if name in vars(self.client):
|
||||
return getattr(self.client, name)
|
||||
elif name in dir(ClientBase):
|
||||
return getattr(self.client, name)
|
||||
else:
|
||||
raise AttributeError()
|
||||
|
||||
def new_account_and_tos(self, regr, check_tos_cb=None):
|
||||
"""Combined register and agree_tos for V1, new_account for V2
|
||||
|
||||
:param .NewRegistration regr:
|
||||
:param callable check_tos_cb: callback that raises an error if
|
||||
the check does not work
|
||||
"""
|
||||
def _assess_tos(tos):
|
||||
if check_tos_cb is not None:
|
||||
check_tos_cb(tos)
|
||||
if self.acme_version == 1:
|
||||
regr = self.client.register(regr)
|
||||
if regr.terms_of_service is not None:
|
||||
_assess_tos(regr.terms_of_service)
|
||||
return self.client.agree_to_tos(regr)
|
||||
return regr
|
||||
else:
|
||||
if "terms_of_service" in self.client.directory.meta:
|
||||
_assess_tos(self.client.directory.meta.terms_of_service)
|
||||
regr = regr.update(terms_of_service_agreed=True)
|
||||
return self.client.new_account(regr)
|
||||
|
||||
def new_order(self, csr_pem):
|
||||
"""Request a new Order object from the server.
|
||||
|
||||
If using ACMEv1, returns a dummy OrderResource with only
|
||||
the authorizations field filled in.
|
||||
|
||||
:param str csr_pem: A CSR in PEM format.
|
||||
|
||||
:returns: The newly created order.
|
||||
:rtype: OrderResource
|
||||
|
||||
:raises errors.WildcardUnsupportedError: if a wildcard domain is
|
||||
requested but unsupported by the ACME version
|
||||
|
||||
"""
|
||||
if self.acme_version == 1:
|
||||
csr = OpenSSL.crypto.load_certificate_request(OpenSSL.crypto.FILETYPE_PEM, csr_pem)
|
||||
# pylint: disable=protected-access
|
||||
dnsNames = crypto_util._pyopenssl_cert_or_req_all_names(csr)
|
||||
authorizations = []
|
||||
for domain in dnsNames:
|
||||
authorizations.append(self.client.request_domain_challenges(domain))
|
||||
return messages.OrderResource(authorizations=authorizations, csr_pem=csr_pem)
|
||||
else:
|
||||
return self.client.new_order(csr_pem)
|
||||
|
||||
def finalize_order(self, orderr, deadline):
|
||||
"""Finalize an order and obtain a certificate.
|
||||
|
||||
:param messages.OrderResource orderr: order to finalize
|
||||
:param datetime.datetime deadline: when to stop polling and timeout
|
||||
|
||||
:returns: finalized order
|
||||
:rtype: messages.OrderResource
|
||||
|
||||
"""
|
||||
if self.acme_version == 1:
|
||||
csr_pem = orderr.csr_pem
|
||||
certr = self.client.request_issuance(
|
||||
jose.ComparableX509(
|
||||
OpenSSL.crypto.load_certificate_request(OpenSSL.crypto.FILETYPE_PEM, csr_pem)),
|
||||
orderr.authorizations)
|
||||
|
||||
chain = None
|
||||
while datetime.datetime.now() < deadline:
|
||||
try:
|
||||
chain = self.client.fetch_chain(certr)
|
||||
break
|
||||
except errors.Error:
|
||||
time.sleep(1)
|
||||
|
||||
if chain is None:
|
||||
raise errors.TimeoutError(
|
||||
'Failed to fetch chain. You should not deploy the generated '
|
||||
'certificate, please rerun the command for a new one.')
|
||||
|
||||
cert = OpenSSL.crypto.dump_certificate(
|
||||
OpenSSL.crypto.FILETYPE_PEM, certr.body.wrapped).decode()
|
||||
chain = crypto_util.dump_pyopenssl_chain(chain).decode()
|
||||
|
||||
return orderr.update(fullchain_pem=(cert + chain))
|
||||
else:
|
||||
return self.client.finalize_order(orderr, deadline)
|
||||
|
||||
def revoke(self, cert, rsn):
|
||||
"""Revoke certificate.
|
||||
|
||||
:param .ComparableX509 cert: `OpenSSL.crypto.X509` wrapped in
|
||||
`.ComparableX509`
|
||||
|
||||
:param int rsn: Reason code for certificate revocation.
|
||||
|
||||
:raises .ClientError: If revocation is unsuccessful.
|
||||
|
||||
"""
|
||||
return self.client.revoke(cert, rsn)
|
||||
|
||||
def _acme_version_from_directory(self, directory):
|
||||
if hasattr(directory, 'newNonce'):
|
||||
return 2
|
||||
else:
|
||||
return 1
|
||||
|
||||
|
||||
class ClientNetwork(object): # pylint: disable=too-many-instance-attributes
|
||||
"""Client network."""
|
||||
"""Wrapper around requests that signs POSTs for authentication.
|
||||
|
||||
Also adds user agent, and handles Content-Type.
|
||||
"""
|
||||
JSON_CONTENT_TYPE = 'application/json'
|
||||
JOSE_CONTENT_TYPE = 'application/jose+json'
|
||||
JSON_ERROR_CONTENT_TYPE = 'application/problem+json'
|
||||
REPLAY_NONCE_HEADER = 'Replay-Nonce'
|
||||
|
||||
def __init__(self, key, alg=jose.RS256, verify_ssl=True,
|
||||
"""Initialize.
|
||||
|
||||
:param josepy.JWK key: Account private key
|
||||
:param messages.RegistrationResource account: Account object. Required if you are
|
||||
planning to use .post() with acme_version=2 for anything other than
|
||||
creating a new account; may be set later after registering.
|
||||
:param josepy.JWASignature alg: Algoritm to use in signing JWS.
|
||||
:param bool verify_ssl: Whether to verify certificates on SSL connections.
|
||||
:param str user_agent: String to send as User-Agent header.
|
||||
:param float timeout: Timeout for requests.
|
||||
"""
|
||||
def __init__(self, key, account=None, alg=jose.RS256, verify_ssl=True,
|
||||
user_agent='acme-python', timeout=DEFAULT_NETWORK_TIMEOUT):
|
||||
# pylint: disable=too-many-arguments
|
||||
self.key = key
|
||||
self.account = account
|
||||
self.alg = alg
|
||||
self.verify_ssl = verify_ssl
|
||||
self._nonces = set()
|
||||
@@ -527,21 +877,31 @@ class ClientNetwork(object): # pylint: disable=too-many-instance-attributes
|
||||
except Exception: # pylint: disable=broad-except
|
||||
pass
|
||||
|
||||
def _wrap_in_jws(self, obj, nonce):
|
||||
def _wrap_in_jws(self, obj, nonce, url, acme_version):
|
||||
"""Wrap `JSONDeSerializable` object in JWS.
|
||||
|
||||
.. todo:: Implement ``acmePath``.
|
||||
|
||||
:param .JSONDeSerializable obj:
|
||||
:param josepy.JSONDeSerializable obj:
|
||||
:param str url: The URL to which this object will be POSTed
|
||||
:param bytes nonce:
|
||||
:rtype: `.JWS`
|
||||
:rtype: `josepy.JWS`
|
||||
|
||||
"""
|
||||
jobj = obj.json_dumps(indent=2).encode()
|
||||
logger.debug('JWS payload:\n%s', jobj)
|
||||
return jws.JWS.sign(
|
||||
payload=jobj, key=self.key, alg=self.alg,
|
||||
nonce=nonce).json_dumps(indent=2)
|
||||
kwargs = {
|
||||
"alg": self.alg,
|
||||
"nonce": nonce
|
||||
}
|
||||
if acme_version == 2:
|
||||
kwargs["url"] = url
|
||||
# newAccount and revokeCert work without the kid
|
||||
if self.account is not None:
|
||||
kwargs["kid"] = self.account["uri"]
|
||||
kwargs["key"] = self.key
|
||||
# pylint: disable=star-args
|
||||
return jws.JWS.sign(jobj, **kwargs).json_dumps(indent=2)
|
||||
|
||||
@classmethod
|
||||
def _check_response(cls, response, content_type=None):
|
||||
@@ -714,8 +1074,9 @@ class ClientNetwork(object): # pylint: disable=too-many-instance-attributes
|
||||
else:
|
||||
raise
|
||||
|
||||
def _post_once(self, url, obj, content_type=JOSE_CONTENT_TYPE, **kwargs):
|
||||
data = self._wrap_in_jws(obj, self._get_nonce(url))
|
||||
def _post_once(self, url, obj, content_type=JOSE_CONTENT_TYPE,
|
||||
acme_version=1, **kwargs):
|
||||
data = self._wrap_in_jws(obj, self._get_nonce(url), url, acme_version)
|
||||
kwargs.setdefault('headers', {'Content-Type': content_type})
|
||||
response = self._send_request('POST', url, data=data, **kwargs)
|
||||
self._add_nonce(response)
|
||||
|
||||
@@ -1,16 +1,18 @@
|
||||
"""Tests for acme.client."""
|
||||
import copy
|
||||
import datetime
|
||||
import json
|
||||
import unittest
|
||||
|
||||
from six.moves import http_client # pylint: disable=import-error
|
||||
|
||||
import josepy as jose
|
||||
import mock
|
||||
import OpenSSL
|
||||
import requests
|
||||
|
||||
from acme import challenges
|
||||
from acme import errors
|
||||
from acme import jose
|
||||
from acme import jws as acme_jws
|
||||
from acme import messages
|
||||
from acme import messages_test
|
||||
@@ -18,13 +20,32 @@ from acme import test_util
|
||||
|
||||
|
||||
CERT_DER = test_util.load_vector('cert.der')
|
||||
CERT_SAN_PEM = test_util.load_vector('cert-san.pem')
|
||||
CSR_SAN_PEM = test_util.load_vector('csr-san.pem')
|
||||
KEY = jose.JWKRSA.load(test_util.load_vector('rsa512_key.pem'))
|
||||
KEY2 = jose.JWKRSA.load(test_util.load_vector('rsa256_key.pem'))
|
||||
|
||||
DIRECTORY_V1 = messages.Directory({
|
||||
messages.NewRegistration:
|
||||
'https://www.letsencrypt-demo.org/acme/new-reg',
|
||||
messages.Revocation:
|
||||
'https://www.letsencrypt-demo.org/acme/revoke-cert',
|
||||
messages.NewAuthorization:
|
||||
'https://www.letsencrypt-demo.org/acme/new-authz',
|
||||
messages.CertificateRequest:
|
||||
'https://www.letsencrypt-demo.org/acme/new-cert',
|
||||
})
|
||||
|
||||
class ClientTest(unittest.TestCase):
|
||||
"""Tests for acme.client.Client."""
|
||||
# pylint: disable=too-many-instance-attributes,too-many-public-methods
|
||||
DIRECTORY_V2 = messages.Directory({
|
||||
'newAccount': 'https://www.letsencrypt-demo.org/acme/new-account',
|
||||
'newNonce': 'https://www.letsencrypt-demo.org/acme/new-nonce',
|
||||
'newOrder': 'https://www.letsencrypt-demo.org/acme/new-order',
|
||||
'revokeCert': 'https://www.letsencrypt-demo.org/acme/revoke-cert',
|
||||
})
|
||||
|
||||
|
||||
class ClientTestBase(unittest.TestCase):
|
||||
"""Base for tests in acme.client."""
|
||||
|
||||
def setUp(self):
|
||||
self.response = mock.MagicMock(
|
||||
@@ -33,21 +54,6 @@ class ClientTest(unittest.TestCase):
|
||||
self.net.post.return_value = self.response
|
||||
self.net.get.return_value = self.response
|
||||
|
||||
self.directory = messages.Directory({
|
||||
messages.NewRegistration:
|
||||
'https://www.letsencrypt-demo.org/acme/new-reg',
|
||||
messages.Revocation:
|
||||
'https://www.letsencrypt-demo.org/acme/revoke-cert',
|
||||
messages.NewAuthorization:
|
||||
'https://www.letsencrypt-demo.org/acme/new-authz',
|
||||
messages.CertificateRequest:
|
||||
'https://www.letsencrypt-demo.org/acme/new-cert',
|
||||
})
|
||||
|
||||
from acme.client import Client
|
||||
self.client = Client(
|
||||
directory=self.directory, key=KEY, alg=jose.RS256, net=self.net)
|
||||
|
||||
self.identifier = messages.Identifier(
|
||||
typ=messages.IDENTIFIER_FQDN, value='example.com')
|
||||
|
||||
@@ -57,8 +63,7 @@ class ClientTest(unittest.TestCase):
|
||||
contact=self.contact, key=KEY.public_key())
|
||||
self.new_reg = messages.NewRegistration(**dict(reg))
|
||||
self.regr = messages.RegistrationResource(
|
||||
body=reg, uri='https://www.letsencrypt-demo.org/acme/reg/1',
|
||||
terms_of_service='https://www.letsencrypt-demo.org/tos')
|
||||
body=reg, uri='https://www.letsencrypt-demo.org/acme/reg/1')
|
||||
|
||||
# Authorization
|
||||
authzr_uri = 'https://www.letsencrypt-demo.org/acme/authz/1'
|
||||
@@ -75,14 +80,217 @@ class ClientTest(unittest.TestCase):
|
||||
self.authzr = messages.AuthorizationResource(
|
||||
body=self.authz, uri=authzr_uri)
|
||||
|
||||
# Reason code for revocation
|
||||
self.rsn = 1
|
||||
|
||||
|
||||
class BackwardsCompatibleClientV2Test(ClientTestBase):
|
||||
"""Tests for acme.client.BackwardsCompatibleClientV2."""
|
||||
|
||||
def setUp(self):
|
||||
super(BackwardsCompatibleClientV2Test, self).setUp()
|
||||
# contains a loaded cert
|
||||
self.certr = messages.CertificateResource(
|
||||
body=messages_test.CERT)
|
||||
|
||||
loaded = OpenSSL.crypto.load_certificate(
|
||||
OpenSSL.crypto.FILETYPE_PEM, CERT_SAN_PEM)
|
||||
wrapped = jose.ComparableX509(loaded)
|
||||
self.chain = [wrapped, wrapped]
|
||||
|
||||
self.cert_pem = OpenSSL.crypto.dump_certificate(
|
||||
OpenSSL.crypto.FILETYPE_PEM, messages_test.CERT.wrapped).decode()
|
||||
|
||||
single_chain = OpenSSL.crypto.dump_certificate(
|
||||
OpenSSL.crypto.FILETYPE_PEM, loaded).decode()
|
||||
self.chain_pem = single_chain + single_chain
|
||||
|
||||
self.fullchain_pem = self.cert_pem + self.chain_pem
|
||||
|
||||
self.orderr = messages.OrderResource(
|
||||
csr_pem=CSR_SAN_PEM)
|
||||
|
||||
def _init(self):
|
||||
uri = 'http://www.letsencrypt-demo.org/directory'
|
||||
from acme.client import BackwardsCompatibleClientV2
|
||||
return BackwardsCompatibleClientV2(net=self.net,
|
||||
key=KEY, server=uri)
|
||||
|
||||
def test_init_downloads_directory(self):
|
||||
uri = 'http://www.letsencrypt-demo.org/directory'
|
||||
from acme.client import BackwardsCompatibleClientV2
|
||||
BackwardsCompatibleClientV2(net=self.net,
|
||||
key=KEY, server=uri)
|
||||
self.net.get.assert_called_once_with(uri)
|
||||
|
||||
def test_init_acme_version(self):
|
||||
self.response.json.return_value = DIRECTORY_V1.to_json()
|
||||
client = self._init()
|
||||
self.assertEqual(client.acme_version, 1)
|
||||
|
||||
self.response.json.return_value = DIRECTORY_V2.to_json()
|
||||
client = self._init()
|
||||
self.assertEqual(client.acme_version, 2)
|
||||
|
||||
def test_forwarding(self):
|
||||
self.response.json.return_value = DIRECTORY_V1.to_json()
|
||||
client = self._init()
|
||||
self.assertEqual(client.directory, client.client.directory)
|
||||
self.assertEqual(client.key, KEY)
|
||||
self.assertEqual(client.update_registration, client.client.update_registration)
|
||||
self.assertRaises(AttributeError, client.__getattr__, 'nonexistent')
|
||||
self.assertRaises(AttributeError, client.__getattr__, 'new_account_and_tos')
|
||||
self.assertRaises(AttributeError, client.__getattr__, 'new_account')
|
||||
|
||||
def test_new_account_and_tos(self):
|
||||
# v2 no tos
|
||||
self.response.json.return_value = DIRECTORY_V2.to_json()
|
||||
with mock.patch('acme.client.ClientV2') as mock_client:
|
||||
client = self._init()
|
||||
client.new_account_and_tos(self.new_reg)
|
||||
mock_client().new_account.assert_called_with(self.new_reg)
|
||||
|
||||
# v2 tos good
|
||||
with mock.patch('acme.client.ClientV2') as mock_client:
|
||||
mock_client().directory.meta.__contains__.return_value = True
|
||||
client = self._init()
|
||||
client.new_account_and_tos(self.new_reg, lambda x: True)
|
||||
mock_client().new_account.assert_called_with(
|
||||
self.new_reg.update(terms_of_service_agreed=True))
|
||||
|
||||
# v2 tos bad
|
||||
with mock.patch('acme.client.ClientV2') as mock_client:
|
||||
mock_client().directory.meta.__contains__.return_value = True
|
||||
client = self._init()
|
||||
def _tos_cb(tos):
|
||||
raise errors.Error
|
||||
self.assertRaises(errors.Error, client.new_account_and_tos,
|
||||
self.new_reg, _tos_cb)
|
||||
mock_client().new_account.assert_not_called()
|
||||
|
||||
# v1 yes tos
|
||||
self.response.json.return_value = DIRECTORY_V1.to_json()
|
||||
with mock.patch('acme.client.Client') as mock_client:
|
||||
regr = mock.MagicMock(terms_of_service="TOS")
|
||||
mock_client().register.return_value = regr
|
||||
client = self._init()
|
||||
client.new_account_and_tos(self.new_reg)
|
||||
mock_client().register.assert_called_once_with(self.new_reg)
|
||||
mock_client().agree_to_tos.assert_called_once_with(regr)
|
||||
|
||||
# v1 no tos
|
||||
with mock.patch('acme.client.Client') as mock_client:
|
||||
regr = mock.MagicMock(terms_of_service=None)
|
||||
mock_client().register.return_value = regr
|
||||
client = self._init()
|
||||
client.new_account_and_tos(self.new_reg)
|
||||
mock_client().register.assert_called_once_with(self.new_reg)
|
||||
mock_client().agree_to_tos.assert_not_called()
|
||||
|
||||
@mock.patch('OpenSSL.crypto.load_certificate_request')
|
||||
@mock.patch('acme.crypto_util._pyopenssl_cert_or_req_all_names')
|
||||
def test_new_order_v1(self, mock__pyopenssl_cert_or_req_all_names,
|
||||
unused_mock_load_certificate_request):
|
||||
self.response.json.return_value = DIRECTORY_V1.to_json()
|
||||
mock__pyopenssl_cert_or_req_all_names.return_value = ['example.com', 'www.example.com']
|
||||
mock_csr_pem = mock.MagicMock()
|
||||
with mock.patch('acme.client.Client') as mock_client:
|
||||
mock_client().request_domain_challenges.return_value = mock.sentinel.auth
|
||||
client = self._init()
|
||||
orderr = client.new_order(mock_csr_pem)
|
||||
self.assertEqual(orderr.authorizations, [mock.sentinel.auth, mock.sentinel.auth])
|
||||
|
||||
def test_new_order_v2(self):
|
||||
self.response.json.return_value = DIRECTORY_V2.to_json()
|
||||
mock_csr_pem = mock.MagicMock()
|
||||
with mock.patch('acme.client.ClientV2') as mock_client:
|
||||
client = self._init()
|
||||
client.new_order(mock_csr_pem)
|
||||
mock_client().new_order.assert_called_once_with(mock_csr_pem)
|
||||
|
||||
@mock.patch('acme.client.Client')
|
||||
def test_finalize_order_v1_success(self, mock_client):
|
||||
self.response.json.return_value = DIRECTORY_V1.to_json()
|
||||
|
||||
mock_client().request_issuance.return_value = self.certr
|
||||
mock_client().fetch_chain.return_value = self.chain
|
||||
|
||||
deadline = datetime.datetime(9999, 9, 9)
|
||||
client = self._init()
|
||||
result = client.finalize_order(self.orderr, deadline)
|
||||
self.assertEqual(result.fullchain_pem, self.fullchain_pem)
|
||||
mock_client().fetch_chain.assert_called_once_with(self.certr)
|
||||
|
||||
@mock.patch('acme.client.Client')
|
||||
def test_finalize_order_v1_fetch_chain_error(self, mock_client):
|
||||
self.response.json.return_value = DIRECTORY_V1.to_json()
|
||||
|
||||
mock_client().request_issuance.return_value = self.certr
|
||||
mock_client().fetch_chain.return_value = self.chain
|
||||
mock_client().fetch_chain.side_effect = [errors.Error, self.chain]
|
||||
|
||||
deadline = datetime.datetime(9999, 9, 9)
|
||||
client = self._init()
|
||||
result = client.finalize_order(self.orderr, deadline)
|
||||
self.assertEqual(result.fullchain_pem, self.fullchain_pem)
|
||||
self.assertEqual(mock_client().fetch_chain.call_count, 2)
|
||||
|
||||
@mock.patch('acme.client.Client')
|
||||
def test_finalize_order_v1_timeout(self, mock_client):
|
||||
self.response.json.return_value = DIRECTORY_V1.to_json()
|
||||
|
||||
mock_client().request_issuance.return_value = self.certr
|
||||
|
||||
deadline = deadline = datetime.datetime.now() - datetime.timedelta(seconds=60)
|
||||
client = self._init()
|
||||
self.assertRaises(errors.TimeoutError, client.finalize_order,
|
||||
self.orderr, deadline)
|
||||
|
||||
def test_finalize_order_v2(self):
|
||||
self.response.json.return_value = DIRECTORY_V2.to_json()
|
||||
mock_orderr = mock.MagicMock()
|
||||
mock_deadline = mock.MagicMock()
|
||||
with mock.patch('acme.client.ClientV2') as mock_client:
|
||||
client = self._init()
|
||||
client.finalize_order(mock_orderr, mock_deadline)
|
||||
mock_client().finalize_order.assert_called_once_with(mock_orderr, mock_deadline)
|
||||
|
||||
def test_revoke(self):
|
||||
self.response.json.return_value = DIRECTORY_V1.to_json()
|
||||
with mock.patch('acme.client.Client') as mock_client:
|
||||
client = self._init()
|
||||
client.revoke(messages_test.CERT, self.rsn)
|
||||
mock_client().revoke.assert_called_once_with(messages_test.CERT, self.rsn)
|
||||
|
||||
self.response.json.return_value = DIRECTORY_V2.to_json()
|
||||
with mock.patch('acme.client.ClientV2') as mock_client:
|
||||
client = self._init()
|
||||
client.revoke(messages_test.CERT, self.rsn)
|
||||
mock_client().revoke.assert_called_once_with(messages_test.CERT, self.rsn)
|
||||
|
||||
|
||||
class ClientTest(ClientTestBase):
|
||||
"""Tests for acme.client.Client."""
|
||||
# pylint: disable=too-many-instance-attributes,too-many-public-methods
|
||||
|
||||
def setUp(self):
|
||||
super(ClientTest, self).setUp()
|
||||
|
||||
self.directory = DIRECTORY_V1
|
||||
|
||||
# Registration
|
||||
self.regr = self.regr.update(
|
||||
terms_of_service='https://www.letsencrypt-demo.org/tos')
|
||||
|
||||
# Request issuance
|
||||
self.certr = messages.CertificateResource(
|
||||
body=messages_test.CERT, authzrs=(self.authzr,),
|
||||
uri='https://www.letsencrypt-demo.org/acme/cert/1',
|
||||
cert_chain_uri='https://www.letsencrypt-demo.org/ca')
|
||||
|
||||
# Reason code for revocation
|
||||
self.rsn = 1
|
||||
from acme.client import Client
|
||||
self.client = Client(
|
||||
directory=self.directory, key=KEY, alg=jose.RS256, net=self.net)
|
||||
|
||||
def test_init_downloads_directory(self):
|
||||
uri = 'http://www.letsencrypt-demo.org/directory'
|
||||
@@ -91,6 +299,16 @@ class ClientTest(unittest.TestCase):
|
||||
directory=uri, key=KEY, alg=jose.RS256, net=self.net)
|
||||
self.net.get.assert_called_once_with(uri)
|
||||
|
||||
@mock.patch('acme.client.ClientNetwork')
|
||||
def test_init_without_net(self, mock_net):
|
||||
mock_net.return_value = mock.sentinel.net
|
||||
alg = jose.RS256
|
||||
from acme.client import Client
|
||||
self.client = Client(
|
||||
directory=self.directory, key=KEY, alg=alg)
|
||||
mock_net.called_once_with(KEY, alg=alg, verify_ssl=True)
|
||||
self.assertEqual(self.client.net, mock.sentinel.net)
|
||||
|
||||
def test_register(self):
|
||||
# "Instance of 'Field' has no to_json/update member" bug:
|
||||
# pylint: disable=no-member
|
||||
@@ -142,20 +360,23 @@ class ClientTest(unittest.TestCase):
|
||||
self.client.request_challenges(self.identifier)
|
||||
self.net.post.assert_called_once_with(
|
||||
self.directory.new_authz,
|
||||
messages.NewAuthorization(identifier=self.identifier))
|
||||
messages.NewAuthorization(identifier=self.identifier),
|
||||
acme_version=1)
|
||||
|
||||
def test_request_challenges_deprecated_arg(self):
|
||||
self._prepare_response_for_request_challenges()
|
||||
self.client.request_challenges(self.identifier, new_authzr_uri="hi")
|
||||
self.net.post.assert_called_once_with(
|
||||
self.directory.new_authz,
|
||||
messages.NewAuthorization(identifier=self.identifier))
|
||||
messages.NewAuthorization(identifier=self.identifier),
|
||||
acme_version=1)
|
||||
|
||||
def test_request_challenges_custom_uri(self):
|
||||
self._prepare_response_for_request_challenges()
|
||||
self.client.request_challenges(self.identifier)
|
||||
self.net.post.assert_called_once_with(
|
||||
'https://www.letsencrypt-demo.org/acme/new-authz', mock.ANY)
|
||||
'https://www.letsencrypt-demo.org/acme/new-authz', mock.ANY,
|
||||
acme_version=1)
|
||||
|
||||
def test_request_challenges_unexpected_update(self):
|
||||
self._prepare_response_for_request_challenges()
|
||||
@@ -165,6 +386,13 @@ class ClientTest(unittest.TestCase):
|
||||
errors.UnexpectedUpdate, self.client.request_challenges,
|
||||
self.identifier)
|
||||
|
||||
def test_request_challenges_wildcard(self):
|
||||
wildcard_identifier = messages.Identifier(
|
||||
typ=messages.IDENTIFIER_FQDN, value='*.example.org')
|
||||
self.assertRaises(
|
||||
errors.WildcardUnsupportedError, self.client.request_challenges,
|
||||
wildcard_identifier)
|
||||
|
||||
def test_request_domain_challenges(self):
|
||||
self.client.request_challenges = mock.MagicMock()
|
||||
self.assertEqual(
|
||||
@@ -417,7 +645,7 @@ class ClientTest(unittest.TestCase):
|
||||
def test_revoke(self):
|
||||
self.client.revoke(self.certr.body, self.rsn)
|
||||
self.net.post.assert_called_once_with(
|
||||
self.directory[messages.Revocation], mock.ANY, content_type=None)
|
||||
self.directory[messages.Revocation], mock.ANY, acme_version=1)
|
||||
|
||||
def test_revocation_payload(self):
|
||||
obj = messages.Revocation(certificate=self.certr.body, reason=self.rsn)
|
||||
@@ -432,9 +660,150 @@ class ClientTest(unittest.TestCase):
|
||||
self.certr,
|
||||
self.rsn)
|
||||
|
||||
class ClientV2Test(ClientTestBase):
|
||||
"""Tests for acme.client.ClientV2."""
|
||||
|
||||
def setUp(self):
|
||||
super(ClientV2Test, self).setUp()
|
||||
|
||||
self.directory = DIRECTORY_V2
|
||||
|
||||
from acme.client import ClientV2
|
||||
self.client = ClientV2(self.directory, self.net)
|
||||
|
||||
self.new_reg = self.new_reg.update(terms_of_service_agreed=True)
|
||||
|
||||
self.authzr_uri2 = 'https://www.letsencrypt-demo.org/acme/authz/2'
|
||||
self.authz2 = self.authz.update(identifier=messages.Identifier(
|
||||
typ=messages.IDENTIFIER_FQDN, value='www.example.com'),
|
||||
status=messages.STATUS_PENDING)
|
||||
self.authzr2 = messages.AuthorizationResource(
|
||||
body=self.authz2, uri=self.authzr_uri2)
|
||||
|
||||
self.order = messages.Order(
|
||||
identifiers=(self.authz.identifier, self.authz2.identifier),
|
||||
status=messages.STATUS_PENDING,
|
||||
authorizations=(self.authzr.uri, self.authzr_uri2),
|
||||
finalize='https://www.letsencrypt-demo.org/acme/acct/1/order/1/finalize')
|
||||
self.orderr = messages.OrderResource(
|
||||
body=self.order,
|
||||
uri='https://www.letsencrypt-demo.org/acme/acct/1/order/1',
|
||||
authorizations=[self.authzr, self.authzr2], csr_pem=CSR_SAN_PEM)
|
||||
|
||||
def test_new_account(self):
|
||||
self.response.status_code = http_client.CREATED
|
||||
self.response.json.return_value = self.regr.body.to_json()
|
||||
self.response.headers['Location'] = self.regr.uri
|
||||
|
||||
self.assertEqual(self.regr, self.client.new_account(self.new_reg))
|
||||
|
||||
def test_new_order(self):
|
||||
order_response = copy.deepcopy(self.response)
|
||||
order_response.status_code = http_client.CREATED
|
||||
order_response.json.return_value = self.order.to_json()
|
||||
order_response.headers['Location'] = self.orderr.uri
|
||||
self.net.post.return_value = order_response
|
||||
|
||||
authz_response = copy.deepcopy(self.response)
|
||||
authz_response.json.return_value = self.authz.to_json()
|
||||
authz_response.headers['Location'] = self.authzr.uri
|
||||
authz_response2 = self.response
|
||||
authz_response2.json.return_value = self.authz2.to_json()
|
||||
authz_response2.headers['Location'] = self.authzr2.uri
|
||||
self.net.get.side_effect = (authz_response, authz_response2)
|
||||
|
||||
self.assertEqual(self.client.new_order(CSR_SAN_PEM), self.orderr)
|
||||
|
||||
@mock.patch('acme.client.datetime')
|
||||
def test_poll_and_finalize(self, mock_datetime):
|
||||
mock_datetime.datetime.now.return_value = datetime.datetime(2018, 2, 15)
|
||||
mock_datetime.timedelta = datetime.timedelta
|
||||
expected_deadline = mock_datetime.datetime.now() + datetime.timedelta(seconds=90)
|
||||
|
||||
self.client.poll_authorizations = mock.Mock(return_value=self.orderr)
|
||||
self.client.finalize_order = mock.Mock(return_value=self.orderr)
|
||||
|
||||
self.assertEqual(self.client.poll_and_finalize(self.orderr), self.orderr)
|
||||
self.client.poll_authorizations.assert_called_once_with(self.orderr, expected_deadline)
|
||||
self.client.finalize_order.assert_called_once_with(self.orderr, expected_deadline)
|
||||
|
||||
@mock.patch('acme.client.datetime')
|
||||
def test_poll_authorizations_timeout(self, mock_datetime):
|
||||
now_side_effect = [datetime.datetime(2018, 2, 15),
|
||||
datetime.datetime(2018, 2, 16),
|
||||
datetime.datetime(2018, 2, 17)]
|
||||
mock_datetime.datetime.now.side_effect = now_side_effect
|
||||
self.response.json.side_effect = [
|
||||
self.authz.to_json(), self.authz2.to_json(), self.authz2.to_json()]
|
||||
|
||||
self.assertRaises(
|
||||
errors.TimeoutError, self.client.poll_authorizations, self.orderr, now_side_effect[1])
|
||||
|
||||
def test_poll_authorizations_failure(self):
|
||||
deadline = datetime.datetime(9999, 9, 9)
|
||||
challb = self.challr.body.update(status=messages.STATUS_INVALID,
|
||||
error=messages.Error.with_code('unauthorized'))
|
||||
authz = self.authz.update(status=messages.STATUS_INVALID, challenges=(challb,))
|
||||
self.response.json.return_value = authz.to_json()
|
||||
|
||||
self.assertRaises(
|
||||
errors.ValidationError, self.client.poll_authorizations, self.orderr, deadline)
|
||||
|
||||
def test_poll_authorizations_success(self):
|
||||
deadline = datetime.datetime(9999, 9, 9)
|
||||
updated_authz2 = self.authz2.update(status=messages.STATUS_VALID)
|
||||
updated_authzr2 = messages.AuthorizationResource(
|
||||
body=updated_authz2, uri=self.authzr_uri2)
|
||||
updated_orderr = self.orderr.update(authorizations=[self.authzr, updated_authzr2])
|
||||
|
||||
self.response.json.side_effect = (
|
||||
self.authz.to_json(), self.authz2.to_json(), updated_authz2.to_json())
|
||||
self.assertEqual(self.client.poll_authorizations(self.orderr, deadline), updated_orderr)
|
||||
|
||||
def test_finalize_order_success(self):
|
||||
updated_order = self.order.update(
|
||||
certificate='https://www.letsencrypt-demo.org/acme/cert/')
|
||||
updated_orderr = self.orderr.update(body=updated_order, fullchain_pem=CERT_SAN_PEM)
|
||||
|
||||
self.response.json.return_value = updated_order.to_json()
|
||||
self.response.text = CERT_SAN_PEM
|
||||
|
||||
deadline = datetime.datetime(9999, 9, 9)
|
||||
self.assertEqual(self.client.finalize_order(self.orderr, deadline), updated_orderr)
|
||||
|
||||
def test_finalize_order_error(self):
|
||||
updated_order = self.order.update(error=messages.Error.with_code('unauthorized'))
|
||||
self.response.json.return_value = updated_order.to_json()
|
||||
|
||||
deadline = datetime.datetime(9999, 9, 9)
|
||||
self.assertRaises(errors.IssuanceError, self.client.finalize_order, self.orderr, deadline)
|
||||
|
||||
def test_finalize_order_timeout(self):
|
||||
deadline = datetime.datetime.now() - datetime.timedelta(seconds=60)
|
||||
self.assertRaises(errors.TimeoutError, self.client.finalize_order, self.orderr, deadline)
|
||||
|
||||
def test_revoke(self):
|
||||
self.client.revoke(messages_test.CERT, self.rsn)
|
||||
self.net.post.assert_called_once_with(
|
||||
self.directory["revokeCert"], mock.ANY, acme_version=2)
|
||||
|
||||
|
||||
class MockJSONDeSerializable(jose.JSONDeSerializable):
|
||||
# pylint: disable=missing-docstring
|
||||
def __init__(self, value):
|
||||
self.value = value
|
||||
|
||||
def to_partial_json(self):
|
||||
return {'foo': self.value}
|
||||
|
||||
@classmethod
|
||||
def from_json(cls, value):
|
||||
pass # pragma: no cover
|
||||
|
||||
|
||||
class ClientNetworkTest(unittest.TestCase):
|
||||
"""Tests for acme.client.ClientNetwork."""
|
||||
# pylint: disable=too-many-public-methods
|
||||
|
||||
def setUp(self):
|
||||
self.verify_ssl = mock.MagicMock()
|
||||
@@ -453,25 +822,27 @@ class ClientNetworkTest(unittest.TestCase):
|
||||
self.assertTrue(self.net.verify_ssl is self.verify_ssl)
|
||||
|
||||
def test_wrap_in_jws(self):
|
||||
class MockJSONDeSerializable(jose.JSONDeSerializable):
|
||||
# pylint: disable=missing-docstring
|
||||
def __init__(self, value):
|
||||
self.value = value
|
||||
|
||||
def to_partial_json(self):
|
||||
return {'foo': self.value}
|
||||
|
||||
@classmethod
|
||||
def from_json(cls, value):
|
||||
pass # pragma: no cover
|
||||
|
||||
# pylint: disable=protected-access
|
||||
jws_dump = self.net._wrap_in_jws(
|
||||
MockJSONDeSerializable('foo'), nonce=b'Tg')
|
||||
MockJSONDeSerializable('foo'), nonce=b'Tg', url="url",
|
||||
acme_version=1)
|
||||
jws = acme_jws.JWS.json_loads(jws_dump)
|
||||
self.assertEqual(json.loads(jws.payload.decode()), {'foo': 'foo'})
|
||||
self.assertEqual(jws.signature.combined.nonce, b'Tg')
|
||||
|
||||
def test_wrap_in_jws_v2(self):
|
||||
self.net.account = {'uri': 'acct-uri'}
|
||||
# pylint: disable=protected-access
|
||||
jws_dump = self.net._wrap_in_jws(
|
||||
MockJSONDeSerializable('foo'), nonce=b'Tg', url="url",
|
||||
acme_version=2)
|
||||
jws = acme_jws.JWS.json_loads(jws_dump)
|
||||
self.assertEqual(json.loads(jws.payload.decode()), {'foo': 'foo'})
|
||||
self.assertEqual(jws.signature.combined.nonce, b'Tg')
|
||||
self.assertEqual(jws.signature.combined.kid, u'acct-uri')
|
||||
self.assertEqual(jws.signature.combined.url, u'url')
|
||||
|
||||
|
||||
def test_check_response_not_ok_jobj_no_error(self):
|
||||
self.response.ok = False
|
||||
self.response.json.return_value = {}
|
||||
@@ -701,13 +1072,13 @@ class ClientNetworkWithMockedResponseTest(unittest.TestCase):
|
||||
self.assertEqual(self.checked_response, self.net.post(
|
||||
'uri', self.obj, content_type=self.content_type))
|
||||
self.net._wrap_in_jws.assert_called_once_with(
|
||||
self.obj, jose.b64decode(self.all_nonces.pop()))
|
||||
self.obj, jose.b64decode(self.all_nonces.pop()), "uri", 1)
|
||||
|
||||
self.available_nonces = []
|
||||
self.assertRaises(errors.MissingNonce, self.net.post,
|
||||
'uri', self.obj, content_type=self.content_type)
|
||||
self.net._wrap_in_jws.assert_called_with(
|
||||
self.obj, jose.b64decode(self.all_nonces.pop()))
|
||||
self.obj, jose.b64decode(self.all_nonces.pop()), "uri", 1)
|
||||
|
||||
def test_post_wrong_initial_nonce(self): # HEAD
|
||||
self.available_nonces = [b'f', jose.b64encode(b'good')]
|
||||
|
||||
@@ -5,9 +5,10 @@ import logging
|
||||
import os
|
||||
import re
|
||||
import socket
|
||||
import sys
|
||||
|
||||
import OpenSSL
|
||||
import josepy as jose
|
||||
|
||||
|
||||
from acme import errors
|
||||
|
||||
@@ -130,8 +131,7 @@ def probe_sni(name, host, port=443, timeout=300,
|
||||
context = OpenSSL.SSL.Context(method)
|
||||
context.set_timeout(timeout)
|
||||
|
||||
socket_kwargs = {} if sys.version_info < (2, 7) else {
|
||||
'source_address': source_address}
|
||||
socket_kwargs = {'source_address': source_address}
|
||||
|
||||
host_protocol_agnostic = None if host == '::' or host == '0' else host
|
||||
|
||||
@@ -186,6 +186,15 @@ def make_csr(private_key_pem, domains, must_staple=False):
|
||||
return OpenSSL.crypto.dump_certificate_request(
|
||||
OpenSSL.crypto.FILETYPE_PEM, csr)
|
||||
|
||||
def _pyopenssl_cert_or_req_all_names(loaded_cert_or_req):
|
||||
common_name = loaded_cert_or_req.get_subject().CN
|
||||
sans = _pyopenssl_cert_or_req_san(loaded_cert_or_req)
|
||||
|
||||
if common_name is None:
|
||||
return sans
|
||||
else:
|
||||
return [common_name] + [d for d in sans if d != common_name]
|
||||
|
||||
def _pyopenssl_cert_or_req_san(cert_or_req):
|
||||
"""Get Subject Alternative Names from certificate or CSR using pyOpenSSL.
|
||||
|
||||
@@ -271,3 +280,26 @@ def gen_ss_cert(key, domains, not_before=None,
|
||||
cert.set_pubkey(key)
|
||||
cert.sign(key, "sha256")
|
||||
return cert
|
||||
|
||||
def dump_pyopenssl_chain(chain, filetype=OpenSSL.crypto.FILETYPE_PEM):
|
||||
"""Dump certificate chain into a bundle.
|
||||
|
||||
:param list chain: List of `OpenSSL.crypto.X509` (or wrapped in
|
||||
:class:`josepy.util.ComparableX509`).
|
||||
|
||||
:returns: certificate chain bundle
|
||||
:rtype: bytes
|
||||
|
||||
"""
|
||||
# XXX: returns empty string when no chain is available, which
|
||||
# shuts up RenewableCert, but might not be the best solution...
|
||||
|
||||
def _dump_cert(cert):
|
||||
if isinstance(cert, jose.ComparableX509):
|
||||
# pylint: disable=protected-access
|
||||
cert = cert.wrapped
|
||||
return OpenSSL.crypto.dump_certificate(filetype, cert)
|
||||
|
||||
# assumes that OpenSSL.crypto.dump_certificate includes ending
|
||||
# newline character
|
||||
return b"".join(_dump_cert(cert) for cert in chain)
|
||||
|
||||
@@ -8,10 +8,10 @@ import unittest
|
||||
import six
|
||||
from six.moves import socketserver #type: ignore # pylint: disable=import-error
|
||||
|
||||
import josepy as jose
|
||||
import OpenSSL
|
||||
|
||||
from acme import errors
|
||||
from acme import jose
|
||||
from acme import test_util
|
||||
|
||||
|
||||
@@ -65,6 +65,30 @@ class SSLSocketAndProbeSNITest(unittest.TestCase):
|
||||
# self.assertRaises(errors.Error, self._probe, b'bar')
|
||||
|
||||
|
||||
class PyOpenSSLCertOrReqAllNamesTest(unittest.TestCase):
|
||||
"""Test for acme.crypto_util._pyopenssl_cert_or_req_all_names."""
|
||||
|
||||
@classmethod
|
||||
def _call(cls, loader, name):
|
||||
# pylint: disable=protected-access
|
||||
from acme.crypto_util import _pyopenssl_cert_or_req_all_names
|
||||
return _pyopenssl_cert_or_req_all_names(loader(name))
|
||||
|
||||
def _call_cert(self, name):
|
||||
return self._call(test_util.load_cert, name)
|
||||
|
||||
def test_cert_one_san_no_common(self):
|
||||
self.assertEqual(self._call_cert('cert-nocn.der'),
|
||||
['no-common-name.badssl.com'])
|
||||
|
||||
def test_cert_no_sans_yes_common(self):
|
||||
self.assertEqual(self._call_cert('cert.pem'), ['example.com'])
|
||||
|
||||
def test_cert_two_sans_yes_common(self):
|
||||
self.assertEqual(self._call_cert('cert-san.pem'),
|
||||
['example.com', 'www.example.com'])
|
||||
|
||||
|
||||
class PyOpenSSLCertOrReqSANTest(unittest.TestCase):
|
||||
"""Test for acme.crypto_util._pyopenssl_cert_or_req_san."""
|
||||
|
||||
@@ -170,9 +194,9 @@ class MakeCSRTest(unittest.TestCase):
|
||||
self.assertTrue(b'--END CERTIFICATE REQUEST--' in csr_pem)
|
||||
csr = OpenSSL.crypto.load_certificate_request(
|
||||
OpenSSL.crypto.FILETYPE_PEM, csr_pem)
|
||||
# In pyopenssl 0.13 (used with TOXENV=py26-oldest and py27-oldest), csr
|
||||
# objects don't have a get_extensions() method, so we skip this test if
|
||||
# the method isn't available.
|
||||
# In pyopenssl 0.13 (used with TOXENV=py27-oldest), csr objects don't
|
||||
# have a get_extensions() method, so we skip this test if the method
|
||||
# isn't available.
|
||||
if hasattr(csr, 'get_extensions'):
|
||||
self.assertEquals(len(csr.get_extensions()), 1)
|
||||
self.assertEquals(csr.get_extensions()[0].get_data(),
|
||||
@@ -188,9 +212,9 @@ class MakeCSRTest(unittest.TestCase):
|
||||
csr = OpenSSL.crypto.load_certificate_request(
|
||||
OpenSSL.crypto.FILETYPE_PEM, csr_pem)
|
||||
|
||||
# In pyopenssl 0.13 (used with TOXENV=py26-oldest and py27-oldest), csr
|
||||
# objects don't have a get_extensions() method, so we skip this test if
|
||||
# the method isn't available.
|
||||
# In pyopenssl 0.13 (used with TOXENV=py27-oldest), csr objects don't
|
||||
# have a get_extensions() method, so we skip this test if the method
|
||||
# isn't available.
|
||||
if hasattr(csr, 'get_extensions'):
|
||||
self.assertEquals(len(csr.get_extensions()), 2)
|
||||
# NOTE: Ideally we would filter by the TLS Feature OID, but
|
||||
@@ -201,5 +225,33 @@ class MakeCSRTest(unittest.TestCase):
|
||||
self.assertEqual(len(must_staple_exts), 1,
|
||||
"Expected exactly one Must Staple extension")
|
||||
|
||||
|
||||
class DumpPyopensslChainTest(unittest.TestCase):
|
||||
"""Test for dump_pyopenssl_chain."""
|
||||
|
||||
@classmethod
|
||||
def _call(cls, loaded):
|
||||
# pylint: disable=protected-access
|
||||
from acme.crypto_util import dump_pyopenssl_chain
|
||||
return dump_pyopenssl_chain(loaded)
|
||||
|
||||
def test_dump_pyopenssl_chain(self):
|
||||
names = ['cert.pem', 'cert-san.pem', 'cert-idnsans.pem']
|
||||
loaded = [test_util.load_cert(name) for name in names]
|
||||
length = sum(
|
||||
len(OpenSSL.crypto.dump_certificate(OpenSSL.crypto.FILETYPE_PEM, cert))
|
||||
for cert in loaded)
|
||||
self.assertEqual(len(self._call(loaded)), length)
|
||||
|
||||
def test_dump_pyopenssl_chain_wrapped(self):
|
||||
names = ['cert.pem', 'cert-san.pem', 'cert-idnsans.pem']
|
||||
loaded = [test_util.load_cert(name) for name in names]
|
||||
wrap_func = jose.ComparableX509
|
||||
wrapped = [wrap_func(cert) for cert in loaded]
|
||||
dump_func = OpenSSL.crypto.dump_certificate
|
||||
length = sum(len(dump_func(OpenSSL.crypto.FILETYPE_PEM, cert)) for cert in loaded)
|
||||
self.assertEqual(len(self._call(wrapped)), length)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main() # pragma: no cover
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
"""ACME errors."""
|
||||
from acme.jose import errors as jose_errors
|
||||
from josepy import errors as jose_errors
|
||||
|
||||
|
||||
class Error(Exception):
|
||||
@@ -83,6 +83,28 @@ class PollError(ClientError):
|
||||
return '{0}(exhausted={1!r}, updated={2!r})'.format(
|
||||
self.__class__.__name__, self.exhausted, self.updated)
|
||||
|
||||
class ValidationError(Error):
|
||||
"""Error for authorization failures. Contains a list of authorization
|
||||
resources, each of which is invalid and should have an error field.
|
||||
"""
|
||||
def __init__(self, failed_authzrs):
|
||||
self.failed_authzrs = failed_authzrs
|
||||
super(ValidationError, self).__init__()
|
||||
|
||||
class TimeoutError(Error):
|
||||
"""Error for when polling an authorization or an order times out."""
|
||||
|
||||
class IssuanceError(Error):
|
||||
"""Error sent by the server after requesting issuance of a certificate."""
|
||||
|
||||
def __init__(self, error):
|
||||
"""Initialize.
|
||||
|
||||
:param messages.Error error: The error provided by the server.
|
||||
"""
|
||||
self.error = error
|
||||
super(IssuanceError, self).__init__()
|
||||
|
||||
class ConflictError(ClientError):
|
||||
"""Error for when the server returns a 409 (Conflict) HTTP status.
|
||||
|
||||
@@ -93,3 +115,6 @@ class ConflictError(ClientError):
|
||||
self.location = location
|
||||
super(ConflictError, self).__init__()
|
||||
|
||||
|
||||
class WildcardUnsupportedError(Error):
|
||||
"""Error for when a wildcard is requested but is unsupported by ACME CA."""
|
||||
|
||||
@@ -1,10 +1,9 @@
|
||||
"""ACME JSON fields."""
|
||||
import logging
|
||||
|
||||
import josepy as jose
|
||||
import pyrfc3339
|
||||
|
||||
from acme import jose
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@@ -2,10 +2,9 @@
|
||||
import datetime
|
||||
import unittest
|
||||
|
||||
import josepy as jose
|
||||
import pytz
|
||||
|
||||
from acme import jose
|
||||
|
||||
|
||||
class FixedTest(unittest.TestCase):
|
||||
"""Tests for acme.fields.Fixed."""
|
||||
|
||||
@@ -1,82 +0,0 @@
|
||||
"""Javascript Object Signing and Encryption (jose).
|
||||
|
||||
This package is a Python implementation of the standards developed by
|
||||
IETF `Javascript Object Signing and Encryption (Active WG)`_, in
|
||||
particular the following RFCs:
|
||||
|
||||
- `JSON Web Algorithms (JWA)`_
|
||||
- `JSON Web Key (JWK)`_
|
||||
- `JSON Web Signature (JWS)`_
|
||||
|
||||
|
||||
.. _`Javascript Object Signing and Encryption (Active WG)`:
|
||||
https://tools.ietf.org/wg/jose/
|
||||
|
||||
.. _`JSON Web Algorithms (JWA)`:
|
||||
https://datatracker.ietf.org/doc/draft-ietf-jose-json-web-algorithms/
|
||||
|
||||
.. _`JSON Web Key (JWK)`:
|
||||
https://datatracker.ietf.org/doc/draft-ietf-jose-json-web-key/
|
||||
|
||||
.. _`JSON Web Signature (JWS)`:
|
||||
https://datatracker.ietf.org/doc/draft-ietf-jose-json-web-signature/
|
||||
|
||||
"""
|
||||
from acme.jose.b64 import (
|
||||
b64decode,
|
||||
b64encode,
|
||||
)
|
||||
|
||||
from acme.jose.errors import (
|
||||
DeserializationError,
|
||||
SerializationError,
|
||||
Error,
|
||||
UnrecognizedTypeError,
|
||||
)
|
||||
|
||||
from acme.jose.interfaces import JSONDeSerializable
|
||||
|
||||
from acme.jose.json_util import (
|
||||
Field,
|
||||
JSONObjectWithFields,
|
||||
TypedJSONObjectWithFields,
|
||||
decode_b64jose,
|
||||
decode_cert,
|
||||
decode_csr,
|
||||
decode_hex16,
|
||||
encode_b64jose,
|
||||
encode_cert,
|
||||
encode_csr,
|
||||
encode_hex16,
|
||||
)
|
||||
|
||||
from acme.jose.jwa import (
|
||||
HS256,
|
||||
HS384,
|
||||
HS512,
|
||||
JWASignature,
|
||||
PS256,
|
||||
PS384,
|
||||
PS512,
|
||||
RS256,
|
||||
RS384,
|
||||
RS512,
|
||||
)
|
||||
|
||||
from acme.jose.jwk import (
|
||||
JWK,
|
||||
JWKRSA,
|
||||
)
|
||||
|
||||
from acme.jose.jws import (
|
||||
Header,
|
||||
JWS,
|
||||
Signature,
|
||||
)
|
||||
|
||||
from acme.jose.util import (
|
||||
ComparableX509,
|
||||
ComparableKey,
|
||||
ComparableRSAKey,
|
||||
ImmutableMap,
|
||||
)
|
||||
@@ -1,61 +0,0 @@
|
||||
"""JOSE Base64.
|
||||
|
||||
`JOSE Base64`_ is defined as:
|
||||
|
||||
- URL-safe Base64
|
||||
- padding stripped
|
||||
|
||||
|
||||
.. _`JOSE Base64`:
|
||||
https://tools.ietf.org/html/draft-ietf-jose-json-web-signature-37#appendix-C
|
||||
|
||||
.. Do NOT try to call this module "base64", as it will "shadow" the
|
||||
standard library.
|
||||
|
||||
"""
|
||||
import base64
|
||||
|
||||
import six
|
||||
|
||||
|
||||
def b64encode(data):
|
||||
"""JOSE Base64 encode.
|
||||
|
||||
:param data: Data to be encoded.
|
||||
:type data: `bytes`
|
||||
|
||||
:returns: JOSE Base64 string.
|
||||
:rtype: bytes
|
||||
|
||||
:raises TypeError: if `data` is of incorrect type
|
||||
|
||||
"""
|
||||
if not isinstance(data, six.binary_type):
|
||||
raise TypeError('argument should be {0}'.format(six.binary_type))
|
||||
return base64.urlsafe_b64encode(data).rstrip(b'=')
|
||||
|
||||
|
||||
def b64decode(data):
|
||||
"""JOSE Base64 decode.
|
||||
|
||||
:param data: Base64 string to be decoded. If it's unicode, then
|
||||
only ASCII characters are allowed.
|
||||
:type data: `bytes` or `unicode`
|
||||
|
||||
:returns: Decoded data.
|
||||
:rtype: bytes
|
||||
|
||||
:raises TypeError: if input is of incorrect type
|
||||
:raises ValueError: if input is unicode with non-ASCII characters
|
||||
|
||||
"""
|
||||
if isinstance(data, six.string_types):
|
||||
try:
|
||||
data = data.encode('ascii')
|
||||
except UnicodeEncodeError:
|
||||
raise ValueError(
|
||||
'unicode argument should contain only ASCII characters')
|
||||
elif not isinstance(data, six.binary_type):
|
||||
raise TypeError('argument should be a str or unicode')
|
||||
|
||||
return base64.urlsafe_b64decode(data + b'=' * (4 - (len(data) % 4)))
|
||||
@@ -1,77 +0,0 @@
|
||||
"""Tests for acme.jose.b64."""
|
||||
import unittest
|
||||
|
||||
import six
|
||||
|
||||
|
||||
# https://en.wikipedia.org/wiki/Base64#Examples
|
||||
B64_PADDING_EXAMPLES = {
|
||||
b'any carnal pleasure.': (b'YW55IGNhcm5hbCBwbGVhc3VyZS4', b'='),
|
||||
b'any carnal pleasure': (b'YW55IGNhcm5hbCBwbGVhc3VyZQ', b'=='),
|
||||
b'any carnal pleasur': (b'YW55IGNhcm5hbCBwbGVhc3Vy', b''),
|
||||
b'any carnal pleasu': (b'YW55IGNhcm5hbCBwbGVhc3U', b'='),
|
||||
b'any carnal pleas': (b'YW55IGNhcm5hbCBwbGVhcw', b'=='),
|
||||
}
|
||||
|
||||
|
||||
B64_URL_UNSAFE_EXAMPLES = {
|
||||
six.int2byte(251) + six.int2byte(239): b'--8',
|
||||
six.int2byte(255) * 2: b'__8',
|
||||
}
|
||||
|
||||
|
||||
class B64EncodeTest(unittest.TestCase):
|
||||
"""Tests for acme.jose.b64.b64encode."""
|
||||
|
||||
@classmethod
|
||||
def _call(cls, data):
|
||||
from acme.jose.b64 import b64encode
|
||||
return b64encode(data)
|
||||
|
||||
def test_empty(self):
|
||||
self.assertEqual(self._call(b''), b'')
|
||||
|
||||
def test_unsafe_url(self):
|
||||
for text, b64 in six.iteritems(B64_URL_UNSAFE_EXAMPLES):
|
||||
self.assertEqual(self._call(text), b64)
|
||||
|
||||
def test_different_paddings(self):
|
||||
for text, (b64, _) in six.iteritems(B64_PADDING_EXAMPLES):
|
||||
self.assertEqual(self._call(text), b64)
|
||||
|
||||
def test_unicode_fails_with_type_error(self):
|
||||
self.assertRaises(TypeError, self._call, u'some unicode')
|
||||
|
||||
|
||||
class B64DecodeTest(unittest.TestCase):
|
||||
"""Tests for acme.jose.b64.b64decode."""
|
||||
|
||||
@classmethod
|
||||
def _call(cls, data):
|
||||
from acme.jose.b64 import b64decode
|
||||
return b64decode(data)
|
||||
|
||||
def test_unsafe_url(self):
|
||||
for text, b64 in six.iteritems(B64_URL_UNSAFE_EXAMPLES):
|
||||
self.assertEqual(self._call(b64), text)
|
||||
|
||||
def test_input_without_padding(self):
|
||||
for text, (b64, _) in six.iteritems(B64_PADDING_EXAMPLES):
|
||||
self.assertEqual(self._call(b64), text)
|
||||
|
||||
def test_input_with_padding(self):
|
||||
for text, (b64, pad) in six.iteritems(B64_PADDING_EXAMPLES):
|
||||
self.assertEqual(self._call(b64 + pad), text)
|
||||
|
||||
def test_unicode_with_ascii(self):
|
||||
self.assertEqual(self._call(u'YQ'), b'a')
|
||||
|
||||
def test_non_ascii_unicode_fails(self):
|
||||
self.assertRaises(ValueError, self._call, u'\u0105')
|
||||
|
||||
def test_type_error_no_unicode_or_bytes(self):
|
||||
self.assertRaises(TypeError, self._call, object())
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main() # pragma: no cover
|
||||
@@ -1,35 +0,0 @@
|
||||
"""JOSE errors."""
|
||||
|
||||
|
||||
class Error(Exception):
|
||||
"""Generic JOSE Error."""
|
||||
|
||||
|
||||
class DeserializationError(Error):
|
||||
"""JSON deserialization error."""
|
||||
|
||||
def __str__(self):
|
||||
return "Deserialization error: {0}".format(
|
||||
super(DeserializationError, self).__str__())
|
||||
|
||||
|
||||
class SerializationError(Error):
|
||||
"""JSON serialization error."""
|
||||
|
||||
|
||||
class UnrecognizedTypeError(DeserializationError):
|
||||
"""Unrecognized type error.
|
||||
|
||||
:ivar str typ: The unrecognized type of the JSON object.
|
||||
:ivar jobj: Full JSON object.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, typ, jobj):
|
||||
self.typ = typ
|
||||
self.jobj = jobj
|
||||
super(UnrecognizedTypeError, self).__init__(str(self))
|
||||
|
||||
def __str__(self):
|
||||
return '{0} was not recognized, full message: {1}'.format(
|
||||
self.typ, self.jobj)
|
||||
@@ -1,17 +0,0 @@
|
||||
"""Tests for acme.jose.errors."""
|
||||
import unittest
|
||||
|
||||
|
||||
class UnrecognizedTypeErrorTest(unittest.TestCase):
|
||||
def setUp(self):
|
||||
from acme.jose.errors import UnrecognizedTypeError
|
||||
self.error = UnrecognizedTypeError('foo', {'type': 'foo'})
|
||||
|
||||
def test_str(self):
|
||||
self.assertEqual(
|
||||
"foo was not recognized, full message: {'type': 'foo'}",
|
||||
str(self.error))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main() # pragma: no cover
|
||||
@@ -1,216 +0,0 @@
|
||||
"""JOSE interfaces."""
|
||||
import abc
|
||||
import collections
|
||||
import json
|
||||
|
||||
import six
|
||||
|
||||
from acme.jose import errors
|
||||
from acme.jose import util
|
||||
|
||||
# pylint: disable=no-self-argument,no-method-argument,no-init,inherit-non-class
|
||||
# pylint: disable=too-few-public-methods
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class JSONDeSerializable(object):
|
||||
# pylint: disable=too-few-public-methods
|
||||
"""Interface for (de)serializable JSON objects.
|
||||
|
||||
Please recall, that standard Python library implements
|
||||
:class:`json.JSONEncoder` and :class:`json.JSONDecoder` that perform
|
||||
translations based on respective :ref:`conversion tables
|
||||
<conversion-table>` that look pretty much like the one below (for
|
||||
complete tables see relevant Python documentation):
|
||||
|
||||
.. _conversion-table:
|
||||
|
||||
====== ======
|
||||
JSON Python
|
||||
====== ======
|
||||
object dict
|
||||
... ...
|
||||
====== ======
|
||||
|
||||
While the above **conversion table** is about translation of JSON
|
||||
documents to/from the basic Python types only,
|
||||
:class:`JSONDeSerializable` introduces the following two concepts:
|
||||
|
||||
serialization
|
||||
Turning an arbitrary Python object into Python object that can
|
||||
be encoded into a JSON document. **Full serialization** produces
|
||||
a Python object composed of only basic types as required by the
|
||||
:ref:`conversion table <conversion-table>`. **Partial
|
||||
serialization** (accomplished by :meth:`to_partial_json`)
|
||||
produces a Python object that might also be built from other
|
||||
:class:`JSONDeSerializable` objects.
|
||||
|
||||
deserialization
|
||||
Turning a decoded Python object (necessarily one of the basic
|
||||
types as required by the :ref:`conversion table
|
||||
<conversion-table>`) into an arbitrary Python object.
|
||||
|
||||
Serialization produces **serialized object** ("partially serialized
|
||||
object" or "fully serialized object" for partial and full
|
||||
serialization respectively) and deserialization produces
|
||||
**deserialized object**, both usually denoted in the source code as
|
||||
``jobj``.
|
||||
|
||||
Wording in the official Python documentation might be confusing
|
||||
after reading the above, but in the light of those definitions, one
|
||||
can view :meth:`json.JSONDecoder.decode` as decoder and
|
||||
deserializer of basic types, :meth:`json.JSONEncoder.default` as
|
||||
serializer of basic types, :meth:`json.JSONEncoder.encode` as
|
||||
serializer and encoder of basic types.
|
||||
|
||||
One could extend :mod:`json` to support arbitrary object
|
||||
(de)serialization either by:
|
||||
|
||||
- overriding :meth:`json.JSONDecoder.decode` and
|
||||
:meth:`json.JSONEncoder.default` in subclasses
|
||||
|
||||
- or passing ``object_hook`` argument (or ``object_hook_pairs``)
|
||||
to :func:`json.load`/:func:`json.loads` or ``default`` argument
|
||||
for :func:`json.dump`/:func:`json.dumps`.
|
||||
|
||||
Interestingly, ``default`` is required to perform only partial
|
||||
serialization, as :func:`json.dumps` applies ``default``
|
||||
recursively. This is the idea behind making :meth:`to_partial_json`
|
||||
produce only partial serialization, while providing custom
|
||||
:meth:`json_dumps` that dumps with ``default`` set to
|
||||
:meth:`json_dump_default`.
|
||||
|
||||
To make further documentation a bit more concrete, please, consider
|
||||
the following imaginatory implementation example::
|
||||
|
||||
class Foo(JSONDeSerializable):
|
||||
def to_partial_json(self):
|
||||
return 'foo'
|
||||
|
||||
@classmethod
|
||||
def from_json(cls, jobj):
|
||||
return Foo()
|
||||
|
||||
class Bar(JSONDeSerializable):
|
||||
def to_partial_json(self):
|
||||
return [Foo(), Foo()]
|
||||
|
||||
@classmethod
|
||||
def from_json(cls, jobj):
|
||||
return Bar()
|
||||
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def to_partial_json(self): # pragma: no cover
|
||||
"""Partially serialize.
|
||||
|
||||
Following the example, **partial serialization** means the following::
|
||||
|
||||
assert isinstance(Bar().to_partial_json()[0], Foo)
|
||||
assert isinstance(Bar().to_partial_json()[1], Foo)
|
||||
|
||||
# in particular...
|
||||
assert Bar().to_partial_json() != ['foo', 'foo']
|
||||
|
||||
:raises acme.jose.errors.SerializationError:
|
||||
in case of any serialization error.
|
||||
:returns: Partially serializable object.
|
||||
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
def to_json(self):
|
||||
"""Fully serialize.
|
||||
|
||||
Again, following the example from before, **full serialization**
|
||||
means the following::
|
||||
|
||||
assert Bar().to_json() == ['foo', 'foo']
|
||||
|
||||
:raises acme.jose.errors.SerializationError:
|
||||
in case of any serialization error.
|
||||
:returns: Fully serialized object.
|
||||
|
||||
"""
|
||||
def _serialize(obj):
|
||||
if isinstance(obj, JSONDeSerializable):
|
||||
return _serialize(obj.to_partial_json())
|
||||
if isinstance(obj, six.string_types): # strings are Sequence
|
||||
return obj
|
||||
elif isinstance(obj, list):
|
||||
return [_serialize(subobj) for subobj in obj]
|
||||
elif isinstance(obj, collections.Sequence):
|
||||
# default to tuple, otherwise Mapping could get
|
||||
# unhashable list
|
||||
return tuple(_serialize(subobj) for subobj in obj)
|
||||
elif isinstance(obj, collections.Mapping):
|
||||
return dict((_serialize(key), _serialize(value))
|
||||
for key, value in six.iteritems(obj))
|
||||
else:
|
||||
return obj
|
||||
|
||||
return _serialize(self)
|
||||
|
||||
@util.abstractclassmethod
|
||||
def from_json(cls, jobj): # pylint: disable=unused-argument
|
||||
"""Deserialize a decoded JSON document.
|
||||
|
||||
:param jobj: Python object, composed of only other basic data
|
||||
types, as decoded from JSON document. Not necessarily
|
||||
:class:`dict` (as decoded from "JSON object" document).
|
||||
|
||||
:raises acme.jose.errors.DeserializationError:
|
||||
if decoding was unsuccessful, e.g. in case of unparseable
|
||||
X509 certificate, or wrong padding in JOSE base64 encoded
|
||||
string, etc.
|
||||
|
||||
"""
|
||||
# TypeError: Can't instantiate abstract class <cls> with
|
||||
# abstract methods from_json, to_partial_json
|
||||
return cls() # pylint: disable=abstract-class-instantiated
|
||||
|
||||
@classmethod
|
||||
def json_loads(cls, json_string):
|
||||
"""Deserialize from JSON document string."""
|
||||
try:
|
||||
loads = json.loads(json_string)
|
||||
except ValueError as error:
|
||||
raise errors.DeserializationError(error)
|
||||
return cls.from_json(loads)
|
||||
|
||||
def json_dumps(self, **kwargs):
|
||||
"""Dump to JSON string using proper serializer.
|
||||
|
||||
:returns: JSON document string.
|
||||
:rtype: str
|
||||
|
||||
"""
|
||||
return json.dumps(self, default=self.json_dump_default, **kwargs)
|
||||
|
||||
def json_dumps_pretty(self):
|
||||
"""Dump the object to pretty JSON document string.
|
||||
|
||||
:rtype: str
|
||||
|
||||
"""
|
||||
return self.json_dumps(sort_keys=True, indent=4, separators=(',', ': '))
|
||||
|
||||
@classmethod
|
||||
def json_dump_default(cls, python_object):
|
||||
"""Serialize Python object.
|
||||
|
||||
This function is meant to be passed as ``default`` to
|
||||
:func:`json.dump` or :func:`json.dumps`. They call
|
||||
``default(python_object)`` only for non-basic Python types, so
|
||||
this function necessarily raises :class:`TypeError` if
|
||||
``python_object`` is not an instance of
|
||||
:class:`IJSONSerializable`.
|
||||
|
||||
Please read the class docstring for more information.
|
||||
|
||||
"""
|
||||
if isinstance(python_object, JSONDeSerializable):
|
||||
return python_object.to_partial_json()
|
||||
else: # this branch is necessary, cannot just "return"
|
||||
raise TypeError(repr(python_object) + ' is not JSON serializable')
|
||||
@@ -1,114 +0,0 @@
|
||||
"""Tests for acme.jose.interfaces."""
|
||||
import unittest
|
||||
|
||||
|
||||
class JSONDeSerializableTest(unittest.TestCase):
|
||||
# pylint: disable=too-many-instance-attributes
|
||||
|
||||
def setUp(self):
|
||||
from acme.jose.interfaces import JSONDeSerializable
|
||||
|
||||
# pylint: disable=missing-docstring,invalid-name
|
||||
|
||||
class Basic(JSONDeSerializable):
|
||||
def __init__(self, v):
|
||||
self.v = v
|
||||
|
||||
def to_partial_json(self):
|
||||
return self.v
|
||||
|
||||
@classmethod
|
||||
def from_json(cls, jobj):
|
||||
return cls(jobj)
|
||||
|
||||
class Sequence(JSONDeSerializable):
|
||||
def __init__(self, x, y):
|
||||
self.x = x
|
||||
self.y = y
|
||||
|
||||
def to_partial_json(self):
|
||||
return [self.x, self.y]
|
||||
|
||||
@classmethod
|
||||
def from_json(cls, jobj):
|
||||
return cls(
|
||||
Basic.from_json(jobj[0]), Basic.from_json(jobj[1]))
|
||||
|
||||
class Mapping(JSONDeSerializable):
|
||||
def __init__(self, x, y):
|
||||
self.x = x
|
||||
self.y = y
|
||||
|
||||
def to_partial_json(self):
|
||||
return {self.x: self.y}
|
||||
|
||||
@classmethod
|
||||
def from_json(cls, jobj):
|
||||
pass # pragma: no cover
|
||||
|
||||
self.basic1 = Basic('foo1')
|
||||
self.basic2 = Basic('foo2')
|
||||
self.seq = Sequence(self.basic1, self.basic2)
|
||||
self.mapping = Mapping(self.basic1, self.basic2)
|
||||
self.nested = Basic([[self.basic1]])
|
||||
self.tuple = Basic(('foo',))
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
self.Basic = Basic
|
||||
self.Sequence = Sequence
|
||||
self.Mapping = Mapping
|
||||
|
||||
def test_to_json_sequence(self):
|
||||
self.assertEqual(self.seq.to_json(), ['foo1', 'foo2'])
|
||||
|
||||
def test_to_json_mapping(self):
|
||||
self.assertEqual(self.mapping.to_json(), {'foo1': 'foo2'})
|
||||
|
||||
def test_to_json_other(self):
|
||||
mock_value = object()
|
||||
self.assertTrue(self.Basic(mock_value).to_json() is mock_value)
|
||||
|
||||
def test_to_json_nested(self):
|
||||
self.assertEqual(self.nested.to_json(), [['foo1']])
|
||||
|
||||
def test_to_json(self):
|
||||
self.assertEqual(self.tuple.to_json(), (('foo', )))
|
||||
|
||||
def test_from_json_not_implemented(self):
|
||||
from acme.jose.interfaces import JSONDeSerializable
|
||||
self.assertRaises(TypeError, JSONDeSerializable.from_json, 'xxx')
|
||||
|
||||
def test_json_loads(self):
|
||||
seq = self.Sequence.json_loads('["foo1", "foo2"]')
|
||||
self.assertTrue(isinstance(seq, self.Sequence))
|
||||
self.assertTrue(isinstance(seq.x, self.Basic))
|
||||
self.assertTrue(isinstance(seq.y, self.Basic))
|
||||
self.assertEqual(seq.x.v, 'foo1')
|
||||
self.assertEqual(seq.y.v, 'foo2')
|
||||
|
||||
def test_json_dumps(self):
|
||||
self.assertEqual('["foo1", "foo2"]', self.seq.json_dumps())
|
||||
|
||||
def test_json_dumps_pretty(self):
|
||||
self.assertEqual(self.seq.json_dumps_pretty(),
|
||||
'[\n "foo1",\n "foo2"\n]')
|
||||
|
||||
def test_json_dump_default(self):
|
||||
from acme.jose.interfaces import JSONDeSerializable
|
||||
|
||||
self.assertEqual(
|
||||
'foo1', JSONDeSerializable.json_dump_default(self.basic1))
|
||||
|
||||
jobj = JSONDeSerializable.json_dump_default(self.seq)
|
||||
self.assertEqual(len(jobj), 2)
|
||||
self.assertTrue(jobj[0] is self.basic1)
|
||||
self.assertTrue(jobj[1] is self.basic2)
|
||||
|
||||
def test_json_dump_default_type_error(self):
|
||||
from acme.jose.interfaces import JSONDeSerializable
|
||||
self.assertRaises(
|
||||
TypeError, JSONDeSerializable.json_dump_default, object())
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main() # pragma: no cover
|
||||
@@ -1,485 +0,0 @@
|
||||
"""JSON (de)serialization framework.
|
||||
|
||||
The framework presented here is somewhat based on `Go's "json" package`_
|
||||
(especially the ``omitempty`` functionality).
|
||||
|
||||
.. _`Go's "json" package`: http://golang.org/pkg/encoding/json/
|
||||
|
||||
"""
|
||||
import abc
|
||||
import binascii
|
||||
import logging
|
||||
|
||||
import OpenSSL
|
||||
import six
|
||||
|
||||
from acme.jose import b64
|
||||
from acme.jose import errors
|
||||
from acme.jose import interfaces
|
||||
from acme.jose import util
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Field(object):
|
||||
"""JSON object field.
|
||||
|
||||
:class:`Field` is meant to be used together with
|
||||
:class:`JSONObjectWithFields`.
|
||||
|
||||
``encoder`` (``decoder``) is a callable that accepts a single
|
||||
parameter, i.e. a value to be encoded (decoded), and returns the
|
||||
serialized (deserialized) value. In case of errors it should raise
|
||||
:class:`~acme.jose.errors.SerializationError`
|
||||
(:class:`~acme.jose.errors.DeserializationError`).
|
||||
|
||||
Note, that ``decoder`` should perform partial serialization only.
|
||||
|
||||
:ivar str json_name: Name of the field when encoded to JSON.
|
||||
:ivar default: Default value (used when not present in JSON object).
|
||||
:ivar bool omitempty: If ``True`` and the field value is empty, then
|
||||
it will not be included in the serialized JSON object, and
|
||||
``default`` will be used for deserialization. Otherwise, if ``False``,
|
||||
field is considered as required, value will always be included in the
|
||||
serialized JSON objected, and it must also be present when
|
||||
deserializing.
|
||||
|
||||
"""
|
||||
__slots__ = ('json_name', 'default', 'omitempty', 'fdec', 'fenc')
|
||||
|
||||
def __init__(self, json_name, default=None, omitempty=False,
|
||||
decoder=None, encoder=None):
|
||||
# pylint: disable=too-many-arguments
|
||||
self.json_name = json_name
|
||||
self.default = default
|
||||
self.omitempty = omitempty
|
||||
|
||||
self.fdec = self.default_decoder if decoder is None else decoder
|
||||
self.fenc = self.default_encoder if encoder is None else encoder
|
||||
|
||||
@classmethod
|
||||
def _empty(cls, value):
|
||||
"""Is the provided value considered "empty" for this field?
|
||||
|
||||
This is useful for subclasses that might want to override the
|
||||
definition of being empty, e.g. for some more exotic data types.
|
||||
|
||||
"""
|
||||
return not isinstance(value, bool) and not value
|
||||
|
||||
def omit(self, value):
|
||||
"""Omit the value in output?"""
|
||||
return self._empty(value) and self.omitempty
|
||||
|
||||
def _update_params(self, **kwargs):
|
||||
current = dict(json_name=self.json_name, default=self.default,
|
||||
omitempty=self.omitempty,
|
||||
decoder=self.fdec, encoder=self.fenc)
|
||||
current.update(kwargs)
|
||||
return type(self)(**current) # pylint: disable=star-args
|
||||
|
||||
def decoder(self, fdec):
|
||||
"""Descriptor to change the decoder on JSON object field."""
|
||||
return self._update_params(decoder=fdec)
|
||||
|
||||
def encoder(self, fenc):
|
||||
"""Descriptor to change the encoder on JSON object field."""
|
||||
return self._update_params(encoder=fenc)
|
||||
|
||||
def decode(self, value):
|
||||
"""Decode a value, optionally with context JSON object."""
|
||||
return self.fdec(value)
|
||||
|
||||
def encode(self, value):
|
||||
"""Encode a value, optionally with context JSON object."""
|
||||
return self.fenc(value)
|
||||
|
||||
@classmethod
|
||||
def default_decoder(cls, value):
|
||||
"""Default decoder.
|
||||
|
||||
Recursively deserialize into immutable types (
|
||||
:class:`acme.jose.util.frozendict` instead of
|
||||
:func:`dict`, :func:`tuple` instead of :func:`list`).
|
||||
|
||||
"""
|
||||
# bases cases for different types returned by json.loads
|
||||
if isinstance(value, list):
|
||||
return tuple(cls.default_decoder(subvalue) for subvalue in value)
|
||||
elif isinstance(value, dict):
|
||||
return util.frozendict(
|
||||
dict((cls.default_decoder(key), cls.default_decoder(value))
|
||||
for key, value in six.iteritems(value)))
|
||||
else: # integer or string
|
||||
return value
|
||||
|
||||
@classmethod
|
||||
def default_encoder(cls, value):
|
||||
"""Default (passthrough) encoder."""
|
||||
# field.to_partial_json() is no good as encoder has to do partial
|
||||
# serialization only
|
||||
return value
|
||||
|
||||
|
||||
class JSONObjectWithFieldsMeta(abc.ABCMeta):
|
||||
"""Metaclass for :class:`JSONObjectWithFields` and its subclasses.
|
||||
|
||||
It makes sure that, for any class ``cls`` with ``__metaclass__``
|
||||
set to ``JSONObjectWithFieldsMeta``:
|
||||
|
||||
1. All fields (attributes of type :class:`Field`) in the class
|
||||
definition are moved to the ``cls._fields`` dictionary, where
|
||||
keys are field attribute names and values are fields themselves.
|
||||
|
||||
2. ``cls.__slots__`` is extended by all field attribute names
|
||||
(i.e. not :attr:`Field.json_name`). Original ``cls.__slots__``
|
||||
are stored in ``cls._orig_slots``.
|
||||
|
||||
In a consequence, for a field attribute name ``some_field``,
|
||||
``cls.some_field`` will be a slot descriptor and not an instance
|
||||
of :class:`Field`. For example::
|
||||
|
||||
some_field = Field('someField', default=())
|
||||
|
||||
class Foo(object):
|
||||
__metaclass__ = JSONObjectWithFieldsMeta
|
||||
__slots__ = ('baz',)
|
||||
some_field = some_field
|
||||
|
||||
assert Foo.__slots__ == ('some_field', 'baz')
|
||||
assert Foo._orig_slots == ()
|
||||
assert Foo.some_field is not Field
|
||||
|
||||
assert Foo._fields.keys() == ['some_field']
|
||||
assert Foo._fields['some_field'] is some_field
|
||||
|
||||
As an implementation note, this metaclass inherits from
|
||||
:class:`abc.ABCMeta` (and not the usual :class:`type`) to mitigate
|
||||
the metaclass conflict (:class:`ImmutableMap` and
|
||||
:class:`JSONDeSerializable`, parents of :class:`JSONObjectWithFields`,
|
||||
use :class:`abc.ABCMeta` as its metaclass).
|
||||
|
||||
"""
|
||||
|
||||
def __new__(mcs, name, bases, dikt):
|
||||
fields = {}
|
||||
|
||||
for base in bases:
|
||||
fields.update(getattr(base, '_fields', {}))
|
||||
# Do not reorder, this class might override fields from base classes!
|
||||
for key, value in tuple(six.iteritems(dikt)):
|
||||
# not six.iterkeys() (in-place edit!)
|
||||
if isinstance(value, Field):
|
||||
fields[key] = dikt.pop(key)
|
||||
|
||||
dikt['_orig_slots'] = dikt.get('__slots__', ())
|
||||
dikt['__slots__'] = tuple(
|
||||
list(dikt['_orig_slots']) + list(six.iterkeys(fields)))
|
||||
dikt['_fields'] = fields
|
||||
|
||||
return abc.ABCMeta.__new__(mcs, name, bases, dikt)
|
||||
|
||||
|
||||
@six.add_metaclass(JSONObjectWithFieldsMeta)
|
||||
class JSONObjectWithFields(util.ImmutableMap, interfaces.JSONDeSerializable):
|
||||
# pylint: disable=too-few-public-methods
|
||||
"""JSON object with fields.
|
||||
|
||||
Example::
|
||||
|
||||
class Foo(JSONObjectWithFields):
|
||||
bar = Field('Bar')
|
||||
empty = Field('Empty', omitempty=True)
|
||||
|
||||
@bar.encoder
|
||||
def bar(value):
|
||||
return value + 'bar'
|
||||
|
||||
@bar.decoder
|
||||
def bar(value):
|
||||
if not value.endswith('bar'):
|
||||
raise errors.DeserializationError('No bar suffix!')
|
||||
return value[:-3]
|
||||
|
||||
assert Foo(bar='baz').to_partial_json() == {'Bar': 'bazbar'}
|
||||
assert Foo.from_json({'Bar': 'bazbar'}) == Foo(bar='baz')
|
||||
assert (Foo.from_json({'Bar': 'bazbar', 'Empty': '!'})
|
||||
== Foo(bar='baz', empty='!'))
|
||||
assert Foo(bar='baz').bar == 'baz'
|
||||
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
def _defaults(cls):
|
||||
"""Get default fields values."""
|
||||
return dict([(slot, field.default) for slot, field
|
||||
in six.iteritems(cls._fields)])
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
# pylint: disable=star-args
|
||||
super(JSONObjectWithFields, self).__init__(
|
||||
**(dict(self._defaults(), **kwargs)))
|
||||
|
||||
def encode(self, name):
|
||||
"""Encode a single field.
|
||||
|
||||
:param str name: Name of the field to be encoded.
|
||||
|
||||
:raises errors.SerializationError: if field cannot be serialized
|
||||
:raises errors.Error: if field could not be found
|
||||
|
||||
"""
|
||||
try:
|
||||
field = self._fields[name]
|
||||
except KeyError:
|
||||
raise errors.Error("Field not found: {0}".format(name))
|
||||
|
||||
return field.encode(getattr(self, name))
|
||||
|
||||
def fields_to_partial_json(self):
|
||||
"""Serialize fields to JSON."""
|
||||
jobj = {}
|
||||
omitted = set()
|
||||
for slot, field in six.iteritems(self._fields):
|
||||
value = getattr(self, slot)
|
||||
|
||||
if field.omit(value):
|
||||
omitted.add((slot, value))
|
||||
else:
|
||||
try:
|
||||
jobj[field.json_name] = field.encode(value)
|
||||
except errors.SerializationError as error:
|
||||
raise errors.SerializationError(
|
||||
'Could not encode {0} ({1}): {2}'.format(
|
||||
slot, value, error))
|
||||
return jobj
|
||||
|
||||
def to_partial_json(self):
|
||||
return self.fields_to_partial_json()
|
||||
|
||||
@classmethod
|
||||
def _check_required(cls, jobj):
|
||||
missing = set()
|
||||
for _, field in six.iteritems(cls._fields):
|
||||
if not field.omitempty and field.json_name not in jobj:
|
||||
missing.add(field.json_name)
|
||||
|
||||
if missing:
|
||||
raise errors.DeserializationError(
|
||||
'The following fields are required: {0}'.format(
|
||||
','.join(missing)))
|
||||
|
||||
@classmethod
|
||||
def fields_from_json(cls, jobj):
|
||||
"""Deserialize fields from JSON."""
|
||||
cls._check_required(jobj)
|
||||
fields = {}
|
||||
for slot, field in six.iteritems(cls._fields):
|
||||
if field.json_name not in jobj and field.omitempty:
|
||||
fields[slot] = field.default
|
||||
else:
|
||||
value = jobj[field.json_name]
|
||||
try:
|
||||
fields[slot] = field.decode(value)
|
||||
except errors.DeserializationError as error:
|
||||
raise errors.DeserializationError(
|
||||
'Could not decode {0!r} ({1!r}): {2}'.format(
|
||||
slot, value, error))
|
||||
return fields
|
||||
|
||||
@classmethod
|
||||
def from_json(cls, jobj):
|
||||
return cls(**cls.fields_from_json(jobj))
|
||||
|
||||
|
||||
def encode_b64jose(data):
|
||||
"""Encode JOSE Base-64 field.
|
||||
|
||||
:param bytes data:
|
||||
:rtype: `unicode`
|
||||
|
||||
"""
|
||||
# b64encode produces ASCII characters only
|
||||
return b64.b64encode(data).decode('ascii')
|
||||
|
||||
|
||||
def decode_b64jose(data, size=None, minimum=False):
|
||||
"""Decode JOSE Base-64 field.
|
||||
|
||||
:param unicode data:
|
||||
:param int size: Required length (after decoding).
|
||||
:param bool minimum: If ``True``, then `size` will be treated as
|
||||
minimum required length, as opposed to exact equality.
|
||||
|
||||
:rtype: bytes
|
||||
|
||||
"""
|
||||
error_cls = TypeError if six.PY2 else binascii.Error
|
||||
try:
|
||||
decoded = b64.b64decode(data.encode())
|
||||
except error_cls as error:
|
||||
raise errors.DeserializationError(error)
|
||||
|
||||
if size is not None and ((not minimum and len(decoded) != size) or
|
||||
(minimum and len(decoded) < size)):
|
||||
raise errors.DeserializationError(
|
||||
"Expected at least or exactly {0} bytes".format(size))
|
||||
|
||||
return decoded
|
||||
|
||||
|
||||
def encode_hex16(value):
|
||||
"""Hexlify.
|
||||
|
||||
:param bytes value:
|
||||
:rtype: unicode
|
||||
|
||||
"""
|
||||
return binascii.hexlify(value).decode()
|
||||
|
||||
|
||||
def decode_hex16(value, size=None, minimum=False):
|
||||
"""Decode hexlified field.
|
||||
|
||||
:param unicode value:
|
||||
:param int size: Required length (after decoding).
|
||||
:param bool minimum: If ``True``, then `size` will be treated as
|
||||
minimum required length, as opposed to exact equality.
|
||||
|
||||
:rtype: bytes
|
||||
|
||||
"""
|
||||
value = value.encode()
|
||||
if size is not None and ((not minimum and len(value) != size * 2) or
|
||||
(minimum and len(value) < size * 2)):
|
||||
raise errors.DeserializationError()
|
||||
error_cls = TypeError if six.PY2 else binascii.Error
|
||||
try:
|
||||
return binascii.unhexlify(value)
|
||||
except error_cls as error:
|
||||
raise errors.DeserializationError(error)
|
||||
|
||||
|
||||
def encode_cert(cert):
|
||||
"""Encode certificate as JOSE Base-64 DER.
|
||||
|
||||
:type cert: `OpenSSL.crypto.X509` wrapped in `.ComparableX509`
|
||||
:rtype: unicode
|
||||
|
||||
"""
|
||||
return encode_b64jose(OpenSSL.crypto.dump_certificate(
|
||||
OpenSSL.crypto.FILETYPE_ASN1, cert.wrapped))
|
||||
|
||||
|
||||
def decode_cert(b64der):
|
||||
"""Decode JOSE Base-64 DER-encoded certificate.
|
||||
|
||||
:param unicode b64der:
|
||||
:rtype: `OpenSSL.crypto.X509` wrapped in `.ComparableX509`
|
||||
|
||||
"""
|
||||
try:
|
||||
return util.ComparableX509(OpenSSL.crypto.load_certificate(
|
||||
OpenSSL.crypto.FILETYPE_ASN1, decode_b64jose(b64der)))
|
||||
except OpenSSL.crypto.Error as error:
|
||||
raise errors.DeserializationError(error)
|
||||
|
||||
|
||||
def encode_csr(csr):
|
||||
"""Encode CSR as JOSE Base-64 DER.
|
||||
|
||||
:type csr: `OpenSSL.crypto.X509Req` wrapped in `.ComparableX509`
|
||||
:rtype: unicode
|
||||
|
||||
"""
|
||||
return encode_b64jose(OpenSSL.crypto.dump_certificate_request(
|
||||
OpenSSL.crypto.FILETYPE_ASN1, csr.wrapped))
|
||||
|
||||
|
||||
def decode_csr(b64der):
|
||||
"""Decode JOSE Base-64 DER-encoded CSR.
|
||||
|
||||
:param unicode b64der:
|
||||
:rtype: `OpenSSL.crypto.X509Req` wrapped in `.ComparableX509`
|
||||
|
||||
"""
|
||||
try:
|
||||
return util.ComparableX509(OpenSSL.crypto.load_certificate_request(
|
||||
OpenSSL.crypto.FILETYPE_ASN1, decode_b64jose(b64der)))
|
||||
except OpenSSL.crypto.Error as error:
|
||||
raise errors.DeserializationError(error)
|
||||
|
||||
|
||||
class TypedJSONObjectWithFields(JSONObjectWithFields):
|
||||
"""JSON object with type."""
|
||||
|
||||
typ = NotImplemented
|
||||
"""Type of the object. Subclasses must override."""
|
||||
|
||||
type_field_name = "type"
|
||||
"""Field name used to distinguish different object types.
|
||||
|
||||
Subclasses will probably have to override this.
|
||||
|
||||
"""
|
||||
|
||||
TYPES = NotImplemented
|
||||
"""Types registered for JSON deserialization"""
|
||||
|
||||
@classmethod
|
||||
def register(cls, type_cls, typ=None):
|
||||
"""Register class for JSON deserialization."""
|
||||
typ = type_cls.typ if typ is None else typ
|
||||
cls.TYPES[typ] = type_cls
|
||||
return type_cls
|
||||
|
||||
@classmethod
|
||||
def get_type_cls(cls, jobj):
|
||||
"""Get the registered class for ``jobj``."""
|
||||
if cls in six.itervalues(cls.TYPES):
|
||||
if cls.type_field_name not in jobj:
|
||||
raise errors.DeserializationError(
|
||||
"Missing type field ({0})".format(cls.type_field_name))
|
||||
# cls is already registered type_cls, force to use it
|
||||
# so that, e.g Revocation.from_json(jobj) fails if
|
||||
# jobj["type"] != "revocation".
|
||||
return cls
|
||||
|
||||
if not isinstance(jobj, dict):
|
||||
raise errors.DeserializationError(
|
||||
"{0} is not a dictionary object".format(jobj))
|
||||
try:
|
||||
typ = jobj[cls.type_field_name]
|
||||
except KeyError:
|
||||
raise errors.DeserializationError("missing type field")
|
||||
|
||||
try:
|
||||
return cls.TYPES[typ]
|
||||
except KeyError:
|
||||
raise errors.UnrecognizedTypeError(typ, jobj)
|
||||
|
||||
def to_partial_json(self):
|
||||
"""Get JSON serializable object.
|
||||
|
||||
:returns: Serializable JSON object representing ACME typed object.
|
||||
:meth:`validate` will almost certainly not work, due to reasons
|
||||
explained in :class:`acme.interfaces.IJSONSerializable`.
|
||||
:rtype: dict
|
||||
|
||||
"""
|
||||
jobj = self.fields_to_partial_json()
|
||||
jobj[self.type_field_name] = self.typ
|
||||
return jobj
|
||||
|
||||
@classmethod
|
||||
def from_json(cls, jobj):
|
||||
"""Deserialize ACME object from valid JSON object.
|
||||
|
||||
:raises acme.errors.UnrecognizedTypeError: if type
|
||||
of the ACME object has not been registered.
|
||||
|
||||
"""
|
||||
# make sure subclasses don't cause infinite recursive from_json calls
|
||||
type_cls = cls.get_type_cls(jobj)
|
||||
return type_cls(**type_cls.fields_from_json(jobj))
|
||||
@@ -1,381 +0,0 @@
|
||||
"""Tests for acme.jose.json_util."""
|
||||
import itertools
|
||||
import unittest
|
||||
|
||||
import mock
|
||||
import six
|
||||
|
||||
from acme import test_util
|
||||
|
||||
from acme.jose import errors
|
||||
from acme.jose import interfaces
|
||||
from acme.jose import util
|
||||
|
||||
|
||||
CERT = test_util.load_comparable_cert('cert.pem')
|
||||
CSR = test_util.load_comparable_csr('csr.pem')
|
||||
|
||||
|
||||
class FieldTest(unittest.TestCase):
|
||||
"""Tests for acme.jose.json_util.Field."""
|
||||
|
||||
def test_no_omit_boolean(self):
|
||||
from acme.jose.json_util import Field
|
||||
for default, omitempty, value in itertools.product(
|
||||
[True, False], [True, False], [True, False]):
|
||||
self.assertFalse(
|
||||
Field("foo", default=default, omitempty=omitempty).omit(value))
|
||||
|
||||
def test_descriptors(self):
|
||||
mock_value = mock.MagicMock()
|
||||
|
||||
# pylint: disable=missing-docstring
|
||||
|
||||
def decoder(unused_value):
|
||||
return 'd'
|
||||
|
||||
def encoder(unused_value):
|
||||
return 'e'
|
||||
|
||||
from acme.jose.json_util import Field
|
||||
field = Field('foo')
|
||||
|
||||
field = field.encoder(encoder)
|
||||
self.assertEqual('e', field.encode(mock_value))
|
||||
|
||||
field = field.decoder(decoder)
|
||||
self.assertEqual('e', field.encode(mock_value))
|
||||
self.assertEqual('d', field.decode(mock_value))
|
||||
|
||||
def test_default_encoder_is_partial(self):
|
||||
class MockField(interfaces.JSONDeSerializable):
|
||||
# pylint: disable=missing-docstring
|
||||
def to_partial_json(self):
|
||||
return 'foo' # pragma: no cover
|
||||
|
||||
@classmethod
|
||||
def from_json(cls, jobj):
|
||||
pass # pragma: no cover
|
||||
mock_field = MockField()
|
||||
|
||||
from acme.jose.json_util import Field
|
||||
self.assertTrue(Field.default_encoder(mock_field) is mock_field)
|
||||
# in particular...
|
||||
self.assertNotEqual('foo', Field.default_encoder(mock_field))
|
||||
|
||||
def test_default_encoder_passthrough(self):
|
||||
mock_value = mock.MagicMock()
|
||||
from acme.jose.json_util import Field
|
||||
self.assertTrue(Field.default_encoder(mock_value) is mock_value)
|
||||
|
||||
def test_default_decoder_list_to_tuple(self):
|
||||
from acme.jose.json_util import Field
|
||||
self.assertEqual((1, 2, 3), Field.default_decoder([1, 2, 3]))
|
||||
|
||||
def test_default_decoder_dict_to_frozendict(self):
|
||||
from acme.jose.json_util import Field
|
||||
obj = Field.default_decoder({'x': 2})
|
||||
self.assertTrue(isinstance(obj, util.frozendict))
|
||||
self.assertEqual(obj, util.frozendict(x=2))
|
||||
|
||||
def test_default_decoder_passthrough(self):
|
||||
mock_value = mock.MagicMock()
|
||||
from acme.jose.json_util import Field
|
||||
self.assertTrue(Field.default_decoder(mock_value) is mock_value)
|
||||
|
||||
|
||||
class JSONObjectWithFieldsMetaTest(unittest.TestCase):
|
||||
"""Tests for acme.jose.json_util.JSONObjectWithFieldsMeta."""
|
||||
|
||||
def setUp(self):
|
||||
from acme.jose.json_util import Field
|
||||
from acme.jose.json_util import JSONObjectWithFieldsMeta
|
||||
self.field = Field('Baz')
|
||||
self.field2 = Field('Baz2')
|
||||
# pylint: disable=invalid-name,missing-docstring,too-few-public-methods
|
||||
# pylint: disable=blacklisted-name
|
||||
|
||||
@six.add_metaclass(JSONObjectWithFieldsMeta)
|
||||
class A(object):
|
||||
__slots__ = ('bar',)
|
||||
baz = self.field
|
||||
|
||||
class B(A):
|
||||
pass
|
||||
|
||||
class C(A):
|
||||
baz = self.field2
|
||||
|
||||
self.a_cls = A
|
||||
self.b_cls = B
|
||||
self.c_cls = C
|
||||
|
||||
def test_fields(self):
|
||||
# pylint: disable=protected-access,no-member
|
||||
self.assertEqual({'baz': self.field}, self.a_cls._fields)
|
||||
self.assertEqual({'baz': self.field}, self.b_cls._fields)
|
||||
|
||||
def test_fields_inheritance(self):
|
||||
# pylint: disable=protected-access,no-member
|
||||
self.assertEqual({'baz': self.field2}, self.c_cls._fields)
|
||||
|
||||
def test_slots(self):
|
||||
self.assertEqual(('bar', 'baz'), self.a_cls.__slots__)
|
||||
self.assertEqual(('baz',), self.b_cls.__slots__)
|
||||
|
||||
def test_orig_slots(self):
|
||||
# pylint: disable=protected-access,no-member
|
||||
self.assertEqual(('bar',), self.a_cls._orig_slots)
|
||||
self.assertEqual((), self.b_cls._orig_slots)
|
||||
|
||||
|
||||
class JSONObjectWithFieldsTest(unittest.TestCase):
|
||||
"""Tests for acme.jose.json_util.JSONObjectWithFields."""
|
||||
# pylint: disable=protected-access
|
||||
|
||||
def setUp(self):
|
||||
from acme.jose.json_util import JSONObjectWithFields
|
||||
from acme.jose.json_util import Field
|
||||
|
||||
class MockJSONObjectWithFields(JSONObjectWithFields):
|
||||
# pylint: disable=invalid-name,missing-docstring,no-self-argument
|
||||
# pylint: disable=too-few-public-methods
|
||||
x = Field('x', omitempty=True,
|
||||
encoder=(lambda x: x * 2),
|
||||
decoder=(lambda x: x / 2))
|
||||
y = Field('y')
|
||||
z = Field('Z') # on purpose uppercase
|
||||
|
||||
@y.encoder
|
||||
def y(value):
|
||||
if value == 500:
|
||||
raise errors.SerializationError()
|
||||
return value
|
||||
|
||||
@y.decoder
|
||||
def y(value):
|
||||
if value == 500:
|
||||
raise errors.DeserializationError()
|
||||
return value
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
self.MockJSONObjectWithFields = MockJSONObjectWithFields
|
||||
self.mock = MockJSONObjectWithFields(x=None, y=2, z=3)
|
||||
|
||||
def test_init_defaults(self):
|
||||
self.assertEqual(self.mock, self.MockJSONObjectWithFields(y=2, z=3))
|
||||
|
||||
def test_encode(self):
|
||||
self.assertEqual(10, self.MockJSONObjectWithFields(
|
||||
x=5, y=0, z=0).encode("x"))
|
||||
|
||||
def test_encode_wrong_field(self):
|
||||
self.assertRaises(errors.Error, self.mock.encode, 'foo')
|
||||
|
||||
def test_encode_serialization_error_passthrough(self):
|
||||
self.assertRaises(
|
||||
errors.SerializationError,
|
||||
self.MockJSONObjectWithFields(y=500, z=None).encode, "y")
|
||||
|
||||
def test_fields_to_partial_json_omits_empty(self):
|
||||
self.assertEqual(self.mock.fields_to_partial_json(), {'y': 2, 'Z': 3})
|
||||
|
||||
def test_fields_from_json_fills_default_for_empty(self):
|
||||
self.assertEqual(
|
||||
{'x': None, 'y': 2, 'z': 3},
|
||||
self.MockJSONObjectWithFields.fields_from_json({'y': 2, 'Z': 3}))
|
||||
|
||||
def test_fields_from_json_fails_on_missing(self):
|
||||
self.assertRaises(
|
||||
errors.DeserializationError,
|
||||
self.MockJSONObjectWithFields.fields_from_json, {'y': 0})
|
||||
self.assertRaises(
|
||||
errors.DeserializationError,
|
||||
self.MockJSONObjectWithFields.fields_from_json, {'Z': 0})
|
||||
self.assertRaises(
|
||||
errors.DeserializationError,
|
||||
self.MockJSONObjectWithFields.fields_from_json, {'x': 0, 'y': 0})
|
||||
self.assertRaises(
|
||||
errors.DeserializationError,
|
||||
self.MockJSONObjectWithFields.fields_from_json, {'x': 0, 'Z': 0})
|
||||
|
||||
def test_fields_to_partial_json_encoder(self):
|
||||
self.assertEqual(
|
||||
self.MockJSONObjectWithFields(x=1, y=2, z=3).to_partial_json(),
|
||||
{'x': 2, 'y': 2, 'Z': 3})
|
||||
|
||||
def test_fields_from_json_decoder(self):
|
||||
self.assertEqual(
|
||||
{'x': 2, 'y': 2, 'z': 3},
|
||||
self.MockJSONObjectWithFields.fields_from_json(
|
||||
{'x': 4, 'y': 2, 'Z': 3}))
|
||||
|
||||
def test_fields_to_partial_json_error_passthrough(self):
|
||||
self.assertRaises(
|
||||
errors.SerializationError, self.MockJSONObjectWithFields(
|
||||
x=1, y=500, z=3).to_partial_json)
|
||||
|
||||
def test_fields_from_json_error_passthrough(self):
|
||||
self.assertRaises(
|
||||
errors.DeserializationError,
|
||||
self.MockJSONObjectWithFields.from_json,
|
||||
{'x': 4, 'y': 500, 'Z': 3})
|
||||
|
||||
|
||||
class DeEncodersTest(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.b64_cert = (
|
||||
u'MIIB3jCCAYigAwIBAgICBTkwDQYJKoZIhvcNAQELBQAwdzELMAkGA1UEBhM'
|
||||
u'CVVMxETAPBgNVBAgMCE1pY2hpZ2FuMRIwEAYDVQQHDAlBbm4gQXJib3IxKz'
|
||||
u'ApBgNVBAoMIlVuaXZlcnNpdHkgb2YgTWljaGlnYW4gYW5kIHRoZSBFRkYxF'
|
||||
u'DASBgNVBAMMC2V4YW1wbGUuY29tMB4XDTE0MTIxMTIyMzQ0NVoXDTE0MTIx'
|
||||
u'ODIyMzQ0NVowdzELMAkGA1UEBhMCVVMxETAPBgNVBAgMCE1pY2hpZ2FuMRI'
|
||||
u'wEAYDVQQHDAlBbm4gQXJib3IxKzApBgNVBAoMIlVuaXZlcnNpdHkgb2YgTW'
|
||||
u'ljaGlnYW4gYW5kIHRoZSBFRkYxFDASBgNVBAMMC2V4YW1wbGUuY29tMFwwD'
|
||||
u'QYJKoZIhvcNAQEBBQADSwAwSAJBAKx1c7RR7R_drnBSQ_zfx1vQLHUbFLh1'
|
||||
u'AQQQ5R8DZUXd36efNK79vukFhN9HFoHZiUvOjm0c-pVE6K-EdE_twuUCAwE'
|
||||
u'AATANBgkqhkiG9w0BAQsFAANBAC24z0IdwIVKSlntksllvr6zJepBH5fMnd'
|
||||
u'fk3XJp10jT6VE-14KNtjh02a56GoraAvJAT5_H67E8GvJ_ocNnB_o'
|
||||
)
|
||||
self.b64_csr = (
|
||||
u'MIIBXTCCAQcCAQAweTELMAkGA1UEBhMCVVMxETAPBgNVBAgMCE1pY2hpZ2F'
|
||||
u'uMRIwEAYDVQQHDAlBbm4gQXJib3IxDDAKBgNVBAoMA0VGRjEfMB0GA1UECw'
|
||||
u'wWVW5pdmVyc2l0eSBvZiBNaWNoaWdhbjEUMBIGA1UEAwwLZXhhbXBsZS5jb'
|
||||
u'20wXDANBgkqhkiG9w0BAQEFAANLADBIAkEArHVztFHtH92ucFJD_N_HW9As'
|
||||
u'dRsUuHUBBBDlHwNlRd3fp580rv2-6QWE30cWgdmJS86ObRz6lUTor4R0T-3'
|
||||
u'C5QIDAQABoCkwJwYJKoZIhvcNAQkOMRowGDAWBgNVHREEDzANggtleGFtcG'
|
||||
u'xlLmNvbTANBgkqhkiG9w0BAQsFAANBAHJH_O6BtC9aGzEVCMGOZ7z9iIRHW'
|
||||
u'Szr9x_bOzn7hLwsbXPAgO1QxEwL-X-4g20Gn9XBE1N9W6HCIEut2d8wACg'
|
||||
)
|
||||
|
||||
def test_encode_b64jose(self):
|
||||
from acme.jose.json_util import encode_b64jose
|
||||
encoded = encode_b64jose(b'x')
|
||||
self.assertTrue(isinstance(encoded, six.string_types))
|
||||
self.assertEqual(u'eA', encoded)
|
||||
|
||||
def test_decode_b64jose(self):
|
||||
from acme.jose.json_util import decode_b64jose
|
||||
decoded = decode_b64jose(u'eA')
|
||||
self.assertTrue(isinstance(decoded, six.binary_type))
|
||||
self.assertEqual(b'x', decoded)
|
||||
|
||||
def test_decode_b64jose_padding_error(self):
|
||||
from acme.jose.json_util import decode_b64jose
|
||||
self.assertRaises(errors.DeserializationError, decode_b64jose, u'x')
|
||||
|
||||
def test_decode_b64jose_size(self):
|
||||
from acme.jose.json_util import decode_b64jose
|
||||
self.assertEqual(b'foo', decode_b64jose(u'Zm9v', size=3))
|
||||
self.assertRaises(
|
||||
errors.DeserializationError, decode_b64jose, u'Zm9v', size=2)
|
||||
self.assertRaises(
|
||||
errors.DeserializationError, decode_b64jose, u'Zm9v', size=4)
|
||||
|
||||
def test_decode_b64jose_minimum_size(self):
|
||||
from acme.jose.json_util import decode_b64jose
|
||||
self.assertEqual(b'foo', decode_b64jose(u'Zm9v', size=3, minimum=True))
|
||||
self.assertEqual(b'foo', decode_b64jose(u'Zm9v', size=2, minimum=True))
|
||||
self.assertRaises(errors.DeserializationError, decode_b64jose,
|
||||
u'Zm9v', size=4, minimum=True)
|
||||
|
||||
def test_encode_hex16(self):
|
||||
from acme.jose.json_util import encode_hex16
|
||||
encoded = encode_hex16(b'foo')
|
||||
self.assertEqual(u'666f6f', encoded)
|
||||
self.assertTrue(isinstance(encoded, six.string_types))
|
||||
|
||||
def test_decode_hex16(self):
|
||||
from acme.jose.json_util import decode_hex16
|
||||
decoded = decode_hex16(u'666f6f')
|
||||
self.assertEqual(b'foo', decoded)
|
||||
self.assertTrue(isinstance(decoded, six.binary_type))
|
||||
|
||||
def test_decode_hex16_minimum_size(self):
|
||||
from acme.jose.json_util import decode_hex16
|
||||
self.assertEqual(b'foo', decode_hex16(u'666f6f', size=3, minimum=True))
|
||||
self.assertEqual(b'foo', decode_hex16(u'666f6f', size=2, minimum=True))
|
||||
self.assertRaises(errors.DeserializationError, decode_hex16,
|
||||
u'666f6f', size=4, minimum=True)
|
||||
|
||||
def test_decode_hex16_odd_length(self):
|
||||
from acme.jose.json_util import decode_hex16
|
||||
self.assertRaises(errors.DeserializationError, decode_hex16, u'x')
|
||||
|
||||
def test_encode_cert(self):
|
||||
from acme.jose.json_util import encode_cert
|
||||
self.assertEqual(self.b64_cert, encode_cert(CERT))
|
||||
|
||||
def test_decode_cert(self):
|
||||
from acme.jose.json_util import decode_cert
|
||||
cert = decode_cert(self.b64_cert)
|
||||
self.assertTrue(isinstance(cert, util.ComparableX509))
|
||||
self.assertEqual(cert, CERT)
|
||||
self.assertRaises(errors.DeserializationError, decode_cert, u'')
|
||||
|
||||
def test_encode_csr(self):
|
||||
from acme.jose.json_util import encode_csr
|
||||
self.assertEqual(self.b64_csr, encode_csr(CSR))
|
||||
|
||||
def test_decode_csr(self):
|
||||
from acme.jose.json_util import decode_csr
|
||||
csr = decode_csr(self.b64_csr)
|
||||
self.assertTrue(isinstance(csr, util.ComparableX509))
|
||||
self.assertEqual(csr, CSR)
|
||||
self.assertRaises(errors.DeserializationError, decode_csr, u'')
|
||||
|
||||
|
||||
class TypedJSONObjectWithFieldsTest(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
from acme.jose.json_util import TypedJSONObjectWithFields
|
||||
|
||||
# pylint: disable=missing-docstring,abstract-method
|
||||
# pylint: disable=too-few-public-methods
|
||||
|
||||
class MockParentTypedJSONObjectWithFields(TypedJSONObjectWithFields):
|
||||
TYPES = {}
|
||||
type_field_name = 'type'
|
||||
|
||||
@MockParentTypedJSONObjectWithFields.register
|
||||
class MockTypedJSONObjectWithFields(
|
||||
MockParentTypedJSONObjectWithFields):
|
||||
typ = 'test'
|
||||
__slots__ = ('foo',)
|
||||
|
||||
@classmethod
|
||||
def fields_from_json(cls, jobj):
|
||||
return {'foo': jobj['foo']}
|
||||
|
||||
def fields_to_partial_json(self):
|
||||
return {'foo': self.foo}
|
||||
|
||||
self.parent_cls = MockParentTypedJSONObjectWithFields
|
||||
self.msg = MockTypedJSONObjectWithFields(foo='bar')
|
||||
|
||||
def test_to_partial_json(self):
|
||||
self.assertEqual(self.msg.to_partial_json(), {
|
||||
'type': 'test',
|
||||
'foo': 'bar',
|
||||
})
|
||||
|
||||
def test_from_json_non_dict_fails(self):
|
||||
for value in [[], (), 5, "asd"]: # all possible input types
|
||||
self.assertRaises(
|
||||
errors.DeserializationError, self.parent_cls.from_json, value)
|
||||
|
||||
def test_from_json_dict_no_type_fails(self):
|
||||
self.assertRaises(
|
||||
errors.DeserializationError, self.parent_cls.from_json, {})
|
||||
|
||||
def test_from_json_unknown_type_fails(self):
|
||||
self.assertRaises(errors.UnrecognizedTypeError,
|
||||
self.parent_cls.from_json, {'type': 'bar'})
|
||||
|
||||
def test_from_json_returns_obj(self):
|
||||
self.assertEqual({'foo': 'bar'}, self.parent_cls.from_json(
|
||||
{'type': 'test', 'foo': 'bar'}))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main() # pragma: no cover
|
||||
@@ -1,180 +0,0 @@
|
||||
"""JSON Web Algorithm.
|
||||
|
||||
https://tools.ietf.org/html/draft-ietf-jose-json-web-algorithms-40
|
||||
|
||||
"""
|
||||
import abc
|
||||
import collections
|
||||
import logging
|
||||
|
||||
import cryptography.exceptions
|
||||
from cryptography.hazmat.backends import default_backend
|
||||
from cryptography.hazmat.primitives import hashes # type: ignore
|
||||
from cryptography.hazmat.primitives import hmac # type: ignore
|
||||
from cryptography.hazmat.primitives.asymmetric import padding # type: ignore
|
||||
|
||||
from acme.jose import errors
|
||||
from acme.jose import interfaces
|
||||
from acme.jose import jwk
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class JWA(interfaces.JSONDeSerializable): # pylint: disable=abstract-method
|
||||
# pylint: disable=too-few-public-methods
|
||||
# for some reason disable=abstract-method has to be on the line
|
||||
# above...
|
||||
"""JSON Web Algorithm."""
|
||||
|
||||
|
||||
class JWASignature(JWA, collections.Hashable): # type: ignore
|
||||
"""JSON Web Signature Algorithm."""
|
||||
SIGNATURES = {} # type: dict
|
||||
|
||||
def __init__(self, name):
|
||||
self.name = name
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, JWASignature):
|
||||
return NotImplemented
|
||||
return self.name == other.name
|
||||
|
||||
def __hash__(self):
|
||||
return hash((self.__class__, self.name))
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
||||
@classmethod
|
||||
def register(cls, signature_cls):
|
||||
"""Register class for JSON deserialization."""
|
||||
cls.SIGNATURES[signature_cls.name] = signature_cls
|
||||
return signature_cls
|
||||
|
||||
def to_partial_json(self):
|
||||
return self.name
|
||||
|
||||
@classmethod
|
||||
def from_json(cls, jobj):
|
||||
return cls.SIGNATURES[jobj]
|
||||
|
||||
@abc.abstractmethod
|
||||
def sign(self, key, msg): # pragma: no cover
|
||||
"""Sign the ``msg`` using ``key``."""
|
||||
raise NotImplementedError()
|
||||
|
||||
@abc.abstractmethod
|
||||
def verify(self, key, msg, sig): # pragma: no cover
|
||||
"""Verify the ``msg` and ``sig`` using ``key``."""
|
||||
raise NotImplementedError()
|
||||
|
||||
def __repr__(self):
|
||||
return self.name
|
||||
|
||||
|
||||
class _JWAHS(JWASignature):
|
||||
|
||||
kty = jwk.JWKOct
|
||||
|
||||
def __init__(self, name, hash_):
|
||||
super(_JWAHS, self).__init__(name)
|
||||
self.hash = hash_()
|
||||
|
||||
def sign(self, key, msg):
|
||||
signer = hmac.HMAC(key, self.hash, backend=default_backend())
|
||||
signer.update(msg)
|
||||
return signer.finalize()
|
||||
|
||||
def verify(self, key, msg, sig):
|
||||
verifier = hmac.HMAC(key, self.hash, backend=default_backend())
|
||||
verifier.update(msg)
|
||||
try:
|
||||
verifier.verify(sig)
|
||||
except cryptography.exceptions.InvalidSignature as error:
|
||||
logger.debug(error, exc_info=True)
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
|
||||
class _JWARSA(object):
|
||||
|
||||
kty = jwk.JWKRSA
|
||||
padding = NotImplemented
|
||||
hash = NotImplemented
|
||||
|
||||
def sign(self, key, msg):
|
||||
"""Sign the ``msg`` using ``key``."""
|
||||
try:
|
||||
signer = key.signer(self.padding, self.hash)
|
||||
except AttributeError as error:
|
||||
logger.debug(error, exc_info=True)
|
||||
raise errors.Error("Public key cannot be used for signing")
|
||||
except ValueError as error: # digest too large
|
||||
logger.debug(error, exc_info=True)
|
||||
raise errors.Error(str(error))
|
||||
signer.update(msg)
|
||||
try:
|
||||
return signer.finalize()
|
||||
except ValueError as error:
|
||||
logger.debug(error, exc_info=True)
|
||||
raise errors.Error(str(error))
|
||||
|
||||
def verify(self, key, msg, sig):
|
||||
"""Verify the ``msg` and ``sig`` using ``key``."""
|
||||
verifier = key.verifier(sig, self.padding, self.hash)
|
||||
verifier.update(msg)
|
||||
try:
|
||||
verifier.verify()
|
||||
except cryptography.exceptions.InvalidSignature as error:
|
||||
logger.debug(error, exc_info=True)
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
|
||||
class _JWARS(_JWARSA, JWASignature):
|
||||
|
||||
def __init__(self, name, hash_):
|
||||
super(_JWARS, self).__init__(name)
|
||||
self.padding = padding.PKCS1v15()
|
||||
self.hash = hash_()
|
||||
|
||||
|
||||
class _JWAPS(_JWARSA, JWASignature):
|
||||
|
||||
def __init__(self, name, hash_):
|
||||
super(_JWAPS, self).__init__(name)
|
||||
self.padding = padding.PSS(
|
||||
mgf=padding.MGF1(hash_()),
|
||||
salt_length=padding.PSS.MAX_LENGTH)
|
||||
self.hash = hash_()
|
||||
|
||||
|
||||
class _JWAES(JWASignature): # pylint: disable=abstract-class-not-used
|
||||
|
||||
# TODO: implement ES signatures
|
||||
|
||||
def sign(self, key, msg): # pragma: no cover
|
||||
raise NotImplementedError()
|
||||
|
||||
def verify(self, key, msg, sig): # pragma: no cover
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
HS256 = JWASignature.register(_JWAHS('HS256', hashes.SHA256))
|
||||
HS384 = JWASignature.register(_JWAHS('HS384', hashes.SHA384))
|
||||
HS512 = JWASignature.register(_JWAHS('HS512', hashes.SHA512))
|
||||
|
||||
RS256 = JWASignature.register(_JWARS('RS256', hashes.SHA256))
|
||||
RS384 = JWASignature.register(_JWARS('RS384', hashes.SHA384))
|
||||
RS512 = JWASignature.register(_JWARS('RS512', hashes.SHA512))
|
||||
|
||||
PS256 = JWASignature.register(_JWAPS('PS256', hashes.SHA256))
|
||||
PS384 = JWASignature.register(_JWAPS('PS384', hashes.SHA384))
|
||||
PS512 = JWASignature.register(_JWAPS('PS512', hashes.SHA512))
|
||||
|
||||
ES256 = JWASignature.register(_JWAES('ES256'))
|
||||
ES384 = JWASignature.register(_JWAES('ES384'))
|
||||
ES512 = JWASignature.register(_JWAES('ES512'))
|
||||
@@ -1,104 +0,0 @@
|
||||
"""Tests for acme.jose.jwa."""
|
||||
import unittest
|
||||
|
||||
from acme import test_util
|
||||
|
||||
from acme.jose import errors
|
||||
|
||||
|
||||
RSA256_KEY = test_util.load_rsa_private_key('rsa256_key.pem')
|
||||
RSA512_KEY = test_util.load_rsa_private_key('rsa512_key.pem')
|
||||
RSA1024_KEY = test_util.load_rsa_private_key('rsa1024_key.pem')
|
||||
|
||||
|
||||
class JWASignatureTest(unittest.TestCase):
|
||||
"""Tests for acme.jose.jwa.JWASignature."""
|
||||
|
||||
def setUp(self):
|
||||
from acme.jose.jwa import JWASignature
|
||||
|
||||
class MockSig(JWASignature):
|
||||
# pylint: disable=missing-docstring,too-few-public-methods
|
||||
# pylint: disable=abstract-class-not-used
|
||||
def sign(self, key, msg):
|
||||
raise NotImplementedError() # pragma: no cover
|
||||
|
||||
def verify(self, key, msg, sig):
|
||||
raise NotImplementedError() # pragma: no cover
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
self.Sig1 = MockSig('Sig1')
|
||||
self.Sig2 = MockSig('Sig2')
|
||||
|
||||
def test_eq(self):
|
||||
self.assertEqual(self.Sig1, self.Sig1)
|
||||
|
||||
def test_ne(self):
|
||||
self.assertNotEqual(self.Sig1, self.Sig2)
|
||||
|
||||
def test_ne_other_type(self):
|
||||
self.assertNotEqual(self.Sig1, 5)
|
||||
|
||||
def test_repr(self):
|
||||
self.assertEqual('Sig1', repr(self.Sig1))
|
||||
self.assertEqual('Sig2', repr(self.Sig2))
|
||||
|
||||
def test_to_partial_json(self):
|
||||
self.assertEqual(self.Sig1.to_partial_json(), 'Sig1')
|
||||
self.assertEqual(self.Sig2.to_partial_json(), 'Sig2')
|
||||
|
||||
def test_from_json(self):
|
||||
from acme.jose.jwa import JWASignature
|
||||
from acme.jose.jwa import RS256
|
||||
self.assertTrue(JWASignature.from_json('RS256') is RS256)
|
||||
|
||||
|
||||
class JWAHSTest(unittest.TestCase): # pylint: disable=too-few-public-methods
|
||||
|
||||
def test_it(self):
|
||||
from acme.jose.jwa import HS256
|
||||
sig = (
|
||||
b"\xceR\xea\xcd\x94\xab\xcf\xfb\xe0\xacA.:\x1a'\x08i\xe2\xc4"
|
||||
b"\r\x85+\x0e\x85\xaeUZ\xd4\xb3\x97zO"
|
||||
)
|
||||
self.assertEqual(HS256.sign(b'some key', b'foo'), sig)
|
||||
self.assertTrue(HS256.verify(b'some key', b'foo', sig) is True)
|
||||
self.assertTrue(HS256.verify(b'some key', b'foo', sig + b'!') is False)
|
||||
|
||||
|
||||
class JWARSTest(unittest.TestCase):
|
||||
|
||||
def test_sign_no_private_part(self):
|
||||
from acme.jose.jwa import RS256
|
||||
self.assertRaises(
|
||||
errors.Error, RS256.sign, RSA512_KEY.public_key(), b'foo')
|
||||
|
||||
def test_sign_key_too_small(self):
|
||||
from acme.jose.jwa import RS256
|
||||
from acme.jose.jwa import PS256
|
||||
self.assertRaises(errors.Error, RS256.sign, RSA256_KEY, b'foo')
|
||||
self.assertRaises(errors.Error, PS256.sign, RSA256_KEY, b'foo')
|
||||
|
||||
def test_rs(self):
|
||||
from acme.jose.jwa import RS256
|
||||
sig = (
|
||||
b'|\xc6\xb2\xa4\xab(\x87\x99\xfa*:\xea\xf8\xa0N&}\x9f\x0f\xc0O'
|
||||
b'\xc6t\xa3\xe6\xfa\xbb"\x15Y\x80Y\xe0\x81\xb8\x88)\xba\x0c\x9c'
|
||||
b'\xa4\x99\x1e\x19&\xd8\xc7\x99S\x97\xfc\x85\x0cOV\xe6\x07\x99'
|
||||
b'\xd2\xb9.>}\xfd'
|
||||
)
|
||||
self.assertEqual(RS256.sign(RSA512_KEY, b'foo'), sig)
|
||||
self.assertTrue(RS256.verify(RSA512_KEY.public_key(), b'foo', sig))
|
||||
self.assertFalse(RS256.verify(
|
||||
RSA512_KEY.public_key(), b'foo', sig + b'!'))
|
||||
|
||||
def test_ps(self):
|
||||
from acme.jose.jwa import PS256
|
||||
sig = PS256.sign(RSA1024_KEY, b'foo')
|
||||
self.assertTrue(PS256.verify(RSA1024_KEY.public_key(), b'foo', sig))
|
||||
self.assertFalse(PS256.verify(
|
||||
RSA1024_KEY.public_key(), b'foo', sig + b'!'))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main() # pragma: no cover
|
||||
@@ -1,281 +0,0 @@
|
||||
"""JSON Web Key."""
|
||||
import abc
|
||||
import binascii
|
||||
import json
|
||||
import logging
|
||||
|
||||
import cryptography.exceptions
|
||||
from cryptography.hazmat.backends import default_backend
|
||||
from cryptography.hazmat.primitives import hashes # type: ignore
|
||||
from cryptography.hazmat.primitives import serialization
|
||||
from cryptography.hazmat.primitives.asymmetric import ec # type: ignore
|
||||
from cryptography.hazmat.primitives.asymmetric import rsa
|
||||
|
||||
import six
|
||||
|
||||
from acme.jose import errors
|
||||
from acme.jose import json_util
|
||||
from acme.jose import util
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class JWK(json_util.TypedJSONObjectWithFields):
|
||||
# pylint: disable=too-few-public-methods
|
||||
"""JSON Web Key."""
|
||||
type_field_name = 'kty'
|
||||
TYPES = {} # type: dict
|
||||
cryptography_key_types = () # type: tuple
|
||||
"""Subclasses should override."""
|
||||
|
||||
required = NotImplemented
|
||||
"""Required members of public key's representation as defined by JWK/JWA."""
|
||||
|
||||
_thumbprint_json_dumps_params = {
|
||||
# "no whitespace or line breaks before or after any syntactic
|
||||
# elements"
|
||||
'indent': None,
|
||||
'separators': (',', ':'),
|
||||
# "members ordered lexicographically by the Unicode [UNICODE]
|
||||
# code points of the member names"
|
||||
'sort_keys': True,
|
||||
}
|
||||
|
||||
def thumbprint(self, hash_function=hashes.SHA256):
|
||||
"""Compute JWK Thumbprint.
|
||||
|
||||
https://tools.ietf.org/html/rfc7638
|
||||
|
||||
:returns bytes:
|
||||
|
||||
"""
|
||||
digest = hashes.Hash(hash_function(), backend=default_backend())
|
||||
digest.update(json.dumps(
|
||||
dict((k, v) for k, v in six.iteritems(self.to_json())
|
||||
if k in self.required),
|
||||
**self._thumbprint_json_dumps_params).encode())
|
||||
return digest.finalize()
|
||||
|
||||
@abc.abstractmethod
|
||||
def public_key(self): # pragma: no cover
|
||||
"""Generate JWK with public key.
|
||||
|
||||
For symmetric cryptosystems, this would return ``self``.
|
||||
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
@classmethod
|
||||
def _load_cryptography_key(cls, data, password=None, backend=None):
|
||||
backend = default_backend() if backend is None else backend
|
||||
exceptions = {}
|
||||
|
||||
# private key?
|
||||
for loader in (serialization.load_pem_private_key,
|
||||
serialization.load_der_private_key):
|
||||
try:
|
||||
return loader(data, password, backend)
|
||||
except (ValueError, TypeError,
|
||||
cryptography.exceptions.UnsupportedAlgorithm) as error:
|
||||
exceptions[loader] = error
|
||||
|
||||
# public key?
|
||||
for loader in (serialization.load_pem_public_key,
|
||||
serialization.load_der_public_key):
|
||||
try:
|
||||
return loader(data, backend)
|
||||
except (ValueError,
|
||||
cryptography.exceptions.UnsupportedAlgorithm) as error:
|
||||
exceptions[loader] = error
|
||||
|
||||
# no luck
|
||||
raise errors.Error('Unable to deserialize key: {0}'.format(exceptions))
|
||||
|
||||
@classmethod
|
||||
def load(cls, data, password=None, backend=None):
|
||||
"""Load serialized key as JWK.
|
||||
|
||||
:param str data: Public or private key serialized as PEM or DER.
|
||||
:param str password: Optional password.
|
||||
:param backend: A `.PEMSerializationBackend` and
|
||||
`.DERSerializationBackend` provider.
|
||||
|
||||
:raises errors.Error: if unable to deserialize, or unsupported
|
||||
JWK algorithm
|
||||
|
||||
:returns: JWK of an appropriate type.
|
||||
:rtype: `JWK`
|
||||
|
||||
"""
|
||||
try:
|
||||
key = cls._load_cryptography_key(data, password, backend)
|
||||
except errors.Error as error:
|
||||
logger.debug('Loading symmetric key, asymmetric failed: %s', error)
|
||||
return JWKOct(key=data)
|
||||
|
||||
if cls.typ is not NotImplemented and not isinstance(
|
||||
key, cls.cryptography_key_types):
|
||||
raise errors.Error('Unable to deserialize {0} into {1}'.format(
|
||||
key.__class__, cls.__class__))
|
||||
for jwk_cls in six.itervalues(cls.TYPES):
|
||||
if isinstance(key, jwk_cls.cryptography_key_types):
|
||||
return jwk_cls(key=key)
|
||||
raise errors.Error('Unsupported algorithm: {0}'.format(key.__class__))
|
||||
|
||||
|
||||
@JWK.register
|
||||
class JWKES(JWK): # pragma: no cover
|
||||
# pylint: disable=abstract-class-not-used
|
||||
"""ES JWK.
|
||||
|
||||
.. warning:: This is not yet implemented!
|
||||
|
||||
"""
|
||||
typ = 'ES'
|
||||
cryptography_key_types = (
|
||||
ec.EllipticCurvePublicKey, ec.EllipticCurvePrivateKey)
|
||||
required = ('crv', JWK.type_field_name, 'x', 'y')
|
||||
|
||||
def fields_to_partial_json(self):
|
||||
raise NotImplementedError()
|
||||
|
||||
@classmethod
|
||||
def fields_from_json(cls, jobj):
|
||||
raise NotImplementedError()
|
||||
|
||||
def public_key(self):
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
@JWK.register
|
||||
class JWKOct(JWK):
|
||||
"""Symmetric JWK."""
|
||||
typ = 'oct'
|
||||
__slots__ = ('key',)
|
||||
required = ('k', JWK.type_field_name)
|
||||
|
||||
def fields_to_partial_json(self):
|
||||
# TODO: An "alg" member SHOULD also be present to identify the
|
||||
# algorithm intended to be used with the key, unless the
|
||||
# application uses another means or convention to determine
|
||||
# the algorithm used.
|
||||
return {'k': json_util.encode_b64jose(self.key)}
|
||||
|
||||
@classmethod
|
||||
def fields_from_json(cls, jobj):
|
||||
return cls(key=json_util.decode_b64jose(jobj['k']))
|
||||
|
||||
def public_key(self):
|
||||
return self
|
||||
|
||||
|
||||
@JWK.register
|
||||
class JWKRSA(JWK):
|
||||
"""RSA JWK.
|
||||
|
||||
:ivar key: `cryptography.hazmat.primitives.rsa.RSAPrivateKey`
|
||||
or `cryptography.hazmat.primitives.rsa.RSAPublicKey` wrapped
|
||||
in `.ComparableRSAKey`
|
||||
|
||||
"""
|
||||
typ = 'RSA'
|
||||
cryptography_key_types = (rsa.RSAPublicKey, rsa.RSAPrivateKey)
|
||||
__slots__ = ('key',)
|
||||
required = ('e', JWK.type_field_name, 'n')
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
if 'key' in kwargs and not isinstance(
|
||||
kwargs['key'], util.ComparableRSAKey):
|
||||
kwargs['key'] = util.ComparableRSAKey(kwargs['key'])
|
||||
super(JWKRSA, self).__init__(*args, **kwargs)
|
||||
|
||||
@classmethod
|
||||
def _encode_param(cls, data):
|
||||
"""Encode Base64urlUInt.
|
||||
|
||||
:type data: long
|
||||
:rtype: unicode
|
||||
|
||||
"""
|
||||
def _leading_zeros(arg):
|
||||
if len(arg) % 2:
|
||||
return '0' + arg
|
||||
return arg
|
||||
|
||||
return json_util.encode_b64jose(binascii.unhexlify(
|
||||
_leading_zeros(hex(data)[2:].rstrip('L'))))
|
||||
|
||||
@classmethod
|
||||
def _decode_param(cls, data):
|
||||
"""Decode Base64urlUInt."""
|
||||
try:
|
||||
return int(binascii.hexlify(json_util.decode_b64jose(data)), 16)
|
||||
except ValueError: # invalid literal for long() with base 16
|
||||
raise errors.DeserializationError()
|
||||
|
||||
def public_key(self):
|
||||
return type(self)(key=self.key.public_key())
|
||||
|
||||
@classmethod
|
||||
def fields_from_json(cls, jobj):
|
||||
# pylint: disable=invalid-name
|
||||
n, e = (cls._decode_param(jobj[x]) for x in ('n', 'e'))
|
||||
public_numbers = rsa.RSAPublicNumbers(e=e, n=n)
|
||||
if 'd' not in jobj: # public key
|
||||
key = public_numbers.public_key(default_backend())
|
||||
else: # private key
|
||||
d = cls._decode_param(jobj['d'])
|
||||
if ('p' in jobj or 'q' in jobj or 'dp' in jobj or
|
||||
'dq' in jobj or 'qi' in jobj or 'oth' in jobj):
|
||||
# "If the producer includes any of the other private
|
||||
# key parameters, then all of the others MUST be
|
||||
# present, with the exception of "oth", which MUST
|
||||
# only be present when more than two prime factors
|
||||
# were used."
|
||||
p, q, dp, dq, qi, = all_params = tuple(
|
||||
jobj.get(x) for x in ('p', 'q', 'dp', 'dq', 'qi'))
|
||||
if tuple(param for param in all_params if param is None):
|
||||
raise errors.Error(
|
||||
'Some private parameters are missing: {0}'.format(
|
||||
all_params))
|
||||
p, q, dp, dq, qi = tuple(
|
||||
cls._decode_param(x) for x in all_params)
|
||||
|
||||
# TODO: check for oth
|
||||
else:
|
||||
# cryptography>=0.8
|
||||
p, q = rsa.rsa_recover_prime_factors(n, e, d)
|
||||
dp = rsa.rsa_crt_dmp1(d, p)
|
||||
dq = rsa.rsa_crt_dmq1(d, q)
|
||||
qi = rsa.rsa_crt_iqmp(p, q)
|
||||
|
||||
key = rsa.RSAPrivateNumbers(
|
||||
p, q, d, dp, dq, qi, public_numbers).private_key(
|
||||
default_backend())
|
||||
|
||||
return cls(key=key)
|
||||
|
||||
def fields_to_partial_json(self):
|
||||
# pylint: disable=protected-access
|
||||
if isinstance(self.key._wrapped, rsa.RSAPublicKey):
|
||||
numbers = self.key.public_numbers()
|
||||
params = {
|
||||
'n': numbers.n,
|
||||
'e': numbers.e,
|
||||
}
|
||||
else: # rsa.RSAPrivateKey
|
||||
private = self.key.private_numbers()
|
||||
public = self.key.public_key().public_numbers()
|
||||
params = {
|
||||
'n': public.n,
|
||||
'e': public.e,
|
||||
'd': private.d,
|
||||
'p': private.p,
|
||||
'q': private.q,
|
||||
'dp': private.dmp1,
|
||||
'dq': private.dmq1,
|
||||
'qi': private.iqmp,
|
||||
}
|
||||
return dict((key, self._encode_param(value))
|
||||
for key, value in six.iteritems(params))
|
||||
@@ -1,191 +0,0 @@
|
||||
"""Tests for acme.jose.jwk."""
|
||||
import binascii
|
||||
import unittest
|
||||
|
||||
from acme import test_util
|
||||
|
||||
from acme.jose import errors
|
||||
from acme.jose import json_util
|
||||
from acme.jose import util
|
||||
|
||||
|
||||
DSA_PEM = test_util.load_vector('dsa512_key.pem')
|
||||
RSA256_KEY = test_util.load_rsa_private_key('rsa256_key.pem')
|
||||
RSA512_KEY = test_util.load_rsa_private_key('rsa512_key.pem')
|
||||
|
||||
|
||||
class JWKTest(unittest.TestCase):
|
||||
"""Tests for acme.jose.jwk.JWK."""
|
||||
|
||||
def test_load(self):
|
||||
from acme.jose.jwk import JWK
|
||||
self.assertRaises(errors.Error, JWK.load, DSA_PEM)
|
||||
|
||||
def test_load_subclass_wrong_type(self):
|
||||
from acme.jose.jwk import JWKRSA
|
||||
self.assertRaises(errors.Error, JWKRSA.load, DSA_PEM)
|
||||
|
||||
|
||||
class JWKTestBaseMixin(object):
|
||||
"""Mixin test for JWK subclass tests."""
|
||||
|
||||
thumbprint = NotImplemented
|
||||
|
||||
def test_thumbprint_private(self):
|
||||
self.assertEqual(self.thumbprint, self.jwk.thumbprint())
|
||||
|
||||
def test_thumbprint_public(self):
|
||||
self.assertEqual(self.thumbprint, self.jwk.public_key().thumbprint())
|
||||
|
||||
|
||||
class JWKOctTest(unittest.TestCase, JWKTestBaseMixin):
|
||||
"""Tests for acme.jose.jwk.JWKOct."""
|
||||
|
||||
thumbprint = (b"\xf3\xe7\xbe\xa8`\xd2\xdap\xe9}\x9c\xce>"
|
||||
b"\xd0\xfcI\xbe\xcd\x92'\xd4o\x0e\xf41\xea"
|
||||
b"\x8e(\x8a\xb2i\x1c")
|
||||
|
||||
def setUp(self):
|
||||
from acme.jose.jwk import JWKOct
|
||||
self.jwk = JWKOct(key=b'foo')
|
||||
self.jobj = {'kty': 'oct', 'k': json_util.encode_b64jose(b'foo')}
|
||||
|
||||
def test_to_partial_json(self):
|
||||
self.assertEqual(self.jwk.to_partial_json(), self.jobj)
|
||||
|
||||
def test_from_json(self):
|
||||
from acme.jose.jwk import JWKOct
|
||||
self.assertEqual(self.jwk, JWKOct.from_json(self.jobj))
|
||||
|
||||
def test_from_json_hashable(self):
|
||||
from acme.jose.jwk import JWKOct
|
||||
hash(JWKOct.from_json(self.jobj))
|
||||
|
||||
def test_load(self):
|
||||
from acme.jose.jwk import JWKOct
|
||||
self.assertEqual(self.jwk, JWKOct.load(b'foo'))
|
||||
|
||||
def test_public_key(self):
|
||||
self.assertTrue(self.jwk.public_key() is self.jwk)
|
||||
|
||||
|
||||
class JWKRSATest(unittest.TestCase, JWKTestBaseMixin):
|
||||
"""Tests for acme.jose.jwk.JWKRSA."""
|
||||
# pylint: disable=too-many-instance-attributes
|
||||
|
||||
thumbprint = (b'\x83K\xdc#3\x98\xca\x98\xed\xcb\x80\x80<\x0c'
|
||||
b'\xf0\x95\xb9H\xb2*l\xbd$\xe5&|O\x91\xd4 \xb0Y')
|
||||
|
||||
def setUp(self):
|
||||
from acme.jose.jwk import JWKRSA
|
||||
self.jwk256 = JWKRSA(key=RSA256_KEY.public_key())
|
||||
self.jwk256json = {
|
||||
'kty': 'RSA',
|
||||
'e': 'AQAB',
|
||||
'n': 'm2Fylv-Uz7trgTW8EBHP3FQSMeZs2GNQ6VRo1sIVJEk',
|
||||
}
|
||||
# pylint: disable=protected-access
|
||||
self.jwk256_not_comparable = JWKRSA(
|
||||
key=RSA256_KEY.public_key()._wrapped)
|
||||
self.jwk512 = JWKRSA(key=RSA512_KEY.public_key())
|
||||
self.jwk512json = {
|
||||
'kty': 'RSA',
|
||||
'e': 'AQAB',
|
||||
'n': 'rHVztFHtH92ucFJD_N_HW9AsdRsUuHUBBBDlHwNlRd3fp5'
|
||||
'80rv2-6QWE30cWgdmJS86ObRz6lUTor4R0T-3C5Q',
|
||||
}
|
||||
self.private = JWKRSA(key=RSA256_KEY)
|
||||
self.private_json_small = self.jwk256json.copy()
|
||||
self.private_json_small['d'] = (
|
||||
'lPQED_EPTV0UIBfNI3KP2d9Jlrc2mrMllmf946bu-CE')
|
||||
self.private_json = self.jwk256json.copy()
|
||||
self.private_json.update({
|
||||
'd': 'lPQED_EPTV0UIBfNI3KP2d9Jlrc2mrMllmf946bu-CE',
|
||||
'p': 'zUVNZn4lLLBD1R6NE8TKNQ',
|
||||
'q': 'wcfKfc7kl5jfqXArCRSURQ',
|
||||
'dp': 'CWJFq43QvT5Bm5iN8n1okQ',
|
||||
'dq': 'bHh2u7etM8LKKCF2pY2UdQ',
|
||||
'qi': 'oi45cEkbVoJjAbnQpFY87Q',
|
||||
})
|
||||
self.jwk = self.private
|
||||
|
||||
def test_init_auto_comparable(self):
|
||||
self.assertTrue(isinstance(
|
||||
self.jwk256_not_comparable.key, util.ComparableRSAKey))
|
||||
self.assertEqual(self.jwk256, self.jwk256_not_comparable)
|
||||
|
||||
def test_encode_param_zero(self):
|
||||
from acme.jose.jwk import JWKRSA
|
||||
# pylint: disable=protected-access
|
||||
# TODO: move encode/decode _param to separate class
|
||||
self.assertEqual('AA', JWKRSA._encode_param(0))
|
||||
|
||||
def test_equals(self):
|
||||
self.assertEqual(self.jwk256, self.jwk256)
|
||||
self.assertEqual(self.jwk512, self.jwk512)
|
||||
|
||||
def test_not_equals(self):
|
||||
self.assertNotEqual(self.jwk256, self.jwk512)
|
||||
self.assertNotEqual(self.jwk512, self.jwk256)
|
||||
|
||||
def test_load(self):
|
||||
from acme.jose.jwk import JWKRSA
|
||||
self.assertEqual(self.private, JWKRSA.load(
|
||||
test_util.load_vector('rsa256_key.pem')))
|
||||
|
||||
def test_public_key(self):
|
||||
self.assertEqual(self.jwk256, self.private.public_key())
|
||||
|
||||
def test_to_partial_json(self):
|
||||
self.assertEqual(self.jwk256.to_partial_json(), self.jwk256json)
|
||||
self.assertEqual(self.jwk512.to_partial_json(), self.jwk512json)
|
||||
self.assertEqual(self.private.to_partial_json(), self.private_json)
|
||||
|
||||
def test_from_json(self):
|
||||
from acme.jose.jwk import JWK
|
||||
self.assertEqual(
|
||||
self.jwk256, JWK.from_json(self.jwk256json))
|
||||
self.assertEqual(
|
||||
self.jwk512, JWK.from_json(self.jwk512json))
|
||||
self.assertEqual(self.private, JWK.from_json(self.private_json))
|
||||
|
||||
def test_from_json_private_small(self):
|
||||
from acme.jose.jwk import JWK
|
||||
self.assertEqual(self.private, JWK.from_json(self.private_json_small))
|
||||
|
||||
def test_from_json_missing_one_additional(self):
|
||||
from acme.jose.jwk import JWK
|
||||
del self.private_json['q']
|
||||
self.assertRaises(errors.Error, JWK.from_json, self.private_json)
|
||||
|
||||
def test_from_json_hashable(self):
|
||||
from acme.jose.jwk import JWK
|
||||
hash(JWK.from_json(self.jwk256json))
|
||||
|
||||
def test_from_json_non_schema_errors(self):
|
||||
# valid against schema, but still failing
|
||||
from acme.jose.jwk import JWK
|
||||
self.assertRaises(errors.DeserializationError, JWK.from_json,
|
||||
{'kty': 'RSA', 'e': 'AQAB', 'n': ''})
|
||||
self.assertRaises(errors.DeserializationError, JWK.from_json,
|
||||
{'kty': 'RSA', 'e': 'AQAB', 'n': '1'})
|
||||
|
||||
def test_thumbprint_go_jose(self):
|
||||
# https://github.com/square/go-jose/blob/4ddd71883fa547d37fbf598071f04512d8bafee3/jwk.go#L155
|
||||
# https://github.com/square/go-jose/blob/4ddd71883fa547d37fbf598071f04512d8bafee3/jwk_test.go#L331-L344
|
||||
# https://github.com/square/go-jose/blob/4ddd71883fa547d37fbf598071f04512d8bafee3/jwk_test.go#L384
|
||||
from acme.jose.jwk import JWKRSA
|
||||
key = JWKRSA.json_loads("""{
|
||||
"kty": "RSA",
|
||||
"kid": "bilbo.baggins@hobbiton.example",
|
||||
"use": "sig",
|
||||
"n": "n4EPtAOCc9AlkeQHPzHStgAbgs7bTZLwUBZdR8_KuKPEHLd4rHVTeT-O-XV2jRojdNhxJWTDvNd7nqQ0VEiZQHz_AJmSCpMaJMRBSFKrKb2wqVwGU_NsYOYL-QtiWN2lbzcEe6XC0dApr5ydQLrHqkHHig3RBordaZ6Aj-oBHqFEHYpPe7Tpe-OfVfHd1E6cS6M1FZcD1NNLYD5lFHpPI9bTwJlsde3uhGqC0ZCuEHg8lhzwOHrtIQbS0FVbb9k3-tVTU4fg_3L_vniUFAKwuCLqKnS2BYwdq_mzSnbLY7h_qixoR7jig3__kRhuaxwUkRz5iaiQkqgc5gHdrNP5zw",
|
||||
"e": "AQAB"
|
||||
}""")
|
||||
self.assertEqual(
|
||||
binascii.hexlify(key.thumbprint()),
|
||||
b"f63838e96077ad1fc01c3f8405774dedc0641f558ebb4b40dccf5f9b6d66a932")
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main() # pragma: no cover
|
||||
@@ -1,433 +0,0 @@
|
||||
"""JOSE Web Signature."""
|
||||
import argparse
|
||||
import base64
|
||||
import sys
|
||||
|
||||
import OpenSSL
|
||||
import six
|
||||
|
||||
from acme.jose import b64
|
||||
from acme.jose import errors
|
||||
from acme.jose import json_util
|
||||
from acme.jose import jwa
|
||||
from acme.jose import jwk
|
||||
from acme.jose import util
|
||||
|
||||
|
||||
class MediaType(object):
|
||||
"""MediaType field encoder/decoder."""
|
||||
|
||||
PREFIX = 'application/'
|
||||
"""MIME Media Type and Content Type prefix."""
|
||||
|
||||
@classmethod
|
||||
def decode(cls, value):
|
||||
"""Decoder."""
|
||||
# 4.1.10
|
||||
if '/' not in value:
|
||||
if ';' in value:
|
||||
raise errors.DeserializationError('Unexpected semi-colon')
|
||||
return cls.PREFIX + value
|
||||
return value
|
||||
|
||||
@classmethod
|
||||
def encode(cls, value):
|
||||
"""Encoder."""
|
||||
# 4.1.10
|
||||
if ';' not in value:
|
||||
assert value.startswith(cls.PREFIX)
|
||||
return value[len(cls.PREFIX):]
|
||||
return value
|
||||
|
||||
|
||||
class Header(json_util.JSONObjectWithFields):
|
||||
"""JOSE Header.
|
||||
|
||||
.. warning:: This class supports **only** Registered Header
|
||||
Parameter Names (as defined in section 4.1 of the
|
||||
protocol). If you need Public Header Parameter Names (4.2)
|
||||
or Private Header Parameter Names (4.3), you must subclass
|
||||
and override :meth:`from_json` and :meth:`to_partial_json`
|
||||
appropriately.
|
||||
|
||||
.. warning:: This class does not support any extensions through
|
||||
the "crit" (Critical) Header Parameter (4.1.11) and as a
|
||||
conforming implementation, :meth:`from_json` treats its
|
||||
occurrence as an error. Please subclass if you seek for
|
||||
a different behaviour.
|
||||
|
||||
:ivar x5tS256: "x5t#S256"
|
||||
:ivar str typ: MIME Media Type, inc. :const:`MediaType.PREFIX`.
|
||||
:ivar str cty: Content-Type, inc. :const:`MediaType.PREFIX`.
|
||||
|
||||
"""
|
||||
alg = json_util.Field(
|
||||
'alg', decoder=jwa.JWASignature.from_json, omitempty=True)
|
||||
jku = json_util.Field('jku', omitempty=True)
|
||||
jwk = json_util.Field('jwk', decoder=jwk.JWK.from_json, omitempty=True)
|
||||
kid = json_util.Field('kid', omitempty=True)
|
||||
x5u = json_util.Field('x5u', omitempty=True)
|
||||
x5c = json_util.Field('x5c', omitempty=True, default=())
|
||||
x5t = json_util.Field(
|
||||
'x5t', decoder=json_util.decode_b64jose, omitempty=True)
|
||||
x5tS256 = json_util.Field(
|
||||
'x5t#S256', decoder=json_util.decode_b64jose, omitempty=True)
|
||||
typ = json_util.Field('typ', encoder=MediaType.encode,
|
||||
decoder=MediaType.decode, omitempty=True)
|
||||
cty = json_util.Field('cty', encoder=MediaType.encode,
|
||||
decoder=MediaType.decode, omitempty=True)
|
||||
crit = json_util.Field('crit', omitempty=True, default=())
|
||||
|
||||
def not_omitted(self):
|
||||
"""Fields that would not be omitted in the JSON object."""
|
||||
return dict((name, getattr(self, name))
|
||||
for name, field in six.iteritems(self._fields)
|
||||
if not field.omit(getattr(self, name)))
|
||||
|
||||
def __add__(self, other):
|
||||
if not isinstance(other, type(self)):
|
||||
raise TypeError('Header cannot be added to: {0}'.format(
|
||||
type(other)))
|
||||
|
||||
not_omitted_self = self.not_omitted()
|
||||
not_omitted_other = other.not_omitted()
|
||||
|
||||
if set(not_omitted_self).intersection(not_omitted_other):
|
||||
raise TypeError('Addition of overlapping headers not defined')
|
||||
|
||||
not_omitted_self.update(not_omitted_other)
|
||||
return type(self)(**not_omitted_self) # pylint: disable=star-args
|
||||
|
||||
def find_key(self):
|
||||
"""Find key based on header.
|
||||
|
||||
.. todo:: Supports only "jwk" header parameter lookup.
|
||||
|
||||
:returns: (Public) key found in the header.
|
||||
:rtype: .JWK
|
||||
|
||||
:raises acme.jose.errors.Error: if key could not be found
|
||||
|
||||
"""
|
||||
if self.jwk is None:
|
||||
raise errors.Error('No key found')
|
||||
return self.jwk
|
||||
|
||||
@crit.decoder
|
||||
def crit(unused_value):
|
||||
# pylint: disable=missing-docstring,no-self-argument,no-self-use
|
||||
raise errors.DeserializationError(
|
||||
'"crit" is not supported, please subclass')
|
||||
|
||||
# x5c does NOT use JOSE Base64 (4.1.6)
|
||||
|
||||
@x5c.encoder # type: ignore
|
||||
def x5c(value): # pylint: disable=missing-docstring,no-self-argument
|
||||
return [base64.b64encode(OpenSSL.crypto.dump_certificate(
|
||||
OpenSSL.crypto.FILETYPE_ASN1, cert.wrapped)) for cert in value]
|
||||
|
||||
@x5c.decoder # type: ignore
|
||||
def x5c(value): # pylint: disable=missing-docstring,no-self-argument
|
||||
try:
|
||||
return tuple(util.ComparableX509(OpenSSL.crypto.load_certificate(
|
||||
OpenSSL.crypto.FILETYPE_ASN1,
|
||||
base64.b64decode(cert))) for cert in value)
|
||||
except OpenSSL.crypto.Error as error:
|
||||
raise errors.DeserializationError(error)
|
||||
|
||||
|
||||
class Signature(json_util.JSONObjectWithFields):
|
||||
"""JWS Signature.
|
||||
|
||||
:ivar combined: Combined Header (protected and unprotected,
|
||||
:class:`Header`).
|
||||
:ivar unicode protected: JWS protected header (Jose Base-64 decoded).
|
||||
:ivar header: JWS Unprotected Header (:class:`Header`).
|
||||
:ivar str signature: The signature.
|
||||
|
||||
"""
|
||||
header_cls = Header
|
||||
|
||||
__slots__ = ('combined',)
|
||||
protected = json_util.Field('protected', omitempty=True, default='')
|
||||
header = json_util.Field(
|
||||
'header', omitempty=True, default=header_cls(),
|
||||
decoder=header_cls.from_json)
|
||||
signature = json_util.Field(
|
||||
'signature', decoder=json_util.decode_b64jose,
|
||||
encoder=json_util.encode_b64jose)
|
||||
|
||||
@protected.encoder # type: ignore
|
||||
def protected(value): # pylint: disable=missing-docstring,no-self-argument
|
||||
# wrong type guess (Signature, not bytes) | pylint: disable=no-member
|
||||
return json_util.encode_b64jose(value.encode('utf-8'))
|
||||
|
||||
@protected.decoder # type: ignore
|
||||
def protected(value): # pylint: disable=missing-docstring,no-self-argument
|
||||
return json_util.decode_b64jose(value).decode('utf-8')
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
if 'combined' not in kwargs:
|
||||
kwargs = self._with_combined(kwargs)
|
||||
super(Signature, self).__init__(**kwargs)
|
||||
assert self.combined.alg is not None
|
||||
|
||||
@classmethod
|
||||
def _with_combined(cls, kwargs):
|
||||
assert 'combined' not in kwargs
|
||||
header = kwargs.get('header', cls._fields['header'].default)
|
||||
protected = kwargs.get('protected', cls._fields['protected'].default)
|
||||
|
||||
if protected:
|
||||
combined = header + cls.header_cls.json_loads(protected)
|
||||
else:
|
||||
combined = header
|
||||
|
||||
kwargs['combined'] = combined
|
||||
return kwargs
|
||||
|
||||
@classmethod
|
||||
def _msg(cls, protected, payload):
|
||||
return (b64.b64encode(protected.encode('utf-8')) + b'.' +
|
||||
b64.b64encode(payload))
|
||||
|
||||
def verify(self, payload, key=None):
|
||||
"""Verify.
|
||||
|
||||
:param JWK key: Key used for verification.
|
||||
|
||||
"""
|
||||
key = self.combined.find_key() if key is None else key
|
||||
return self.combined.alg.verify(
|
||||
key=key.key, sig=self.signature,
|
||||
msg=self._msg(self.protected, payload))
|
||||
|
||||
@classmethod
|
||||
def sign(cls, payload, key, alg, include_jwk=True,
|
||||
protect=frozenset(), **kwargs):
|
||||
"""Sign.
|
||||
|
||||
:param JWK key: Key for signature.
|
||||
|
||||
"""
|
||||
assert isinstance(key, alg.kty)
|
||||
|
||||
header_params = kwargs
|
||||
header_params['alg'] = alg
|
||||
if include_jwk:
|
||||
header_params['jwk'] = key.public_key()
|
||||
|
||||
assert set(header_params).issubset(cls.header_cls._fields)
|
||||
assert protect.issubset(cls.header_cls._fields)
|
||||
|
||||
protected_params = {}
|
||||
for header in protect:
|
||||
if header in header_params:
|
||||
protected_params[header] = header_params.pop(header)
|
||||
if protected_params:
|
||||
# pylint: disable=star-args
|
||||
protected = cls.header_cls(**protected_params).json_dumps()
|
||||
else:
|
||||
protected = ''
|
||||
|
||||
header = cls.header_cls(**header_params) # pylint: disable=star-args
|
||||
signature = alg.sign(key.key, cls._msg(protected, payload))
|
||||
|
||||
return cls(protected=protected, header=header, signature=signature)
|
||||
|
||||
def fields_to_partial_json(self):
|
||||
fields = super(Signature, self).fields_to_partial_json()
|
||||
if not fields['header'].not_omitted():
|
||||
del fields['header']
|
||||
return fields
|
||||
|
||||
@classmethod
|
||||
def fields_from_json(cls, jobj):
|
||||
fields = super(Signature, cls).fields_from_json(jobj)
|
||||
fields_with_combined = cls._with_combined(fields)
|
||||
if 'alg' not in fields_with_combined['combined'].not_omitted():
|
||||
raise errors.DeserializationError('alg not present')
|
||||
return fields_with_combined
|
||||
|
||||
|
||||
class JWS(json_util.JSONObjectWithFields):
|
||||
"""JSON Web Signature.
|
||||
|
||||
:ivar str payload: JWS Payload.
|
||||
:ivar str signature: JWS Signatures.
|
||||
|
||||
"""
|
||||
__slots__ = ('payload', 'signatures')
|
||||
|
||||
signature_cls = Signature
|
||||
|
||||
def verify(self, key=None):
|
||||
"""Verify."""
|
||||
return all(sig.verify(self.payload, key) for sig in self.signatures)
|
||||
|
||||
@classmethod
|
||||
def sign(cls, payload, **kwargs):
|
||||
"""Sign."""
|
||||
return cls(payload=payload, signatures=(
|
||||
cls.signature_cls.sign(payload=payload, **kwargs),))
|
||||
|
||||
@property
|
||||
def signature(self):
|
||||
"""Get a singleton signature.
|
||||
|
||||
:rtype: `signature_cls`
|
||||
|
||||
"""
|
||||
assert len(self.signatures) == 1
|
||||
return self.signatures[0]
|
||||
|
||||
def to_compact(self):
|
||||
"""Compact serialization.
|
||||
|
||||
:rtype: bytes
|
||||
|
||||
"""
|
||||
assert len(self.signatures) == 1
|
||||
|
||||
assert 'alg' not in self.signature.header.not_omitted()
|
||||
# ... it must be in protected
|
||||
|
||||
return (
|
||||
b64.b64encode(self.signature.protected.encode('utf-8')) +
|
||||
b'.' +
|
||||
b64.b64encode(self.payload) +
|
||||
b'.' +
|
||||
b64.b64encode(self.signature.signature))
|
||||
|
||||
@classmethod
|
||||
def from_compact(cls, compact):
|
||||
"""Compact deserialization.
|
||||
|
||||
:param bytes compact:
|
||||
|
||||
"""
|
||||
try:
|
||||
protected, payload, signature = compact.split(b'.')
|
||||
except ValueError:
|
||||
raise errors.DeserializationError(
|
||||
'Compact JWS serialization should comprise of exactly'
|
||||
' 3 dot-separated components')
|
||||
|
||||
sig = cls.signature_cls(
|
||||
protected=b64.b64decode(protected).decode('utf-8'),
|
||||
signature=b64.b64decode(signature))
|
||||
return cls(payload=b64.b64decode(payload), signatures=(sig,))
|
||||
|
||||
def to_partial_json(self, flat=True): # pylint: disable=arguments-differ
|
||||
assert self.signatures
|
||||
payload = json_util.encode_b64jose(self.payload)
|
||||
|
||||
if flat and len(self.signatures) == 1:
|
||||
ret = self.signatures[0].to_partial_json()
|
||||
ret['payload'] = payload
|
||||
return ret
|
||||
else:
|
||||
return {
|
||||
'payload': payload,
|
||||
'signatures': self.signatures,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def from_json(cls, jobj):
|
||||
if 'signature' in jobj and 'signatures' in jobj:
|
||||
raise errors.DeserializationError('Flat mixed with non-flat')
|
||||
elif 'signature' in jobj: # flat
|
||||
return cls(payload=json_util.decode_b64jose(jobj.pop('payload')),
|
||||
signatures=(cls.signature_cls.from_json(jobj),))
|
||||
else:
|
||||
return cls(payload=json_util.decode_b64jose(jobj['payload']),
|
||||
signatures=tuple(cls.signature_cls.from_json(sig)
|
||||
for sig in jobj['signatures']))
|
||||
|
||||
|
||||
class CLI(object):
|
||||
"""JWS CLI."""
|
||||
|
||||
@classmethod
|
||||
def sign(cls, args):
|
||||
"""Sign."""
|
||||
key = args.alg.kty.load(args.key.read())
|
||||
args.key.close()
|
||||
if args.protect is None:
|
||||
args.protect = []
|
||||
if args.compact:
|
||||
args.protect.append('alg')
|
||||
|
||||
sig = JWS.sign(payload=sys.stdin.read().encode(), key=key, alg=args.alg,
|
||||
protect=set(args.protect))
|
||||
|
||||
if args.compact:
|
||||
six.print_(sig.to_compact().decode('utf-8'))
|
||||
else: # JSON
|
||||
six.print_(sig.json_dumps_pretty())
|
||||
|
||||
@classmethod
|
||||
def verify(cls, args):
|
||||
"""Verify."""
|
||||
if args.compact:
|
||||
sig = JWS.from_compact(sys.stdin.read().encode())
|
||||
else: # JSON
|
||||
try:
|
||||
sig = JWS.json_loads(sys.stdin.read())
|
||||
except errors.Error as error:
|
||||
six.print_(error)
|
||||
return -1
|
||||
|
||||
if args.key is not None:
|
||||
assert args.kty is not None
|
||||
key = args.kty.load(args.key.read()).public_key()
|
||||
args.key.close()
|
||||
else:
|
||||
key = None
|
||||
|
||||
sys.stdout.write(sig.payload)
|
||||
return not sig.verify(key=key)
|
||||
|
||||
@classmethod
|
||||
def _alg_type(cls, arg):
|
||||
return jwa.JWASignature.from_json(arg)
|
||||
|
||||
@classmethod
|
||||
def _header_type(cls, arg):
|
||||
assert arg in Signature.header_cls._fields
|
||||
return arg
|
||||
|
||||
@classmethod
|
||||
def _kty_type(cls, arg):
|
||||
assert arg in jwk.JWK.TYPES
|
||||
return jwk.JWK.TYPES[arg]
|
||||
|
||||
@classmethod
|
||||
def run(cls, args=sys.argv[1:]):
|
||||
"""Parse arguments and sign/verify."""
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('--compact', action='store_true')
|
||||
|
||||
subparsers = parser.add_subparsers()
|
||||
parser_sign = subparsers.add_parser('sign')
|
||||
parser_sign.set_defaults(func=cls.sign)
|
||||
parser_sign.add_argument(
|
||||
'-k', '--key', type=argparse.FileType('rb'), required=True)
|
||||
parser_sign.add_argument(
|
||||
'-a', '--alg', type=cls._alg_type, default=jwa.RS256)
|
||||
parser_sign.add_argument(
|
||||
'-p', '--protect', action='append', type=cls._header_type)
|
||||
|
||||
parser_verify = subparsers.add_parser('verify')
|
||||
parser_verify.set_defaults(func=cls.verify)
|
||||
parser_verify.add_argument(
|
||||
'-k', '--key', type=argparse.FileType('rb'), required=False)
|
||||
parser_verify.add_argument(
|
||||
'--kty', type=cls._kty_type, required=False)
|
||||
|
||||
parsed = parser.parse_args(args)
|
||||
return parsed.func(parsed)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
exit(CLI.run()) # pragma: no cover
|
||||
@@ -1,239 +0,0 @@
|
||||
"""Tests for acme.jose.jws."""
|
||||
import base64
|
||||
import unittest
|
||||
|
||||
import mock
|
||||
import OpenSSL
|
||||
|
||||
from acme import test_util
|
||||
|
||||
from acme.jose import errors
|
||||
from acme.jose import json_util
|
||||
from acme.jose import jwa
|
||||
from acme.jose import jwk
|
||||
|
||||
|
||||
CERT = test_util.load_comparable_cert('cert.pem')
|
||||
KEY = jwk.JWKRSA.load(test_util.load_vector('rsa512_key.pem'))
|
||||
|
||||
|
||||
class MediaTypeTest(unittest.TestCase):
|
||||
"""Tests for acme.jose.jws.MediaType."""
|
||||
|
||||
def test_decode(self):
|
||||
from acme.jose.jws import MediaType
|
||||
self.assertEqual('application/app', MediaType.decode('application/app'))
|
||||
self.assertEqual('application/app', MediaType.decode('app'))
|
||||
self.assertRaises(
|
||||
errors.DeserializationError, MediaType.decode, 'app;foo')
|
||||
|
||||
def test_encode(self):
|
||||
from acme.jose.jws import MediaType
|
||||
self.assertEqual('app', MediaType.encode('application/app'))
|
||||
self.assertEqual('application/app;foo',
|
||||
MediaType.encode('application/app;foo'))
|
||||
|
||||
|
||||
class HeaderTest(unittest.TestCase):
|
||||
"""Tests for acme.jose.jws.Header."""
|
||||
|
||||
def setUp(self):
|
||||
from acme.jose.jws import Header
|
||||
self.header1 = Header(jwk='foo')
|
||||
self.header2 = Header(jwk='bar')
|
||||
self.crit = Header(crit=('a', 'b'))
|
||||
self.empty = Header()
|
||||
|
||||
def test_add_non_empty(self):
|
||||
from acme.jose.jws import Header
|
||||
self.assertEqual(Header(jwk='foo', crit=('a', 'b')),
|
||||
self.header1 + self.crit)
|
||||
|
||||
def test_add_empty(self):
|
||||
self.assertEqual(self.header1, self.header1 + self.empty)
|
||||
self.assertEqual(self.header1, self.empty + self.header1)
|
||||
|
||||
def test_add_overlapping_error(self):
|
||||
self.assertRaises(TypeError, self.header1.__add__, self.header2)
|
||||
|
||||
def test_add_wrong_type_error(self):
|
||||
self.assertRaises(TypeError, self.header1.__add__, 'xxx')
|
||||
|
||||
def test_crit_decode_always_errors(self):
|
||||
from acme.jose.jws import Header
|
||||
self.assertRaises(errors.DeserializationError, Header.from_json,
|
||||
{'crit': ['a', 'b']})
|
||||
|
||||
def test_x5c_decoding(self):
|
||||
from acme.jose.jws import Header
|
||||
header = Header(x5c=(CERT, CERT))
|
||||
jobj = header.to_partial_json()
|
||||
cert_asn1 = OpenSSL.crypto.dump_certificate(
|
||||
OpenSSL.crypto.FILETYPE_ASN1, CERT.wrapped)
|
||||
cert_b64 = base64.b64encode(cert_asn1)
|
||||
self.assertEqual(jobj, {'x5c': [cert_b64, cert_b64]})
|
||||
self.assertEqual(header, Header.from_json(jobj))
|
||||
jobj['x5c'][0] = base64.b64encode(b'xxx' + cert_asn1)
|
||||
self.assertRaises(errors.DeserializationError, Header.from_json, jobj)
|
||||
|
||||
def test_find_key(self):
|
||||
self.assertEqual('foo', self.header1.find_key())
|
||||
self.assertEqual('bar', self.header2.find_key())
|
||||
self.assertRaises(errors.Error, self.crit.find_key)
|
||||
|
||||
|
||||
class SignatureTest(unittest.TestCase):
|
||||
"""Tests for acme.jose.jws.Signature."""
|
||||
|
||||
def test_from_json(self):
|
||||
from acme.jose.jws import Header
|
||||
from acme.jose.jws import Signature
|
||||
self.assertEqual(
|
||||
Signature(signature=b'foo', header=Header(alg=jwa.RS256)),
|
||||
Signature.from_json(
|
||||
{'signature': 'Zm9v', 'header': {'alg': 'RS256'}}))
|
||||
|
||||
def test_from_json_no_alg_error(self):
|
||||
from acme.jose.jws import Signature
|
||||
self.assertRaises(errors.DeserializationError,
|
||||
Signature.from_json, {'signature': 'foo'})
|
||||
|
||||
|
||||
class JWSTest(unittest.TestCase):
|
||||
"""Tests for acme.jose.jws.JWS."""
|
||||
|
||||
def setUp(self):
|
||||
self.privkey = KEY
|
||||
self.pubkey = self.privkey.public_key()
|
||||
|
||||
from acme.jose.jws import JWS
|
||||
self.unprotected = JWS.sign(
|
||||
payload=b'foo', key=self.privkey, alg=jwa.RS256)
|
||||
self.protected = JWS.sign(
|
||||
payload=b'foo', key=self.privkey, alg=jwa.RS256,
|
||||
protect=frozenset(['jwk', 'alg']))
|
||||
self.mixed = JWS.sign(
|
||||
payload=b'foo', key=self.privkey, alg=jwa.RS256,
|
||||
protect=frozenset(['alg']))
|
||||
|
||||
def test_pubkey_jwk(self):
|
||||
self.assertEqual(self.unprotected.signature.combined.jwk, self.pubkey)
|
||||
self.assertEqual(self.protected.signature.combined.jwk, self.pubkey)
|
||||
self.assertEqual(self.mixed.signature.combined.jwk, self.pubkey)
|
||||
|
||||
def test_sign_unprotected(self):
|
||||
self.assertTrue(self.unprotected.verify())
|
||||
|
||||
def test_sign_protected(self):
|
||||
self.assertTrue(self.protected.verify())
|
||||
|
||||
def test_sign_mixed(self):
|
||||
self.assertTrue(self.mixed.verify())
|
||||
|
||||
def test_compact_lost_unprotected(self):
|
||||
compact = self.mixed.to_compact()
|
||||
self.assertEqual(
|
||||
b'eyJhbGciOiAiUlMyNTYifQ.Zm9v.OHdxFVj73l5LpxbFp1AmYX4yJM0Pyb'
|
||||
b'_893n1zQjpim_eLS5J1F61lkvrCrCDErTEJnBGOGesJ72M7b6Ve1cAJA',
|
||||
compact)
|
||||
|
||||
from acme.jose.jws import JWS
|
||||
mixed = JWS.from_compact(compact)
|
||||
|
||||
self.assertNotEqual(self.mixed, mixed)
|
||||
self.assertEqual(
|
||||
set(['alg']), set(mixed.signature.combined.not_omitted()))
|
||||
|
||||
def test_from_compact_missing_components(self):
|
||||
from acme.jose.jws import JWS
|
||||
self.assertRaises(errors.DeserializationError, JWS.from_compact, b'.')
|
||||
|
||||
def test_json_omitempty(self):
|
||||
protected_jobj = self.protected.to_partial_json(flat=True)
|
||||
unprotected_jobj = self.unprotected.to_partial_json(flat=True)
|
||||
|
||||
self.assertTrue('protected' not in unprotected_jobj)
|
||||
self.assertTrue('header' not in protected_jobj)
|
||||
|
||||
unprotected_jobj['header'] = unprotected_jobj['header'].to_json()
|
||||
|
||||
from acme.jose.jws import JWS
|
||||
self.assertEqual(JWS.from_json(protected_jobj), self.protected)
|
||||
self.assertEqual(JWS.from_json(unprotected_jobj), self.unprotected)
|
||||
|
||||
def test_json_flat(self):
|
||||
jobj_to = {
|
||||
'signature': json_util.encode_b64jose(
|
||||
self.mixed.signature.signature),
|
||||
'payload': json_util.encode_b64jose(b'foo'),
|
||||
'header': self.mixed.signature.header,
|
||||
'protected': json_util.encode_b64jose(
|
||||
self.mixed.signature.protected.encode('utf-8')),
|
||||
}
|
||||
jobj_from = jobj_to.copy()
|
||||
jobj_from['header'] = jobj_from['header'].to_json()
|
||||
|
||||
self.assertEqual(self.mixed.to_partial_json(flat=True), jobj_to)
|
||||
from acme.jose.jws import JWS
|
||||
self.assertEqual(self.mixed, JWS.from_json(jobj_from))
|
||||
|
||||
def test_json_not_flat(self):
|
||||
jobj_to = {
|
||||
'signatures': (self.mixed.signature,),
|
||||
'payload': json_util.encode_b64jose(b'foo'),
|
||||
}
|
||||
jobj_from = jobj_to.copy()
|
||||
jobj_from['signatures'] = [jobj_to['signatures'][0].to_json()]
|
||||
|
||||
self.assertEqual(self.mixed.to_partial_json(flat=False), jobj_to)
|
||||
from acme.jose.jws import JWS
|
||||
self.assertEqual(self.mixed, JWS.from_json(jobj_from))
|
||||
|
||||
def test_from_json_mixed_flat(self):
|
||||
from acme.jose.jws import JWS
|
||||
self.assertRaises(errors.DeserializationError, JWS.from_json,
|
||||
{'signatures': (), 'signature': 'foo'})
|
||||
|
||||
def test_from_json_hashable(self):
|
||||
from acme.jose.jws import JWS
|
||||
hash(JWS.from_json(self.mixed.to_json()))
|
||||
|
||||
|
||||
class CLITest(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.key_path = test_util.vector_path('rsa512_key.pem')
|
||||
|
||||
def test_unverified(self):
|
||||
from acme.jose.jws import CLI
|
||||
with mock.patch('sys.stdin') as sin:
|
||||
sin.read.return_value = '{"payload": "foo", "signature": "xxx"}'
|
||||
with mock.patch('sys.stdout'):
|
||||
self.assertEqual(-1, CLI.run(['verify']))
|
||||
|
||||
def test_json(self):
|
||||
from acme.jose.jws import CLI
|
||||
|
||||
with mock.patch('sys.stdin') as sin:
|
||||
sin.read.return_value = 'foo'
|
||||
with mock.patch('sys.stdout') as sout:
|
||||
CLI.run(['sign', '-k', self.key_path, '-a', 'RS256',
|
||||
'-p', 'jwk'])
|
||||
sin.read.return_value = sout.write.mock_calls[0][1][0]
|
||||
self.assertEqual(0, CLI.run(['verify']))
|
||||
|
||||
def test_compact(self):
|
||||
from acme.jose.jws import CLI
|
||||
|
||||
with mock.patch('sys.stdin') as sin:
|
||||
sin.read.return_value = 'foo'
|
||||
with mock.patch('sys.stdout') as sout:
|
||||
CLI.run(['--compact', 'sign', '-k', self.key_path])
|
||||
sin.read.return_value = sout.write.mock_calls[0][1][0]
|
||||
self.assertEqual(0, CLI.run([
|
||||
'--compact', 'verify', '--kty', 'RSA',
|
||||
'-k', self.key_path]))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main() # pragma: no cover
|
||||
@@ -1,226 +0,0 @@
|
||||
"""JOSE utilities."""
|
||||
import collections
|
||||
|
||||
from cryptography.hazmat.primitives.asymmetric import rsa
|
||||
import OpenSSL
|
||||
import six
|
||||
|
||||
|
||||
class abstractclassmethod(classmethod):
|
||||
# pylint: disable=invalid-name,too-few-public-methods
|
||||
"""Descriptor for an abstract classmethod.
|
||||
|
||||
It augments the :mod:`abc` framework with an abstract
|
||||
classmethod. This is implemented as :class:`abc.abstractclassmethod`
|
||||
in the standard Python library starting with version 3.2.
|
||||
|
||||
This particular implementation, allegedly based on Python 3.3 source
|
||||
code, is stolen from
|
||||
http://stackoverflow.com/questions/11217878/python-2-7-combine-abc-abstractmethod-and-classmethod.
|
||||
|
||||
"""
|
||||
__isabstractmethod__ = True
|
||||
|
||||
def __init__(self, target):
|
||||
target.__isabstractmethod__ = True
|
||||
super(abstractclassmethod, self).__init__(target)
|
||||
|
||||
|
||||
class ComparableX509(object): # pylint: disable=too-few-public-methods
|
||||
"""Wrapper for OpenSSL.crypto.X509** objects that supports __eq__.
|
||||
|
||||
:ivar wrapped: Wrapped certificate or certificate request.
|
||||
:type wrapped: `OpenSSL.crypto.X509` or `OpenSSL.crypto.X509Req`.
|
||||
|
||||
"""
|
||||
def __init__(self, wrapped):
|
||||
assert isinstance(wrapped, OpenSSL.crypto.X509) or isinstance(
|
||||
wrapped, OpenSSL.crypto.X509Req)
|
||||
self.wrapped = wrapped
|
||||
|
||||
def __getattr__(self, name):
|
||||
return getattr(self.wrapped, name)
|
||||
|
||||
def _dump(self, filetype=OpenSSL.crypto.FILETYPE_ASN1):
|
||||
"""Dumps the object into a buffer with the specified encoding.
|
||||
|
||||
:param int filetype: The desired encoding. Should be one of
|
||||
`OpenSSL.crypto.FILETYPE_ASN1`,
|
||||
`OpenSSL.crypto.FILETYPE_PEM`, or
|
||||
`OpenSSL.crypto.FILETYPE_TEXT`.
|
||||
|
||||
:returns: Encoded X509 object.
|
||||
:rtype: str
|
||||
|
||||
"""
|
||||
if isinstance(self.wrapped, OpenSSL.crypto.X509):
|
||||
func = OpenSSL.crypto.dump_certificate
|
||||
else: # assert in __init__ makes sure this is X509Req
|
||||
func = OpenSSL.crypto.dump_certificate_request
|
||||
return func(filetype, self.wrapped)
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, self.__class__):
|
||||
return NotImplemented
|
||||
# pylint: disable=protected-access
|
||||
return self._dump() == other._dump()
|
||||
|
||||
def __hash__(self):
|
||||
return hash((self.__class__, self._dump()))
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
||||
def __repr__(self):
|
||||
return '<{0}({1!r})>'.format(self.__class__.__name__, self.wrapped)
|
||||
|
||||
|
||||
class ComparableKey(object): # pylint: disable=too-few-public-methods
|
||||
"""Comparable wrapper for `cryptography` keys.
|
||||
|
||||
See https://github.com/pyca/cryptography/issues/2122.
|
||||
|
||||
"""
|
||||
__hash__ = NotImplemented
|
||||
|
||||
def __init__(self, wrapped):
|
||||
self._wrapped = wrapped
|
||||
|
||||
def __getattr__(self, name):
|
||||
return getattr(self._wrapped, name)
|
||||
|
||||
def __eq__(self, other):
|
||||
# pylint: disable=protected-access
|
||||
if (not isinstance(other, self.__class__) or
|
||||
self._wrapped.__class__ is not other._wrapped.__class__):
|
||||
return NotImplemented
|
||||
elif hasattr(self._wrapped, 'private_numbers'):
|
||||
return self.private_numbers() == other.private_numbers()
|
||||
elif hasattr(self._wrapped, 'public_numbers'):
|
||||
return self.public_numbers() == other.public_numbers()
|
||||
else:
|
||||
return NotImplemented
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
||||
def __repr__(self):
|
||||
return '<{0}({1!r})>'.format(self.__class__.__name__, self._wrapped)
|
||||
|
||||
def public_key(self):
|
||||
"""Get wrapped public key."""
|
||||
return self.__class__(self._wrapped.public_key())
|
||||
|
||||
|
||||
class ComparableRSAKey(ComparableKey): # pylint: disable=too-few-public-methods
|
||||
"""Wrapper for `cryptography` RSA keys.
|
||||
|
||||
Wraps around:
|
||||
- `cryptography.hazmat.primitives.asymmetric.RSAPrivateKey`
|
||||
- `cryptography.hazmat.primitives.asymmetric.RSAPublicKey`
|
||||
|
||||
"""
|
||||
|
||||
def __hash__(self):
|
||||
# public_numbers() hasn't got stable hash!
|
||||
# https://github.com/pyca/cryptography/issues/2143
|
||||
if isinstance(self._wrapped, rsa.RSAPrivateKeyWithSerialization):
|
||||
priv = self.private_numbers()
|
||||
pub = priv.public_numbers
|
||||
return hash((self.__class__, priv.p, priv.q, priv.dmp1,
|
||||
priv.dmq1, priv.iqmp, pub.n, pub.e))
|
||||
elif isinstance(self._wrapped, rsa.RSAPublicKeyWithSerialization):
|
||||
pub = self.public_numbers()
|
||||
return hash((self.__class__, pub.n, pub.e))
|
||||
|
||||
|
||||
class ImmutableMap(collections.Mapping, collections.Hashable): # type: ignore
|
||||
# pylint: disable=too-few-public-methods
|
||||
"""Immutable key to value mapping with attribute access."""
|
||||
|
||||
__slots__ = ()
|
||||
"""Must be overridden in subclasses."""
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
if set(kwargs) != set(self.__slots__):
|
||||
raise TypeError(
|
||||
'__init__() takes exactly the following arguments: {0} '
|
||||
'({1} given)'.format(', '.join(self.__slots__),
|
||||
', '.join(kwargs) if kwargs else 'none'))
|
||||
for slot in self.__slots__:
|
||||
object.__setattr__(self, slot, kwargs.pop(slot))
|
||||
|
||||
def update(self, **kwargs):
|
||||
"""Return updated map."""
|
||||
items = dict(self)
|
||||
items.update(kwargs)
|
||||
return type(self)(**items) # pylint: disable=star-args
|
||||
|
||||
def __getitem__(self, key):
|
||||
try:
|
||||
return getattr(self, key)
|
||||
except AttributeError:
|
||||
raise KeyError(key)
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self.__slots__)
|
||||
|
||||
def __len__(self):
|
||||
return len(self.__slots__)
|
||||
|
||||
def __hash__(self):
|
||||
return hash(tuple(getattr(self, slot) for slot in self.__slots__))
|
||||
|
||||
def __setattr__(self, name, value):
|
||||
raise AttributeError("can't set attribute")
|
||||
|
||||
def __repr__(self):
|
||||
return '{0}({1})'.format(self.__class__.__name__, ', '.join(
|
||||
'{0}={1!r}'.format(key, value)
|
||||
for key, value in six.iteritems(self)))
|
||||
|
||||
|
||||
class frozendict(collections.Mapping, collections.Hashable): # type: ignore
|
||||
# pylint: disable=invalid-name,too-few-public-methods
|
||||
"""Frozen dictionary."""
|
||||
__slots__ = ('_items', '_keys')
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
if kwargs and not args:
|
||||
items = dict(kwargs)
|
||||
elif len(args) == 1 and isinstance(args[0], collections.Mapping):
|
||||
items = args[0]
|
||||
else:
|
||||
raise TypeError()
|
||||
# TODO: support generators/iterators
|
||||
|
||||
object.__setattr__(self, '_items', items)
|
||||
object.__setattr__(self, '_keys', tuple(sorted(six.iterkeys(items))))
|
||||
|
||||
def __getitem__(self, key):
|
||||
return self._items[key]
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self._keys)
|
||||
|
||||
def __len__(self):
|
||||
return len(self._items)
|
||||
|
||||
def _sorted_items(self):
|
||||
return tuple((key, self[key]) for key in self._keys)
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self._sorted_items())
|
||||
|
||||
def __getattr__(self, name):
|
||||
try:
|
||||
return self._items[name]
|
||||
except KeyError:
|
||||
raise AttributeError(name)
|
||||
|
||||
def __setattr__(self, name, value):
|
||||
raise AttributeError("can't set attribute")
|
||||
|
||||
def __repr__(self):
|
||||
return 'frozendict({0})'.format(', '.join('{0}={1!r}'.format(
|
||||
key, value) for key, value in self._sorted_items()))
|
||||
@@ -1,199 +0,0 @@
|
||||
"""Tests for acme.jose.util."""
|
||||
import functools
|
||||
import unittest
|
||||
|
||||
import six
|
||||
|
||||
from acme import test_util
|
||||
|
||||
|
||||
class ComparableX509Test(unittest.TestCase):
|
||||
"""Tests for acme.jose.util.ComparableX509."""
|
||||
|
||||
def setUp(self):
|
||||
# test_util.load_comparable_{csr,cert} return ComparableX509
|
||||
self.req1 = test_util.load_comparable_csr('csr.pem')
|
||||
self.req2 = test_util.load_comparable_csr('csr.pem')
|
||||
self.req_other = test_util.load_comparable_csr('csr-san.pem')
|
||||
|
||||
self.cert1 = test_util.load_comparable_cert('cert.pem')
|
||||
self.cert2 = test_util.load_comparable_cert('cert.pem')
|
||||
self.cert_other = test_util.load_comparable_cert('cert-san.pem')
|
||||
|
||||
def test_getattr_proxy(self):
|
||||
self.assertTrue(self.cert1.has_expired())
|
||||
|
||||
def test_eq(self):
|
||||
self.assertEqual(self.req1, self.req2)
|
||||
self.assertEqual(self.cert1, self.cert2)
|
||||
|
||||
def test_ne(self):
|
||||
self.assertNotEqual(self.req1, self.req_other)
|
||||
self.assertNotEqual(self.cert1, self.cert_other)
|
||||
|
||||
def test_ne_wrong_types(self):
|
||||
self.assertNotEqual(self.req1, 5)
|
||||
self.assertNotEqual(self.cert1, 5)
|
||||
|
||||
def test_hash(self):
|
||||
self.assertEqual(hash(self.req1), hash(self.req2))
|
||||
self.assertNotEqual(hash(self.req1), hash(self.req_other))
|
||||
|
||||
self.assertEqual(hash(self.cert1), hash(self.cert2))
|
||||
self.assertNotEqual(hash(self.cert1), hash(self.cert_other))
|
||||
|
||||
def test_repr(self):
|
||||
for x509 in self.req1, self.cert1:
|
||||
self.assertEqual(repr(x509),
|
||||
'<ComparableX509({0!r})>'.format(x509.wrapped))
|
||||
|
||||
|
||||
class ComparableRSAKeyTest(unittest.TestCase):
|
||||
"""Tests for acme.jose.util.ComparableRSAKey."""
|
||||
|
||||
def setUp(self):
|
||||
# test_utl.load_rsa_private_key return ComparableRSAKey
|
||||
self.key = test_util.load_rsa_private_key('rsa256_key.pem')
|
||||
self.key_same = test_util.load_rsa_private_key('rsa256_key.pem')
|
||||
self.key2 = test_util.load_rsa_private_key('rsa512_key.pem')
|
||||
|
||||
def test_getattr_proxy(self):
|
||||
self.assertEqual(256, self.key.key_size)
|
||||
|
||||
def test_eq(self):
|
||||
self.assertEqual(self.key, self.key_same)
|
||||
|
||||
def test_ne(self):
|
||||
self.assertNotEqual(self.key, self.key2)
|
||||
|
||||
def test_ne_different_types(self):
|
||||
self.assertNotEqual(self.key, 5)
|
||||
|
||||
def test_ne_not_wrapped(self):
|
||||
# pylint: disable=protected-access
|
||||
self.assertNotEqual(self.key, self.key_same._wrapped)
|
||||
|
||||
def test_ne_no_serialization(self):
|
||||
from acme.jose.util import ComparableRSAKey
|
||||
self.assertNotEqual(ComparableRSAKey(5), ComparableRSAKey(5))
|
||||
|
||||
def test_hash(self):
|
||||
self.assertTrue(isinstance(hash(self.key), int))
|
||||
self.assertEqual(hash(self.key), hash(self.key_same))
|
||||
self.assertNotEqual(hash(self.key), hash(self.key2))
|
||||
|
||||
def test_repr(self):
|
||||
self.assertTrue(repr(self.key).startswith(
|
||||
'<ComparableRSAKey(<cryptography.hazmat.'))
|
||||
|
||||
def test_public_key(self):
|
||||
from acme.jose.util import ComparableRSAKey
|
||||
self.assertTrue(isinstance(self.key.public_key(), ComparableRSAKey))
|
||||
|
||||
|
||||
class ImmutableMapTest(unittest.TestCase):
|
||||
"""Tests for acme.jose.util.ImmutableMap."""
|
||||
|
||||
def setUp(self):
|
||||
# pylint: disable=invalid-name,too-few-public-methods
|
||||
# pylint: disable=missing-docstring
|
||||
from acme.jose.util import ImmutableMap
|
||||
|
||||
class A(ImmutableMap):
|
||||
__slots__ = ('x', 'y')
|
||||
|
||||
class B(ImmutableMap):
|
||||
__slots__ = ('x', 'y')
|
||||
|
||||
self.A = A
|
||||
self.B = B
|
||||
|
||||
self.a1 = self.A(x=1, y=2)
|
||||
self.a1_swap = self.A(y=2, x=1)
|
||||
self.a2 = self.A(x=3, y=4)
|
||||
self.b = self.B(x=1, y=2)
|
||||
|
||||
def test_update(self):
|
||||
self.assertEqual(self.A(x=2, y=2), self.a1.update(x=2))
|
||||
self.assertEqual(self.a2, self.a1.update(x=3, y=4))
|
||||
|
||||
def test_get_missing_item_raises_key_error(self):
|
||||
self.assertRaises(KeyError, self.a1.__getitem__, 'z')
|
||||
|
||||
def test_order_of_args_does_not_matter(self):
|
||||
self.assertEqual(self.a1, self.a1_swap)
|
||||
|
||||
def test_type_error_on_missing(self):
|
||||
self.assertRaises(TypeError, self.A, x=1)
|
||||
self.assertRaises(TypeError, self.A, y=2)
|
||||
|
||||
def test_type_error_on_unrecognized(self):
|
||||
self.assertRaises(TypeError, self.A, x=1, z=2)
|
||||
self.assertRaises(TypeError, self.A, x=1, y=2, z=3)
|
||||
|
||||
def test_get_attr(self):
|
||||
self.assertEqual(1, self.a1.x)
|
||||
self.assertEqual(2, self.a1.y)
|
||||
self.assertEqual(1, self.a1_swap.x)
|
||||
self.assertEqual(2, self.a1_swap.y)
|
||||
|
||||
def test_set_attr_raises_attribute_error(self):
|
||||
self.assertRaises(
|
||||
AttributeError, functools.partial(self.a1.__setattr__, 'x'), 10)
|
||||
|
||||
def test_equal(self):
|
||||
self.assertEqual(self.a1, self.a1)
|
||||
self.assertEqual(self.a2, self.a2)
|
||||
self.assertNotEqual(self.a1, self.a2)
|
||||
|
||||
def test_hash(self):
|
||||
self.assertEqual(hash((1, 2)), hash(self.a1))
|
||||
|
||||
def test_unhashable(self):
|
||||
self.assertRaises(TypeError, self.A(x=1, y={}).__hash__)
|
||||
|
||||
def test_repr(self):
|
||||
self.assertEqual('A(x=1, y=2)', repr(self.a1))
|
||||
self.assertEqual('A(x=1, y=2)', repr(self.a1_swap))
|
||||
self.assertEqual('B(x=1, y=2)', repr(self.b))
|
||||
self.assertEqual("B(x='foo', y='bar')", repr(self.B(x='foo', y='bar')))
|
||||
|
||||
|
||||
class frozendictTest(unittest.TestCase): # pylint: disable=invalid-name
|
||||
"""Tests for acme.jose.util.frozendict."""
|
||||
|
||||
def setUp(self):
|
||||
from acme.jose.util import frozendict
|
||||
self.fdict = frozendict(x=1, y='2')
|
||||
|
||||
def test_init_dict(self):
|
||||
from acme.jose.util import frozendict
|
||||
self.assertEqual(self.fdict, frozendict({'x': 1, 'y': '2'}))
|
||||
|
||||
def test_init_other_raises_type_error(self):
|
||||
from acme.jose.util import frozendict
|
||||
# specifically fail for generators...
|
||||
self.assertRaises(TypeError, frozendict, six.iteritems({'a': 'b'}))
|
||||
|
||||
def test_len(self):
|
||||
self.assertEqual(2, len(self.fdict))
|
||||
|
||||
def test_hash(self):
|
||||
self.assertTrue(isinstance(hash(self.fdict), int))
|
||||
|
||||
def test_getattr_proxy(self):
|
||||
self.assertEqual(1, self.fdict.x)
|
||||
self.assertEqual('2', self.fdict.y)
|
||||
|
||||
def test_getattr_raises_attribute_error(self):
|
||||
self.assertRaises(AttributeError, self.fdict.__getattr__, 'z')
|
||||
|
||||
def test_setattr_immutable(self):
|
||||
self.assertRaises(AttributeError, self.fdict.__setattr__, 'z', 3)
|
||||
|
||||
def test_repr(self):
|
||||
self.assertEqual("frozendict(x=1, y='2')", repr(self.fdict))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main() # pragma: no cover
|
||||
@@ -1,10 +1,10 @@
|
||||
"""ACME-specific JWS.
|
||||
|
||||
The JWS implementation in acme.jose only implements the base JOSE standard. In
|
||||
The JWS implementation in josepy only implements the base JOSE standard. In
|
||||
order to support the new header fields defined in ACME, this module defines some
|
||||
ACME-specific classes that layer on top of acme.jose.
|
||||
ACME-specific classes that layer on top of josepy.
|
||||
"""
|
||||
from acme import jose
|
||||
import josepy as jose
|
||||
|
||||
|
||||
class Header(jose.Header):
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
"""Tests for acme.jws."""
|
||||
import unittest
|
||||
|
||||
from acme import jose
|
||||
import josepy as jose
|
||||
|
||||
from acme import test_util
|
||||
|
||||
|
||||
|
||||
@@ -2,10 +2,11 @@
|
||||
import collections
|
||||
import six
|
||||
|
||||
import josepy as jose
|
||||
|
||||
from acme import challenges
|
||||
from acme import errors
|
||||
from acme import fields
|
||||
from acme import jose
|
||||
from acme import util
|
||||
|
||||
OLD_ERROR_PREFIX = "urn:acme:error:"
|
||||
@@ -170,9 +171,30 @@ class Directory(jose.JSONDeSerializable):
|
||||
|
||||
class Meta(jose.JSONObjectWithFields):
|
||||
"""Directory Meta."""
|
||||
terms_of_service = jose.Field('terms-of-service', omitempty=True)
|
||||
_terms_of_service = jose.Field('terms-of-service', omitempty=True)
|
||||
_terms_of_service_v2 = jose.Field('termsOfService', omitempty=True)
|
||||
website = jose.Field('website', omitempty=True)
|
||||
caa_identities = jose.Field('caa-identities', omitempty=True)
|
||||
caa_identities = jose.Field('caaIdentities', omitempty=True)
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
kwargs = dict((self._internal_name(k), v) for k, v in kwargs.items())
|
||||
# pylint: disable=star-args
|
||||
super(Directory.Meta, self).__init__(**kwargs)
|
||||
|
||||
@property
|
||||
def terms_of_service(self):
|
||||
"""URL for the CA TOS"""
|
||||
return self._terms_of_service or self._terms_of_service_v2
|
||||
|
||||
def __iter__(self):
|
||||
# When iterating over fields, use the external name 'terms_of_service' instead of
|
||||
# the internal '_terms_of_service'.
|
||||
for name in super(Directory.Meta, self).__iter__():
|
||||
yield name[1:] if name == '_terms_of_service' else name
|
||||
|
||||
def _internal_name(self, name):
|
||||
return '_' + name if name == 'terms_of_service' else name
|
||||
|
||||
|
||||
@classmethod
|
||||
def _canon_key(cls, key):
|
||||
@@ -238,7 +260,7 @@ class ResourceBody(jose.JSONObjectWithFields):
|
||||
class Registration(ResourceBody):
|
||||
"""Registration Resource Body.
|
||||
|
||||
:ivar acme.jose.jwk.JWK key: Public key.
|
||||
:ivar josepy.jwk.JWK key: Public key.
|
||||
:ivar tuple contact: Contact information following ACME spec,
|
||||
`tuple` of `unicode`.
|
||||
:ivar unicode agreement:
|
||||
@@ -250,6 +272,7 @@ class Registration(ResourceBody):
|
||||
contact = jose.Field('contact', omitempty=True, default=())
|
||||
agreement = jose.Field('agreement', omitempty=True)
|
||||
status = jose.Field('status', omitempty=True)
|
||||
terms_of_service_agreed = jose.Field('termsOfServiceAgreed', omitempty=True)
|
||||
|
||||
phone_prefix = 'tel:'
|
||||
email_prefix = 'mailto:'
|
||||
@@ -412,6 +435,7 @@ class Authorization(ResourceBody):
|
||||
# be absent'... then acme-spec gives example with 'expires'
|
||||
# present... That's confusing!
|
||||
expires = fields.RFC3339Field('expires', omitempty=True)
|
||||
wildcard = jose.Field('wildcard', omitempty=True)
|
||||
|
||||
@challenges.decoder
|
||||
def challenges(value): # pylint: disable=missing-docstring,no-self-argument
|
||||
@@ -446,7 +470,7 @@ class AuthorizationResource(ResourceWithURI):
|
||||
class CertificateRequest(jose.JSONObjectWithFields):
|
||||
"""ACME new-cert request.
|
||||
|
||||
:ivar acme.jose.util.ComparableX509 csr:
|
||||
:ivar josepy.util.ComparableX509 csr:
|
||||
`OpenSSL.crypto.X509Req` wrapped in `.ComparableX509`
|
||||
|
||||
"""
|
||||
@@ -458,7 +482,7 @@ class CertificateRequest(jose.JSONObjectWithFields):
|
||||
class CertificateResource(ResourceWithURI):
|
||||
"""Certificate Resource.
|
||||
|
||||
:ivar acme.jose.util.ComparableX509 body:
|
||||
:ivar josepy.util.ComparableX509 body:
|
||||
`OpenSSL.crypto.X509` wrapped in `.ComparableX509`
|
||||
:ivar unicode cert_chain_uri: URI found in the 'up' ``Link`` header
|
||||
:ivar tuple authzrs: `tuple` of `AuthorizationResource`.
|
||||
@@ -481,3 +505,50 @@ class Revocation(jose.JSONObjectWithFields):
|
||||
certificate = jose.Field(
|
||||
'certificate', decoder=jose.decode_cert, encoder=jose.encode_cert)
|
||||
reason = jose.Field('reason')
|
||||
|
||||
|
||||
class Order(ResourceBody):
|
||||
"""Order Resource Body.
|
||||
|
||||
:ivar list of .Identifier: List of identifiers for the certificate.
|
||||
:ivar acme.messages.Status status:
|
||||
:ivar list of str authorizations: URLs of authorizations.
|
||||
:ivar str certificate: URL to download certificate as a fullchain PEM.
|
||||
:ivar str finalize: URL to POST to to request issuance once all
|
||||
authorizations have "valid" status.
|
||||
:ivar datetime.datetime expires: When the order expires.
|
||||
:ivar .Error error: Any error that occurred during finalization, if applicable.
|
||||
"""
|
||||
identifiers = jose.Field('identifiers', omitempty=True)
|
||||
status = jose.Field('status', decoder=Status.from_json,
|
||||
omitempty=True, default=STATUS_PENDING)
|
||||
authorizations = jose.Field('authorizations', omitempty=True)
|
||||
certificate = jose.Field('certificate', omitempty=True)
|
||||
finalize = jose.Field('finalize', omitempty=True)
|
||||
expires = fields.RFC3339Field('expires', omitempty=True)
|
||||
error = jose.Field('error', omitempty=True, decoder=Error.from_json)
|
||||
|
||||
@identifiers.decoder
|
||||
def identifiers(value): # pylint: disable=missing-docstring,no-self-argument
|
||||
return tuple(Identifier.from_json(identifier) for identifier in value)
|
||||
|
||||
class OrderResource(ResourceWithURI):
|
||||
"""Order Resource.
|
||||
|
||||
:ivar acme.messages.Order body:
|
||||
:ivar str csr_pem: The CSR this Order will be finalized with.
|
||||
:ivar list of acme.messages.AuthorizationResource authorizations:
|
||||
Fully-fetched AuthorizationResource objects.
|
||||
:ivar str fullchain_pem: The fetched contents of the certificate URL
|
||||
produced once the order was finalized, if it's present.
|
||||
"""
|
||||
body = jose.Field('body', decoder=Order.from_json)
|
||||
csr_pem = jose.Field('csr_pem', omitempty=True)
|
||||
authorizations = jose.Field('authorizations')
|
||||
fullchain_pem = jose.Field('fullchain_pem', omitempty=True)
|
||||
|
||||
@Directory.register
|
||||
class NewOrder(Order):
|
||||
"""New order."""
|
||||
resource_type = 'new-order'
|
||||
resource = fields.Resource(resource_type)
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
"""Tests for acme.messages."""
|
||||
import unittest
|
||||
|
||||
import josepy as jose
|
||||
import mock
|
||||
|
||||
from acme import challenges
|
||||
from acme import jose
|
||||
from acme import test_util
|
||||
|
||||
|
||||
@@ -157,7 +157,7 @@ class DirectoryTest(unittest.TestCase):
|
||||
'meta': {
|
||||
'terms-of-service': 'https://example.com/acme/terms',
|
||||
'website': 'https://www.example.com/',
|
||||
'caa-identities': ['example.com'],
|
||||
'caaIdentities': ['example.com'],
|
||||
},
|
||||
})
|
||||
|
||||
@@ -165,6 +165,13 @@ class DirectoryTest(unittest.TestCase):
|
||||
from acme.messages import Directory
|
||||
Directory.from_json({'foo': 'bar'})
|
||||
|
||||
def test_iter_meta(self):
|
||||
result = False
|
||||
for k in self.dir.meta:
|
||||
if k == 'terms_of_service':
|
||||
result = self.dir.meta[k] == 'https://example.com/acme/terms'
|
||||
self.assertTrue(result)
|
||||
|
||||
|
||||
class RegistrationTest(unittest.TestCase):
|
||||
"""Tests for acme.messages.Registration."""
|
||||
@@ -401,5 +408,21 @@ class RevocationTest(unittest.TestCase):
|
||||
hash(Revocation.from_json(self.rev.to_json()))
|
||||
|
||||
|
||||
class OrderResourceTest(unittest.TestCase):
|
||||
"""Tests for acme.messages.OrderResource."""
|
||||
|
||||
def setUp(self):
|
||||
from acme.messages import OrderResource
|
||||
self.regr = OrderResource(
|
||||
body=mock.sentinel.body, uri=mock.sentinel.uri)
|
||||
|
||||
def test_to_partial_json(self):
|
||||
self.assertEqual(self.regr.to_json(), {
|
||||
'body': mock.sentinel.body,
|
||||
'uri': mock.sentinel.uri,
|
||||
'authorizations': None,
|
||||
})
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main() # pragma: no cover
|
||||
|
||||
@@ -10,13 +10,13 @@ import unittest
|
||||
from six.moves import http_client # pylint: disable=import-error
|
||||
from six.moves import socketserver # type: ignore # pylint: disable=import-error
|
||||
|
||||
import josepy as jose
|
||||
import mock
|
||||
import requests
|
||||
|
||||
from acme import challenges
|
||||
from acme import crypto_util
|
||||
from acme import errors
|
||||
from acme import jose
|
||||
from acme import test_util
|
||||
|
||||
|
||||
|
||||
@@ -9,10 +9,9 @@ import unittest
|
||||
|
||||
from cryptography.hazmat.backends import default_backend
|
||||
from cryptography.hazmat.primitives import serialization
|
||||
import josepy as jose
|
||||
import OpenSSL
|
||||
|
||||
from acme import jose
|
||||
|
||||
|
||||
def vector_path(*names):
|
||||
"""Path to a test vector."""
|
||||
|
||||
BIN
acme/acme/testdata/cert-nocn.der
vendored
Normal file
BIN
acme/acme/testdata/cert-nocn.der
vendored
Normal file
Binary file not shown.
@@ -1,10 +1,7 @@
|
||||
JOSE
|
||||
----
|
||||
|
||||
.. automodule:: acme.jose
|
||||
:members:
|
||||
The ``acme.jose`` module was moved to its own package "josepy_".
|
||||
Please refer to its documentation there.
|
||||
|
||||
.. toctree::
|
||||
:glob:
|
||||
|
||||
jose/*
|
||||
.. _josepy: https://josepy.readthedocs.io/
|
||||
|
||||
@@ -1,5 +0,0 @@
|
||||
JOSE Base64
|
||||
-----------
|
||||
|
||||
.. automodule:: acme.jose.b64
|
||||
:members:
|
||||
@@ -1,5 +0,0 @@
|
||||
Errors
|
||||
------
|
||||
|
||||
.. automodule:: acme.jose.errors
|
||||
:members:
|
||||
@@ -1,5 +0,0 @@
|
||||
Interfaces
|
||||
----------
|
||||
|
||||
.. automodule:: acme.jose.interfaces
|
||||
:members:
|
||||
@@ -1,5 +0,0 @@
|
||||
JSON utilities
|
||||
--------------
|
||||
|
||||
.. automodule:: acme.jose.json_util
|
||||
:members:
|
||||
@@ -1,5 +0,0 @@
|
||||
JSON Web Algorithms
|
||||
-------------------
|
||||
|
||||
.. automodule:: acme.jose.jwa
|
||||
:members:
|
||||
@@ -1,5 +0,0 @@
|
||||
JSON Web Key
|
||||
------------
|
||||
|
||||
.. automodule:: acme.jose.jwk
|
||||
:members:
|
||||
@@ -1,5 +0,0 @@
|
||||
JSON Web Signature
|
||||
------------------
|
||||
|
||||
.. automodule:: acme.jose.jws
|
||||
:members:
|
||||
@@ -1,5 +0,0 @@
|
||||
Utilities
|
||||
---------
|
||||
|
||||
.. automodule:: acme.jose.util
|
||||
:members:
|
||||
@@ -1,5 +0,0 @@
|
||||
Other ACME objects
|
||||
------------------
|
||||
|
||||
.. automodule:: acme.other
|
||||
:members:
|
||||
@@ -308,4 +308,5 @@ texinfo_documents = [
|
||||
|
||||
intersphinx_mapping = {
|
||||
'python': ('https://docs.python.org/', None),
|
||||
'josepy': ('https://josepy.readthedocs.io/en/latest/', None),
|
||||
}
|
||||
|
||||
@@ -5,11 +5,11 @@ import pkg_resources
|
||||
|
||||
from cryptography.hazmat.backends import default_backend
|
||||
from cryptography.hazmat.primitives.asymmetric import rsa
|
||||
import josepy as jose
|
||||
import OpenSSL
|
||||
|
||||
from acme import client
|
||||
from acme import messages
|
||||
from acme import jose
|
||||
|
||||
|
||||
logging.basicConfig(level=logging.DEBUG)
|
||||
|
||||
@@ -4,32 +4,25 @@ from setuptools import setup
|
||||
from setuptools import find_packages
|
||||
|
||||
|
||||
version = '0.20.0.dev0'
|
||||
version = '0.24.0.dev0'
|
||||
|
||||
# Please update tox.ini when modifying dependency version requirements
|
||||
install_requires = [
|
||||
# load_pem_private/public_key (>=0.6)
|
||||
# rsa_recover_prime_factors (>=0.8)
|
||||
'cryptography>=0.8',
|
||||
# formerly known as acme.jose:
|
||||
'josepy>=1.0.0',
|
||||
# Connection.set_tlsext_host_name (>=0.13)
|
||||
'mock',
|
||||
'PyOpenSSL>=0.13',
|
||||
'pyrfc3339',
|
||||
'pytz',
|
||||
'requests[security]>=2.4.1', # security extras added in 2.4.1
|
||||
# For pkg_resources. >=1.0 so pip resolves it to a version cryptography
|
||||
# will tolerate; see #2599:
|
||||
'setuptools>=1.0',
|
||||
'setuptools',
|
||||
'six>=1.9.0', # needed for python_2_unicode_compatible
|
||||
]
|
||||
|
||||
# env markers cause problems with older pip and setuptools
|
||||
if sys.version_info < (2, 7):
|
||||
install_requires.extend([
|
||||
'argparse',
|
||||
'ordereddict',
|
||||
])
|
||||
|
||||
dev_extras = [
|
||||
'pytest',
|
||||
'pytest-xdist',
|
||||
@@ -50,16 +43,15 @@ setup(
|
||||
author="Certbot Project",
|
||||
author_email='client-dev@letsencrypt.org',
|
||||
license='Apache License 2.0',
|
||||
python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*',
|
||||
classifiers=[
|
||||
'Development Status :: 3 - Alpha',
|
||||
'Intended Audience :: Developers',
|
||||
'License :: OSI Approved :: Apache Software License',
|
||||
'Programming Language :: Python',
|
||||
'Programming Language :: Python :: 2',
|
||||
'Programming Language :: Python :: 2.6',
|
||||
'Programming Language :: Python :: 2.7',
|
||||
'Programming Language :: Python :: 3',
|
||||
'Programming Language :: Python :: 3.3',
|
||||
'Programming Language :: Python :: 3.4',
|
||||
'Programming Language :: Python :: 3.5',
|
||||
'Programming Language :: Python :: 3.6',
|
||||
@@ -74,10 +66,5 @@ setup(
|
||||
'dev': dev_extras,
|
||||
'docs': docs_extras,
|
||||
},
|
||||
entry_points={
|
||||
'console_scripts': [
|
||||
'jws = acme.jose.jws:CLI.run',
|
||||
],
|
||||
},
|
||||
test_suite='acme',
|
||||
)
|
||||
|
||||
@@ -93,4 +93,8 @@ def parse_define_file(filepath, varname):
|
||||
if v == "-D" and len(a_opts) >= i+2:
|
||||
var_parts = a_opts[i+1].partition("=")
|
||||
return_vars[var_parts[0]] = var_parts[2]
|
||||
elif len(v) > 2 and v.startswith("-D"):
|
||||
# Found var with no whitespace separator
|
||||
var_parts = v[2:].partition("=")
|
||||
return_vars[var_parts[0]] = var_parts[2]
|
||||
return return_vars
|
||||
|
||||
@@ -8,7 +8,7 @@ SSLEngine on
|
||||
|
||||
# Intermediate configuration, tweak to your needs
|
||||
SSLProtocol all -SSLv2 -SSLv3
|
||||
SSLCipherSuite ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-AES256-GCM-SHA384:DHE-RSA-AES128-GCM-SHA256:DHE-DSS-AES128-GCM-SHA256:kEDH+AESGCM:ECDHE-RSA-AES128-SHA256:ECDHE-ECDSA-AES128-SHA256:ECDHE-RSA-AES128-SHA:ECDHE-ECDSA-AES128-SHA:ECDHE-RSA-AES256-SHA384:ECDHE-ECDSA-AES256-SHA384:ECDHE-RSA-AES256-SHA:ECDHE-ECDSA-AES256-SHA:DHE-RSA-AES128-SHA256:DHE-RSA-AES128-SHA:DHE-DSS-AES128-SHA256:DHE-RSA-AES256-SHA256:DHE-DSS-AES256-SHA:DHE-RSA-AES256-SHA:AES128-GCM-SHA256:AES256-GCM-SHA384:AES128-SHA256:AES256-SHA256:AES128-SHA:AES256-SHA:AES:CAMELLIA:DES-CBC3-SHA:!aNULL:!eNULL:!EXPORT:!DES:!RC4:!MD5:!PSK:!aECDH:!EDH-DSS-DES-CBC3-SHA:!EDH-RSA-DES-CBC3-SHA:!KRB5-DES-CBC3-SHA
|
||||
SSLCipherSuite ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:DHE-RSA-AES128-GCM-SHA256:DHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-AES128-SHA256:ECDHE-RSA-AES128-SHA256:ECDHE-ECDSA-AES128-SHA:ECDHE-RSA-AES256-SHA384:ECDHE-RSA-AES128-SHA:ECDHE-ECDSA-AES256-SHA384:ECDHE-ECDSA-AES256-SHA:ECDHE-RSA-AES256-SHA:DHE-RSA-AES128-SHA256:DHE-RSA-AES128-SHA:DHE-RSA-AES256-SHA256:DHE-RSA-AES256-SHA:ECDHE-ECDSA-DES-CBC3-SHA:ECDHE-RSA-DES-CBC3-SHA:EDH-RSA-DES-CBC3-SHA:AES128-GCM-SHA256:AES256-GCM-SHA384:AES128-SHA256:AES256-SHA256:AES128-SHA:AES256-SHA:DES-CBC3-SHA:!DSS
|
||||
SSLHonorCipherOrder on
|
||||
|
||||
SSLOptions +StrictRequire
|
||||
|
||||
@@ -5,6 +5,7 @@ import logging
|
||||
import os
|
||||
import pkg_resources
|
||||
import re
|
||||
import six
|
||||
import socket
|
||||
import time
|
||||
|
||||
@@ -24,9 +25,10 @@ from certbot_apache import apache_util
|
||||
from certbot_apache import augeas_configurator
|
||||
from certbot_apache import constants
|
||||
from certbot_apache import display_ops
|
||||
from certbot_apache import tls_sni_01
|
||||
from certbot_apache import http_01
|
||||
from certbot_apache import obj
|
||||
from certbot_apache import parser
|
||||
from certbot_apache import tls_sni_01
|
||||
|
||||
from collections import defaultdict
|
||||
|
||||
@@ -151,6 +153,9 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
self.assoc = dict()
|
||||
# Outstanding challenges
|
||||
self._chall_out = set()
|
||||
# List of vhosts configured per wildcard domain on this run.
|
||||
# used by deploy_cert() and enhance()
|
||||
self._wildcard_vhosts = dict()
|
||||
# Maps enhancements to vhosts we've enabled the enhancement for
|
||||
self._enhanced_vhosts = defaultdict(set)
|
||||
|
||||
@@ -261,12 +266,27 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
self.aug, self.conf("server-root"), self.conf("vhost-root"),
|
||||
self.version, configurator=self)
|
||||
|
||||
def _wildcard_domain(self, domain):
|
||||
"""
|
||||
Checks if domain is a wildcard domain
|
||||
|
||||
:param str domain: Domain to check
|
||||
|
||||
:returns: If the domain is wildcard domain
|
||||
:rtype: bool
|
||||
"""
|
||||
if isinstance(domain, six.text_type):
|
||||
wildcard_marker = u"*."
|
||||
else:
|
||||
wildcard_marker = b"*."
|
||||
return domain.startswith(wildcard_marker)
|
||||
|
||||
def deploy_cert(self, domain, cert_path, key_path,
|
||||
chain_path=None, fullchain_path=None):
|
||||
"""Deploys certificate to specified virtual host.
|
||||
|
||||
Currently tries to find the last directives to deploy the cert in
|
||||
the VHost associated with the given domain. If it can't find the
|
||||
Currently tries to find the last directives to deploy the certificate
|
||||
in the VHost associated with the given domain. If it can't find the
|
||||
directives, it searches the "included" confs. The function verifies
|
||||
that it has located the three directives and finally modifies them
|
||||
to point to the correct destination. After the certificate is
|
||||
@@ -279,9 +299,112 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
a lack of directives
|
||||
|
||||
"""
|
||||
# Choose vhost before (possible) enabling of mod_ssl, to keep the
|
||||
# vhost choice namespace similar with the pre-validation one.
|
||||
vhost = self.choose_vhost(domain)
|
||||
vhosts = self.choose_vhosts(domain)
|
||||
for vhost in vhosts:
|
||||
self._deploy_cert(vhost, cert_path, key_path, chain_path, fullchain_path)
|
||||
|
||||
def choose_vhosts(self, domain, create_if_no_ssl=True):
|
||||
"""
|
||||
Finds VirtualHosts that can be used with the provided domain
|
||||
|
||||
:param str domain: Domain name to match VirtualHosts to
|
||||
:param bool create_if_no_ssl: If found VirtualHost doesn't have a HTTPS
|
||||
counterpart, should one get created
|
||||
|
||||
:returns: List of VirtualHosts or None
|
||||
:rtype: `list` of :class:`~certbot_apache.obj.VirtualHost`
|
||||
"""
|
||||
|
||||
if self._wildcard_domain(domain):
|
||||
if domain in self._wildcard_vhosts:
|
||||
# Vhosts for a wildcard domain were already selected
|
||||
return self._wildcard_vhosts[domain]
|
||||
# Ask user which VHosts to support.
|
||||
# Returned objects are guaranteed to be ssl vhosts
|
||||
return self._choose_vhosts_wildcard(domain, create_if_no_ssl)
|
||||
else:
|
||||
return [self.choose_vhost(domain, create_if_no_ssl)]
|
||||
|
||||
def _vhosts_for_wildcard(self, domain):
|
||||
"""
|
||||
Get VHost objects for every VirtualHost that the user wants to handle
|
||||
with the wildcard certificate.
|
||||
"""
|
||||
|
||||
# Collect all vhosts that match the name
|
||||
matched = set()
|
||||
for vhost in self.vhosts:
|
||||
for name in vhost.get_names():
|
||||
if self._in_wildcard_scope(name, domain):
|
||||
matched.add(vhost)
|
||||
|
||||
return list(matched)
|
||||
|
||||
def _in_wildcard_scope(self, name, domain):
|
||||
"""
|
||||
Helper method for _vhosts_for_wildcard() that makes sure that the domain
|
||||
is in the scope of wildcard domain.
|
||||
|
||||
eg. in scope: domain = *.wild.card, name = 1.wild.card
|
||||
not in scope: domain = *.wild.card, name = 1.2.wild.card
|
||||
"""
|
||||
if len(name.split(".")) == len(domain.split(".")):
|
||||
return fnmatch.fnmatch(name, domain)
|
||||
|
||||
|
||||
def _choose_vhosts_wildcard(self, domain, create_ssl=True):
|
||||
"""Prompts user to choose vhosts to install a wildcard certificate for"""
|
||||
|
||||
# Get all vhosts that are covered by the wildcard domain
|
||||
vhosts = self._vhosts_for_wildcard(domain)
|
||||
|
||||
# Go through the vhosts, making sure that we cover all the names
|
||||
# present, but preferring the SSL vhosts
|
||||
filtered_vhosts = dict()
|
||||
for vhost in vhosts:
|
||||
for name in vhost.get_names():
|
||||
if vhost.ssl:
|
||||
# Always prefer SSL vhosts
|
||||
filtered_vhosts[name] = vhost
|
||||
elif name not in filtered_vhosts and create_ssl:
|
||||
# Add if not in list previously
|
||||
filtered_vhosts[name] = vhost
|
||||
|
||||
# Only unique VHost objects
|
||||
dialog_input = set([vhost for vhost in filtered_vhosts.values()])
|
||||
|
||||
# Ask the user which of names to enable, expect list of names back
|
||||
dialog_output = display_ops.select_vhost_multiple(list(dialog_input))
|
||||
|
||||
if not dialog_output:
|
||||
logger.error(
|
||||
"No vhost exists with servername or alias for domain %s. "
|
||||
"No vhost was selected. Please specify ServerName or ServerAlias "
|
||||
"in the Apache config.",
|
||||
domain)
|
||||
raise errors.PluginError("No vhost selected")
|
||||
|
||||
# Make sure we create SSL vhosts for the ones that are HTTP only
|
||||
# if requested.
|
||||
return_vhosts = list()
|
||||
for vhost in dialog_output:
|
||||
if not vhost.ssl:
|
||||
return_vhosts.append(self.make_vhost_ssl(vhost))
|
||||
else:
|
||||
return_vhosts.append(vhost)
|
||||
|
||||
self._wildcard_vhosts[domain] = return_vhosts
|
||||
return return_vhosts
|
||||
|
||||
|
||||
def _deploy_cert(self, vhost, cert_path, key_path, chain_path, fullchain_path):
|
||||
"""
|
||||
Helper function for deploy_cert() that handles the actual deployment
|
||||
this exists because we might want to do multiple deployments per
|
||||
domain originally passed for deploy_cert(). This is especially true
|
||||
with wildcard certificates
|
||||
"""
|
||||
|
||||
|
||||
# This is done first so that ssl module is enabled and cert_path,
|
||||
# cert_key... can all be parsed appropriately
|
||||
@@ -301,16 +424,22 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
path["chain_path"] = self.parser.find_dir(
|
||||
"SSLCertificateChainFile", None, vhost.path)
|
||||
|
||||
if not path["cert_path"] or not path["cert_key"]:
|
||||
# Throw some can't find all of the directives error"
|
||||
# Handle errors when certificate/key directives cannot be found
|
||||
if not path["cert_path"]:
|
||||
logger.warning(
|
||||
"Cannot find a cert or key directive in %s. "
|
||||
"Cannot find an SSLCertificateFile directive in %s. "
|
||||
"VirtualHost was not modified", vhost.path)
|
||||
# Presumably break here so that the virtualhost is not modified
|
||||
raise errors.PluginError(
|
||||
"Unable to find cert and/or key directives")
|
||||
"Unable to find an SSLCertificateFile directive")
|
||||
elif not path["cert_key"]:
|
||||
logger.warning(
|
||||
"Cannot find an SSLCertificateKeyFile directive for "
|
||||
"certificate in %s. VirtualHost was not modified", vhost.path)
|
||||
raise errors.PluginError(
|
||||
"Unable to find an SSLCertificateKeyFile directive for "
|
||||
"certificate")
|
||||
|
||||
logger.info("Deploying Certificate for %s to VirtualHost %s", domain, vhost.filep)
|
||||
logger.info("Deploying Certificate to VirtualHost %s", vhost.filep)
|
||||
|
||||
if self.version < (2, 4, 8) or (chain_path and not fullchain_path):
|
||||
# install SSLCertificateFile, SSLCertificateKeyFile,
|
||||
@@ -326,8 +455,8 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
"version of Apache")
|
||||
else:
|
||||
if not fullchain_path:
|
||||
raise errors.PluginError("Please provide the --fullchain-path\
|
||||
option pointing to your full chain file")
|
||||
raise errors.PluginError("Please provide the --fullchain-path "
|
||||
"option pointing to your full chain file")
|
||||
set_cert_path = fullchain_path
|
||||
self.aug.set(path["cert_path"][-1], fullchain_path)
|
||||
self.aug.set(path["cert_key"][-1], key_path)
|
||||
@@ -346,20 +475,21 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
if chain_path is not None:
|
||||
self.save_notes += "\tSSLCertificateChainFile %s\n" % chain_path
|
||||
|
||||
def choose_vhost(self, target_name, temp=False):
|
||||
def choose_vhost(self, target_name, create_if_no_ssl=True):
|
||||
"""Chooses a virtual host based on the given domain name.
|
||||
|
||||
If there is no clear virtual host to be selected, the user is prompted
|
||||
with all available choices.
|
||||
|
||||
The returned vhost is guaranteed to have TLS enabled unless temp is
|
||||
True. If temp is True, there is no such guarantee and the result is
|
||||
not cached.
|
||||
The returned vhost is guaranteed to have TLS enabled unless
|
||||
create_if_no_ssl is set to False, in which case there is no such guarantee
|
||||
and the result is not cached.
|
||||
|
||||
:param str target_name: domain name
|
||||
:param bool temp: whether the vhost is only used temporarily
|
||||
:param bool create_if_no_ssl: If found VirtualHost doesn't have a HTTPS
|
||||
counterpart, should one get created
|
||||
|
||||
:returns: ssl vhost associated with name
|
||||
:returns: vhost associated with name
|
||||
:rtype: :class:`~certbot_apache.obj.VirtualHost`
|
||||
|
||||
:raises .errors.PluginError: If no vhost is available or chosen
|
||||
@@ -372,7 +502,7 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
# Try to find a reasonable vhost
|
||||
vhost = self._find_best_vhost(target_name)
|
||||
if vhost is not None:
|
||||
if temp:
|
||||
if not create_if_no_ssl:
|
||||
return vhost
|
||||
if not vhost.ssl:
|
||||
vhost = self.make_vhost_ssl(vhost)
|
||||
@@ -381,7 +511,9 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
self.assoc[target_name] = vhost
|
||||
return vhost
|
||||
|
||||
return self._choose_vhost_from_list(target_name, temp)
|
||||
# Negate create_if_no_ssl value to indicate if we want a SSL vhost
|
||||
# to get created if a non-ssl vhost is selected.
|
||||
return self._choose_vhost_from_list(target_name, temp=not create_if_no_ssl)
|
||||
|
||||
def _choose_vhost_from_list(self, target_name, temp=False):
|
||||
# Select a vhost from a list
|
||||
@@ -390,7 +522,7 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
logger.error(
|
||||
"No vhost exists with servername or alias of %s. "
|
||||
"No vhost was selected. Please specify ServerName or ServerAlias "
|
||||
"in the Apache config, or split vhosts into separate files.",
|
||||
"in the Apache config.",
|
||||
target_name)
|
||||
raise errors.PluginError("No vhost selected")
|
||||
elif temp:
|
||||
@@ -435,12 +567,35 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
return True
|
||||
return False
|
||||
|
||||
def _find_best_vhost(self, target_name):
|
||||
def find_best_http_vhost(self, target, filter_defaults, port="80"):
|
||||
"""Returns non-HTTPS vhost objects found from the Apache config
|
||||
|
||||
:param str target: Domain name of the desired VirtualHost
|
||||
:param bool filter_defaults: whether _default_ vhosts should be
|
||||
included if it is the best match
|
||||
:param str port: port number the vhost should be listening on
|
||||
|
||||
:returns: VirtualHost object that's the best match for target name
|
||||
:rtype: `obj.VirtualHost` or None
|
||||
"""
|
||||
filtered_vhosts = []
|
||||
for vhost in self.vhosts:
|
||||
if any(a.is_wildcard() or a.get_port() == port for a in vhost.addrs) and not vhost.ssl:
|
||||
filtered_vhosts.append(vhost)
|
||||
return self._find_best_vhost(target, filtered_vhosts, filter_defaults)
|
||||
|
||||
def _find_best_vhost(self, target_name, vhosts=None, filter_defaults=True):
|
||||
"""Finds the best vhost for a target_name.
|
||||
|
||||
This does not upgrade a vhost to HTTPS... it only finds the most
|
||||
appropriate vhost for the given target_name.
|
||||
|
||||
:param str target_name: domain handled by the desired vhost
|
||||
:param vhosts: vhosts to consider
|
||||
:type vhosts: `collections.Iterable` of :class:`~certbot_apache.obj.VirtualHost`
|
||||
:param bool filter_defaults: whether a vhost with a _default_
|
||||
addr is acceptable
|
||||
|
||||
:returns: VHost or None
|
||||
|
||||
"""
|
||||
@@ -452,7 +607,11 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
# Points 1 - Address name with no SSL
|
||||
best_candidate = None
|
||||
best_points = 0
|
||||
for vhost in self.vhosts:
|
||||
|
||||
if vhosts is None:
|
||||
vhosts = self.vhosts
|
||||
|
||||
for vhost in vhosts:
|
||||
if vhost.modmacro is True:
|
||||
continue
|
||||
names = vhost.get_names()
|
||||
@@ -476,8 +635,8 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
|
||||
# No winners here... is there only one reasonable vhost?
|
||||
if best_candidate is None:
|
||||
# reasonable == Not all _default_ addrs
|
||||
vhosts = self._non_default_vhosts()
|
||||
if filter_defaults:
|
||||
vhosts = self._non_default_vhosts(vhosts)
|
||||
# remove mod_macro hosts from reasonable vhosts
|
||||
reasonable_vhosts = [vh for vh
|
||||
in vhosts if vh.modmacro is False]
|
||||
@@ -486,9 +645,9 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
|
||||
return best_candidate
|
||||
|
||||
def _non_default_vhosts(self):
|
||||
def _non_default_vhosts(self, vhosts):
|
||||
"""Return all non _default_ only vhosts."""
|
||||
return [vh for vh in self.vhosts if not all(
|
||||
return [vh for vh in vhosts if not all(
|
||||
addr.get_addr() == "_default_" for addr in vh.addrs
|
||||
)]
|
||||
|
||||
@@ -736,31 +895,43 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
|
||||
"""
|
||||
|
||||
# If nonstandard port, add service definition for matching
|
||||
if port != "443":
|
||||
self.prepare_https_modules(temp)
|
||||
self.ensure_listen(port, https=True)
|
||||
|
||||
def ensure_listen(self, port, https=False):
|
||||
"""Make sure that Apache is listening on the port. Checks if the
|
||||
Listen statement for the port already exists, and adds it to the
|
||||
configuration if necessary.
|
||||
|
||||
:param str port: Port number to check and add Listen for if not in
|
||||
place already
|
||||
:param bool https: If the port will be used for HTTPS
|
||||
|
||||
"""
|
||||
|
||||
# If HTTPS requested for nonstandard port, add service definition
|
||||
if https and port != "443":
|
||||
port_service = "%s %s" % (port, "https")
|
||||
else:
|
||||
port_service = port
|
||||
|
||||
self.prepare_https_modules(temp)
|
||||
# Check for Listen <port>
|
||||
# Note: This could be made to also look for ip:443 combo
|
||||
listens = [self.parser.get_arg(x).split()[0] for
|
||||
x in self.parser.find_dir("Listen")]
|
||||
|
||||
# In case no Listens are set (which really is a broken apache config)
|
||||
if not listens:
|
||||
listens = ["80"]
|
||||
|
||||
# Listen already in place
|
||||
if self._has_port_already(listens, port):
|
||||
return
|
||||
|
||||
listen_dirs = set(listens)
|
||||
|
||||
if not listens:
|
||||
listen_dirs.add(port_service)
|
||||
|
||||
for listen in listens:
|
||||
# For any listen statement, check if the machine also listens on
|
||||
# Port 443. If not, add such a listen statement.
|
||||
# the given port. If not, add such a listen statement.
|
||||
if len(listen.split(":")) == 1:
|
||||
# Its listening to all interfaces
|
||||
if port not in listen_dirs and port_service not in listen_dirs:
|
||||
@@ -772,11 +943,39 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
if "%s:%s" % (ip, port_service) not in listen_dirs and (
|
||||
"%s:%s" % (ip, port_service) not in listen_dirs):
|
||||
listen_dirs.add("%s:%s" % (ip, port_service))
|
||||
self._add_listens(listen_dirs, listens, port)
|
||||
if https:
|
||||
self._add_listens_https(listen_dirs, listens, port)
|
||||
else:
|
||||
self._add_listens_http(listen_dirs, listens, port)
|
||||
|
||||
def _add_listens(self, listens, listens_orig, port):
|
||||
"""Helper method for prepare_server_https to figure out which new
|
||||
listen statements need adding
|
||||
def _add_listens_http(self, listens, listens_orig, port):
|
||||
"""Helper method for ensure_listen to figure out which new
|
||||
listen statements need adding for listening HTTP on port
|
||||
|
||||
:param set listens: Set of all needed Listen statements
|
||||
:param list listens_orig: List of existing listen statements
|
||||
:param string port: Port number we're adding
|
||||
"""
|
||||
|
||||
new_listens = listens.difference(listens_orig)
|
||||
|
||||
if port in new_listens:
|
||||
# We have wildcard, skip the rest
|
||||
self.parser.add_dir(parser.get_aug_path(self.parser.loc["listen"]),
|
||||
"Listen", port)
|
||||
self.save_notes += "Added Listen %s directive to %s\n" % (
|
||||
port, self.parser.loc["listen"])
|
||||
else:
|
||||
for listen in new_listens:
|
||||
self.parser.add_dir(parser.get_aug_path(
|
||||
self.parser.loc["listen"]), "Listen", listen.split(" "))
|
||||
self.save_notes += ("Added Listen %s directive to "
|
||||
"%s\n") % (listen,
|
||||
self.parser.loc["listen"])
|
||||
|
||||
def _add_listens_https(self, listens, listens_orig, port):
|
||||
"""Helper method for ensure_listen to figure out which new
|
||||
listen statements need adding for listening HTTPS on port
|
||||
|
||||
:param set listens: Set of all needed Listen statements
|
||||
:param list listens_orig: List of existing listen statements
|
||||
@@ -1201,7 +1400,10 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
"insert_cert_file_path")
|
||||
self.parser.add_dir(vh_path, "SSLCertificateKeyFile",
|
||||
"insert_key_file_path")
|
||||
self.parser.add_dir(vh_path, "Include", self.mod_ssl_conf)
|
||||
# Only include the TLS configuration if not already included
|
||||
existing_inc = self.parser.find_dir("Include", self.mod_ssl_conf, vh_path)
|
||||
if not existing_inc:
|
||||
self.parser.add_dir(vh_path, "Include", self.mod_ssl_conf)
|
||||
|
||||
def _add_servername_alias(self, target_name, vhost):
|
||||
vh_path = vhost.path
|
||||
@@ -1305,8 +1507,24 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
except KeyError:
|
||||
raise errors.PluginError(
|
||||
"Unsupported enhancement: {0}".format(enhancement))
|
||||
|
||||
matched_vhosts = self.choose_vhosts(domain, create_if_no_ssl=False)
|
||||
# We should be handling only SSL vhosts for enhancements
|
||||
vhosts = [vhost for vhost in matched_vhosts if vhost.ssl]
|
||||
|
||||
if not vhosts:
|
||||
msg_tmpl = ("Certbot was not able to find SSL VirtualHost for a "
|
||||
"domain {0} for enabling enhancement \"{1}\". The requested "
|
||||
"enhancement was not configured.")
|
||||
msg_enhancement = enhancement
|
||||
if options:
|
||||
msg_enhancement += ": " + options
|
||||
msg = msg_tmpl.format(domain, msg_enhancement)
|
||||
logger.warning(msg)
|
||||
raise errors.PluginError(msg)
|
||||
try:
|
||||
func(self.choose_vhost(domain), options)
|
||||
for vhost in vhosts:
|
||||
func(vhost, options)
|
||||
except errors.PluginError:
|
||||
logger.warning("Failed %s for %s", enhancement, domain)
|
||||
raise
|
||||
@@ -1798,10 +2016,27 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
:raises .errors.MisconfigurationError: If reload fails
|
||||
|
||||
"""
|
||||
error = ""
|
||||
try:
|
||||
util.run_script(self.constant("restart_cmd"))
|
||||
except errors.SubprocessError as err:
|
||||
raise errors.MisconfigurationError(str(err))
|
||||
logger.info("Unable to restart apache using %s",
|
||||
self.constant("restart_cmd"))
|
||||
alt_restart = self.constant("restart_cmd_alt")
|
||||
if alt_restart:
|
||||
logger.debug("Trying alternative restart command: %s",
|
||||
alt_restart)
|
||||
# There is an alternative restart command available
|
||||
# This usually is "restart" verb while original is "graceful"
|
||||
try:
|
||||
util.run_script(self.constant(
|
||||
"restart_cmd_alt"))
|
||||
return
|
||||
except errors.SubprocessError as secerr:
|
||||
error = str(secerr)
|
||||
else:
|
||||
error = str(err)
|
||||
raise errors.MisconfigurationError(error)
|
||||
|
||||
def config_test(self): # pylint: disable=no-self-use
|
||||
"""Check the configuration of Apache for errors.
|
||||
@@ -1855,7 +2090,7 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
###########################################################################
|
||||
def get_chall_pref(self, unused_domain): # pylint: disable=no-self-use
|
||||
"""Return list of challenge preferences."""
|
||||
return [challenges.TLSSNI01]
|
||||
return [challenges.TLSSNI01, challenges.HTTP01]
|
||||
|
||||
def perform(self, achalls):
|
||||
"""Perform the configuration related challenge.
|
||||
@@ -1867,16 +2102,21 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
"""
|
||||
self._chall_out.update(achalls)
|
||||
responses = [None] * len(achalls)
|
||||
chall_doer = tls_sni_01.ApacheTlsSni01(self)
|
||||
http_doer = http_01.ApacheHttp01(self)
|
||||
sni_doer = tls_sni_01.ApacheTlsSni01(self)
|
||||
|
||||
for i, achall in enumerate(achalls):
|
||||
# Currently also have chall_doer hold associated index of the
|
||||
# challenge. This helps to put all of the responses back together
|
||||
# when they are all complete.
|
||||
chall_doer.add_chall(achall, i)
|
||||
if isinstance(achall.chall, challenges.HTTP01):
|
||||
http_doer.add_chall(achall, i)
|
||||
else: # tls-sni-01
|
||||
sni_doer.add_chall(achall, i)
|
||||
|
||||
sni_response = chall_doer.perform()
|
||||
if sni_response:
|
||||
http_response = http_doer.perform()
|
||||
sni_response = sni_doer.perform()
|
||||
if http_response or sni_response:
|
||||
# Must reload in order to activate the challenges.
|
||||
# Handled here because we may be able to load up other challenge
|
||||
# types
|
||||
@@ -1886,14 +2126,18 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
# of identifying when the new configuration is being used.
|
||||
time.sleep(3)
|
||||
|
||||
# Go through all of the challenges and assign them to the proper
|
||||
# place in the responses return value. All responses must be in the
|
||||
# same order as the original challenges.
|
||||
for i, resp in enumerate(sni_response):
|
||||
responses[chall_doer.indices[i]] = resp
|
||||
self._update_responses(responses, http_response, http_doer)
|
||||
self._update_responses(responses, sni_response, sni_doer)
|
||||
|
||||
return responses
|
||||
|
||||
def _update_responses(self, responses, chall_response, chall_doer):
|
||||
# Go through all of the challenges and assign them to the proper
|
||||
# place in the responses return value. All responses must be in the
|
||||
# same order as the original challenges.
|
||||
for i, resp in enumerate(chall_response):
|
||||
responses[chall_doer.indices[i]] = resp
|
||||
|
||||
def cleanup(self, achalls):
|
||||
"""Revert all challenges."""
|
||||
self._chall_out.difference_update(achalls)
|
||||
@@ -1912,5 +2156,3 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
# to be modified.
|
||||
return common.install_version_controlled_file(options_ssl, options_ssl_digest,
|
||||
self.constant("MOD_SSL_CONF_SRC"), constants.ALL_SSL_OPTIONS_HASHES)
|
||||
|
||||
|
||||
|
||||
@@ -16,6 +16,8 @@ ALL_SSL_OPTIONS_HASHES = [
|
||||
'4066b90268c03c9ba0201068eaa39abbc02acf9558bb45a788b630eb85dadf27',
|
||||
'f175e2e7c673bd88d0aff8220735f385f916142c44aa83b09f1df88dd4767a88',
|
||||
'cfdd7c18d2025836ea3307399f509cfb1ebf2612c87dd600a65da2a8e2f2797b',
|
||||
'80720bd171ccdc2e6b917ded340defae66919e4624962396b992b7218a561791',
|
||||
'c0c022ea6b8a51ecc8f1003d0a04af6c3f2bc1c3ce506b3c2dfc1f11ef931082',
|
||||
]
|
||||
"""SHA256 hashes of the contents of previous versions of all versions of MOD_SSL_CONF_SRC"""
|
||||
|
||||
|
||||
@@ -13,10 +13,44 @@ import certbot.display.util as display_util
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def select_vhost_multiple(vhosts):
|
||||
"""Select multiple Vhosts to install the certificate for
|
||||
|
||||
:param vhosts: Available Apache VirtualHosts
|
||||
:type vhosts: :class:`list` of type `~obj.Vhost`
|
||||
|
||||
:returns: List of VirtualHosts
|
||||
:rtype: :class:`list`of type `~obj.Vhost`
|
||||
"""
|
||||
if not vhosts:
|
||||
return list()
|
||||
tags_list = [vhost.display_repr()+"\n" for vhost in vhosts]
|
||||
# Remove the extra newline from the last entry
|
||||
if len(tags_list):
|
||||
tags_list[-1] = tags_list[-1][:-1]
|
||||
code, names = zope.component.getUtility(interfaces.IDisplay).checklist(
|
||||
"Which VirtualHosts would you like to install the wildcard certificate for?",
|
||||
tags=tags_list, force_interactive=True)
|
||||
if code == display_util.OK:
|
||||
return_vhosts = _reversemap_vhosts(names, vhosts)
|
||||
return return_vhosts
|
||||
return []
|
||||
|
||||
def _reversemap_vhosts(names, vhosts):
|
||||
"""Helper function for select_vhost_multiple for mapping string
|
||||
representations back to actual vhost objects"""
|
||||
return_vhosts = list()
|
||||
|
||||
for selection in names:
|
||||
for vhost in vhosts:
|
||||
if vhost.display_repr().strip() == selection.strip():
|
||||
return_vhosts.append(vhost)
|
||||
return return_vhosts
|
||||
|
||||
def select_vhost(domain, vhosts):
|
||||
"""Select an appropriate Apache Vhost.
|
||||
|
||||
:param vhosts: Available Apache Virtual Hosts
|
||||
:param vhosts: Available Apache VirtualHosts
|
||||
:type vhosts: :class:`list` of type `~obj.Vhost`
|
||||
|
||||
:returns: VirtualHost or `None`
|
||||
@@ -25,13 +59,11 @@ def select_vhost(domain, vhosts):
|
||||
"""
|
||||
if not vhosts:
|
||||
return None
|
||||
while True:
|
||||
code, tag = _vhost_menu(domain, vhosts)
|
||||
if code == display_util.OK:
|
||||
return vhosts[tag]
|
||||
else:
|
||||
return None
|
||||
|
||||
code, tag = _vhost_menu(domain, vhosts)
|
||||
if code == display_util.OK:
|
||||
return vhosts[tag]
|
||||
else:
|
||||
return None
|
||||
|
||||
def _vhost_menu(domain, vhosts):
|
||||
"""Select an appropriate Apache Vhost.
|
||||
|
||||
@@ -17,6 +17,7 @@ OVERRIDE_CLASSES = {
|
||||
"centos": override_centos.CentOSConfigurator,
|
||||
"centos linux": override_centos.CentOSConfigurator,
|
||||
"fedora": override_centos.CentOSConfigurator,
|
||||
"ol": override_centos.CentOSConfigurator,
|
||||
"red hat enterprise linux server": override_centos.CentOSConfigurator,
|
||||
"rhel": override_centos.CentOSConfigurator,
|
||||
"amazon": override_centos.CentOSConfigurator,
|
||||
|
||||
174
certbot-apache/certbot_apache/http_01.py
Normal file
174
certbot-apache/certbot_apache/http_01.py
Normal file
@@ -0,0 +1,174 @@
|
||||
"""A class that performs HTTP-01 challenges for Apache"""
|
||||
import logging
|
||||
import os
|
||||
|
||||
from certbot import errors
|
||||
|
||||
from certbot.plugins import common
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class ApacheHttp01(common.TLSSNI01):
|
||||
"""Class that performs HTTP-01 challenges within the Apache configurator."""
|
||||
|
||||
CONFIG_TEMPLATE22_PRE = """\
|
||||
RewriteEngine on
|
||||
RewriteRule ^/\\.well-known/acme-challenge/([A-Za-z0-9-_=]+)$ {0}/$1 [L]
|
||||
|
||||
"""
|
||||
CONFIG_TEMPLATE22_POST = """\
|
||||
<Directory {0}>
|
||||
Order Allow,Deny
|
||||
Allow from all
|
||||
</Directory>
|
||||
<Location /.well-known/acme-challenge>
|
||||
Order Allow,Deny
|
||||
Allow from all
|
||||
</Location>
|
||||
"""
|
||||
|
||||
CONFIG_TEMPLATE24_PRE = """\
|
||||
RewriteEngine on
|
||||
RewriteRule ^/\\.well-known/acme-challenge/([A-Za-z0-9-_=]+)$ {0}/$1 [END]
|
||||
"""
|
||||
CONFIG_TEMPLATE24_POST = """\
|
||||
<Directory {0}>
|
||||
Require all granted
|
||||
</Directory>
|
||||
<Location /.well-known/acme-challenge>
|
||||
Require all granted
|
||||
</Location>
|
||||
"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(ApacheHttp01, self).__init__(*args, **kwargs)
|
||||
self.challenge_conf_pre = os.path.join(
|
||||
self.configurator.conf("challenge-location"),
|
||||
"le_http_01_challenge_pre.conf")
|
||||
self.challenge_conf_post = os.path.join(
|
||||
self.configurator.conf("challenge-location"),
|
||||
"le_http_01_challenge_post.conf")
|
||||
self.challenge_dir = os.path.join(
|
||||
self.configurator.config.work_dir,
|
||||
"http_challenges")
|
||||
self.moded_vhosts = set()
|
||||
|
||||
def perform(self):
|
||||
"""Perform all HTTP-01 challenges."""
|
||||
if not self.achalls:
|
||||
return []
|
||||
# Save any changes to the configuration as a precaution
|
||||
# About to make temporary changes to the config
|
||||
self.configurator.save("Changes before challenge setup", True)
|
||||
|
||||
self.configurator.ensure_listen(str(
|
||||
self.configurator.config.http01_port))
|
||||
self.prepare_http01_modules()
|
||||
|
||||
responses = self._set_up_challenges()
|
||||
|
||||
self._mod_config()
|
||||
# Save reversible changes
|
||||
self.configurator.save("HTTP Challenge", True)
|
||||
|
||||
return responses
|
||||
|
||||
def prepare_http01_modules(self):
|
||||
"""Make sure that we have the needed modules available for http01"""
|
||||
|
||||
if self.configurator.conf("handle-modules"):
|
||||
needed_modules = ["rewrite"]
|
||||
if self.configurator.version < (2, 4):
|
||||
needed_modules.append("authz_host")
|
||||
else:
|
||||
needed_modules.append("authz_core")
|
||||
for mod in needed_modules:
|
||||
if mod + "_module" not in self.configurator.parser.modules:
|
||||
self.configurator.enable_mod(mod, temp=True)
|
||||
|
||||
def _mod_config(self):
|
||||
for chall in self.achalls:
|
||||
vh = self.configurator.find_best_http_vhost(
|
||||
chall.domain, filter_defaults=False,
|
||||
port=str(self.configurator.config.http01_port))
|
||||
if vh:
|
||||
self._set_up_include_directives(vh)
|
||||
else:
|
||||
for vh in self._relevant_vhosts():
|
||||
self._set_up_include_directives(vh)
|
||||
|
||||
self.configurator.reverter.register_file_creation(
|
||||
True, self.challenge_conf_pre)
|
||||
self.configurator.reverter.register_file_creation(
|
||||
True, self.challenge_conf_post)
|
||||
|
||||
if self.configurator.version < (2, 4):
|
||||
config_template_pre = self.CONFIG_TEMPLATE22_PRE
|
||||
config_template_post = self.CONFIG_TEMPLATE22_POST
|
||||
else:
|
||||
config_template_pre = self.CONFIG_TEMPLATE24_PRE
|
||||
config_template_post = self.CONFIG_TEMPLATE24_POST
|
||||
|
||||
config_text_pre = config_template_pre.format(self.challenge_dir)
|
||||
config_text_post = config_template_post.format(self.challenge_dir)
|
||||
|
||||
logger.debug("writing a pre config file with text:\n %s", config_text_pre)
|
||||
with open(self.challenge_conf_pre, "w") as new_conf:
|
||||
new_conf.write(config_text_pre)
|
||||
logger.debug("writing a post config file with text:\n %s", config_text_post)
|
||||
with open(self.challenge_conf_post, "w") as new_conf:
|
||||
new_conf.write(config_text_post)
|
||||
|
||||
def _relevant_vhosts(self):
|
||||
http01_port = str(self.configurator.config.http01_port)
|
||||
relevant_vhosts = []
|
||||
for vhost in self.configurator.vhosts:
|
||||
if any(a.is_wildcard() or a.get_port() == http01_port for a in vhost.addrs):
|
||||
if not vhost.ssl:
|
||||
relevant_vhosts.append(vhost)
|
||||
if not relevant_vhosts:
|
||||
raise errors.PluginError(
|
||||
"Unable to find a virtual host listening on port {0} which is"
|
||||
" currently needed for Certbot to prove to the CA that you"
|
||||
" control your domain. Please add a virtual host for port"
|
||||
" {0}.".format(http01_port))
|
||||
|
||||
return relevant_vhosts
|
||||
|
||||
def _set_up_challenges(self):
|
||||
if not os.path.isdir(self.challenge_dir):
|
||||
os.makedirs(self.challenge_dir)
|
||||
os.chmod(self.challenge_dir, 0o755)
|
||||
|
||||
responses = []
|
||||
for achall in self.achalls:
|
||||
responses.append(self._set_up_challenge(achall))
|
||||
|
||||
return responses
|
||||
|
||||
def _set_up_challenge(self, achall):
|
||||
response, validation = achall.response_and_validation()
|
||||
|
||||
name = os.path.join(self.challenge_dir, achall.chall.encode("token"))
|
||||
|
||||
self.configurator.reverter.register_file_creation(True, name)
|
||||
with open(name, 'wb') as f:
|
||||
f.write(validation.encode())
|
||||
os.chmod(name, 0o644)
|
||||
|
||||
return response
|
||||
|
||||
def _set_up_include_directives(self, vhost):
|
||||
"""Includes override configuration to the beginning and to the end of
|
||||
VirtualHost. Note that this include isn't added to Augeas search tree"""
|
||||
|
||||
if vhost not in self.moded_vhosts:
|
||||
logger.debug(
|
||||
"Adding a temporary challenge validation Include for name: %s " +
|
||||
"in: %s", vhost.name, vhost.filep)
|
||||
self.configurator.parser.add_dir_beginning(
|
||||
vhost.path, "Include", self.challenge_conf_pre)
|
||||
self.configurator.parser.add_dir(
|
||||
vhost.path, "Include", self.challenge_conf_post)
|
||||
|
||||
self.moded_vhosts.add(vhost)
|
||||
@@ -167,6 +167,19 @@ class VirtualHost(object): # pylint: disable=too-few-public-methods
|
||||
active="Yes" if self.enabled else "No",
|
||||
modmacro="Yes" if self.modmacro else "No"))
|
||||
|
||||
def display_repr(self):
|
||||
"""Return a representation of VHost to be used in dialog"""
|
||||
return (
|
||||
"File: {filename}\n"
|
||||
"Addresses: {addrs}\n"
|
||||
"Names: {names}\n"
|
||||
"HTTPS: {https}\n".format(
|
||||
filename=self.filep,
|
||||
addrs=", ".join(str(addr) for addr in self.addrs),
|
||||
names=", ".join(self.get_names()),
|
||||
https="Yes" if self.ssl else "No"))
|
||||
|
||||
|
||||
def __eq__(self, other):
|
||||
if isinstance(other, self.__class__):
|
||||
return (self.filep == other.filep and self.path == other.path and
|
||||
|
||||
@@ -8,7 +8,7 @@ SSLEngine on
|
||||
|
||||
# Intermediate configuration, tweak to your needs
|
||||
SSLProtocol all -SSLv2 -SSLv3
|
||||
SSLCipherSuite ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-AES256-GCM-SHA384:DHE-RSA-AES128-GCM-SHA256:DHE-DSS-AES128-GCM-SHA256:kEDH+AESGCM:ECDHE-RSA-AES128-SHA256:ECDHE-ECDSA-AES128-SHA256:ECDHE-RSA-AES128-SHA:ECDHE-ECDSA-AES128-SHA:ECDHE-RSA-AES256-SHA384:ECDHE-ECDSA-AES256-SHA384:ECDHE-RSA-AES256-SHA:ECDHE-ECDSA-AES256-SHA:DHE-RSA-AES128-SHA256:DHE-RSA-AES128-SHA:DHE-DSS-AES128-SHA256:DHE-RSA-AES256-SHA256:DHE-DSS-AES256-SHA:DHE-RSA-AES256-SHA:AES128-GCM-SHA256:AES256-GCM-SHA384:AES128-SHA256:AES256-SHA256:AES128-SHA:AES256-SHA:AES:CAMELLIA:DES-CBC3-SHA:!aNULL:!eNULL:!EXPORT:!DES:!RC4:!MD5:!PSK:!aECDH:!EDH-DSS-DES-CBC3-SHA:!EDH-RSA-DES-CBC3-SHA:!KRB5-DES-CBC3-SHA
|
||||
SSLCipherSuite ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:DHE-RSA-AES128-GCM-SHA256:DHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-AES128-SHA256:ECDHE-RSA-AES128-SHA256:ECDHE-ECDSA-AES128-SHA:ECDHE-RSA-AES256-SHA384:ECDHE-RSA-AES128-SHA:ECDHE-ECDSA-AES256-SHA384:ECDHE-ECDSA-AES256-SHA:ECDHE-RSA-AES256-SHA:DHE-RSA-AES128-SHA256:DHE-RSA-AES128-SHA:DHE-RSA-AES256-SHA256:DHE-RSA-AES256-SHA:ECDHE-ECDSA-DES-CBC3-SHA:ECDHE-RSA-DES-CBC3-SHA:EDH-RSA-DES-CBC3-SHA:AES128-GCM-SHA256:AES256-GCM-SHA384:AES128-SHA256:AES256-SHA256:AES128-SHA:AES256-SHA:DES-CBC3-SHA:!DSS
|
||||
SSLHonorCipherOrder on
|
||||
SSLCompression off
|
||||
|
||||
|
||||
@@ -21,6 +21,7 @@ class CentOSConfigurator(configurator.ApacheConfigurator):
|
||||
version_cmd=['apachectl', '-v'],
|
||||
apache_cmd="apachectl",
|
||||
restart_cmd=['apachectl', 'graceful'],
|
||||
restart_cmd_alt=['apachectl', 'restart'],
|
||||
conftest_cmd=['apachectl', 'configtest'],
|
||||
enmod=None,
|
||||
dismod=None,
|
||||
|
||||
@@ -140,5 +140,5 @@ class DebianConfigurator(configurator.ApacheConfigurator):
|
||||
"a2dismod are configured correctly for certbot.")
|
||||
|
||||
self.reverter.register_undo_command(
|
||||
temp, [self.conf("dismod"), mod_name])
|
||||
temp, [self.conf("dismod"), "-f", mod_name])
|
||||
util.run_script([self.conf("enmod"), mod_name])
|
||||
|
||||
@@ -21,6 +21,7 @@ class GentooConfigurator(configurator.ApacheConfigurator):
|
||||
version_cmd=['/usr/sbin/apache2', '-v'],
|
||||
apache_cmd="apache2ctl",
|
||||
restart_cmd=['apache2ctl', 'graceful'],
|
||||
restart_cmd_alt=['apache2ctl', 'restart'],
|
||||
conftest_cmd=['apache2ctl', 'configtest'],
|
||||
enmod=None,
|
||||
dismod=None,
|
||||
@@ -49,6 +50,7 @@ class GentooParser(parser.ApacheParser):
|
||||
def update_runtime_variables(self):
|
||||
""" Override for update_runtime_variables for custom parsing """
|
||||
self.parse_sysconfig_var()
|
||||
self.update_modules()
|
||||
|
||||
def parse_sysconfig_var(self):
|
||||
""" Parses Apache CLI options from Gentoo configuration file """
|
||||
@@ -56,3 +58,10 @@ class GentooParser(parser.ApacheParser):
|
||||
"APACHE2_OPTS")
|
||||
for k in defines.keys():
|
||||
self.variables[k] = defines[k]
|
||||
|
||||
def update_modules(self):
|
||||
"""Get loaded modules from httpd process, and add them to DOM"""
|
||||
mod_cmd = [self.configurator.constant("apache_cmd"), "modules"]
|
||||
matches = self.parse_from_subprocess(mod_cmd, r"(.*)_module")
|
||||
for mod in matches:
|
||||
self.add_mod(mod.strip())
|
||||
|
||||
@@ -332,6 +332,23 @@ class ApacheParser(object):
|
||||
else:
|
||||
self.aug.set(aug_conf_path + "/directive[last()]/arg", args)
|
||||
|
||||
def add_dir_beginning(self, aug_conf_path, dirname, args):
|
||||
"""Adds the directive to the beginning of defined aug_conf_path.
|
||||
|
||||
:param str aug_conf_path: Augeas configuration path to add directive
|
||||
:param str dirname: Directive to add
|
||||
:param args: Value of the directive. ie. Listen 443, 443 is arg
|
||||
:type args: list or str
|
||||
"""
|
||||
first_dir = aug_conf_path + "/directive[1]"
|
||||
self.aug.insert(first_dir, "directive", True)
|
||||
self.aug.set(first_dir, dirname)
|
||||
if isinstance(args, list):
|
||||
for i, value in enumerate(args, 1):
|
||||
self.aug.set(first_dir + "/arg[%d]" % (i), value)
|
||||
else:
|
||||
self.aug.set(first_dir + "/arg", args)
|
||||
|
||||
def find_dir(self, directive, arg=None, start=None, exclude=True):
|
||||
"""Finds directive in the configuration.
|
||||
|
||||
|
||||
@@ -4,6 +4,8 @@ import unittest
|
||||
|
||||
import mock
|
||||
|
||||
from certbot import errors
|
||||
|
||||
from certbot_apache import obj
|
||||
from certbot_apache import override_centos
|
||||
from certbot_apache.tests import util
|
||||
@@ -118,6 +120,20 @@ class MultipleVhostsTestCentOS(util.ApacheTest):
|
||||
self.assertTrue("mock_define_too" in self.config.parser.variables.keys())
|
||||
self.assertTrue("mock_value" in self.config.parser.variables.keys())
|
||||
self.assertEqual("TRUE", self.config.parser.variables["mock_value"])
|
||||
self.assertTrue("MOCK_NOSEP" in self.config.parser.variables.keys())
|
||||
self.assertEqual("NOSEP_VAL", self.config.parser.variables["NOSEP_TWO"])
|
||||
|
||||
@mock.patch("certbot_apache.configurator.util.run_script")
|
||||
def test_alt_restart_works(self, mock_run_script):
|
||||
mock_run_script.side_effect = [None, errors.SubprocessError, None]
|
||||
self.config.restart()
|
||||
self.assertEquals(mock_run_script.call_count, 3)
|
||||
|
||||
@mock.patch("certbot_apache.configurator.util.run_script")
|
||||
def test_alt_restart_errors(self, mock_run_script):
|
||||
mock_run_script.side_effect = [None,
|
||||
errors.SubprocessError,
|
||||
errors.SubprocessError]
|
||||
self.assertRaises(errors.MisconfigurationError, self.config.restart)
|
||||
if __name__ == "__main__":
|
||||
unittest.main() # pragma: no cover
|
||||
|
||||
@@ -126,7 +126,7 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
names = self.config.get_all_names()
|
||||
self.assertEqual(names, set(
|
||||
["certbot.demo", "ocspvhost.com", "encryption-example.demo",
|
||||
"nonsym.link", "vhost.in.rootconf"]
|
||||
"nonsym.link", "vhost.in.rootconf", "www.certbot.demo"]
|
||||
))
|
||||
|
||||
@certbot_util.patch_get_utility()
|
||||
@@ -146,7 +146,7 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
|
||||
names = self.config.get_all_names()
|
||||
# Names get filtered, only 5 are returned
|
||||
self.assertEqual(len(names), 7)
|
||||
self.assertEqual(len(names), 8)
|
||||
self.assertTrue("zombo.com" in names)
|
||||
self.assertTrue("google.com" in names)
|
||||
self.assertTrue("certbot.demo" in names)
|
||||
@@ -246,7 +246,7 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
@mock.patch("certbot_apache.display_ops.select_vhost")
|
||||
def test_choose_vhost_select_vhost_with_temp(self, mock_select):
|
||||
mock_select.return_value = self.vh_truth[0]
|
||||
chosen_vhost = self.config.choose_vhost("none.com", temp=True)
|
||||
chosen_vhost = self.config.choose_vhost("none.com", create_if_no_ssl=False)
|
||||
self.assertEqual(self.vh_truth[0], chosen_vhost)
|
||||
|
||||
@mock.patch("certbot_apache.display_ops.select_vhost")
|
||||
@@ -260,6 +260,20 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
self.assertRaises(
|
||||
errors.PluginError, self.config.choose_vhost, "none.com")
|
||||
|
||||
def test_find_best_http_vhost_default(self):
|
||||
vh = obj.VirtualHost(
|
||||
"fp", "ap", set([obj.Addr.fromstring("_default_:80")]), False, True)
|
||||
self.config.vhosts = [vh]
|
||||
self.assertEqual(self.config.find_best_http_vhost("foo.bar", False), vh)
|
||||
|
||||
def test_find_best_http_vhost_port(self):
|
||||
port = "8080"
|
||||
vh = obj.VirtualHost(
|
||||
"fp", "ap", set([obj.Addr.fromstring("*:" + port)]),
|
||||
False, True, "encryption-example.demo")
|
||||
self.config.vhosts.append(vh)
|
||||
self.assertEqual(self.config.find_best_http_vhost("foo.bar", False, port), vh)
|
||||
|
||||
def test_findbest_continues_on_short_domain(self):
|
||||
# pylint: disable=protected-access
|
||||
chosen_vhost = self.config._find_best_vhost("purple.com")
|
||||
@@ -305,7 +319,8 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
|
||||
def test_non_default_vhosts(self):
|
||||
# pylint: disable=protected-access
|
||||
self.assertEqual(len(self.config._non_default_vhosts()), 8)
|
||||
vhosts = self.config._non_default_vhosts(self.config.vhosts)
|
||||
self.assertEqual(len(vhosts), 8)
|
||||
|
||||
def test_deploy_cert_enable_new_vhost(self):
|
||||
# Create
|
||||
@@ -320,6 +335,33 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
"example/cert_chain.pem", "example/fullchain.pem")
|
||||
self.assertTrue(ssl_vhost.enabled)
|
||||
|
||||
def test_no_duplicate_include(self):
|
||||
def mock_find_dir(directive, argument, _):
|
||||
"""Mock method for parser.find_dir"""
|
||||
if directive == "Include" and argument.endswith("options-ssl-apache.conf"):
|
||||
return ["/path/to/whatever"]
|
||||
|
||||
mock_add = mock.MagicMock()
|
||||
self.config.parser.add_dir = mock_add
|
||||
self.config._add_dummy_ssl_directives(self.vh_truth[0]) # pylint: disable=protected-access
|
||||
tried_to_add = False
|
||||
for a in mock_add.call_args_list:
|
||||
if a[0][1] == "Include" and a[0][2] == self.config.mod_ssl_conf:
|
||||
tried_to_add = True
|
||||
# Include should be added, find_dir is not patched, and returns falsy
|
||||
self.assertTrue(tried_to_add)
|
||||
|
||||
self.config.parser.find_dir = mock_find_dir
|
||||
mock_add.reset_mock()
|
||||
|
||||
self.config._add_dummy_ssl_directives(self.vh_truth[0]) # pylint: disable=protected-access
|
||||
tried_to_add = []
|
||||
for a in mock_add.call_args_list:
|
||||
tried_to_add.append(a[0][1] == "Include" and
|
||||
a[0][2] == self.config.mod_ssl_conf)
|
||||
# Include shouldn't be added, as patched find_dir "finds" existing one
|
||||
self.assertFalse(any(tried_to_add))
|
||||
|
||||
def test_deploy_cert(self):
|
||||
self.config.parser.modules.add("ssl_module")
|
||||
self.config.parser.modules.add("mod_ssl.c")
|
||||
@@ -399,13 +441,37 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
self.vh_truth[1].path))
|
||||
|
||||
def test_deploy_cert_invalid_vhost(self):
|
||||
"""For test cases where the `ApacheConfigurator` class' `_deploy_cert`
|
||||
method is called with an invalid vhost parameter. Currently this tests
|
||||
that a PluginError is appropriately raised when important directives
|
||||
are missing in an SSL module."""
|
||||
self.config.parser.modules.add("ssl_module")
|
||||
mock_find = mock.MagicMock()
|
||||
mock_find.return_value = []
|
||||
self.config.parser.find_dir = mock_find
|
||||
self.config.parser.modules.add("mod_ssl.c")
|
||||
self.config.parser.modules.add("socache_shmcb_module")
|
||||
|
||||
def side_effect(*args):
|
||||
"""Mocks case where an SSLCertificateFile directive can be found
|
||||
but an SSLCertificateKeyFile directive is missing."""
|
||||
if "SSLCertificateFile" in args:
|
||||
return ["example/cert.pem"]
|
||||
else:
|
||||
return []
|
||||
|
||||
mock_find_dir = mock.MagicMock(return_value=[])
|
||||
mock_find_dir.side_effect = side_effect
|
||||
|
||||
self.config.parser.find_dir = mock_find_dir
|
||||
|
||||
# Get the default 443 vhost
|
||||
self.config.assoc["random.demo"] = self.vh_truth[1]
|
||||
|
||||
self.assertRaises(
|
||||
errors.PluginError, self.config.deploy_cert, "random.demo",
|
||||
"example/cert.pem", "example/key.pem", "example/cert_chain.pem")
|
||||
|
||||
# Remove side_effect to mock case where both SSLCertificateFile
|
||||
# and SSLCertificateKeyFile directives are missing
|
||||
self.config.parser.find_dir.side_effect = None
|
||||
self.assertRaises(
|
||||
errors.PluginError, self.config.deploy_cert, "random.demo",
|
||||
"example/cert.pem", "example/key.pem", "example/cert_chain.pem")
|
||||
@@ -424,6 +490,47 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
self.assertTrue(self.config.parser.find_dir(
|
||||
"NameVirtualHost", "*:80"))
|
||||
|
||||
def test_add_listen_80(self):
|
||||
mock_find = mock.Mock()
|
||||
mock_add_dir = mock.Mock()
|
||||
mock_find.return_value = []
|
||||
self.config.parser.find_dir = mock_find
|
||||
self.config.parser.add_dir = mock_add_dir
|
||||
self.config.ensure_listen("80")
|
||||
self.assertTrue(mock_add_dir.called)
|
||||
self.assertTrue(mock_find.called)
|
||||
self.assertEqual(mock_add_dir.call_args[0][1], "Listen")
|
||||
self.assertEqual(mock_add_dir.call_args[0][2], "80")
|
||||
|
||||
def test_add_listen_80_named(self):
|
||||
mock_find = mock.Mock()
|
||||
mock_find.return_value = ["test1", "test2", "test3"]
|
||||
mock_get = mock.Mock()
|
||||
mock_get.side_effect = ["1.2.3.4:80", "[::1]:80", "1.1.1.1:443"]
|
||||
mock_add_dir = mock.Mock()
|
||||
|
||||
self.config.parser.find_dir = mock_find
|
||||
self.config.parser.get_arg = mock_get
|
||||
self.config.parser.add_dir = mock_add_dir
|
||||
|
||||
self.config.ensure_listen("80")
|
||||
self.assertEqual(mock_add_dir.call_count, 0)
|
||||
|
||||
# Reset return lists and inputs
|
||||
mock_add_dir.reset_mock()
|
||||
mock_get.side_effect = ["1.2.3.4:80", "[::1]:80", "1.1.1.1:443"]
|
||||
|
||||
# Test
|
||||
self.config.ensure_listen("8080")
|
||||
self.assertEqual(mock_add_dir.call_count, 3)
|
||||
self.assertTrue(mock_add_dir.called)
|
||||
self.assertEqual(mock_add_dir.call_args[0][1], "Listen")
|
||||
call_found = False
|
||||
for mock_call in mock_add_dir.mock_calls:
|
||||
if mock_call[1][2] == ['1.2.3.4:8080']:
|
||||
call_found = True
|
||||
self.assertTrue(call_found)
|
||||
|
||||
def test_prepare_server_https(self):
|
||||
mock_enable = mock.Mock()
|
||||
self.config.enable_mod = mock_enable
|
||||
@@ -435,7 +542,6 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
# This will test the Add listen
|
||||
self.config.parser.find_dir = mock_find
|
||||
self.config.parser.add_dir_to_ifmodssl = mock_add_dir
|
||||
|
||||
self.config.prepare_server_https("443")
|
||||
# Changing the order these modules are enabled breaks the reverter
|
||||
self.assertEqual(mock_enable.call_args_list[0][0][0], "socache_shmcb")
|
||||
@@ -676,23 +782,33 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
self.config._add_name_vhost_if_necessary(self.vh_truth[0])
|
||||
self.assertEqual(self.config.add_name_vhost.call_count, 2)
|
||||
|
||||
@mock.patch("certbot_apache.configurator.http_01.ApacheHttp01.perform")
|
||||
@mock.patch("certbot_apache.configurator.tls_sni_01.ApacheTlsSni01.perform")
|
||||
@mock.patch("certbot_apache.configurator.ApacheConfigurator.restart")
|
||||
def test_perform(self, mock_restart, mock_perform):
|
||||
def test_perform(self, mock_restart, mock_tls_perform, mock_http_perform):
|
||||
# Only tests functionality specific to configurator.perform
|
||||
# Note: As more challenges are offered this will have to be expanded
|
||||
account_key, achall1, achall2 = self.get_achalls()
|
||||
account_key, achalls = self.get_key_and_achalls()
|
||||
|
||||
expected = [
|
||||
achall1.response(account_key),
|
||||
achall2.response(account_key),
|
||||
]
|
||||
all_expected = []
|
||||
http_expected = []
|
||||
tls_expected = []
|
||||
for achall in achalls:
|
||||
response = achall.response(account_key)
|
||||
if isinstance(achall.chall, challenges.HTTP01):
|
||||
http_expected.append(response)
|
||||
else:
|
||||
tls_expected.append(response)
|
||||
all_expected.append(response)
|
||||
|
||||
mock_perform.return_value = expected
|
||||
responses = self.config.perform([achall1, achall2])
|
||||
mock_http_perform.return_value = http_expected
|
||||
mock_tls_perform.return_value = tls_expected
|
||||
|
||||
self.assertEqual(mock_perform.call_count, 1)
|
||||
self.assertEqual(responses, expected)
|
||||
responses = self.config.perform(achalls)
|
||||
|
||||
self.assertEqual(mock_http_perform.call_count, 1)
|
||||
self.assertEqual(mock_tls_perform.call_count, 1)
|
||||
self.assertEqual(responses, all_expected)
|
||||
|
||||
self.assertEqual(mock_restart.call_count, 1)
|
||||
|
||||
@@ -700,29 +816,32 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
@mock.patch("certbot_apache.parser.ApacheParser._get_runtime_cfg")
|
||||
def test_cleanup(self, mock_cfg, mock_restart):
|
||||
mock_cfg.return_value = ""
|
||||
_, achall1, achall2 = self.get_achalls()
|
||||
_, achalls = self.get_key_and_achalls()
|
||||
|
||||
self.config._chall_out.add(achall1) # pylint: disable=protected-access
|
||||
self.config._chall_out.add(achall2) # pylint: disable=protected-access
|
||||
for achall in achalls:
|
||||
self.config._chall_out.add(achall) # pylint: disable=protected-access
|
||||
|
||||
self.config.cleanup([achall1])
|
||||
self.assertFalse(mock_restart.called)
|
||||
|
||||
self.config.cleanup([achall2])
|
||||
self.assertTrue(mock_restart.called)
|
||||
for i, achall in enumerate(achalls):
|
||||
self.config.cleanup([achall])
|
||||
if i == len(achalls) - 1:
|
||||
self.assertTrue(mock_restart.called)
|
||||
else:
|
||||
self.assertFalse(mock_restart.called)
|
||||
|
||||
@mock.patch("certbot_apache.configurator.ApacheConfigurator.restart")
|
||||
@mock.patch("certbot_apache.parser.ApacheParser._get_runtime_cfg")
|
||||
def test_cleanup_no_errors(self, mock_cfg, mock_restart):
|
||||
mock_cfg.return_value = ""
|
||||
_, achall1, achall2 = self.get_achalls()
|
||||
_, achalls = self.get_key_and_achalls()
|
||||
self.config.http_doer = mock.MagicMock()
|
||||
|
||||
self.config._chall_out.add(achall1) # pylint: disable=protected-access
|
||||
for achall in achalls:
|
||||
self.config._chall_out.add(achall) # pylint: disable=protected-access
|
||||
|
||||
self.config.cleanup([achall2])
|
||||
self.config.cleanup([achalls[-1]])
|
||||
self.assertFalse(mock_restart.called)
|
||||
|
||||
self.config.cleanup([achall1, achall2])
|
||||
self.config.cleanup(achalls)
|
||||
self.assertTrue(mock_restart.called)
|
||||
|
||||
@mock.patch("certbot.util.run_script")
|
||||
@@ -817,6 +936,22 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
errors.PluginError,
|
||||
self.config.enhance, "certbot.demo", "unknown_enhancement")
|
||||
|
||||
def test_enhance_no_ssl_vhost(self):
|
||||
with mock.patch("certbot_apache.configurator.logger.warning") as mock_log:
|
||||
self.assertRaises(errors.PluginError, self.config.enhance,
|
||||
"certbot.demo", "redirect")
|
||||
# Check that correct logger.warning was printed
|
||||
self.assertTrue("not able to find" in mock_log.call_args[0][0])
|
||||
self.assertTrue("\"redirect\"" in mock_log.call_args[0][0])
|
||||
|
||||
mock_log.reset_mock()
|
||||
|
||||
self.assertRaises(errors.PluginError, self.config.enhance,
|
||||
"certbot.demo", "ensure-http-header", "Test")
|
||||
# Check that correct logger.warning was printed
|
||||
self.assertTrue("not able to find" in mock_log.call_args[0][0])
|
||||
self.assertTrue("Test" in mock_log.call_args[0][0])
|
||||
|
||||
@mock.patch("certbot.util.exe_exists")
|
||||
def test_ocsp_stapling(self, mock_exe):
|
||||
self.config.parser.update_runtime_variables = mock.Mock()
|
||||
@@ -826,6 +961,7 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
mock_exe.return_value = True
|
||||
|
||||
# This will create an ssl vhost for certbot.demo
|
||||
self.config.choose_vhost("certbot.demo")
|
||||
self.config.enhance("certbot.demo", "staple-ocsp")
|
||||
|
||||
# Get the ssl vhost for certbot.demo
|
||||
@@ -852,6 +988,7 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
mock_exe.return_value = True
|
||||
|
||||
# Checking the case with already enabled ocsp stapling configuration
|
||||
self.config.choose_vhost("ocspvhost.com")
|
||||
self.config.enhance("ocspvhost.com", "staple-ocsp")
|
||||
|
||||
# Get the ssl vhost for letsencrypt.demo
|
||||
@@ -876,6 +1013,7 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
self.config.parser.modules.add("mod_ssl.c")
|
||||
self.config.parser.modules.add("socache_shmcb_module")
|
||||
self.config.get_version = mock.Mock(return_value=(2, 2, 0))
|
||||
self.config.choose_vhost("certbot.demo")
|
||||
|
||||
self.assertRaises(errors.PluginError,
|
||||
self.config.enhance, "certbot.demo", "staple-ocsp")
|
||||
@@ -901,6 +1039,7 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
mock_exe.return_value = True
|
||||
|
||||
# This will create an ssl vhost for certbot.demo
|
||||
self.config.choose_vhost("certbot.demo")
|
||||
self.config.enhance("certbot.demo", "ensure-http-header",
|
||||
"Strict-Transport-Security")
|
||||
|
||||
@@ -920,7 +1059,8 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
# skip the enable mod
|
||||
self.config.parser.modules.add("headers_module")
|
||||
|
||||
# This will create an ssl vhost for certbot.demo
|
||||
# This will create an ssl vhost for encryption-example.demo
|
||||
self.config.choose_vhost("encryption-example.demo")
|
||||
self.config.enhance("encryption-example.demo", "ensure-http-header",
|
||||
"Strict-Transport-Security")
|
||||
|
||||
@@ -939,6 +1079,7 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
mock_exe.return_value = True
|
||||
|
||||
# This will create an ssl vhost for certbot.demo
|
||||
self.config.choose_vhost("certbot.demo")
|
||||
self.config.enhance("certbot.demo", "ensure-http-header",
|
||||
"Upgrade-Insecure-Requests")
|
||||
|
||||
@@ -960,7 +1101,8 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
# skip the enable mod
|
||||
self.config.parser.modules.add("headers_module")
|
||||
|
||||
# This will create an ssl vhost for certbot.demo
|
||||
# This will create an ssl vhost for encryption-example.demo
|
||||
self.config.choose_vhost("encryption-example.demo")
|
||||
self.config.enhance("encryption-example.demo", "ensure-http-header",
|
||||
"Upgrade-Insecure-Requests")
|
||||
|
||||
@@ -978,6 +1120,7 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
self.config.get_version = mock.Mock(return_value=(2, 2))
|
||||
|
||||
# This will create an ssl vhost for certbot.demo
|
||||
self.config.choose_vhost("certbot.demo")
|
||||
self.config.enhance("certbot.demo", "redirect")
|
||||
|
||||
# These are not immediately available in find_dir even with save() and
|
||||
@@ -1028,6 +1171,7 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
self.config.save()
|
||||
|
||||
# This will create an ssl vhost for certbot.demo
|
||||
self.config.choose_vhost("certbot.demo")
|
||||
self.config.enhance("certbot.demo", "redirect")
|
||||
|
||||
# These are not immediately available in find_dir even with save() and
|
||||
@@ -1094,6 +1238,9 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
self.config.parser.modules.add("rewrite_module")
|
||||
self.config.get_version = mock.Mock(return_value=(2, 3, 9))
|
||||
|
||||
# Creates ssl vhost for the domain
|
||||
self.config.choose_vhost("red.blue.purple.com")
|
||||
|
||||
self.config.enhance("red.blue.purple.com", "redirect")
|
||||
verify_no_redirect = ("certbot_apache.configurator."
|
||||
"ApacheConfigurator._verify_no_certbot_redirect")
|
||||
@@ -1105,7 +1252,7 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
# Skip the enable mod
|
||||
self.config.parser.modules.add("rewrite_module")
|
||||
self.config.get_version = mock.Mock(return_value=(2, 3, 9))
|
||||
|
||||
self.config.choose_vhost("red.blue.purple.com")
|
||||
self.config.enhance("red.blue.purple.com", "redirect")
|
||||
# Clear state about enabling redirect on this run
|
||||
# pylint: disable=protected-access
|
||||
@@ -1151,7 +1298,7 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
not_rewriterule = "NotRewriteRule ^ ..."
|
||||
self.assertFalse(self.config._sift_rewrite_rule(not_rewriterule))
|
||||
|
||||
def get_achalls(self):
|
||||
def get_key_and_achalls(self):
|
||||
"""Return testing achallenges."""
|
||||
account_key = self.rsa512jwk
|
||||
achall1 = achallenges.KeyAuthorizationAnnotatedChallenge(
|
||||
@@ -1166,8 +1313,12 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
token=b"uqnaPzxtrndteOqtrXb0Asl5gOJfWAnnx6QJyvcmlDU"),
|
||||
"pending"),
|
||||
domain="certbot.demo", account_key=account_key)
|
||||
achall3 = achallenges.KeyAuthorizationAnnotatedChallenge(
|
||||
challb=acme_util.chall_to_challb(
|
||||
challenges.HTTP01(token=(b'x' * 16)), "pending"),
|
||||
domain="example.org", account_key=account_key)
|
||||
|
||||
return account_key, achall1, achall2
|
||||
return account_key, (achall1, achall2, achall3)
|
||||
|
||||
def test_make_addrs_sni_ready(self):
|
||||
self.config.version = (2, 2)
|
||||
@@ -1238,6 +1389,108 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
self.config.enable_mod,
|
||||
"whatever")
|
||||
|
||||
def test_wildcard_domain(self):
|
||||
# pylint: disable=protected-access
|
||||
cases = {u"*.example.org": True, b"*.x.example.org": True,
|
||||
u"a.example.org": False, b"a.x.example.org": False}
|
||||
for key in cases.keys():
|
||||
self.assertEqual(self.config._wildcard_domain(key), cases[key])
|
||||
|
||||
def test_choose_vhosts_wildcard(self):
|
||||
# pylint: disable=protected-access
|
||||
mock_path = "certbot_apache.display_ops.select_vhost_multiple"
|
||||
with mock.patch(mock_path) as mock_select_vhs:
|
||||
mock_select_vhs.return_value = [self.vh_truth[3]]
|
||||
vhs = self.config._choose_vhosts_wildcard("*.certbot.demo",
|
||||
create_ssl=True)
|
||||
# Check that the dialog was called with one vh: certbot.demo
|
||||
self.assertEquals(mock_select_vhs.call_args[0][0][0], self.vh_truth[3])
|
||||
self.assertEquals(len(mock_select_vhs.call_args_list), 1)
|
||||
|
||||
# And the actual returned values
|
||||
self.assertEquals(len(vhs), 1)
|
||||
self.assertTrue(vhs[0].name == "certbot.demo")
|
||||
self.assertTrue(vhs[0].ssl)
|
||||
|
||||
self.assertFalse(vhs[0] == self.vh_truth[3])
|
||||
|
||||
@mock.patch("certbot_apache.configurator.ApacheConfigurator.make_vhost_ssl")
|
||||
def test_choose_vhosts_wildcard_no_ssl(self, mock_makessl):
|
||||
# pylint: disable=protected-access
|
||||
mock_path = "certbot_apache.display_ops.select_vhost_multiple"
|
||||
with mock.patch(mock_path) as mock_select_vhs:
|
||||
mock_select_vhs.return_value = [self.vh_truth[1]]
|
||||
vhs = self.config._choose_vhosts_wildcard("*.certbot.demo",
|
||||
create_ssl=False)
|
||||
self.assertFalse(mock_makessl.called)
|
||||
self.assertEquals(vhs[0], self.vh_truth[1])
|
||||
|
||||
@mock.patch("certbot_apache.configurator.ApacheConfigurator._vhosts_for_wildcard")
|
||||
@mock.patch("certbot_apache.configurator.ApacheConfigurator.make_vhost_ssl")
|
||||
def test_choose_vhosts_wildcard_already_ssl(self, mock_makessl, mock_vh_for_w):
|
||||
# pylint: disable=protected-access
|
||||
# Already SSL vhost
|
||||
mock_vh_for_w.return_value = [self.vh_truth[7]]
|
||||
mock_path = "certbot_apache.display_ops.select_vhost_multiple"
|
||||
with mock.patch(mock_path) as mock_select_vhs:
|
||||
mock_select_vhs.return_value = [self.vh_truth[7]]
|
||||
vhs = self.config._choose_vhosts_wildcard("whatever",
|
||||
create_ssl=True)
|
||||
self.assertEquals(mock_select_vhs.call_args[0][0][0], self.vh_truth[7])
|
||||
self.assertEquals(len(mock_select_vhs.call_args_list), 1)
|
||||
# Ensure that make_vhost_ssl was not called, vhost.ssl == true
|
||||
self.assertFalse(mock_makessl.called)
|
||||
|
||||
# And the actual returned values
|
||||
self.assertEquals(len(vhs), 1)
|
||||
self.assertTrue(vhs[0].ssl)
|
||||
self.assertEquals(vhs[0], self.vh_truth[7])
|
||||
|
||||
|
||||
def test_deploy_cert_wildcard(self):
|
||||
# pylint: disable=protected-access
|
||||
mock_choose_vhosts = mock.MagicMock()
|
||||
mock_choose_vhosts.return_value = [self.vh_truth[7]]
|
||||
self.config._choose_vhosts_wildcard = mock_choose_vhosts
|
||||
mock_d = "certbot_apache.configurator.ApacheConfigurator._deploy_cert"
|
||||
with mock.patch(mock_d) as mock_dep:
|
||||
self.config.deploy_cert("*.wildcard.example.org", "/tmp/path",
|
||||
"/tmp/path", "/tmp/path", "/tmp/path")
|
||||
self.assertTrue(mock_dep.called)
|
||||
self.assertEquals(len(mock_dep.call_args_list), 1)
|
||||
self.assertEqual(self.vh_truth[7], mock_dep.call_args_list[0][0][0])
|
||||
|
||||
@mock.patch("certbot_apache.display_ops.select_vhost_multiple")
|
||||
def test_deploy_cert_wildcard_no_vhosts(self, mock_dialog):
|
||||
# pylint: disable=protected-access
|
||||
mock_dialog.return_value = []
|
||||
self.assertRaises(errors.PluginError,
|
||||
self.config.deploy_cert,
|
||||
"*.wild.cat", "/tmp/path", "/tmp/path",
|
||||
"/tmp/path", "/tmp/path")
|
||||
|
||||
@mock.patch("certbot_apache.configurator.ApacheConfigurator._choose_vhosts_wildcard")
|
||||
def test_enhance_wildcard_after_install(self, mock_choose):
|
||||
# pylint: disable=protected-access
|
||||
self.config.parser.modules.add("mod_ssl.c")
|
||||
self.config.parser.modules.add("headers_module")
|
||||
self.vh_truth[3].ssl = True
|
||||
self.config._wildcard_vhosts["*.certbot.demo"] = [self.vh_truth[3]]
|
||||
self.config.enhance("*.certbot.demo", "ensure-http-header",
|
||||
"Upgrade-Insecure-Requests")
|
||||
self.assertFalse(mock_choose.called)
|
||||
|
||||
@mock.patch("certbot_apache.configurator.ApacheConfigurator._choose_vhosts_wildcard")
|
||||
def test_enhance_wildcard_no_install(self, mock_choose):
|
||||
self.vh_truth[3].ssl = True
|
||||
mock_choose.return_value = [self.vh_truth[3]]
|
||||
self.config.parser.modules.add("mod_ssl.c")
|
||||
self.config.parser.modules.add("headers_module")
|
||||
self.config.enhance("*.certbot.demo", "ensure-http-header",
|
||||
"Upgrade-Insecure-Requests")
|
||||
self.assertTrue(mock_choose.called)
|
||||
|
||||
|
||||
class AugeasVhostsTest(util.ApacheTest):
|
||||
"""Test vhosts with illegal names dependent on augeas version."""
|
||||
# pylint: disable=protected-access
|
||||
|
||||
@@ -161,6 +161,8 @@ class MultipleVhostsTestDebian(util.ApacheTest):
|
||||
self.config.parser.modules.add("mod_ssl.c")
|
||||
self.config.get_version = mock.Mock(return_value=(2, 4, 7))
|
||||
mock_exe.return_value = True
|
||||
# This will create an ssl vhost for certbot.demo
|
||||
self.config.choose_vhost("certbot.demo")
|
||||
self.config.enhance("certbot.demo", "staple-ocsp")
|
||||
self.assertTrue("socache_shmcb_module" in self.config.parser.modules)
|
||||
|
||||
@@ -172,6 +174,7 @@ class MultipleVhostsTestDebian(util.ApacheTest):
|
||||
mock_exe.return_value = True
|
||||
|
||||
# This will create an ssl vhost for certbot.demo
|
||||
self.config.choose_vhost("certbot.demo")
|
||||
self.config.enhance("certbot.demo", "ensure-http-header",
|
||||
"Strict-Transport-Security")
|
||||
self.assertTrue("headers_module" in self.config.parser.modules)
|
||||
@@ -183,6 +186,7 @@ class MultipleVhostsTestDebian(util.ApacheTest):
|
||||
mock_exe.return_value = True
|
||||
self.config.get_version = mock.Mock(return_value=(2, 2))
|
||||
# This will create an ssl vhost for certbot.demo
|
||||
self.config.choose_vhost("certbot.demo")
|
||||
self.config.enhance("certbot.demo", "redirect")
|
||||
self.assertTrue("rewrite_module" in self.config.parser.modules)
|
||||
|
||||
|
||||
@@ -11,9 +11,39 @@ from certbot.tests import util as certbot_util
|
||||
|
||||
from certbot_apache import obj
|
||||
|
||||
from certbot_apache.display_ops import select_vhost_multiple
|
||||
from certbot_apache.tests import util
|
||||
|
||||
|
||||
class SelectVhostMultiTest(unittest.TestCase):
|
||||
"""Tests for certbot_apache.display_ops.select_vhost_multiple."""
|
||||
|
||||
def setUp(self):
|
||||
self.base_dir = "/example_path"
|
||||
self.vhosts = util.get_vh_truth(
|
||||
self.base_dir, "debian_apache_2_4/multiple_vhosts")
|
||||
|
||||
def test_select_no_input(self):
|
||||
self.assertFalse(select_vhost_multiple([]))
|
||||
|
||||
@certbot_util.patch_get_utility()
|
||||
def test_select_correct(self, mock_util):
|
||||
mock_util().checklist.return_value = (
|
||||
display_util.OK, [self.vhosts[3].display_repr(),
|
||||
self.vhosts[2].display_repr()])
|
||||
vhs = select_vhost_multiple([self.vhosts[3],
|
||||
self.vhosts[2],
|
||||
self.vhosts[1]])
|
||||
self.assertTrue(self.vhosts[2] in vhs)
|
||||
self.assertTrue(self.vhosts[3] in vhs)
|
||||
self.assertFalse(self.vhosts[1] in vhs)
|
||||
|
||||
@certbot_util.patch_get_utility()
|
||||
def test_select_cancel(self, mock_util):
|
||||
mock_util().checklist.return_value = (display_util.CANCEL, "whatever")
|
||||
vhs = select_vhost_multiple([self.vhosts[2], self.vhosts[3]])
|
||||
self.assertFalse(vhs)
|
||||
|
||||
class SelectVhostTest(unittest.TestCase):
|
||||
"""Tests for certbot_apache.display_ops.select_vhost."""
|
||||
|
||||
|
||||
@@ -2,6 +2,10 @@
|
||||
import os
|
||||
import unittest
|
||||
|
||||
import mock
|
||||
|
||||
from certbot import errors
|
||||
|
||||
from certbot_apache import override_gentoo
|
||||
from certbot_apache import obj
|
||||
from certbot_apache.tests import util
|
||||
@@ -46,9 +50,10 @@ class MultipleVhostsTestGentoo(util.ApacheTest):
|
||||
config_root=config_root,
|
||||
vhost_root=vhost_root)
|
||||
|
||||
self.config = util.get_apache_configurator(
|
||||
self.config_path, self.vhost_path, self.config_dir, self.work_dir,
|
||||
os_info="gentoo")
|
||||
with mock.patch("certbot_apache.override_gentoo.GentooParser.update_runtime_variables"):
|
||||
self.config = util.get_apache_configurator(
|
||||
self.config_path, self.vhost_path, self.config_dir, self.work_dir,
|
||||
os_info="gentoo")
|
||||
self.vh_truth = get_vh_truth(
|
||||
self.temp_dir, "gentoo_apache/apache")
|
||||
|
||||
@@ -78,9 +83,53 @@ class MultipleVhostsTestGentoo(util.ApacheTest):
|
||||
self.config.parser.apacheconfig_filep = os.path.realpath(
|
||||
os.path.join(self.config.parser.root, "../conf.d/apache2"))
|
||||
self.config.parser.variables = {}
|
||||
self.config.parser.update_runtime_variables()
|
||||
with mock.patch("certbot_apache.override_gentoo.GentooParser.update_modules"):
|
||||
self.config.parser.update_runtime_variables()
|
||||
for define in defines:
|
||||
self.assertTrue(define in self.config.parser.variables.keys())
|
||||
|
||||
@mock.patch("certbot_apache.parser.ApacheParser.parse_from_subprocess")
|
||||
def test_no_binary_configdump(self, mock_subprocess):
|
||||
"""Make sure we don't call binary dumps other than modules from Apache
|
||||
as this is not supported in Gentoo currently"""
|
||||
|
||||
with mock.patch("certbot_apache.override_gentoo.GentooParser.update_modules"):
|
||||
self.config.parser.update_runtime_variables()
|
||||
self.config.parser.reset_modules()
|
||||
self.assertFalse(mock_subprocess.called)
|
||||
|
||||
self.config.parser.update_runtime_variables()
|
||||
self.config.parser.reset_modules()
|
||||
self.assertTrue(mock_subprocess.called)
|
||||
|
||||
@mock.patch("certbot_apache.parser.ApacheParser._get_runtime_cfg")
|
||||
def test_opportunistic_httpd_runtime_parsing(self, mock_get):
|
||||
mod_val = (
|
||||
'Loaded Modules:\n'
|
||||
' mock_module (static)\n'
|
||||
' another_module (static)\n'
|
||||
)
|
||||
def mock_get_cfg(command):
|
||||
"""Mock httpd process stdout"""
|
||||
if command == ['apache2ctl', 'modules']:
|
||||
return mod_val
|
||||
mock_get.side_effect = mock_get_cfg
|
||||
self.config.parser.modules = set()
|
||||
|
||||
with mock.patch("certbot.util.get_os_info") as mock_osi:
|
||||
# Make sure we have the have the CentOS httpd constants
|
||||
mock_osi.return_value = ("gentoo", "123")
|
||||
self.config.parser.update_runtime_variables()
|
||||
|
||||
self.assertEquals(mock_get.call_count, 1)
|
||||
self.assertEquals(len(self.config.parser.modules), 4)
|
||||
self.assertTrue("mod_another.c" in self.config.parser.modules)
|
||||
|
||||
@mock.patch("certbot_apache.configurator.util.run_script")
|
||||
def test_alt_restart_works(self, mock_run_script):
|
||||
mock_run_script.side_effect = [None, errors.SubprocessError, None]
|
||||
self.config.restart()
|
||||
self.assertEquals(mock_run_script.call_count, 3)
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main() # pragma: no cover
|
||||
|
||||
206
certbot-apache/certbot_apache/tests/http_01_test.py
Normal file
206
certbot-apache/certbot_apache/tests/http_01_test.py
Normal file
@@ -0,0 +1,206 @@
|
||||
"""Test for certbot_apache.http_01."""
|
||||
import mock
|
||||
import os
|
||||
import unittest
|
||||
|
||||
from acme import challenges
|
||||
|
||||
from certbot import achallenges
|
||||
from certbot import errors
|
||||
|
||||
from certbot.tests import acme_util
|
||||
|
||||
from certbot_apache.tests import util
|
||||
|
||||
|
||||
NUM_ACHALLS = 3
|
||||
|
||||
|
||||
class ApacheHttp01Test(util.ApacheTest):
|
||||
"""Test for certbot_apache.http_01.ApacheHttp01."""
|
||||
|
||||
def setUp(self, *args, **kwargs):
|
||||
super(ApacheHttp01Test, self).setUp(*args, **kwargs)
|
||||
|
||||
self.account_key = self.rsa512jwk
|
||||
self.achalls = []
|
||||
vh_truth = util.get_vh_truth(
|
||||
self.temp_dir, "debian_apache_2_4/multiple_vhosts")
|
||||
# Takes the vhosts for encryption-example.demo, certbot.demo, and
|
||||
# vhost.in.rootconf
|
||||
self.vhosts = [vh_truth[0], vh_truth[3], vh_truth[10]]
|
||||
|
||||
for i in range(NUM_ACHALLS):
|
||||
self.achalls.append(
|
||||
achallenges.KeyAuthorizationAnnotatedChallenge(
|
||||
challb=acme_util.chall_to_challb(
|
||||
challenges.HTTP01(token=((chr(ord('a') + i).encode() * 16))),
|
||||
"pending"),
|
||||
domain=self.vhosts[i].name, account_key=self.account_key))
|
||||
|
||||
modules = ["rewrite", "authz_core", "authz_host"]
|
||||
for mod in modules:
|
||||
self.config.parser.modules.add("mod_{0}.c".format(mod))
|
||||
self.config.parser.modules.add(mod + "_module")
|
||||
|
||||
from certbot_apache.http_01 import ApacheHttp01
|
||||
self.http = ApacheHttp01(self.config)
|
||||
|
||||
def test_empty_perform(self):
|
||||
self.assertFalse(self.http.perform())
|
||||
|
||||
@mock.patch("certbot_apache.configurator.ApacheConfigurator.enable_mod")
|
||||
def test_enable_modules_apache_2_2(self, mock_enmod):
|
||||
self.config.version = (2, 2)
|
||||
self.config.parser.modules.remove("authz_host_module")
|
||||
self.config.parser.modules.remove("mod_authz_host.c")
|
||||
|
||||
enmod_calls = self.common_enable_modules_test(mock_enmod)
|
||||
self.assertEqual(enmod_calls[0][0][0], "authz_host")
|
||||
|
||||
@mock.patch("certbot_apache.configurator.ApacheConfigurator.enable_mod")
|
||||
def test_enable_modules_apache_2_4(self, mock_enmod):
|
||||
self.config.parser.modules.remove("authz_core_module")
|
||||
self.config.parser.modules.remove("mod_authz_core.c")
|
||||
|
||||
enmod_calls = self.common_enable_modules_test(mock_enmod)
|
||||
self.assertEqual(enmod_calls[0][0][0], "authz_core")
|
||||
|
||||
def common_enable_modules_test(self, mock_enmod):
|
||||
"""Tests enabling mod_rewrite and other modules."""
|
||||
self.config.parser.modules.remove("rewrite_module")
|
||||
self.config.parser.modules.remove("mod_rewrite.c")
|
||||
|
||||
self.http.prepare_http01_modules()
|
||||
|
||||
self.assertTrue(mock_enmod.called)
|
||||
calls = mock_enmod.call_args_list
|
||||
other_calls = []
|
||||
for call in calls:
|
||||
if "rewrite" != call[0][0]:
|
||||
other_calls.append(call)
|
||||
|
||||
# If these lists are equal, we never enabled mod_rewrite
|
||||
self.assertNotEqual(calls, other_calls)
|
||||
return other_calls
|
||||
|
||||
def test_same_vhost(self):
|
||||
vhost = next(v for v in self.config.vhosts if v.name == "certbot.demo")
|
||||
achalls = [
|
||||
achallenges.KeyAuthorizationAnnotatedChallenge(
|
||||
challb=acme_util.chall_to_challb(
|
||||
challenges.HTTP01(token=((b'a' * 16))),
|
||||
"pending"),
|
||||
domain=vhost.name, account_key=self.account_key),
|
||||
achallenges.KeyAuthorizationAnnotatedChallenge(
|
||||
challb=acme_util.chall_to_challb(
|
||||
challenges.HTTP01(token=((b'b' * 16))),
|
||||
"pending"),
|
||||
domain=next(iter(vhost.aliases)), account_key=self.account_key)
|
||||
]
|
||||
self.common_perform_test(achalls, [vhost])
|
||||
|
||||
def test_anonymous_vhost(self):
|
||||
vhosts = [v for v in self.config.vhosts if not v.ssl]
|
||||
achalls = [
|
||||
achallenges.KeyAuthorizationAnnotatedChallenge(
|
||||
challb=acme_util.chall_to_challb(
|
||||
challenges.HTTP01(token=((b'a' * 16))),
|
||||
"pending"),
|
||||
domain="something.nonexistent", account_key=self.account_key)]
|
||||
self.common_perform_test(achalls, vhosts)
|
||||
|
||||
def test_no_vhost(self):
|
||||
for achall in self.achalls:
|
||||
self.http.add_chall(achall)
|
||||
self.config.config.http01_port = 12345
|
||||
self.assertRaises(errors.PluginError, self.http.perform)
|
||||
|
||||
def test_perform_1_achall_apache_2_2(self):
|
||||
self.combinations_perform_test(num_achalls=1, minor_version=2)
|
||||
|
||||
def test_perform_1_achall_apache_2_4(self):
|
||||
self.combinations_perform_test(num_achalls=1, minor_version=4)
|
||||
|
||||
def test_perform_2_achall_apache_2_2(self):
|
||||
self.combinations_perform_test(num_achalls=2, minor_version=2)
|
||||
|
||||
def test_perform_2_achall_apache_2_4(self):
|
||||
self.combinations_perform_test(num_achalls=2, minor_version=4)
|
||||
|
||||
def test_perform_3_achall_apache_2_2(self):
|
||||
self.combinations_perform_test(num_achalls=3, minor_version=2)
|
||||
|
||||
def test_perform_3_achall_apache_2_4(self):
|
||||
self.combinations_perform_test(num_achalls=3, minor_version=4)
|
||||
|
||||
def combinations_perform_test(self, num_achalls, minor_version):
|
||||
"""Test perform with the given achall count and Apache version."""
|
||||
achalls = self.achalls[:num_achalls]
|
||||
vhosts = self.vhosts[:num_achalls]
|
||||
self.config.version = (2, minor_version)
|
||||
self.common_perform_test(achalls, vhosts)
|
||||
|
||||
def common_perform_test(self, achalls, vhosts):
|
||||
"""Tests perform with the given achalls."""
|
||||
challenge_dir = self.http.challenge_dir
|
||||
self.assertFalse(os.path.exists(challenge_dir))
|
||||
for achall in achalls:
|
||||
self.http.add_chall(achall)
|
||||
|
||||
expected_response = [
|
||||
achall.response(self.account_key) for achall in achalls]
|
||||
self.assertEqual(self.http.perform(), expected_response)
|
||||
|
||||
self.assertTrue(os.path.isdir(self.http.challenge_dir))
|
||||
self._has_min_permissions(self.http.challenge_dir, 0o755)
|
||||
self._test_challenge_conf()
|
||||
|
||||
for achall in achalls:
|
||||
self._test_challenge_file(achall)
|
||||
|
||||
for vhost in vhosts:
|
||||
if not vhost.ssl:
|
||||
matches = self.config.parser.find_dir("Include",
|
||||
self.http.challenge_conf_pre,
|
||||
vhost.path)
|
||||
self.assertEqual(len(matches), 1)
|
||||
matches = self.config.parser.find_dir("Include",
|
||||
self.http.challenge_conf_post,
|
||||
vhost.path)
|
||||
self.assertEqual(len(matches), 1)
|
||||
|
||||
self.assertTrue(os.path.exists(challenge_dir))
|
||||
|
||||
def _test_challenge_conf(self):
|
||||
with open(self.http.challenge_conf_pre) as f:
|
||||
pre_conf_contents = f.read()
|
||||
|
||||
with open(self.http.challenge_conf_post) as f:
|
||||
post_conf_contents = f.read()
|
||||
|
||||
self.assertTrue("RewriteEngine on" in pre_conf_contents)
|
||||
self.assertTrue("RewriteRule" in pre_conf_contents)
|
||||
|
||||
self.assertTrue(self.http.challenge_dir in post_conf_contents)
|
||||
if self.config.version < (2, 4):
|
||||
self.assertTrue("Allow from all" in post_conf_contents)
|
||||
else:
|
||||
self.assertTrue("Require all granted" in post_conf_contents)
|
||||
|
||||
def _test_challenge_file(self, achall):
|
||||
name = os.path.join(self.http.challenge_dir, achall.chall.encode("token"))
|
||||
validation = achall.validation(self.account_key)
|
||||
|
||||
self._has_min_permissions(name, 0o644)
|
||||
with open(name, 'rb') as f:
|
||||
self.assertEqual(f.read(), validation.encode())
|
||||
|
||||
def _has_min_permissions(self, path, min_mode):
|
||||
"""Tests the given file has at least the permissions in mode."""
|
||||
st_mode = os.stat(path).st_mode
|
||||
self.assertEqual(st_mode, st_mode | min_mode)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main() # pragma: no cover
|
||||
@@ -66,6 +66,23 @@ class BasicParserTest(util.ParserTest):
|
||||
for i, match in enumerate(matches):
|
||||
self.assertEqual(self.parser.aug.get(match), str(i + 1))
|
||||
|
||||
def test_add_dir_beginning(self):
|
||||
aug_default = "/files" + self.parser.loc["default"]
|
||||
self.parser.add_dir_beginning(aug_default,
|
||||
"AddDirectiveBeginning",
|
||||
"testBegin")
|
||||
|
||||
self.assertTrue(
|
||||
self.parser.find_dir("AddDirectiveBeginning", "testBegin", aug_default))
|
||||
|
||||
self.assertEqual(
|
||||
self.parser.aug.get(aug_default+"/directive[1]"),
|
||||
"AddDirectiveBeginning")
|
||||
self.parser.add_dir_beginning(aug_default, "AddList", ["1", "2", "3", "4"])
|
||||
matches = self.parser.find_dir("AddList", None, aug_default)
|
||||
for i, match in enumerate(matches):
|
||||
self.assertEqual(self.parser.aug.get(match), str(i + 1))
|
||||
|
||||
def test_empty_arg(self):
|
||||
self.assertEquals(None,
|
||||
self.parser.get_arg("/files/whatever/nonexistent"))
|
||||
|
||||
@@ -14,7 +14,7 @@
|
||||
# To pass additional options (for instance, -D definitions) to the
|
||||
# httpd binary at startup, set OPTIONS here.
|
||||
#
|
||||
OPTIONS="-D mock_define -D mock_define_too -D mock_value=TRUE"
|
||||
OPTIONS="-D mock_define -D mock_define_too -D mock_value=TRUE -DMOCK_NOSEP -DNOSEP_TWO=NOSEP_VAL"
|
||||
|
||||
#
|
||||
# This setting ensures the httpd process is started in the "C" locale
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
<VirtualHost *:80>
|
||||
ServerName certbot.demo
|
||||
ServerAlias www.certbot.demo
|
||||
ServerAdmin webmaster@localhost
|
||||
|
||||
DocumentRoot /var/www-certbot-reworld/static/
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
"""Test for certbot_apache.tls_sni_01."""
|
||||
import unittest
|
||||
import shutil
|
||||
import unittest
|
||||
|
||||
import mock
|
||||
|
||||
@@ -16,8 +16,8 @@ from six.moves import xrange # pylint: disable=redefined-builtin, import-error
|
||||
class TlsSniPerformTest(util.ApacheTest):
|
||||
"""Test the ApacheTlsSni01 challenge."""
|
||||
|
||||
auth_key = common_test.TLSSNI01Test.auth_key
|
||||
achalls = common_test.TLSSNI01Test.achalls
|
||||
auth_key = common_test.AUTH_KEY
|
||||
achalls = common_test.ACHALLS
|
||||
|
||||
def setUp(self): # pylint: disable=arguments-differ
|
||||
super(TlsSniPerformTest, self).setUp()
|
||||
|
||||
@@ -5,11 +5,10 @@ import sys
|
||||
import unittest
|
||||
|
||||
import augeas
|
||||
import josepy as jose
|
||||
import mock
|
||||
import zope.component
|
||||
|
||||
from acme import jose
|
||||
|
||||
from certbot.display import util as display_util
|
||||
|
||||
from certbot.plugins import common
|
||||
@@ -104,6 +103,7 @@ def get_apache_configurator( # pylint: disable=too-many-arguments, too-many-loc
|
||||
apache_challenge_location=config_path,
|
||||
backup_dir=backups,
|
||||
config_dir=config_dir,
|
||||
http01_port=80,
|
||||
temp_checkpoint_dir=os.path.join(work_dir, "temp_checkpoints"),
|
||||
in_progress_dir=os.path.join(backups, "IN_PROGRESS"),
|
||||
work_dir=work_dir)
|
||||
@@ -170,7 +170,7 @@ def get_vh_truth(temp_dir, config_name):
|
||||
os.path.join(prefix, "certbot.conf"),
|
||||
os.path.join(aug_pre, "certbot.conf/VirtualHost"),
|
||||
set([obj.Addr.fromstring("*:80")]), False, True,
|
||||
"certbot.demo"),
|
||||
"certbot.demo", aliases=["www.certbot.demo"]),
|
||||
obj.VirtualHost(
|
||||
os.path.join(prefix, "mod_macro-example.conf"),
|
||||
os.path.join(aug_pre,
|
||||
|
||||
@@ -123,7 +123,8 @@ class ApacheTlsSni01(common.TLSSNI01):
|
||||
self.configurator.config.tls_sni_01_port)))
|
||||
|
||||
try:
|
||||
vhost = self.configurator.choose_vhost(achall.domain, temp=True)
|
||||
vhost = self.configurator.choose_vhost(achall.domain,
|
||||
create_if_no_ssl=False)
|
||||
except (PluginError, MissingCommandlineFlag):
|
||||
# We couldn't find the virtualhost for this domain, possibly
|
||||
# because it's a new vhost that's not configured yet
|
||||
|
||||
2
certbot-apache/local-oldest-requirements.txt
Normal file
2
certbot-apache/local-oldest-requirements.txt
Normal file
@@ -0,0 +1,2 @@
|
||||
acme[dev]==0.21.1
|
||||
certbot[dev]==0.21.1
|
||||
@@ -4,17 +4,16 @@ from setuptools import setup
|
||||
from setuptools import find_packages
|
||||
|
||||
|
||||
version = '0.20.0.dev0'
|
||||
version = '0.24.0.dev0'
|
||||
|
||||
# Please update tox.ini when modifying dependency version requirements
|
||||
# Remember to update local-oldest-requirements.txt when changing the minimum
|
||||
# acme/certbot version.
|
||||
install_requires = [
|
||||
'acme=={0}'.format(version),
|
||||
'certbot=={0}'.format(version),
|
||||
'acme>=0.21.1',
|
||||
'certbot>=0.21.1',
|
||||
'mock',
|
||||
'python-augeas',
|
||||
# For pkg_resources. >=1.0 so pip resolves it to a version cryptography
|
||||
# will tolerate; see #2599:
|
||||
'setuptools>=1.0',
|
||||
'setuptools',
|
||||
'zope.component',
|
||||
'zope.interface',
|
||||
]
|
||||
@@ -32,6 +31,7 @@ setup(
|
||||
author="Certbot Project",
|
||||
author_email='client-dev@letsencrypt.org',
|
||||
license='Apache License 2.0',
|
||||
python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*',
|
||||
classifiers=[
|
||||
'Development Status :: 3 - Alpha',
|
||||
'Environment :: Plugins',
|
||||
@@ -40,10 +40,8 @@ setup(
|
||||
'Operating System :: POSIX :: Linux',
|
||||
'Programming Language :: Python',
|
||||
'Programming Language :: Python :: 2',
|
||||
'Programming Language :: Python :: 2.6',
|
||||
'Programming Language :: Python :: 2.7',
|
||||
'Programming Language :: Python :: 3',
|
||||
'Programming Language :: Python :: 3.3',
|
||||
'Programming Language :: Python :: 3.4',
|
||||
'Programming Language :: Python :: 3.5',
|
||||
'Programming Language :: Python :: 3.6',
|
||||
|
||||
402
certbot-auto
402
certbot-auto
@@ -31,7 +31,7 @@ if [ -z "$VENV_PATH" ]; then
|
||||
fi
|
||||
VENV_BIN="$VENV_PATH/bin"
|
||||
BOOTSTRAP_VERSION_PATH="$VENV_PATH/certbot-auto-bootstrap-version.txt"
|
||||
LE_AUTO_VERSION="0.19.0"
|
||||
LE_AUTO_VERSION="0.23.0"
|
||||
BASENAME=$(basename $0)
|
||||
USAGE="Usage: $BASENAME [OPTIONS]
|
||||
A self-updating wrapper script for the Certbot ACME client. When run, updates
|
||||
@@ -47,6 +47,7 @@ Help for certbot itself cannot be provided until it is installed.
|
||||
--no-bootstrap do not install OS dependencies
|
||||
--no-self-upgrade do not download updates
|
||||
--os-packages-only install OS dependencies and exit
|
||||
--install-only install certbot, upgrade if needed, and exit
|
||||
-v, --verbose provide more output
|
||||
-q, --quiet provide only update/error output;
|
||||
implies --non-interactive
|
||||
@@ -60,6 +61,8 @@ for arg in "$@" ; do
|
||||
DEBUG=1;;
|
||||
--os-packages-only)
|
||||
OS_PACKAGES_ONLY=1;;
|
||||
--install-only)
|
||||
INSTALL_ONLY=1;;
|
||||
--no-self-upgrade)
|
||||
# Do not upgrade this script (also prevents client upgrades, because each
|
||||
# copy of the script pins a hash of the python client)
|
||||
@@ -68,10 +71,12 @@ for arg in "$@" ; do
|
||||
NO_BOOTSTRAP=1;;
|
||||
--help)
|
||||
HELP=1;;
|
||||
--noninteractive|--non-interactive|renew)
|
||||
ASSUME_YES=1;;
|
||||
--noninteractive|--non-interactive)
|
||||
NONINTERACTIVE=1;;
|
||||
--quiet)
|
||||
QUIET=1;;
|
||||
renew)
|
||||
ASSUME_YES=1;;
|
||||
--verbose)
|
||||
VERBOSE=1;;
|
||||
-[!-]*)
|
||||
@@ -93,7 +98,7 @@ done
|
||||
|
||||
if [ $BASENAME = "letsencrypt-auto" ]; then
|
||||
# letsencrypt-auto does not respect --help or --yes for backwards compatibility
|
||||
ASSUME_YES=1
|
||||
NONINTERACTIVE=1
|
||||
HELP=0
|
||||
fi
|
||||
|
||||
@@ -244,23 +249,42 @@ DeprecationBootstrap() {
|
||||
fi
|
||||
}
|
||||
|
||||
|
||||
MIN_PYTHON_VERSION="2.7"
|
||||
MIN_PYVER=$(echo "$MIN_PYTHON_VERSION" | sed 's/\.//')
|
||||
# Sets LE_PYTHON to Python version string and PYVER to the first two
|
||||
# digits of the python version
|
||||
DeterminePythonVersion() {
|
||||
for LE_PYTHON in "$LE_PYTHON" python2.7 python27 python2 python; do
|
||||
# Break (while keeping the LE_PYTHON value) if found.
|
||||
$EXISTS "$LE_PYTHON" > /dev/null && break
|
||||
done
|
||||
if [ "$?" != "0" ]; then
|
||||
error "Cannot find any Pythons; please install one!"
|
||||
exit 1
|
||||
# Arguments: "NOCRASH" if we shouldn't crash if we don't find a good python
|
||||
#
|
||||
# If no Python is found, PYVER is set to 0.
|
||||
if [ "$USE_PYTHON_3" = 1 ]; then
|
||||
for LE_PYTHON in "$LE_PYTHON" python3; do
|
||||
# Break (while keeping the LE_PYTHON value) if found.
|
||||
$EXISTS "$LE_PYTHON" > /dev/null && break
|
||||
done
|
||||
else
|
||||
for LE_PYTHON in "$LE_PYTHON" python2.7 python27 python2 python; do
|
||||
# Break (while keeping the LE_PYTHON value) if found.
|
||||
$EXISTS "$LE_PYTHON" > /dev/null && break
|
||||
done
|
||||
fi
|
||||
if [ "$?" != "0" ]; then
|
||||
if [ "$1" != "NOCRASH" ]; then
|
||||
error "Cannot find any Pythons; please install one!"
|
||||
exit 1
|
||||
else
|
||||
PYVER=0
|
||||
return 0
|
||||
fi
|
||||
fi
|
||||
export LE_PYTHON
|
||||
|
||||
PYVER=`"$LE_PYTHON" -V 2>&1 | cut -d" " -f 2 | cut -d. -f1,2 | sed 's/\.//'`
|
||||
if [ "$PYVER" -lt 26 ]; then
|
||||
error "You have an ancient version of Python entombed in your operating system..."
|
||||
error "This isn't going to work; you'll need at least version 2.6."
|
||||
exit 1
|
||||
if [ "$PYVER" -lt "$MIN_PYVER" ]; then
|
||||
if [ "$1" != "NOCRASH" ]; then
|
||||
error "You have an ancient version of Python entombed in your operating system..."
|
||||
error "This isn't going to work; you'll need at least version $MIN_PYTHON_VERSION."
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
@@ -384,23 +408,19 @@ BootstrapDebCommon() {
|
||||
fi
|
||||
}
|
||||
|
||||
# If new packages are installed by BootstrapRpmCommon below, this version
|
||||
# number must be increased.
|
||||
BOOTSTRAP_RPM_COMMON_VERSION=1
|
||||
|
||||
BootstrapRpmCommon() {
|
||||
# Tested with:
|
||||
# - Fedora 20, 21, 22, 23 (x64)
|
||||
# - Centos 7 (x64: on DigitalOcean droplet)
|
||||
# - CentOS 7 Minimal install in a Hyper-V VM
|
||||
# - CentOS 6 (EPEL must be installed manually)
|
||||
# If new packages are installed by BootstrapRpmCommonBase below, version
|
||||
# numbers in rpm_common.sh and rpm_python3.sh must be increased.
|
||||
|
||||
# Sets TOOL to the name of the package manager
|
||||
# Sets appropriate values for YES_FLAG and QUIET_FLAG based on $ASSUME_YES and $QUIET_FLAG.
|
||||
# Enables EPEL if applicable and possible.
|
||||
InitializeRPMCommonBase() {
|
||||
if type dnf 2>/dev/null
|
||||
then
|
||||
tool=dnf
|
||||
TOOL=dnf
|
||||
elif type yum 2>/dev/null
|
||||
then
|
||||
tool=yum
|
||||
TOOL=yum
|
||||
|
||||
else
|
||||
error "Neither yum nor dnf found. Aborting bootstrap!"
|
||||
@@ -408,15 +428,15 @@ BootstrapRpmCommon() {
|
||||
fi
|
||||
|
||||
if [ "$ASSUME_YES" = 1 ]; then
|
||||
yes_flag="-y"
|
||||
YES_FLAG="-y"
|
||||
fi
|
||||
if [ "$QUIET" = 1 ]; then
|
||||
QUIET_FLAG='--quiet'
|
||||
fi
|
||||
|
||||
if ! $tool list *virtualenv >/dev/null 2>&1; then
|
||||
if ! $TOOL list *virtualenv >/dev/null 2>&1; then
|
||||
echo "To use Certbot, packages from the EPEL repository need to be installed."
|
||||
if ! $tool list epel-release >/dev/null 2>&1; then
|
||||
if ! $TOOL list epel-release >/dev/null 2>&1; then
|
||||
error "Enable the EPEL repository and try running Certbot again."
|
||||
exit 1
|
||||
fi
|
||||
@@ -425,14 +445,20 @@ BootstrapRpmCommon() {
|
||||
sleep 1s
|
||||
/bin/echo -ne "\e[0K\rEnabling the EPEL repository in 2 seconds..."
|
||||
sleep 1s
|
||||
/bin/echo -e "\e[0K\rEnabling the EPEL repository in 1 seconds..."
|
||||
/bin/echo -e "\e[0K\rEnabling the EPEL repository in 1 second..."
|
||||
sleep 1s
|
||||
fi
|
||||
if ! $tool install $yes_flag $QUIET_FLAG epel-release; then
|
||||
if ! $TOOL install $YES_FLAG $QUIET_FLAG epel-release; then
|
||||
error "Could not enable EPEL. Aborting bootstrap!"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
BootstrapRpmCommonBase() {
|
||||
# Arguments: whitespace-delimited python packages to install
|
||||
|
||||
InitializeRPMCommonBase # This call is superfluous in practice
|
||||
|
||||
pkgs="
|
||||
gcc
|
||||
@@ -444,10 +470,39 @@ BootstrapRpmCommon() {
|
||||
ca-certificates
|
||||
"
|
||||
|
||||
# Most RPM distros use the "python" or "python-" naming convention. Let's try that first.
|
||||
if $tool list python >/dev/null 2>&1; then
|
||||
# Add the python packages
|
||||
pkgs="$pkgs
|
||||
$1
|
||||
"
|
||||
|
||||
if $TOOL list installed "httpd" >/dev/null 2>&1; then
|
||||
pkgs="$pkgs
|
||||
python
|
||||
mod_ssl
|
||||
"
|
||||
fi
|
||||
|
||||
if ! $TOOL install $YES_FLAG $QUIET_FLAG $pkgs; then
|
||||
error "Could not install OS dependencies. Aborting bootstrap!"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
# If new packages are installed by BootstrapRpmCommon below, this version
|
||||
# number must be increased.
|
||||
BOOTSTRAP_RPM_COMMON_VERSION=1
|
||||
|
||||
BootstrapRpmCommon() {
|
||||
# Tested with:
|
||||
# - Fedora 20, 21, 22, 23 (x64)
|
||||
# - Centos 7 (x64: on DigitalOcean droplet)
|
||||
# - CentOS 7 Minimal install in a Hyper-V VM
|
||||
# - CentOS 6
|
||||
|
||||
InitializeRPMCommonBase
|
||||
|
||||
# Most RPM distros use the "python" or "python-" naming convention. Let's try that first.
|
||||
if $TOOL list python >/dev/null 2>&1; then
|
||||
python_pkgs="$python
|
||||
python-devel
|
||||
python-virtualenv
|
||||
python-tools
|
||||
@@ -455,9 +510,8 @@ BootstrapRpmCommon() {
|
||||
"
|
||||
# Fedora 26 starts to use the prefix python2 for python2 based packages.
|
||||
# this elseif is theoretically for any Fedora over version 26:
|
||||
elif $tool list python2 >/dev/null 2>&1; then
|
||||
pkgs="$pkgs
|
||||
python2
|
||||
elif $TOOL list python2 >/dev/null 2>&1; then
|
||||
python_pkgs="$python2
|
||||
python2-libs
|
||||
python2-setuptools
|
||||
python2-devel
|
||||
@@ -468,8 +522,7 @@ BootstrapRpmCommon() {
|
||||
# Some distros and older versions of current distros use a "python27"
|
||||
# instead of the "python" or "python-" naming convention.
|
||||
else
|
||||
pkgs="$pkgs
|
||||
python27
|
||||
python_pkgs="$python27
|
||||
python27-devel
|
||||
python27-virtualenv
|
||||
python27-tools
|
||||
@@ -477,16 +530,31 @@ BootstrapRpmCommon() {
|
||||
"
|
||||
fi
|
||||
|
||||
if $tool list installed "httpd" >/dev/null 2>&1; then
|
||||
pkgs="$pkgs
|
||||
mod_ssl
|
||||
"
|
||||
fi
|
||||
BootstrapRpmCommonBase "$python_pkgs"
|
||||
}
|
||||
|
||||
if ! $tool install $yes_flag $QUIET_FLAG $pkgs; then
|
||||
error "Could not install OS dependencies. Aborting bootstrap!"
|
||||
# If new packages are installed by BootstrapRpmPython3 below, this version
|
||||
# number must be increased.
|
||||
BOOTSTRAP_RPM_PYTHON3_VERSION=1
|
||||
|
||||
BootstrapRpmPython3() {
|
||||
# Tested with:
|
||||
# - CentOS 6
|
||||
|
||||
InitializeRPMCommonBase
|
||||
|
||||
# EPEL uses python34
|
||||
if $TOOL list python34 >/dev/null 2>&1; then
|
||||
python_pkgs="python34
|
||||
python34-devel
|
||||
python34-tools
|
||||
"
|
||||
else
|
||||
error "No supported Python package available to install. Aborting bootstrap!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
BootstrapRpmCommonBase "$python_pkgs"
|
||||
}
|
||||
|
||||
# If new packages are installed by BootstrapSuseCommon below, this version
|
||||
@@ -696,13 +764,8 @@ BootstrapMageiaCommon() {
|
||||
# Set Bootstrap to the function that installs OS dependencies on this system
|
||||
# and BOOTSTRAP_VERSION to the unique identifier for the current version of
|
||||
# that function. If Bootstrap is set to a function that doesn't install any
|
||||
# packages (either because --no-bootstrap was included on the command line or
|
||||
# we don't know how to bootstrap on this system), BOOTSTRAP_VERSION is not set.
|
||||
if [ "$NO_BOOTSTRAP" = 1 ]; then
|
||||
Bootstrap() {
|
||||
:
|
||||
}
|
||||
elif [ -f /etc/debian_version ]; then
|
||||
# packages BOOTSTRAP_VERSION is not set.
|
||||
if [ -f /etc/debian_version ]; then
|
||||
Bootstrap() {
|
||||
BootstrapMessage "Debian-based OSes"
|
||||
BootstrapDebCommon
|
||||
@@ -715,11 +778,27 @@ elif [ -f /etc/mageia-release ]; then
|
||||
}
|
||||
BOOTSTRAP_VERSION="BootstrapMageiaCommon $BOOTSTRAP_MAGEIA_COMMON_VERSION"
|
||||
elif [ -f /etc/redhat-release ]; then
|
||||
Bootstrap() {
|
||||
BootstrapMessage "RedHat-based OSes"
|
||||
BootstrapRpmCommon
|
||||
}
|
||||
BOOTSTRAP_VERSION="BootstrapRpmCommon $BOOTSTRAP_RPM_COMMON_VERSION"
|
||||
# Run DeterminePythonVersion to decide on the basis of available Python versions
|
||||
# whether to use 2.x or 3.x on RedHat-like systems.
|
||||
# Then, revert LE_PYTHON to its previous state.
|
||||
prev_le_python="$LE_PYTHON"
|
||||
unset LE_PYTHON
|
||||
DeterminePythonVersion "NOCRASH"
|
||||
if [ "$PYVER" -eq 26 ]; then
|
||||
Bootstrap() {
|
||||
BootstrapMessage "RedHat-based OSes that will use Python3"
|
||||
BootstrapRpmPython3
|
||||
}
|
||||
USE_PYTHON_3=1
|
||||
BOOTSTRAP_VERSION="BootstrapRpmPython3 $BOOTSTRAP_RPM_PYTHON3_VERSION"
|
||||
else
|
||||
Bootstrap() {
|
||||
BootstrapMessage "RedHat-based OSes"
|
||||
BootstrapRpmCommon
|
||||
}
|
||||
BOOTSTRAP_VERSION="BootstrapRpmCommon $BOOTSTRAP_RPM_COMMON_VERSION"
|
||||
fi
|
||||
LE_PYTHON="$prev_le_python"
|
||||
elif [ -f /etc/os-release ] && `grep -q openSUSE /etc/os-release` ; then
|
||||
Bootstrap() {
|
||||
BootstrapMessage "openSUSE-based OSes"
|
||||
@@ -782,6 +861,17 @@ else
|
||||
}
|
||||
fi
|
||||
|
||||
# We handle this case after determining the normal bootstrap version to allow
|
||||
# variables like USE_PYTHON_3 to be properly set. As described above, if the
|
||||
# Bootstrap function doesn't install any packages, BOOTSTRAP_VERSION should not
|
||||
# be set so we unset it here.
|
||||
if [ "$NO_BOOTSTRAP" = 1 ]; then
|
||||
Bootstrap() {
|
||||
:
|
||||
}
|
||||
unset BOOTSTRAP_VERSION
|
||||
fi
|
||||
|
||||
# Sets PREV_BOOTSTRAP_VERSION to the identifier for the bootstrap script used
|
||||
# to install OS dependencies on this system. PREV_BOOTSTRAP_VERSION isn't set
|
||||
# if it is unknown how OS dependencies were installed on this system.
|
||||
@@ -816,7 +906,11 @@ TempDir() {
|
||||
mktemp -d 2>/dev/null || mktemp -d -t 'le' # Linux || macOS
|
||||
}
|
||||
|
||||
|
||||
# Returns 0 if a letsencrypt installation exists at $OLD_VENV_PATH, otherwise,
|
||||
# returns a non-zero number.
|
||||
OldVenvExists() {
|
||||
[ -n "$OLD_VENV_PATH" -a -f "$OLD_VENV_PATH/bin/letsencrypt" ]
|
||||
}
|
||||
|
||||
if [ "$1" = "--le-auto-phase2" ]; then
|
||||
# Phase 2: Create venv, install LE, and run.
|
||||
@@ -824,14 +918,26 @@ if [ "$1" = "--le-auto-phase2" ]; then
|
||||
shift 1 # the --le-auto-phase2 arg
|
||||
SetPrevBootstrapVersion
|
||||
|
||||
if [ -z "$PHASE_1_VERSION" -a "$USE_PYTHON_3" = 1 ]; then
|
||||
unset LE_PYTHON
|
||||
fi
|
||||
|
||||
INSTALLED_VERSION="none"
|
||||
if [ -d "$VENV_PATH" ]; then
|
||||
if [ -d "$VENV_PATH" ] || OldVenvExists; then
|
||||
# If the selected Bootstrap function isn't a noop and it differs from the
|
||||
# previously used version
|
||||
if [ -n "$BOOTSTRAP_VERSION" -a "$BOOTSTRAP_VERSION" != "$PREV_BOOTSTRAP_VERSION" ]; then
|
||||
# if non-interactive mode or stdin and stdout are connected to a terminal
|
||||
if [ \( "$NONINTERACTIVE" = 1 \) -o \( \( -t 0 \) -a \( -t 1 \) \) ]; then
|
||||
rm -rf "$VENV_PATH"
|
||||
if [ -d "$VENV_PATH" ]; then
|
||||
rm -rf "$VENV_PATH"
|
||||
fi
|
||||
# In the case the old venv was just a symlink to the new one,
|
||||
# OldVenvExists is now false because we deleted the venv at VENV_PATH.
|
||||
if OldVenvExists; then
|
||||
rm -rf "$OLD_VENV_PATH"
|
||||
ln -s "$VENV_PATH" "$OLD_VENV_PATH"
|
||||
fi
|
||||
RerunWithArgs "$@"
|
||||
else
|
||||
error "Skipping upgrade because new OS dependencies may need to be installed."
|
||||
@@ -841,6 +947,10 @@ if [ "$1" = "--le-auto-phase2" ]; then
|
||||
error "install any required packages."
|
||||
# Set INSTALLED_VERSION to be the same so we don't update the venv
|
||||
INSTALLED_VERSION="$LE_AUTO_VERSION"
|
||||
# Continue to use OLD_VENV_PATH if the new venv doesn't exist
|
||||
if [ ! -d "$VENV_PATH" ]; then
|
||||
VENV_BIN="$OLD_VENV_PATH/bin"
|
||||
fi
|
||||
fi
|
||||
elif [ -f "$VENV_BIN/letsencrypt" ]; then
|
||||
# --version output ran through grep due to python-cryptography DeprecationWarnings
|
||||
@@ -858,10 +968,18 @@ if [ "$1" = "--le-auto-phase2" ]; then
|
||||
say "Creating virtual environment..."
|
||||
DeterminePythonVersion
|
||||
rm -rf "$VENV_PATH"
|
||||
if [ "$VERBOSE" = 1 ]; then
|
||||
virtualenv --no-site-packages --python "$LE_PYTHON" "$VENV_PATH"
|
||||
if [ "$PYVER" -le 27 ]; then
|
||||
if [ "$VERBOSE" = 1 ]; then
|
||||
virtualenv --no-site-packages --python "$LE_PYTHON" "$VENV_PATH"
|
||||
else
|
||||
virtualenv --no-site-packages --python "$LE_PYTHON" "$VENV_PATH" > /dev/null
|
||||
fi
|
||||
else
|
||||
virtualenv --no-site-packages --python "$LE_PYTHON" "$VENV_PATH" > /dev/null
|
||||
if [ "$VERBOSE" = 1 ]; then
|
||||
"$LE_PYTHON" -m venv "$VENV_PATH"
|
||||
else
|
||||
"$LE_PYTHON" -m venv "$VENV_PATH" > /dev/null
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ -n "$BOOTSTRAP_VERSION" ]; then
|
||||
@@ -983,9 +1101,16 @@ idna==2.5 \
|
||||
ipaddress==1.0.16 \
|
||||
--hash=sha256:935712800ce4760701d89ad677666cd52691fd2f6f0b340c8b4239a3c17988a5 \
|
||||
--hash=sha256:5a3182b322a706525c46282ca6f064d27a02cffbd449f9f47416f1dc96aa71b0
|
||||
josepy==1.0.1 \
|
||||
--hash=sha256:354a3513038a38bbcd27c97b7c68a8f3dfaff0a135b20a92c6db4cc4ea72915e \
|
||||
--hash=sha256:9f48b88ca37f0244238b1cc77723989f7c54f7b90b2eee6294390bacfe870acc
|
||||
linecache2==1.0.0 \
|
||||
--hash=sha256:e78be9c0a0dfcbac712fe04fbf92b96cddae80b1b842f24248214c8496f006ef \
|
||||
--hash=sha256:4b26ff4e7110db76eeb6f5a7b64a82623839d595c2038eeda662f2a2db78e97c
|
||||
# Using an older version of mock here prevents regressions of #5276.
|
||||
mock==1.3.0 \
|
||||
--hash=sha256:3f573a18be94de886d1191f27c168427ef693e8dcfcecf95b170577b2eb69cbb \
|
||||
--hash=sha256:1e247dbecc6ce057299eb7ee019ad68314bb93152e81d9a6110d35f4d5eca0f6
|
||||
ordereddict==1.1 \
|
||||
--hash=sha256:1c35b4ac206cef2d24816c89f89cf289dd3d38cf7c449bb3fab7bf6d43f01b1f
|
||||
packaging==16.8 \
|
||||
@@ -1062,9 +1187,6 @@ zope.interface==4.1.3 \
|
||||
--hash=sha256:928138365245a0e8869a5999fbcc2a45475a0a6ed52a494d60dbdc540335fedd \
|
||||
--hash=sha256:0d841ba1bb840eea0e6489dc5ecafa6125554971f53b5acb87764441e61bceba \
|
||||
--hash=sha256:b09c8c1d47b3531c400e0195697f1414a63221de6ef478598a4f1460f7d9a392
|
||||
mock==2.0.0 \
|
||||
--hash=sha256:5ce3c71c5545b472da17b72268978914d0252980348636840bd34a00b5cc96c1 \
|
||||
--hash=sha256:b158b6df76edd239b8208d481dc46b6afd45a846b7812ff0ce58971cf5bc8bba
|
||||
|
||||
# Contains the requirements for the letsencrypt package.
|
||||
#
|
||||
@@ -1077,24 +1199,24 @@ letsencrypt==0.7.0 \
|
||||
--hash=sha256:105a5fb107e45bcd0722eb89696986dcf5f08a86a321d6aef25a0c7c63375ade \
|
||||
--hash=sha256:c36e532c486a7e92155ee09da54b436a3c420813ec1c590b98f635d924720de9
|
||||
|
||||
certbot==0.19.0 \
|
||||
--hash=sha256:3207ee5319bfc37e855c25a43148275fcfb37869eefde9087405012049734a20 \
|
||||
--hash=sha256:a7230791dff5d085738119fc22d88ad9d8a35d0b6a3d67806fe33990c7c79d53
|
||||
acme==0.19.0 \
|
||||
--hash=sha256:c612eafe234d722d97bb5d3dbc49e5522f44be29611f7577954eb893e5c2d6de \
|
||||
--hash=sha256:1fa23d64d494aaf001e6fe857c461fcfff10f75a1c2c35ec831447f641e1e822
|
||||
certbot-apache==0.19.0 \
|
||||
--hash=sha256:fadb28b33bfabc85cdb962b5b149bef58b98f0606b78581db7895fe38323f37c \
|
||||
--hash=sha256:70306ca2d5be7f542af68d46883c0ae39527cf202f17ef92cd256fb0bc3f1619
|
||||
certbot-nginx==0.19.0 \
|
||||
--hash=sha256:4909cb3db49919fb35590793cac28e1c0b6dbd29cbedf887b9106e5fcef5362c \
|
||||
--hash=sha256:cb5a224a3f277092555c25096d1678fc735306fd3a43447649ebe524c7ca79e1
|
||||
certbot==0.23.0 \
|
||||
--hash=sha256:66c42cf780ddbf582ecc52aa6a61242450a2650227b436ad0d260685c4ef8a49 \
|
||||
--hash=sha256:6cff4c5da1228661ccaf95195064cb29e6cdf80913193bdb2eb20e164c76053e
|
||||
acme==0.23.0 \
|
||||
--hash=sha256:02e9b596bd3bf8f0733d6d43ec2464ac8185a000acb58d2b4fd9e19223bbbf0b \
|
||||
--hash=sha256:08c16635578507f526c338b3418c1147a9f015bf2d366abd51f38918703b4550
|
||||
certbot-apache==0.23.0 \
|
||||
--hash=sha256:50077742d2763b7600dfda618eb89c870aeea5e6a4c00f60157877f7a7d81f7c \
|
||||
--hash=sha256:6b7acec243e224de5268d46c2597277586dffa55e838c252b6931c30d549028e
|
||||
certbot-nginx==0.23.0 \
|
||||
--hash=sha256:f12c21bbe3eb955ca533f1da96d28c6310378b138e844d83253562e18b6cbb32 \
|
||||
--hash=sha256:cadf14e4bd504d9ce5987a5ec6dbd8e136638e55303ad5dc81dcb723ddd64324
|
||||
|
||||
UNLIKELY_EOF
|
||||
# -------------------------------------------------------------------------
|
||||
cat << "UNLIKELY_EOF" > "$TEMP_DIR/pipstrap.py"
|
||||
#!/usr/bin/env python
|
||||
"""A small script that can act as a trust root for installing pip 8
|
||||
"""A small script that can act as a trust root for installing pip >=8
|
||||
|
||||
Embed this in your project, and your VCS checkout is all you have to trust. In
|
||||
a post-peep era, this lets you claw your way to a hash-checking version of pip,
|
||||
@@ -1118,6 +1240,7 @@ anything goes wrong, it will exit with a non-zero status code.
|
||||
from __future__ import print_function
|
||||
from distutils.version import StrictVersion
|
||||
from hashlib import sha256
|
||||
from os import environ
|
||||
from os.path import join
|
||||
from pipes import quote
|
||||
from shutil import rmtree
|
||||
@@ -1151,14 +1274,14 @@ except ImportError:
|
||||
from urllib.parse import urlparse # 3.4
|
||||
|
||||
|
||||
__version__ = 1, 3, 0
|
||||
__version__ = 1, 5, 1
|
||||
PIP_VERSION = '9.0.1'
|
||||
DEFAULT_INDEX_BASE = 'https://pypi.python.org'
|
||||
|
||||
|
||||
# wheel has a conditional dependency on argparse:
|
||||
maybe_argparse = (
|
||||
[('https://pypi.python.org/packages/18/dd/'
|
||||
'e617cfc3f6210ae183374cd9f6a26b20514bbb5a792af97949c5aacddf0f/'
|
||||
[('18/dd/e617cfc3f6210ae183374cd9f6a26b20514bbb5a792af97949c5aacddf0f/'
|
||||
'argparse-1.4.0.tar.gz',
|
||||
'62b089a55be1d8949cd2bc7e0df0bddb9e028faefc8c32038cc84862aefdd6e4')]
|
||||
if version_info < (2, 7, 0) else [])
|
||||
@@ -1166,18 +1289,14 @@ maybe_argparse = (
|
||||
|
||||
PACKAGES = maybe_argparse + [
|
||||
# Pip has no dependencies, as it vendors everything:
|
||||
('https://pypi.python.org/packages/11/b6/'
|
||||
'abcb525026a4be042b486df43905d6893fb04f05aac21c32c638e939e447/'
|
||||
'pip-{0}.tar.gz'
|
||||
.format(PIP_VERSION),
|
||||
('11/b6/abcb525026a4be042b486df43905d6893fb04f05aac21c32c638e939e447/'
|
||||
'pip-{0}.tar.gz'.format(PIP_VERSION),
|
||||
'09f243e1a7b461f654c26a725fa373211bb7ff17a9300058b205c61658ca940d'),
|
||||
# This version of setuptools has only optional dependencies:
|
||||
('https://pypi.python.org/packages/69/65/'
|
||||
'4c544cde88d4d876cdf5cbc5f3f15d02646477756d89547e9a7ecd6afa76/'
|
||||
'setuptools-20.2.2.tar.gz',
|
||||
'24fcfc15364a9fe09a220f37d2dcedc849795e3de3e4b393ee988e66a9cbd85a'),
|
||||
('https://pypi.python.org/packages/c9/1d/'
|
||||
'bd19e691fd4cfe908c76c429fe6e4436c9e83583c4414b54f6c85471954a/'
|
||||
('59/88/2f3990916931a5de6fa9706d6d75eb32ee8b78627bb2abaab7ed9e6d0622/'
|
||||
'setuptools-29.0.1.tar.gz',
|
||||
'b539118819a4857378398891fa5366e090690e46b3e41421a1e07d6e9fd8feb0'),
|
||||
('c9/1d/bd19e691fd4cfe908c76c429fe6e4436c9e83583c4414b54f6c85471954a/'
|
||||
'wheel-0.29.0.tar.gz',
|
||||
'1ebb8ad7e26b448e9caa4773d2357849bf80ff9e313964bcaf79cbf0201a1648')
|
||||
]
|
||||
@@ -1198,12 +1317,13 @@ def hashed_download(url, temp, digest):
|
||||
# >=2.7.9 verifies HTTPS certs itself, and, in any case, the cert
|
||||
# authenticity has only privacy (not arbitrary code execution)
|
||||
# implications, since we're checking hashes.
|
||||
def opener():
|
||||
def opener(using_https=True):
|
||||
opener = build_opener(HTTPSHandler())
|
||||
# Strip out HTTPHandler to prevent MITM spoof:
|
||||
for handler in opener.handlers:
|
||||
if isinstance(handler, HTTPHandler):
|
||||
opener.handlers.remove(handler)
|
||||
if using_https:
|
||||
# Strip out HTTPHandler to prevent MITM spoof:
|
||||
for handler in opener.handlers:
|
||||
if isinstance(handler, HTTPHandler):
|
||||
opener.handlers.remove(handler)
|
||||
return opener
|
||||
|
||||
def read_chunks(response, chunk_size):
|
||||
@@ -1213,8 +1333,9 @@ def hashed_download(url, temp, digest):
|
||||
break
|
||||
yield chunk
|
||||
|
||||
response = opener().open(url)
|
||||
path = join(temp, urlparse(url).path.split('/')[-1])
|
||||
parsed_url = urlparse(url)
|
||||
response = opener(using_https=parsed_url.scheme == 'https').open(url)
|
||||
path = join(temp, parsed_url.path.split('/')[-1])
|
||||
actual_hash = sha256()
|
||||
with open(path, 'wb') as file:
|
||||
for chunk in read_chunks(response, 4096):
|
||||
@@ -1227,6 +1348,24 @@ def hashed_download(url, temp, digest):
|
||||
return path
|
||||
|
||||
|
||||
def get_index_base():
|
||||
"""Return the URL to the dir containing the "packages" folder.
|
||||
|
||||
Try to wring something out of PIP_INDEX_URL, if set. Hack "/simple" off the
|
||||
end if it's there; that is likely to give us the right dir.
|
||||
|
||||
"""
|
||||
env_var = environ.get('PIP_INDEX_URL', '').rstrip('/')
|
||||
if env_var:
|
||||
SIMPLE = '/simple'
|
||||
if env_var.endswith(SIMPLE):
|
||||
return env_var[:-len(SIMPLE)]
|
||||
else:
|
||||
return env_var
|
||||
else:
|
||||
return DEFAULT_INDEX_BASE
|
||||
|
||||
|
||||
def main():
|
||||
pip_version = StrictVersion(check_output(['pip', '--version'])
|
||||
.decode('utf-8').split()[1])
|
||||
@@ -1234,11 +1373,13 @@ def main():
|
||||
if pip_version >= min_pip_version:
|
||||
return 0
|
||||
has_pip_cache = pip_version >= StrictVersion('6.0')
|
||||
|
||||
index_base = get_index_base()
|
||||
temp = mkdtemp(prefix='pipstrap-')
|
||||
try:
|
||||
downloads = [hashed_download(url, temp, digest)
|
||||
for url, digest in PACKAGES]
|
||||
downloads = [hashed_download(index_base + '/packages/' + path,
|
||||
temp,
|
||||
digest)
|
||||
for path, digest in PACKAGES]
|
||||
check_output('pip install --no-index --no-deps -U ' +
|
||||
# Disable cache since we're not using it and it otherwise
|
||||
# sometimes throws permission warnings:
|
||||
@@ -1309,6 +1450,12 @@ UNLIKELY_EOF
|
||||
|
||||
say "Installation succeeded."
|
||||
fi
|
||||
|
||||
if [ "$INSTALL_ONLY" = 1 ]; then
|
||||
say "Certbot is installed."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
"$VENV_BIN/letsencrypt" "$@"
|
||||
|
||||
else
|
||||
@@ -1318,9 +1465,10 @@ else
|
||||
# upgrading. Phase 1 checks the version of the latest release of
|
||||
# certbot-auto (which is always the same as that of the certbot
|
||||
# package). Phase 2 checks the version of the locally installed certbot.
|
||||
export PHASE_1_VERSION="$LE_AUTO_VERSION"
|
||||
|
||||
if [ ! -f "$VENV_BIN/letsencrypt" ]; then
|
||||
if [ -z "$OLD_VENV_PATH" -o ! -f "$OLD_VENV_PATH/bin/letsencrypt" ]; then
|
||||
if ! OldVenvExists; then
|
||||
if [ "$HELP" = 1 ]; then
|
||||
echo "$USAGE"
|
||||
exit 0
|
||||
@@ -1352,17 +1500,22 @@ On failure, return non-zero.
|
||||
|
||||
"""
|
||||
|
||||
from __future__ import print_function
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
from distutils.version import LooseVersion
|
||||
from json import loads
|
||||
from os import devnull, environ
|
||||
from os.path import dirname, join
|
||||
import re
|
||||
import ssl
|
||||
from subprocess import check_call, CalledProcessError
|
||||
from sys import argv, exit
|
||||
from urllib2 import build_opener, HTTPHandler, HTTPSHandler
|
||||
from urllib2 import HTTPError, URLError
|
||||
try:
|
||||
from urllib2 import build_opener, HTTPHandler, HTTPSHandler
|
||||
from urllib2 import HTTPError, URLError
|
||||
except ImportError:
|
||||
from urllib.request import build_opener, HTTPHandler, HTTPSHandler
|
||||
from urllib.error import HTTPError, URLError
|
||||
|
||||
PUBLIC_KEY = environ.get('LE_AUTO_PUBLIC_KEY', """-----BEGIN PUBLIC KEY-----
|
||||
MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA6MR8W/galdxnpGqBsYbq
|
||||
@@ -1384,8 +1537,11 @@ class HttpsGetter(object):
|
||||
def __init__(self):
|
||||
"""Build an HTTPS opener."""
|
||||
# Based on pip 1.4.1's URLOpener
|
||||
# This verifies certs on only Python >=2.7.9.
|
||||
self._opener = build_opener(HTTPSHandler())
|
||||
# This verifies certs on only Python >=2.7.9, and when NO_CERT_VERIFY isn't set.
|
||||
if environ.get('NO_CERT_VERIFY') == '1' and hasattr(ssl, 'SSLContext'):
|
||||
self._opener = build_opener(HTTPSHandler(context=cert_none_context()))
|
||||
else:
|
||||
self._opener = build_opener(HTTPSHandler())
|
||||
# Strip out HTTPHandler to prevent MITM spoof:
|
||||
for handler in self._opener.handlers:
|
||||
if isinstance(handler, HTTPHandler):
|
||||
@@ -1407,7 +1563,7 @@ class HttpsGetter(object):
|
||||
|
||||
def write(contents, dir, filename):
|
||||
"""Write something to a file in a certain directory."""
|
||||
with open(join(dir, filename), 'w') as file:
|
||||
with open(join(dir, filename), 'wb') as file:
|
||||
file.write(contents)
|
||||
|
||||
|
||||
@@ -1415,13 +1571,13 @@ def latest_stable_version(get):
|
||||
"""Return the latest stable release of letsencrypt."""
|
||||
metadata = loads(get(
|
||||
environ.get('LE_AUTO_JSON_URL',
|
||||
'https://pypi.python.org/pypi/certbot/json')))
|
||||
'https://pypi.python.org/pypi/certbot/json')).decode('UTF-8'))
|
||||
# metadata['info']['version'] actually returns the latest of any kind of
|
||||
# release release, contrary to https://wiki.python.org/moin/PyPIJSON.
|
||||
# The regex is a sufficient regex for picking out prereleases for most
|
||||
# packages, LE included.
|
||||
return str(max(LooseVersion(r) for r
|
||||
in metadata['releases'].iterkeys()
|
||||
in metadata['releases'].keys()
|
||||
if re.match('^[0-9.]+$', r)))
|
||||
|
||||
|
||||
@@ -1438,7 +1594,7 @@ def verified_new_le_auto(get, tag, temp_dir):
|
||||
'letsencrypt-auto-source/') % tag
|
||||
write(get(le_auto_dir + 'letsencrypt-auto'), temp_dir, 'letsencrypt-auto')
|
||||
write(get(le_auto_dir + 'letsencrypt-auto.sig'), temp_dir, 'letsencrypt-auto.sig')
|
||||
write(PUBLIC_KEY, temp_dir, 'public_key.pem')
|
||||
write(PUBLIC_KEY.encode('UTF-8'), temp_dir, 'public_key.pem')
|
||||
try:
|
||||
with open(devnull, 'w') as dev_null:
|
||||
check_call(['openssl', 'dgst', '-sha256', '-verify',
|
||||
@@ -1453,6 +1609,14 @@ def verified_new_le_auto(get, tag, temp_dir):
|
||||
"certbot-auto.", exc)
|
||||
|
||||
|
||||
def cert_none_context():
|
||||
"""Create a SSLContext object to not check hostname."""
|
||||
# PROTOCOL_TLS isn't available before 2.7.13 but this code is for 2.7.9+, so use this.
|
||||
context = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
|
||||
context.verify_mode = ssl.CERT_NONE
|
||||
return context
|
||||
|
||||
|
||||
def main():
|
||||
get = HttpsGetter().get
|
||||
flag = argv[1]
|
||||
@@ -1474,8 +1638,10 @@ if __name__ == '__main__':
|
||||
|
||||
UNLIKELY_EOF
|
||||
# ---------------------------------------------------------------------------
|
||||
DeterminePythonVersion
|
||||
if ! REMOTE_VERSION=`"$LE_PYTHON" "$TEMP_DIR/fetch.py" --latest-version` ; then
|
||||
DeterminePythonVersion "NOCRASH"
|
||||
if [ "$PYVER" -lt "$MIN_PYVER" ]; then
|
||||
error "WARNING: couldn't find Python $MIN_PYTHON_VERSION+ to check for updates."
|
||||
elif ! REMOTE_VERSION=`"$LE_PYTHON" "$TEMP_DIR/fetch.py" --latest-version` ; then
|
||||
error "WARNING: unable to check for updates."
|
||||
elif [ "$LE_AUTO_VERSION" != "$REMOTE_VERSION" ]; then
|
||||
say "Upgrading certbot-auto $LE_AUTO_VERSION to $REMOTE_VERSION..."
|
||||
|
||||
@@ -10,6 +10,8 @@ import sys
|
||||
|
||||
import OpenSSL
|
||||
|
||||
from six.moves import xrange # pylint: disable=import-error,redefined-builtin
|
||||
|
||||
from acme import challenges
|
||||
from acme import crypto_util
|
||||
from acme import messages
|
||||
|
||||
@@ -6,7 +6,8 @@ import re
|
||||
import shutil
|
||||
import tarfile
|
||||
|
||||
from acme import jose
|
||||
import josepy as jose
|
||||
|
||||
from acme import test_util
|
||||
from certbot import constants
|
||||
|
||||
|
||||
@@ -5,6 +5,7 @@ import requests
|
||||
import zope.interface
|
||||
|
||||
import six
|
||||
from six.moves import xrange # pylint: disable=import-error,redefined-builtin
|
||||
|
||||
from acme import crypto_util
|
||||
from acme import errors as acme_errors
|
||||
|
||||
@@ -8,7 +8,7 @@ from certbot_nginx import nginxparser
|
||||
def roundtrip(stuff):
|
||||
success = True
|
||||
for t in stuff:
|
||||
print t
|
||||
print(t)
|
||||
if not os.path.isfile(t):
|
||||
continue
|
||||
with open(t, "r") as f:
|
||||
|
||||
@@ -4,7 +4,7 @@ from setuptools import setup
|
||||
from setuptools import find_packages
|
||||
|
||||
|
||||
version = '0.20.0.dev0'
|
||||
version = '0.24.0.dev0'
|
||||
|
||||
install_requires = [
|
||||
'certbot',
|
||||
@@ -34,16 +34,15 @@ setup(
|
||||
author="Certbot Project",
|
||||
author_email='client-dev@letsencrypt.org',
|
||||
license='Apache License 2.0',
|
||||
python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*',
|
||||
classifiers=[
|
||||
'Development Status :: 3 - Alpha',
|
||||
'Intended Audience :: Developers',
|
||||
'License :: OSI Approved :: Apache Software License',
|
||||
'Programming Language :: Python',
|
||||
'Programming Language :: Python :: 2',
|
||||
'Programming Language :: Python :: 2.6',
|
||||
'Programming Language :: Python :: 2.7',
|
||||
'Programming Language :: Python :: 3',
|
||||
'Programming Language :: Python :: 3.3',
|
||||
'Programming Language :: Python :: 3.4',
|
||||
'Programming Language :: Python :: 3.5',
|
||||
'Programming Language :: Python :: 3.6',
|
||||
|
||||
5
certbot-dns-cloudflare/Dockerfile
Normal file
5
certbot-dns-cloudflare/Dockerfile
Normal file
@@ -0,0 +1,5 @@
|
||||
FROM certbot/certbot
|
||||
|
||||
COPY . src/certbot-dns-cloudflare
|
||||
|
||||
RUN pip install --no-cache-dir --editable src/certbot-dns-cloudflare
|
||||
@@ -10,14 +10,14 @@ Welcome to certbot-dns-cloudflare's documentation!
|
||||
:maxdepth: 2
|
||||
:caption: Contents:
|
||||
|
||||
.. automodule:: certbot_dns_cloudflare
|
||||
:members:
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 1
|
||||
|
||||
api
|
||||
|
||||
.. automodule:: certbot_dns_cloudflare
|
||||
:members:
|
||||
|
||||
|
||||
Indices and tables
|
||||
==================
|
||||
|
||||
2
certbot-dns-cloudflare/local-oldest-requirements.txt
Normal file
2
certbot-dns-cloudflare/local-oldest-requirements.txt
Normal file
@@ -0,0 +1,2 @@
|
||||
acme[dev]==0.21.1
|
||||
certbot[dev]==0.21.1
|
||||
12
certbot-dns-cloudflare/readthedocs.org.requirements.txt
Normal file
12
certbot-dns-cloudflare/readthedocs.org.requirements.txt
Normal file
@@ -0,0 +1,12 @@
|
||||
# readthedocs.org gives no way to change the install command to "pip
|
||||
# install -e .[docs]" (that would in turn install documentation
|
||||
# dependencies), but it allows to specify a requirements.txt file at
|
||||
# https://readthedocs.org/dashboard/letsencrypt/advanced/ (c.f. #259)
|
||||
|
||||
# Although ReadTheDocs certainly doesn't need to install the project
|
||||
# in --editable mode (-e), just "pip install .[docs]" does not work as
|
||||
# expected and "pip install -e .[docs]" must be used instead
|
||||
|
||||
-e acme
|
||||
-e .
|
||||
-e certbot-dns-cloudflare[docs]
|
||||
@@ -4,17 +4,16 @@ from setuptools import setup
|
||||
from setuptools import find_packages
|
||||
|
||||
|
||||
version = '0.20.0.dev0'
|
||||
version = '0.24.0.dev0'
|
||||
|
||||
# Please update tox.ini when modifying dependency version requirements
|
||||
# Remember to update local-oldest-requirements.txt when changing the minimum
|
||||
# acme/certbot version.
|
||||
install_requires = [
|
||||
'acme=={0}'.format(version),
|
||||
'certbot=={0}'.format(version),
|
||||
'acme>=0.21.1',
|
||||
'certbot>=0.21.1',
|
||||
'cloudflare>=1.5.1',
|
||||
'mock',
|
||||
# For pkg_resources. >=1.0 so pip resolves it to a version cryptography
|
||||
# will tolerate; see #2599:
|
||||
'setuptools>=1.0',
|
||||
'setuptools',
|
||||
'zope.interface',
|
||||
]
|
||||
|
||||
@@ -31,6 +30,7 @@ setup(
|
||||
author="Certbot Project",
|
||||
author_email='client-dev@letsencrypt.org',
|
||||
license='Apache License 2.0',
|
||||
python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*',
|
||||
classifiers=[
|
||||
'Development Status :: 3 - Alpha',
|
||||
'Environment :: Plugins',
|
||||
@@ -39,10 +39,8 @@ setup(
|
||||
'Operating System :: POSIX :: Linux',
|
||||
'Programming Language :: Python',
|
||||
'Programming Language :: Python :: 2',
|
||||
'Programming Language :: Python :: 2.6',
|
||||
'Programming Language :: Python :: 2.7',
|
||||
'Programming Language :: Python :: 3',
|
||||
'Programming Language :: Python :: 3.3',
|
||||
'Programming Language :: Python :: 3.4',
|
||||
'Programming Language :: Python :: 3.5',
|
||||
'Programming Language :: Python :: 3.6',
|
||||
|
||||
5
certbot-dns-cloudxns/Dockerfile
Normal file
5
certbot-dns-cloudxns/Dockerfile
Normal file
@@ -0,0 +1,5 @@
|
||||
FROM certbot/certbot
|
||||
|
||||
COPY . src/certbot-dns-cloudxns
|
||||
|
||||
RUN pip install --no-cache-dir --editable src/certbot-dns-cloudxns
|
||||
@@ -10,14 +10,14 @@ Welcome to certbot-dns-cloudxns's documentation!
|
||||
:maxdepth: 2
|
||||
:caption: Contents:
|
||||
|
||||
.. automodule:: certbot_dns_cloudxns
|
||||
:members:
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 1
|
||||
|
||||
api
|
||||
|
||||
.. automodule:: certbot_dns_cloudxns
|
||||
:members:
|
||||
|
||||
|
||||
|
||||
Indices and tables
|
||||
|
||||
2
certbot-dns-cloudxns/local-oldest-requirements.txt
Normal file
2
certbot-dns-cloudxns/local-oldest-requirements.txt
Normal file
@@ -0,0 +1,2 @@
|
||||
acme[dev]==0.21.1
|
||||
certbot[dev]==0.21.1
|
||||
12
certbot-dns-cloudxns/readthedocs.org.requirements.txt
Normal file
12
certbot-dns-cloudxns/readthedocs.org.requirements.txt
Normal file
@@ -0,0 +1,12 @@
|
||||
# readthedocs.org gives no way to change the install command to "pip
|
||||
# install -e .[docs]" (that would in turn install documentation
|
||||
# dependencies), but it allows to specify a requirements.txt file at
|
||||
# https://readthedocs.org/dashboard/letsencrypt/advanced/ (c.f. #259)
|
||||
|
||||
# Although ReadTheDocs certainly doesn't need to install the project
|
||||
# in --editable mode (-e), just "pip install .[docs]" does not work as
|
||||
# expected and "pip install -e .[docs]" must be used instead
|
||||
|
||||
-e acme
|
||||
-e .
|
||||
-e certbot-dns-cloudxns[docs]
|
||||
@@ -4,17 +4,16 @@ from setuptools import setup
|
||||
from setuptools import find_packages
|
||||
|
||||
|
||||
version = '0.20.0.dev0'
|
||||
version = '0.24.0.dev0'
|
||||
|
||||
# Please update tox.ini when modifying dependency version requirements
|
||||
# Remember to update local-oldest-requirements.txt when changing the minimum
|
||||
# acme/certbot version.
|
||||
install_requires = [
|
||||
'acme=={0}'.format(version),
|
||||
'certbot=={0}'.format(version),
|
||||
'dns-lexicon',
|
||||
'acme>=0.21.1',
|
||||
'certbot>=0.21.1',
|
||||
'dns-lexicon>=2.2.1', # Support for >1 TXT record per name
|
||||
'mock',
|
||||
# For pkg_resources. >=1.0 so pip resolves it to a version cryptography
|
||||
# will tolerate; see #2599:
|
||||
'setuptools>=1.0',
|
||||
'setuptools',
|
||||
'zope.interface',
|
||||
]
|
||||
|
||||
@@ -31,6 +30,7 @@ setup(
|
||||
author="Certbot Project",
|
||||
author_email='client-dev@letsencrypt.org',
|
||||
license='Apache License 2.0',
|
||||
python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*',
|
||||
classifiers=[
|
||||
'Development Status :: 3 - Alpha',
|
||||
'Environment :: Plugins',
|
||||
@@ -41,7 +41,6 @@ setup(
|
||||
'Programming Language :: Python :: 2',
|
||||
'Programming Language :: Python :: 2.7',
|
||||
'Programming Language :: Python :: 3',
|
||||
'Programming Language :: Python :: 3.3',
|
||||
'Programming Language :: Python :: 3.4',
|
||||
'Programming Language :: Python :: 3.5',
|
||||
'Programming Language :: Python :: 3.6',
|
||||
|
||||
5
certbot-dns-digitalocean/Dockerfile
Normal file
5
certbot-dns-digitalocean/Dockerfile
Normal file
@@ -0,0 +1,5 @@
|
||||
FROM certbot/certbot
|
||||
|
||||
COPY . src/certbot-dns-digitalocean
|
||||
|
||||
RUN pip install --no-cache-dir --editable src/certbot-dns-digitalocean
|
||||
@@ -50,7 +50,8 @@ class AuthenticatorTest(test_util.TempDirTestCase, dns_test_common.BaseAuthentic
|
||||
|
||||
|
||||
class DigitalOceanClientTest(unittest.TestCase):
|
||||
id = 1
|
||||
|
||||
id_num = 1
|
||||
record_prefix = "_acme-challenge"
|
||||
record_name = record_prefix + "." + DOMAIN
|
||||
record_content = "bar"
|
||||
@@ -70,7 +71,7 @@ class DigitalOceanClientTest(unittest.TestCase):
|
||||
|
||||
domain_mock = mock.MagicMock()
|
||||
domain_mock.name = DOMAIN
|
||||
domain_mock.create_new_domain_record.return_value = {'domain_record': {'id': self.id}}
|
||||
domain_mock.create_new_domain_record.return_value = {'domain_record': {'id': self.id_num}}
|
||||
|
||||
self.manager.get_all_domains.return_value = [wrong_domain_mock, domain_mock]
|
||||
|
||||
|
||||
@@ -10,14 +10,14 @@ Welcome to certbot-dns-digitalocean's documentation!
|
||||
:maxdepth: 2
|
||||
:caption: Contents:
|
||||
|
||||
.. automodule:: certbot_dns_digitalocean
|
||||
:members:
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 1
|
||||
|
||||
api
|
||||
|
||||
.. automodule:: certbot_dns_digitalocean
|
||||
:members:
|
||||
|
||||
|
||||
|
||||
Indices and tables
|
||||
|
||||
2
certbot-dns-digitalocean/local-oldest-requirements.txt
Normal file
2
certbot-dns-digitalocean/local-oldest-requirements.txt
Normal file
@@ -0,0 +1,2 @@
|
||||
acme[dev]==0.21.1
|
||||
certbot[dev]==0.21.1
|
||||
12
certbot-dns-digitalocean/readthedocs.org.requirements.txt
Normal file
12
certbot-dns-digitalocean/readthedocs.org.requirements.txt
Normal file
@@ -0,0 +1,12 @@
|
||||
# readthedocs.org gives no way to change the install command to "pip
|
||||
# install -e .[docs]" (that would in turn install documentation
|
||||
# dependencies), but it allows to specify a requirements.txt file at
|
||||
# https://readthedocs.org/dashboard/letsencrypt/advanced/ (c.f. #259)
|
||||
|
||||
# Although ReadTheDocs certainly doesn't need to install the project
|
||||
# in --editable mode (-e), just "pip install .[docs]" does not work as
|
||||
# expected and "pip install -e .[docs]" must be used instead
|
||||
|
||||
-e acme
|
||||
-e .
|
||||
-e certbot-dns-digitalocean[docs]
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user