Compare commits
408 Commits
nginx-utf8
...
test-varia
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
6c72f45e5c | ||
|
|
fcc398831b | ||
|
|
9da07590bd | ||
|
|
0cfedbc5f5 | ||
|
|
3608abb01a | ||
|
|
4739a0616d | ||
|
|
6e38ad9cce | ||
|
|
4599aff07f | ||
|
|
0b605333d9 | ||
|
|
9c18de993d | ||
|
|
e3dbd9ce4a | ||
|
|
c2480b29f7 | ||
|
|
6ac8633363 | ||
|
|
8a4c2f505f | ||
|
|
ca3077d034 | ||
|
|
6c89aa5227 | ||
|
|
8cb57566c0 | ||
|
|
18e6c6c2a8 | ||
|
|
e402993c34 | ||
|
|
754c34c120 | ||
|
|
2883ca839e | ||
|
|
fb6aad28bd | ||
|
|
ab76834100 | ||
|
|
ada2f5c767 | ||
|
|
e4af1f3319 | ||
|
|
ab0e382829 | ||
|
|
ed0b8e4af5 | ||
|
|
8a570b18e9 | ||
|
|
deb0168c09 | ||
|
|
46a12d0127 | ||
|
|
6d4baec955 | ||
|
|
4eaa06d58e | ||
|
|
0fe28a6459 | ||
|
|
aaeb4582e2 | ||
|
|
fdb0a14812 | ||
|
|
0324d1740e | ||
|
|
ce325db4e4 | ||
|
|
74e6736c79 | ||
|
|
2ed7608ed3 | ||
|
|
eb02acfc4b | ||
|
|
4f19d516d6 | ||
|
|
3dd918b024 | ||
|
|
8320018978 | ||
|
|
c17f2ff6b0 | ||
|
|
46a2ef8ba1 | ||
|
|
17c1d016c1 | ||
|
|
70ed791709 | ||
|
|
d39f63feca | ||
|
|
6882f006ac | ||
|
|
9a047a6996 | ||
|
|
a8bd839223 | ||
|
|
a1aef4c15c | ||
|
|
cb7598b007 | ||
|
|
55cf49cebe | ||
|
|
933f60a3c1 | ||
|
|
44eb048098 | ||
|
|
794ce57356 | ||
|
|
48d9715bd5 | ||
|
|
c5e1be4fd7 | ||
|
|
e21401004b | ||
|
|
120137eb8d | ||
|
|
2911eda3bd | ||
|
|
f1ea37dd71 | ||
|
|
3d3cbc0d16 | ||
|
|
d978440cb5 | ||
|
|
0c04ce3c32 | ||
|
|
987ce2c6b2 | ||
|
|
dded9290b7 | ||
|
|
745ef6e869 | ||
|
|
e2844bd0ad | ||
|
|
b67fda8832 | ||
|
|
d6e6d64848 | ||
|
|
f4d17d9a6b | ||
|
|
8bcb04af4a | ||
|
|
14e10f40e5 | ||
|
|
1c7105a940 | ||
|
|
36b4c312c6 | ||
|
|
56f609d4f5 | ||
|
|
2d3f3a042a | ||
|
|
bfd4955bad | ||
|
|
9174c631d9 | ||
|
|
81e0b92b43 | ||
|
|
d3da19919f | ||
|
|
e6bf3fe7f8 | ||
|
|
40da709792 | ||
|
|
bf9c681c4f | ||
|
|
391f301dd8 | ||
|
|
06a0dae67f | ||
|
|
a35470292e | ||
|
|
47f64c7280 | ||
|
|
f7c736da6f | ||
|
|
71ff47daad | ||
|
|
41a17f913e | ||
|
|
750d6a9686 | ||
|
|
c4684f187a | ||
|
|
82ad736120 | ||
|
|
ca893bd836 | ||
|
|
d1934e36fe | ||
|
|
15b1d8e5a7 | ||
|
|
cbd0a37c7a | ||
|
|
13c44a0595 | ||
|
|
89f52ca9f9 | ||
|
|
d0a9695b09 | ||
|
|
add24d4861 | ||
|
|
74292a10f5 | ||
|
|
74bf9ef46a | ||
|
|
2ac99fefe0 | ||
|
|
43f58ca803 | ||
|
|
17f2cabbbf | ||
|
|
7d61e9ea56 | ||
|
|
20b595bc9e | ||
|
|
88876b9901 | ||
|
|
448d159223 | ||
|
|
3e872627d8 | ||
|
|
76b7eb0628 | ||
|
|
4fc30f2ecb | ||
|
|
1c75b6dacd | ||
|
|
c08a4dec2d | ||
|
|
4fc0ef0fbe | ||
|
|
26a1eddd89 | ||
|
|
1c6210ee00 | ||
|
|
a27f3ebd4f | ||
|
|
a778b50403 | ||
|
|
f2ab6a338c | ||
|
|
0d5bad6c8c | ||
|
|
dc0cfa21c9 | ||
|
|
a37a4486cf | ||
|
|
776e939a4c | ||
|
|
69cf64079c | ||
|
|
9962cf0b8e | ||
|
|
4c95b687ae | ||
|
|
a3bbdd52e7 | ||
|
|
2e3c1d7c77 | ||
|
|
249af5c4cd | ||
|
|
9a60f6df78 | ||
|
|
e9bcaaa576 | ||
|
|
5078b58de9 | ||
|
|
03cf5d15a6 | ||
|
|
8efe3fb19a | ||
|
|
9863c2d18e | ||
|
|
6172821d90 | ||
|
|
dde16df778 | ||
|
|
1df778859b | ||
|
|
20ca47dec6 | ||
|
|
6c53f5d8ed | ||
|
|
add90cef32 | ||
|
|
1b54c74621 | ||
|
|
e60651057e | ||
|
|
e394889864 | ||
|
|
d75908c645 | ||
|
|
72e5d89e95 | ||
|
|
0c5f526f8b | ||
|
|
5385375571 | ||
|
|
7b4201fbdb | ||
|
|
8106f74dc0 | ||
|
|
3bceae4a89 | ||
|
|
f18143b117 | ||
|
|
0cc56677e2 | ||
|
|
6334d065cf | ||
|
|
c3edc25fb7 | ||
|
|
23b52ca1c8 | ||
|
|
02cf051e45 | ||
|
|
4d034122c6 | ||
|
|
391f742df7 | ||
|
|
5c663d4d97 | ||
|
|
89d907b182 | ||
|
|
d0f1a3e205 | ||
|
|
f3b73c4d2a | ||
|
|
f25a9b2004 | ||
|
|
3568070c73 | ||
|
|
8e92577cb0 | ||
|
|
459ba89aef | ||
|
|
bfd1ce97ef | ||
|
|
419ad7df1e | ||
|
|
889aeb31df | ||
|
|
09b7d2f461 | ||
|
|
18797dca79 | ||
|
|
31e81e7ae0 | ||
|
|
4b06eeae64 | ||
|
|
641aba68b1 | ||
|
|
926c8c198c | ||
|
|
4c299be965 | ||
|
|
561534b754 | ||
|
|
7d35f95293 | ||
|
|
d2a2b88090 | ||
|
|
bf818036eb | ||
|
|
8b684e9b95 | ||
|
|
51a7e7cd19 | ||
|
|
d1753e46f9 | ||
|
|
16834a0d78 | ||
|
|
11c3e7107c | ||
|
|
6bcd0415e0 | ||
|
|
7a6545b747 | ||
|
|
f4bbaadd18 | ||
|
|
f8614e7c04 | ||
|
|
def9af9f5e | ||
|
|
26d01537cb | ||
|
|
7cfbeaeac8 | ||
|
|
5ab6a597b0 | ||
|
|
9a7f774706 | ||
|
|
2abe39d8a2 | ||
|
|
3888bc8f2a | ||
|
|
7fe82cf1ac | ||
|
|
d391fb8876 | ||
|
|
60bf8edc79 | ||
|
|
ccedde088d | ||
|
|
c3a395e7c5 | ||
|
|
0e95cd8cde | ||
|
|
7683636684 | ||
|
|
8ff24f60a8 | ||
|
|
a754a90940 | ||
|
|
f56fad59c9 | ||
|
|
b86f553586 | ||
|
|
71b1b8c2d9 | ||
|
|
0c96cf6560 | ||
|
|
0baefcae32 | ||
|
|
115ed0e10b | ||
|
|
2b4d6e23d5 | ||
|
|
e5cdc2738d | ||
|
|
3410b9332c | ||
|
|
6a970f74d0 | ||
|
|
4bf6eb2091 | ||
|
|
e50d47d25c | ||
|
|
0ab2bb21fa | ||
|
|
b19d4801c9 | ||
|
|
57be329058 | ||
|
|
698e520044 | ||
|
|
e15e848474 | ||
|
|
862577fffc | ||
|
|
82f64126d9 | ||
|
|
60e734c969 | ||
|
|
7711da9fc2 | ||
|
|
9734be6922 | ||
|
|
7d28480844 | ||
|
|
6ba242bc3d | ||
|
|
2ef1c512b4 | ||
|
|
5b76de48de | ||
|
|
5f5f44dd97 | ||
|
|
40481e0fdb | ||
|
|
de88e7d777 | ||
|
|
b0d960f102 | ||
|
|
3900e56b52 | ||
|
|
f0f5bb4fc0 | ||
|
|
dcf89c9396 | ||
|
|
d1330efe41 | ||
|
|
b41a992545 | ||
|
|
1aa111f941 | ||
|
|
a1dc63a0a2 | ||
|
|
c99079fb0a | ||
|
|
352218510a | ||
|
|
463d089407 | ||
|
|
333ea90d1b | ||
|
|
fb83a1ac09 | ||
|
|
9dd2990e59 | ||
|
|
618e0562a0 | ||
|
|
2812f054a3 | ||
|
|
a817e4f0ec | ||
|
|
a58ad22002 | ||
|
|
f5d0d4241f | ||
|
|
0ee1002edc | ||
|
|
7e5dcaa383 | ||
|
|
24eb299a9b | ||
|
|
410e74c4a1 | ||
|
|
298b1db36b | ||
|
|
6bdc6435eb | ||
|
|
b73c551f14 | ||
|
|
471f8aecc0 | ||
|
|
3f0dc7c81c | ||
|
|
d7610c1ae7 | ||
|
|
de84688844 | ||
|
|
d5de24d9fc | ||
|
|
9c54f3dec8 | ||
|
|
3a2e9ff1fa | ||
|
|
2b1c77c1ca | ||
|
|
d5ea9f4486 | ||
|
|
b0285438cc | ||
|
|
3381bc6616 | ||
|
|
278cc8feef | ||
|
|
fb5974b8c3 | ||
|
|
12ab59e1fc | ||
|
|
6249cd0237 | ||
|
|
04152c21b5 | ||
|
|
c77159a30c | ||
|
|
9c312a3882 | ||
|
|
944d0e05c8 | ||
|
|
157d1ea0d8 | ||
|
|
aec29c2f1d | ||
|
|
7d58e67fd6 | ||
|
|
b7caa3b3a1 | ||
|
|
6d32dd8792 | ||
|
|
f2b071f8f4 | ||
|
|
e63ceb8dd2 | ||
|
|
ae9c57d68c | ||
|
|
c32b57607f | ||
|
|
45869f8315 | ||
|
|
6590875a1a | ||
|
|
7c7715743c | ||
|
|
30eafba997 | ||
|
|
c2d9ea1f61 | ||
|
|
2cf216122b | ||
|
|
4de4b17216 | ||
|
|
15763a3793 | ||
|
|
7b7f7b25fb | ||
|
|
69bb3eac2c | ||
|
|
58c21aa484 | ||
|
|
1bbfc669ab | ||
|
|
3830c0f900 | ||
|
|
4515a52d3f | ||
|
|
fd6702b869 | ||
|
|
1daa3ca076 | ||
|
|
232e0ea50f | ||
|
|
63c8f2e34d | ||
|
|
ea568d4dc2 | ||
|
|
6ce6c67932 | ||
|
|
b0fb570c1c | ||
|
|
8b8fc5ae54 | ||
|
|
414c70aa6c | ||
|
|
b30a5e5b73 | ||
|
|
491d6c8f45 | ||
|
|
a03e7b95d3 | ||
|
|
821bec6997 | ||
|
|
a27bd28b39 | ||
|
|
50607eb0ff | ||
|
|
97d269ceb5 | ||
|
|
20ed165699 | ||
|
|
4d2dfab4dd | ||
|
|
537bffbc23 | ||
|
|
c1d2efec4e | ||
|
|
d9880721b3 | ||
|
|
b447b0a8e9 | ||
|
|
e909b0852c | ||
|
|
c2f2aa5ee0 | ||
|
|
5e64349a4a | ||
|
|
8386d08de2 | ||
|
|
acc918eee7 | ||
|
|
cf29e89366 | ||
|
|
d34beb4149 | ||
|
|
a7f2f24426 | ||
|
|
e20adedb94 | ||
|
|
81d9b5250e | ||
|
|
674ba896eb | ||
|
|
cac4be7046 | ||
|
|
45229eebdf | ||
|
|
34393f9bf4 | ||
|
|
f378536ffa | ||
|
|
e593674930 | ||
|
|
8dda6cc68f | ||
|
|
0343f365ab | ||
|
|
0492855166 | ||
|
|
a276523c09 | ||
|
|
be6df7de04 | ||
|
|
670e9d89b7 | ||
|
|
198c447e77 | ||
|
|
6a0f3248a8 | ||
|
|
0c6d83dc39 | ||
|
|
d8a3fa3904 | ||
|
|
6ff101dcbb | ||
|
|
bf1f83f47b | ||
|
|
3ed3787bd8 | ||
|
|
c7f8f15e9b | ||
|
|
a468a3b255 | ||
|
|
7161e792e8 | ||
|
|
841f8efd0a | ||
|
|
efc8d49806 | ||
|
|
5e849e03f6 | ||
|
|
a809c3697d | ||
|
|
9c405a3cd1 | ||
|
|
339d034d6a | ||
|
|
f5b23361bd | ||
|
|
401045be89 | ||
|
|
f105aedc92 | ||
|
|
31b4b8e57c | ||
|
|
b10ceb7d90 | ||
|
|
eb5c4eca87 | ||
|
|
eef4c47633 | ||
|
|
8bda10541a | ||
|
|
7c731599a0 | ||
|
|
bda840b3ee | ||
|
|
209a0c4d2c | ||
|
|
0489ca5888 | ||
|
|
e40d929e80 | ||
|
|
583d40f5cf | ||
|
|
acc0b1e773 | ||
|
|
cff8769db7 | ||
|
|
f10f98fec5 | ||
|
|
a0a8292ff2 | ||
|
|
66c9767623 | ||
|
|
ec4c03fa6d | ||
|
|
381d097895 | ||
|
|
917dc16b30 | ||
|
|
75499277be | ||
|
|
ee3c14cbab | ||
|
|
432e18d943 | ||
|
|
67828562a0 | ||
|
|
ab79d1d44a | ||
|
|
c5baf035df | ||
|
|
2560ef0ffa | ||
|
|
7e6a1f2488 | ||
|
|
2ddaf3db04 | ||
|
|
9671985885 | ||
|
|
30803f30ba | ||
|
|
f547521a5b | ||
|
|
bb8222200a | ||
|
|
a0d47a44c9 | ||
|
|
8f7b280106 | ||
|
|
0484b1554d | ||
|
|
5300d7d71f | ||
|
|
24974b07ba |
117
.azure-pipelines/INSTALL.md
Normal file
117
.azure-pipelines/INSTALL.md
Normal file
@@ -0,0 +1,117 @@
|
||||
# Configuring Azure Pipelines with Certbot
|
||||
|
||||
Let's begin. All pipelines are defined in `.azure-pipelines`. Currently there are two:
|
||||
* `.azure-pipelines/main.yml` is the main one, executed on PRs for master, and pushes to master,
|
||||
* `.azure-pipelines/advanced.yml` add installer testing on top of the main pipeline, and is executed for `test-*` branches, release branches, and nightly run for master.
|
||||
|
||||
Several templates are defined in `.azure-pipelines/templates`. These YAML files aggregate common jobs configuration that can be reused in several pipelines.
|
||||
|
||||
Unlike Travis, where CodeCov is working without any action required, CodeCov supports Azure Pipelines
|
||||
using the coverage-bash utility (not python-coverage for now) only if you provide the Codecov repo token
|
||||
using the `CODECOV_TOKEN` environment variable. So `CODECOV_TOKEN` needs to be set as a secured
|
||||
environment variable to allow the main pipeline to publish coverage reports to CodeCov.
|
||||
|
||||
This INSTALL.md file explains how to configure Azure Pipelines with Certbot in order to execute the CI/CD logic defined in `.azure-pipelines` folder with it.
|
||||
During this installation step, warnings describing user access and legal comitments will be displayed like this:
|
||||
```
|
||||
!!! ACCESS REQUIRED !!!
|
||||
```
|
||||
|
||||
This document suppose that the Azure DevOps organization is named _certbot_, and the Azure DevOps project is also _certbot_.
|
||||
|
||||
## Useful links
|
||||
|
||||
* https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=azure-devops&tabs=schema
|
||||
* https://www.azuredevopslabs.com/labs/azuredevops/github-integration/
|
||||
* https://docs.microsoft.com/en-us/azure/devops/pipelines/ecosystems/python?view=azure-devops
|
||||
|
||||
## Prerequisites
|
||||
|
||||
### Having a GitHub account
|
||||
|
||||
Use your GitHub user for a normal GitHub account, or a user that has administrative rights to the GitHub organization if relevant.
|
||||
|
||||
### Having an Azure DevOps account
|
||||
- Go to https://dev.azure.com/, click "Start free with GitHub"
|
||||
- Login to GitHub
|
||||
|
||||
```
|
||||
!!! ACCESS REQUIRED !!!
|
||||
Personal user data (email + profile info, in read-only)
|
||||
```
|
||||
|
||||
- Microsoft will create a Live account using the email referenced for the GitHub account. This account is also linked to GitHub account (meaning you can log it using GitHub authentication)
|
||||
- Proceed with account registration (birth date, country), add details about name and email contact
|
||||
|
||||
```
|
||||
!!! ACCESS REQUIRED !!!
|
||||
Microsoft proposes to send commercial links to this mail
|
||||
Azure DevOps terms of service need to be accepted
|
||||
```
|
||||
|
||||
_Logged to Azure DevOps, account is ready._
|
||||
|
||||
### Installing Azure Pipelines to GitHub
|
||||
|
||||
- On GitHub, go to Marketplace
|
||||
- Select Azure Pipeline, and "Set up a plan"
|
||||
- Select Free, then "Install it for free"
|
||||
- Click "Complete order and begin installation"
|
||||
|
||||
```
|
||||
!!! ACCESS !!!
|
||||
Azure Pipeline needs RW on code, RO on metadata, RW on checks, commit statuses, deployments, issues, pull requests.
|
||||
RW access here is required to allow update of the pipelines YAML files from Azure DevOps interface, and to
|
||||
update the status of builds and PRs on GitHub side when Azure Pipelines are triggered.
|
||||
Note however that no admin access is defined here: this means that Azure Pipelines cannot do anything with
|
||||
protected branches, like master, and cannot modify the security context around this on GitHub.
|
||||
Access can be defined for all or only selected repositories, which is nice.
|
||||
```
|
||||
|
||||
- Redirected to Azure DevOps, select the account created in _Having an Azure DevOps account_ section.
|
||||
- Select the organization, and click "Create a new project" (let's name it the same than the targetted github repo)
|
||||
- The Visibility is public, to profit from 10 parallel jobs
|
||||
|
||||
```
|
||||
!!! ACCESS !!!
|
||||
Azure Pipelines needs access to the GitHub account (in term of beeing able to check it is valid), and the Resources shared between the GitHub account and Azure Pipelines.
|
||||
```
|
||||
|
||||
_Done. We can move to pipelines configuration._
|
||||
|
||||
## Import an existing pipelines from `.azure-pipelines` folder
|
||||
|
||||
- On Azure DevOps, go to your organization (eg. _certbot_) then your project (eg. _certbot_)
|
||||
- Click "Pipelines" tab
|
||||
- Click "New pipeline"
|
||||
- Where is your code?: select "__Use the classic editor__"
|
||||
|
||||
__Warning: Do not choose the GitHub option in Where is your code? section. Indeed, this option will trigger an OAuth
|
||||
grant permissions from Azure Pipelines to GitHub in order to setup a GitHub OAuth Application. The permissions asked
|
||||
then are way too large (admin level on almost everything), while the classic approach does not add any more
|
||||
permissions, and works perfectly well.__
|
||||
|
||||
- Select GitHub in "Select your repository section", choose certbot/certbot in Repository, master in default branch.
|
||||
- Click on YAML option for "Select a template"
|
||||
- Choose a name for the pipeline (eg. test-pipeline), and browse to the actual pipeline YAML definition in the
|
||||
"YAML file path" input (eg. `.azure-pipelines/test-pipeline.yml`)
|
||||
- Click "Save & queue", choose the master branch to build the first pipeline, and click "Save and run" button.
|
||||
|
||||
_Done. Pipeline is operational. Repeat to add more pipelines from existing YAML files in `.azure-pipelines`._
|
||||
|
||||
## Add a secret variable to a pipeline (like `CODECOV_TOKEN`)
|
||||
|
||||
__NB: Following steps suppose that you already setup the YAML pipeline file to
|
||||
consume the secret variable that these steps will create as an environment variable.
|
||||
For a variable named `CODECOV_TOKEN` consuming the variable `codecov_token`,
|
||||
in the YAML file this setup would take the form of the following:
|
||||
```
|
||||
steps:
|
||||
- script: ./do_something_that_consumes_CODECOV_TOKEN # Eg. `codecov -F windows`
|
||||
env:
|
||||
CODECOV_TOKEN: $(codecov_token)
|
||||
```
|
||||
|
||||
- On Azure DevOps, go to you organization, project, pipeline tab
|
||||
- Select the pipeline, click "Edit" button, then click "Variables" button
|
||||
- Set name (eg `codecov_token`), value, tick "Keep this value secret"
|
||||
18
.azure-pipelines/advanced.yml
Normal file
18
.azure-pipelines/advanced.yml
Normal file
@@ -0,0 +1,18 @@
|
||||
# Advanced pipeline for isolated checks and release purpose
|
||||
trigger:
|
||||
- test-*
|
||||
- '*.x'
|
||||
pr:
|
||||
- test-*
|
||||
- '*.x'
|
||||
# This pipeline is also nightly run on master
|
||||
schedules:
|
||||
- cron: "4 0 * * *"
|
||||
displayName: Nightly build
|
||||
branches:
|
||||
include:
|
||||
- master
|
||||
|
||||
jobs:
|
||||
- template: templates/tests-suite.yml
|
||||
- template: templates/installer-tests.yml
|
||||
11
.azure-pipelines/main.yml
Normal file
11
.azure-pipelines/main.yml
Normal file
@@ -0,0 +1,11 @@
|
||||
trigger:
|
||||
# apache-parser-v2 is a temporary branch for doing work related to
|
||||
# rewriting the parser in the Apache plugin.
|
||||
- apache-parser-v2
|
||||
- master
|
||||
pr:
|
||||
- apache-parser-v2
|
||||
- master
|
||||
|
||||
jobs:
|
||||
- template: templates/tests-suite.yml
|
||||
31
.azure-pipelines/templates/installer-tests.yml
Normal file
31
.azure-pipelines/templates/installer-tests.yml
Normal file
@@ -0,0 +1,31 @@
|
||||
jobs:
|
||||
- job: installer
|
||||
pool:
|
||||
vmImage: vs2017-win2016
|
||||
steps:
|
||||
- task: UsePythonVersion@0
|
||||
inputs:
|
||||
versionSpec: 3.7
|
||||
architecture: x86
|
||||
addToPath: true
|
||||
- script: python windows-installer/construct.py
|
||||
displayName: Build Certbot installer
|
||||
- task: CopyFiles@2
|
||||
inputs:
|
||||
sourceFolder: $(System.DefaultWorkingDirectory)/windows-installer/build/nsis
|
||||
contents: '*.exe'
|
||||
targetFolder: $(Build.ArtifactStagingDirectory)
|
||||
- task: PublishPipelineArtifact@1
|
||||
inputs:
|
||||
path: $(Build.ArtifactStagingDirectory)
|
||||
artifact: WindowsInstaller
|
||||
- script: $(Build.ArtifactStagingDirectory)\certbot-installer-win32.exe /S
|
||||
displayName: Install Certbot
|
||||
- script: |
|
||||
python -m venv venv
|
||||
venv\Scripts\python tools\pip_install.py -e certbot-ci
|
||||
displayName: Prepare Certbot-CI
|
||||
- script: |
|
||||
set PATH=%ProgramFiles(x86)%\Certbot\bin;%PATH%
|
||||
venv\Scripts\python -m pytest certbot-ci\certbot_integration_tests\certbot_tests -n 4
|
||||
displayName: Run integration tests
|
||||
38
.azure-pipelines/templates/tests-suite.yml
Normal file
38
.azure-pipelines/templates/tests-suite.yml
Normal file
@@ -0,0 +1,38 @@
|
||||
jobs:
|
||||
- job: test
|
||||
pool:
|
||||
vmImage: vs2017-win2016
|
||||
strategy:
|
||||
matrix:
|
||||
py35:
|
||||
PYTHON_VERSION: 3.5
|
||||
TOXENV: py35
|
||||
py37-cover:
|
||||
PYTHON_VERSION: 3.7
|
||||
TOXENV: py37-cover
|
||||
integration-certbot:
|
||||
PYTHON_VERSION: 3.7
|
||||
TOXENV: integration-certbot
|
||||
PYTEST_ADDOPTS: --numprocesses 4
|
||||
variables:
|
||||
- group: certbot-common
|
||||
steps:
|
||||
- task: UsePythonVersion@0
|
||||
inputs:
|
||||
versionSpec: $(PYTHON_VERSION)
|
||||
addToPath: true
|
||||
- script: python tools/pip_install.py -U tox coverage
|
||||
displayName: Install dependencies
|
||||
- script: python -m tox
|
||||
displayName: Run tox
|
||||
# We do not require codecov report upload to succeed. So to avoid to break the pipeline if
|
||||
# something goes wrong, each command is suffixed with a command that hides any non zero exit
|
||||
# codes and echoes an informative message instead.
|
||||
- bash: |
|
||||
curl -s https://codecov.io/bash -o codecov-bash || echo "Failed to download codecov-bash"
|
||||
chmod +x codecov-bash || echo "Failed to apply execute permissions on codecov-bash"
|
||||
./codecov-bash -F windows || echo "Codecov did not collect coverage reports"
|
||||
condition: eq(variables['TOXENV'], 'py37-cover')
|
||||
env:
|
||||
CODECOV_TOKEN: $(codecov_token)
|
||||
displayName: Publish coverage
|
||||
18
.codecov.yml
Normal file
18
.codecov.yml
Normal file
@@ -0,0 +1,18 @@
|
||||
coverage:
|
||||
status:
|
||||
project:
|
||||
default: off
|
||||
linux:
|
||||
flags: linux
|
||||
# Fixed target instead of auto set by #7173, can
|
||||
# be removed when flags in Codecov are added back.
|
||||
target: 97.4
|
||||
threshold: 0.1
|
||||
base: auto
|
||||
windows:
|
||||
flags: windows
|
||||
# Fixed target instead of auto set by #7173, can
|
||||
# be removed when flags in Codecov are added back.
|
||||
target: 97.7
|
||||
threshold: 0.1
|
||||
base: auto
|
||||
@@ -1,2 +1,5 @@
|
||||
[run]
|
||||
omit = */setup.py
|
||||
|
||||
[report]
|
||||
omit = */setup.py
|
||||
|
||||
35
.github/stale.yml
vendored
Normal file
35
.github/stale.yml
vendored
Normal file
@@ -0,0 +1,35 @@
|
||||
# Configuration for https://github.com/marketplace/stale
|
||||
|
||||
# Number of days of inactivity before an Issue or Pull Request becomes stale
|
||||
daysUntilStale: 365
|
||||
|
||||
# Number of days of inactivity before an Issue or Pull Request with the stale label is closed.
|
||||
# Set to false to disable. If disabled, issues still need to be closed manually, but will remain marked as stale.
|
||||
# When changing this value, be sure to also update markComment below.
|
||||
daysUntilClose: 30
|
||||
|
||||
# Ignore issues with an assignee (defaults to false)
|
||||
exemptAssignees: true
|
||||
|
||||
# Label to use when marking as stale
|
||||
staleLabel: needs-update
|
||||
|
||||
# Comment to post when marking as stale. Set to `false` to disable
|
||||
markComment: >
|
||||
We've made a lot of changes to Certbot since this issue was opened. If you
|
||||
still have this issue with an up-to-date version of Certbot, can you please
|
||||
add a comment letting us know? This helps us to better see what issues are
|
||||
still affecting our users. If there is no activity in the next 30 days, this
|
||||
issue will be automatically closed.
|
||||
|
||||
# Comment to post when closing a stale Issue or Pull Request.
|
||||
closeComment: >
|
||||
This issue has been closed due to lack of activity, but if you think it
|
||||
should be reopened, please open a new issue with a link to this one and we'll
|
||||
take a look.
|
||||
|
||||
# Limit the number of actions per hour, from 1-30. Default is 30
|
||||
limitPerRun: 1
|
||||
|
||||
# Don't mark pull requests as stale.
|
||||
only: issues
|
||||
5
.gitignore
vendored
5
.gitignore
vendored
@@ -44,3 +44,8 @@ tests/letstest/venv/
|
||||
|
||||
# docker files
|
||||
.docker
|
||||
|
||||
# certbot tests
|
||||
.certbot_test_workspace
|
||||
**/assets/pebble*
|
||||
**/assets/challtestsrv*
|
||||
|
||||
@@ -251,7 +251,7 @@ ignored-modules=pkg_resources,confargparse,argparse,six.moves,six.moves.urllib
|
||||
|
||||
# List of classes names for which member attributes should not be checked
|
||||
# (useful for classes with attributes dynamically set).
|
||||
ignored-classes=SQLObject
|
||||
ignored-classes=Field,Header,JWS,closing
|
||||
|
||||
# When zope mode is activated, add a predefined set of Zope acquired attributes
|
||||
# to generated-members.
|
||||
@@ -304,7 +304,7 @@ max-args=6
|
||||
|
||||
# Argument names that match this expression will be ignored. Default to name
|
||||
# with leading underscore
|
||||
ignored-argument-names=_.*
|
||||
ignored-argument-names=(unused)?_.*|dummy
|
||||
|
||||
# Maximum number of locals for function / method body
|
||||
max-locals=15
|
||||
|
||||
439
.travis.yml
439
.travis.yml
@@ -1,185 +1,273 @@
|
||||
language: python
|
||||
dist: xenial
|
||||
|
||||
cache:
|
||||
directories:
|
||||
- $HOME/.cache/pip
|
||||
|
||||
before_script:
|
||||
- 'if [ $TRAVIS_OS_NAME = osx ] ; then ulimit -n 1024 ; fi'
|
||||
- 'if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then ulimit -n 1024 ; fi'
|
||||
# On Travis, the fastest parallelization for integration tests has proved to be 4.
|
||||
- 'if [[ "$TOXENV" == *"integration"* ]]; then export PYTEST_ADDOPTS="--numprocesses 4"; fi'
|
||||
# Use Travis retry feature for farm tests since they are flaky
|
||||
- 'if [[ "$TOXENV" == "travis-test-farm"* ]]; then export TRAVIS_RETRY=travis_retry; fi'
|
||||
- export TOX_TESTENV_PASSENV=TRAVIS
|
||||
|
||||
matrix:
|
||||
include:
|
||||
# These environments are always executed
|
||||
- python: "2.7"
|
||||
env: BOULDER_INTEGRATION=v1 INTEGRATION_TEST=all TOXENV=py27_install
|
||||
sudo: required
|
||||
services: docker
|
||||
- python: "2.7"
|
||||
env: BOULDER_INTEGRATION=v2 INTEGRATION_TEST=all TOXENV=py27_install
|
||||
sudo: required
|
||||
services: docker
|
||||
- python: "2.7"
|
||||
env: TOXENV=py27-cover FYI="py27 tests + code coverage"
|
||||
- sudo: required
|
||||
env: TOXENV=nginx_compat
|
||||
services: docker
|
||||
before_install:
|
||||
addons:
|
||||
- python: "2.7"
|
||||
env: TOXENV=lint
|
||||
- python: "3.4"
|
||||
env: TOXENV=mypy
|
||||
- python: "3.5"
|
||||
env: TOXENV=mypy
|
||||
- python: "2.7"
|
||||
env: TOXENV='py27-{acme,apache,certbot,dns,nginx,postfix}-oldest'
|
||||
sudo: required
|
||||
services: docker
|
||||
- python: "3.4"
|
||||
env: TOXENV=py34
|
||||
sudo: required
|
||||
services: docker
|
||||
- python: "3.7"
|
||||
dist: xenial
|
||||
env: TOXENV=py37
|
||||
sudo: required
|
||||
services: docker
|
||||
- sudo: required
|
||||
env: TOXENV=apache_compat
|
||||
services: docker
|
||||
before_install:
|
||||
addons:
|
||||
- sudo: required
|
||||
env: TOXENV=le_auto_trusty
|
||||
services: docker
|
||||
before_install:
|
||||
addons:
|
||||
- python: "2.7"
|
||||
env: TOXENV=apacheconftest-with-pebble
|
||||
sudo: required
|
||||
services: docker
|
||||
- python: "2.7"
|
||||
env: TOXENV=nginxroundtrip
|
||||
|
||||
# These environments are executed on cron events only
|
||||
- python: "3.7"
|
||||
dist: xenial
|
||||
env: TOXENV=py37 CERTBOT_NO_PIN=1
|
||||
if: type = cron
|
||||
- python: "2.7"
|
||||
env: BOULDER_INTEGRATION=v1 INTEGRATION_TEST=certbot TOXENV=py27-certbot-oldest
|
||||
sudo: required
|
||||
services: docker
|
||||
if: type = cron
|
||||
- python: "2.7"
|
||||
env: BOULDER_INTEGRATION=v2 INTEGRATION_TEST=certbot TOXENV=py27-certbot-oldest
|
||||
sudo: required
|
||||
services: docker
|
||||
if: type = cron
|
||||
- python: "2.7"
|
||||
env: BOULDER_INTEGRATION=v1 INTEGRATION_TEST=nginx TOXENV=py27-nginx-oldest
|
||||
sudo: required
|
||||
services: docker
|
||||
if: type = cron
|
||||
- python: "2.7"
|
||||
env: BOULDER_INTEGRATION=v2 INTEGRATION_TEST=nginx TOXENV=py27-nginx-oldest
|
||||
sudo: required
|
||||
services: docker
|
||||
if: type = cron
|
||||
- python: "3.4"
|
||||
env: TOXENV=py34 BOULDER_INTEGRATION=v1
|
||||
sudo: required
|
||||
services: docker
|
||||
if: type = cron
|
||||
- python: "3.4"
|
||||
env: TOXENV=py34 BOULDER_INTEGRATION=v2
|
||||
sudo: required
|
||||
services: docker
|
||||
if: type = cron
|
||||
- python: "3.5"
|
||||
env: TOXENV=py35 BOULDER_INTEGRATION=v1
|
||||
sudo: required
|
||||
services: docker
|
||||
if: type = cron
|
||||
- python: "3.5"
|
||||
env: TOXENV=py35 BOULDER_INTEGRATION=v2
|
||||
sudo: required
|
||||
services: docker
|
||||
if: type = cron
|
||||
- python: "3.6"
|
||||
env: TOXENV=py36 BOULDER_INTEGRATION=v1
|
||||
sudo: required
|
||||
services: docker
|
||||
if: type = cron
|
||||
- python: "3.6"
|
||||
env: TOXENV=py36 BOULDER_INTEGRATION=v2
|
||||
sudo: required
|
||||
services: docker
|
||||
if: type = cron
|
||||
- python: "3.7"
|
||||
dist: xenial
|
||||
env: TOXENV=py37 BOULDER_INTEGRATION=v1
|
||||
sudo: required
|
||||
services: docker
|
||||
if: type = cron
|
||||
- python: "3.7"
|
||||
dist: xenial
|
||||
env: TOXENV=py37 BOULDER_INTEGRATION=v2
|
||||
sudo: required
|
||||
services: docker
|
||||
if: type = cron
|
||||
- sudo: required
|
||||
env: TOXENV=le_auto_xenial
|
||||
services: docker
|
||||
if: type = cron
|
||||
- sudo: required
|
||||
env: TOXENV=le_auto_jessie
|
||||
services: docker
|
||||
if: type = cron
|
||||
- sudo: required
|
||||
env: TOXENV=le_auto_centos6
|
||||
services: docker
|
||||
if: type = cron
|
||||
- sudo: required
|
||||
env: TOXENV=docker_dev
|
||||
services: docker
|
||||
addons:
|
||||
apt:
|
||||
packages: # don't install nginx and apache
|
||||
- libaugeas0
|
||||
if: type = cron
|
||||
- language: generic
|
||||
env: TOXENV=py27
|
||||
os: osx
|
||||
addons:
|
||||
homebrew:
|
||||
packages:
|
||||
- augeas
|
||||
- python2
|
||||
if: type = cron
|
||||
- language: generic
|
||||
env: TOXENV=py3
|
||||
os: osx
|
||||
addons:
|
||||
homebrew:
|
||||
packages:
|
||||
- augeas
|
||||
- python3
|
||||
if: type = cron
|
||||
|
||||
|
||||
|
||||
# Only build pushes to the master branch, PRs, and branches beginning with
|
||||
# `test-` or of the form `digit(s).digit(s).x`. This reduces the number of
|
||||
# simultaneous Travis runs, which speeds turnaround time on review since there
|
||||
# is a cap of on the number of simultaneous runs.
|
||||
branches:
|
||||
only:
|
||||
# apache-parser-v2 is a temporary branch for doing work related to
|
||||
# rewriting the parser in the Apache plugin.
|
||||
- apache-parser-v2
|
||||
- master
|
||||
- /^\d+\.\d+\.x$/
|
||||
- /^test-.*$/
|
||||
|
||||
# Jobs for the main test suite are always executed (including on PRs) except for pushes on master.
|
||||
not-on-master: ¬-on-master
|
||||
if: NOT (type = push AND branch = master)
|
||||
|
||||
# Jobs for the extended test suite are executed for cron jobs and pushes to
|
||||
# non-development branches. See the explanation for apache-parser-v2 above.
|
||||
extended-test-suite: &extended-test-suite
|
||||
if: type = cron OR (type = push AND branch NOT IN (apache-parser-v2, master))
|
||||
|
||||
matrix:
|
||||
include:
|
||||
# Main test suite
|
||||
- python: "2.7"
|
||||
env: ACME_SERVER=pebble TOXENV=integration
|
||||
<<: *not-on-master
|
||||
|
||||
# This job is always executed, including on master
|
||||
- python: "2.7"
|
||||
env: TOXENV=py27-cover FYI="py27 tests + code coverage"
|
||||
|
||||
- python: "2.7"
|
||||
env: TOXENV=lint
|
||||
<<: *not-on-master
|
||||
- python: "3.4"
|
||||
env: TOXENV=mypy
|
||||
<<: *not-on-master
|
||||
- python: "3.5"
|
||||
env: TOXENV=mypy
|
||||
<<: *not-on-master
|
||||
- python: "2.7"
|
||||
# Ubuntu Trusty or older must be used because the oldest version of
|
||||
# cryptography we support cannot be compiled against the version of
|
||||
# OpenSSL in Xenial or newer.
|
||||
dist: trusty
|
||||
env: TOXENV='py27-{acme,apache,certbot,dns,nginx}-oldest'
|
||||
<<: *not-on-master
|
||||
- python: "3.4"
|
||||
env: TOXENV=py34
|
||||
<<: *not-on-master
|
||||
- python: "3.7"
|
||||
env: TOXENV=py37
|
||||
<<: *not-on-master
|
||||
- python: "3.8-dev"
|
||||
env: TOXENV=py38
|
||||
<<: *not-on-master
|
||||
- sudo: required
|
||||
env: TOXENV=apache_compat
|
||||
services: docker
|
||||
before_install:
|
||||
addons:
|
||||
<<: *not-on-master
|
||||
- sudo: required
|
||||
env: TOXENV=le_auto_xenial
|
||||
services: docker
|
||||
<<: *not-on-master
|
||||
- python: "2.7"
|
||||
env: TOXENV=apacheconftest-with-pebble
|
||||
<<: *not-on-master
|
||||
- python: "2.7"
|
||||
env: TOXENV=nginxroundtrip
|
||||
<<: *not-on-master
|
||||
|
||||
# Extended test suite on cron jobs and pushes to tested branches other than master
|
||||
- sudo: required
|
||||
env: TOXENV=nginx_compat
|
||||
services: docker
|
||||
before_install:
|
||||
addons:
|
||||
<<: *extended-test-suite
|
||||
- python: "2.7"
|
||||
env:
|
||||
- TOXENV=travis-test-farm-apache2
|
||||
- secure: "f+j/Lj9s1lcuKo5sEFrlRd1kIAMnIJI4z0MTI7QF8jl9Fkmbx7KECGzw31TNgzrOSzxSapHbcueFYvNCLKST+kE/8ogMZBbwqXfEDuKpyF6BY3uYoJn+wPVE5pIb8Hhe08xPte8TTDSMIyHI3EyTfcAKrIreauoArePvh/cRvSw="
|
||||
<<: *extended-test-suite
|
||||
- python: "2.7"
|
||||
env:
|
||||
- TOXENV=travis-test-farm-leauto-upgrades
|
||||
- secure: "f+j/Lj9s1lcuKo5sEFrlRd1kIAMnIJI4z0MTI7QF8jl9Fkmbx7KECGzw31TNgzrOSzxSapHbcueFYvNCLKST+kE/8ogMZBbwqXfEDuKpyF6BY3uYoJn+wPVE5pIb8Hhe08xPte8TTDSMIyHI3EyTfcAKrIreauoArePvh/cRvSw="
|
||||
git:
|
||||
depth: false # This is needed to have the history to checkout old versions of certbot-auto.
|
||||
<<: *extended-test-suite
|
||||
- python: "2.7"
|
||||
env:
|
||||
- TOXENV=travis-test-farm-certonly-standalone
|
||||
- secure: "f+j/Lj9s1lcuKo5sEFrlRd1kIAMnIJI4z0MTI7QF8jl9Fkmbx7KECGzw31TNgzrOSzxSapHbcueFYvNCLKST+kE/8ogMZBbwqXfEDuKpyF6BY3uYoJn+wPVE5pIb8Hhe08xPte8TTDSMIyHI3EyTfcAKrIreauoArePvh/cRvSw="
|
||||
<<: *extended-test-suite
|
||||
- python: "2.7"
|
||||
env:
|
||||
- TOXENV=travis-test-farm-sdists
|
||||
- secure: "f+j/Lj9s1lcuKo5sEFrlRd1kIAMnIJI4z0MTI7QF8jl9Fkmbx7KECGzw31TNgzrOSzxSapHbcueFYvNCLKST+kE/8ogMZBbwqXfEDuKpyF6BY3uYoJn+wPVE5pIb8Hhe08xPte8TTDSMIyHI3EyTfcAKrIreauoArePvh/cRvSw="
|
||||
<<: *extended-test-suite
|
||||
- python: "3.7"
|
||||
env: TOXENV=py37 CERTBOT_NO_PIN=1
|
||||
<<: *extended-test-suite
|
||||
- python: "2.7"
|
||||
env: ACME_SERVER=boulder-v1 TOXENV=integration
|
||||
sudo: required
|
||||
services: docker
|
||||
<<: *extended-test-suite
|
||||
- python: "2.7"
|
||||
env: ACME_SERVER=boulder-v2 TOXENV=integration
|
||||
sudo: required
|
||||
services: docker
|
||||
<<: *extended-test-suite
|
||||
- python: "2.7"
|
||||
env: ACME_SERVER=boulder-v1 TOXENV=integration-certbot-oldest
|
||||
# Ubuntu Trusty or older must be used because the oldest version of
|
||||
# cryptography we support cannot be compiled against the version of
|
||||
# OpenSSL in Xenial or newer.
|
||||
dist: trusty
|
||||
sudo: required
|
||||
services: docker
|
||||
<<: *extended-test-suite
|
||||
- python: "2.7"
|
||||
env: ACME_SERVER=boulder-v2 TOXENV=integration-certbot-oldest
|
||||
# Ubuntu Trusty or older must be used because the oldest version of
|
||||
# cryptography we support cannot be compiled against the version of
|
||||
# OpenSSL in Xenial or newer.
|
||||
dist: trusty
|
||||
sudo: required
|
||||
services: docker
|
||||
<<: *extended-test-suite
|
||||
- python: "2.7"
|
||||
env: ACME_SERVER=boulder-v1 TOXENV=integration-nginx-oldest
|
||||
# Ubuntu Trusty or older must be used because the oldest version of
|
||||
# cryptography we support cannot be compiled against the version of
|
||||
# OpenSSL in Xenial or newer.
|
||||
dist: trusty
|
||||
sudo: required
|
||||
services: docker
|
||||
<<: *extended-test-suite
|
||||
- python: "2.7"
|
||||
env: ACME_SERVER=boulder-v2 TOXENV=integration-nginx-oldest
|
||||
# Ubuntu Trusty or older must be used because the oldest version of
|
||||
# cryptography we support cannot be compiled against the version of
|
||||
# OpenSSL in Xenial or newer.
|
||||
dist: trusty
|
||||
sudo: required
|
||||
services: docker
|
||||
<<: *extended-test-suite
|
||||
- python: "3.4"
|
||||
env: TOXENV=py34
|
||||
<<: *extended-test-suite
|
||||
- python: "3.5"
|
||||
env: TOXENV=py35
|
||||
<<: *extended-test-suite
|
||||
- python: "3.6"
|
||||
env: TOXENV=py36
|
||||
<<: *extended-test-suite
|
||||
- python: "3.7"
|
||||
env: TOXENV=py37
|
||||
<<: *extended-test-suite
|
||||
- python: "3.8-dev"
|
||||
env: TOXENV=py38
|
||||
<<: *extended-test-suite
|
||||
- python: "3.4"
|
||||
env: ACME_SERVER=boulder-v1 TOXENV=integration
|
||||
sudo: required
|
||||
services: docker
|
||||
<<: *extended-test-suite
|
||||
- python: "3.4"
|
||||
env: ACME_SERVER=boulder-v2 TOXENV=integration
|
||||
sudo: required
|
||||
services: docker
|
||||
<<: *extended-test-suite
|
||||
- python: "3.5"
|
||||
env: ACME_SERVER=boulder-v1 TOXENV=integration
|
||||
sudo: required
|
||||
services: docker
|
||||
<<: *extended-test-suite
|
||||
- python: "3.5"
|
||||
env: ACME_SERVER=boulder-v2 TOXENV=integration
|
||||
sudo: required
|
||||
services: docker
|
||||
<<: *extended-test-suite
|
||||
- python: "3.6"
|
||||
env: ACME_SERVER=boulder-v1 TOXENV=integration
|
||||
sudo: required
|
||||
services: docker
|
||||
<<: *extended-test-suite
|
||||
- python: "3.6"
|
||||
env: ACME_SERVER=boulder-v2 TOXENV=integration
|
||||
sudo: required
|
||||
services: docker
|
||||
<<: *extended-test-suite
|
||||
- python: "3.7"
|
||||
env: ACME_SERVER=boulder-v1 TOXENV=integration
|
||||
sudo: required
|
||||
services: docker
|
||||
<<: *extended-test-suite
|
||||
- python: "3.7"
|
||||
env: ACME_SERVER=boulder-v2 TOXENV=integration
|
||||
sudo: required
|
||||
services: docker
|
||||
<<: *extended-test-suite
|
||||
- python: "3.8-dev"
|
||||
env: ACME_SERVER=boulder-v1 TOXENV=integration
|
||||
<<: *extended-test-suite
|
||||
- python: "3.8-dev"
|
||||
env: ACME_SERVER=boulder-v2 TOXENV=integration
|
||||
<<: *extended-test-suite
|
||||
- sudo: required
|
||||
env: TOXENV=le_auto_jessie
|
||||
services: docker
|
||||
<<: *extended-test-suite
|
||||
- sudo: required
|
||||
env: TOXENV=le_auto_centos6
|
||||
services: docker
|
||||
<<: *extended-test-suite
|
||||
- sudo: required
|
||||
env: TOXENV=docker_dev
|
||||
services: docker
|
||||
addons:
|
||||
apt:
|
||||
packages: # don't install nginx and apache
|
||||
- libaugeas0
|
||||
<<: *extended-test-suite
|
||||
- language: generic
|
||||
env: TOXENV=py27
|
||||
os: osx
|
||||
# Using this osx_image is a workaround for
|
||||
# https://travis-ci.community/t/xcode-8-3-homebrew-outdated-error/3798.
|
||||
osx_image: xcode10.2
|
||||
addons:
|
||||
homebrew:
|
||||
packages:
|
||||
- augeas
|
||||
- python2
|
||||
<<: *extended-test-suite
|
||||
- language: generic
|
||||
env: TOXENV=py3
|
||||
os: osx
|
||||
# Using this osx_image is a workaround for
|
||||
# https://travis-ci.community/t/xcode-8-3-homebrew-outdated-error/3798.
|
||||
osx_image: xcode10.2
|
||||
addons:
|
||||
homebrew:
|
||||
packages:
|
||||
- augeas
|
||||
- python3
|
||||
<<: *extended-test-suite
|
||||
|
||||
# container-based infrastructure
|
||||
sudo: false
|
||||
|
||||
@@ -187,7 +275,6 @@ addons:
|
||||
apt:
|
||||
packages: # Keep in sync with letsencrypt-auto-source/pieces/bootstrappers/deb_common.sh and Boulder.
|
||||
- python-dev
|
||||
- python-virtualenv
|
||||
- gcc
|
||||
- libaugeas0
|
||||
- libssl-dev
|
||||
@@ -197,19 +284,29 @@ addons:
|
||||
- nginx-light
|
||||
- openssl
|
||||
|
||||
install: "travis_retry $(command -v pip || command -v pip3) install codecov tox"
|
||||
script:
|
||||
- travis_retry tox
|
||||
- '[ -z "${BOULDER_INTEGRATION+x}" ] || (travis_retry tests/boulder-fetch.sh && tests/tox-boulder-integration.sh)'
|
||||
# tools/pip_install.py is used to pin packages to a known working version
|
||||
# except in tests where the environment variable CERTBOT_NO_PIN is set.
|
||||
# virtualenv is listed here explicitly to make sure it is upgraded when
|
||||
# CERTBOT_NO_PIN is set to work around failures we've seen when using an older
|
||||
# version of virtualenv.
|
||||
install: 'tools/pip_install.py -U codecov tox virtualenv'
|
||||
# Most of the time TRAVIS_RETRY is an empty string, and has no effect on the
|
||||
# script command. It is set only to `travis_retry` during farm tests, in
|
||||
# order to trigger the Travis retry feature, and compensate the inherent
|
||||
# flakiness of these specific tests.
|
||||
script: '$TRAVIS_RETRY tox'
|
||||
|
||||
after_success: '[ "$TOXENV" == "py27-cover" ] && codecov'
|
||||
after_success: '[ "$TOXENV" == "py27-cover" ] && codecov -F linux'
|
||||
|
||||
notifications:
|
||||
email: false
|
||||
irc:
|
||||
channels:
|
||||
- secure: "SGWZl3ownKx9xKVV2VnGt7DqkTmutJ89oJV9tjKhSs84kLijU6EYdPnllqISpfHMTxXflNZuxtGo0wTDYHXBuZL47w1O32W6nzuXdra5zC+i4sYQwYULUsyfOv9gJX8zWAULiK0Z3r0oho45U+FR5ZN6TPCidi8/eGU+EEPwaAw="
|
||||
# This is set to a secure variable to prevent forks from sending
|
||||
# notifications. This value was created by installing
|
||||
# https://github.com/travis-ci/travis.rb and running
|
||||
# `travis encrypt "chat.freenode.net#certbot-devel"`.
|
||||
- secure: "EWW66E2+KVPZyIPR8ViENZwfcup4Gx3/dlimmAZE0WuLwxDCshBBOd3O8Rf6pBokEoZlXM5eDT6XdyJj8n0DLslgjO62pExdunXpbcMwdY7l1ELxX2/UbnDTE6UnPYa09qVBHNG7156Z6yE0x2lH4M9Ykvp0G0cubjPQHylAwo0="
|
||||
on_cancel: never
|
||||
on_success: never
|
||||
on_failure: always
|
||||
use_notice: true
|
||||
|
||||
@@ -5,6 +5,7 @@ Authors
|
||||
* Aaron Zuehlke
|
||||
* Ada Lovelace
|
||||
* [Adam Woodbeck](https://github.com/awoodbeck)
|
||||
* [Adrien Ferrand](https://github.com/adferrand)
|
||||
* [Aidin Gharibnavaz](https://github.com/aidin36)
|
||||
* [AJ ONeal](https://github.com/coolaj86)
|
||||
* [Alcaro](https://github.com/Alcaro)
|
||||
@@ -14,8 +15,10 @@ Authors
|
||||
* [Alex Gaynor](https://github.com/alex)
|
||||
* [Alex Halderman](https://github.com/jhalderm)
|
||||
* [Alex Jordan](https://github.com/strugee)
|
||||
* [Alex Zorin](https://github.com/alexzorin)
|
||||
* [Amjad Mashaal](https://github.com/TheNavigat)
|
||||
* [Andrew Murray](https://github.com/radarhere)
|
||||
* [Andrzej Górski](https://github.com/andrzej3393)
|
||||
* [Anselm Levskaya](https://github.com/levskaya)
|
||||
* [Antoine Jacoutot](https://github.com/ajacoutot)
|
||||
* [asaph](https://github.com/asaph)
|
||||
@@ -75,6 +78,7 @@ Authors
|
||||
* [Fabian](https://github.com/faerbit)
|
||||
* [Faidon Liambotis](https://github.com/paravoid)
|
||||
* [Fan Jiang](https://github.com/tcz001)
|
||||
* [Felix Lechner](https://github.com/lechner)
|
||||
* [Felix Schwarz](https://github.com/FelixSchwarz)
|
||||
* [Felix Yan](https://github.com/felixonmars)
|
||||
* [Filip Ochnik](https://github.com/filipochnik)
|
||||
@@ -124,6 +128,7 @@ Authors
|
||||
* [Joubin Jabbari](https://github.com/joubin)
|
||||
* [Juho Juopperi](https://github.com/jkjuopperi)
|
||||
* [Kane York](https://github.com/riking)
|
||||
* [Kenichi Maehashi](https://github.com/kmaehashi)
|
||||
* [Kenneth Skovhede](https://github.com/kenkendk)
|
||||
* [Kevin Burke](https://github.com/kevinburke)
|
||||
* [Kevin London](https://github.com/kevinlondon)
|
||||
@@ -159,6 +164,7 @@ Authors
|
||||
* [Michael Schumacher](https://github.com/schumaml)
|
||||
* [Michael Strache](https://github.com/Jarodiv)
|
||||
* [Michael Sverdlin](https://github.com/sveder)
|
||||
* [Michael Watters](https://github.com/blackknight36)
|
||||
* [Michal Moravec](https://github.com/https://github.com/Majkl578)
|
||||
* [Michal Papis](https://github.com/mpapis)
|
||||
* [Minn Soe](https://github.com/MinnSoe)
|
||||
|
||||
355
CHANGELOG.md
355
CHANGELOG.md
@@ -2,17 +2,365 @@
|
||||
|
||||
Certbot adheres to [Semantic Versioning](https://semver.org/).
|
||||
|
||||
## 0.31.0 - master
|
||||
## 0.40.0 - master
|
||||
|
||||
### Added
|
||||
|
||||
* Avoid to process again challenges that are already validated
|
||||
when a certificate is issued.
|
||||
*
|
||||
|
||||
### Changed
|
||||
|
||||
* Removed `--fast` flag from the test farm tests
|
||||
|
||||
### Fixed
|
||||
|
||||
*
|
||||
|
||||
More details about these changes can be found on our GitHub repo.
|
||||
|
||||
## 0.39.0 - 2019-10-01
|
||||
|
||||
### Added
|
||||
|
||||
* Support for Python 3.8 was added to Certbot and all of its components.
|
||||
* Support for CentOS 8 was added to certbot-auto.
|
||||
|
||||
### Changed
|
||||
|
||||
* Don't send OCSP requests for expired certificates
|
||||
* Return to using platform.linux_distribution instead of distro.linux_distribution in OS fingerprinting for Python < 3.8
|
||||
* Updated the Nginx plugin's TLS configuration to keep support for some versions of IE11.
|
||||
|
||||
### Fixed
|
||||
|
||||
* Fixed OS detection in the Apache plugin on RHEL 6.
|
||||
|
||||
More details about these changes can be found on our GitHub repo.
|
||||
|
||||
## 0.38.0 - 2019-09-03
|
||||
|
||||
### Added
|
||||
|
||||
* Disable session tickets for Nginx users when appropriate.
|
||||
|
||||
### Changed
|
||||
|
||||
* If Certbot fails to rollback your server configuration, the error message
|
||||
links to the Let's Encrypt forum. Change the link to the Help category now
|
||||
that the Server category has been closed.
|
||||
* Replace platform.linux_distribution with distro.linux_distribution as a step
|
||||
towards Python 3.8 support in Certbot.
|
||||
|
||||
### Fixed
|
||||
|
||||
* Fixed OS detection in the Apache plugin on Scientific Linux.
|
||||
|
||||
More details about these changes can be found on our GitHub repo.
|
||||
|
||||
## 0.37.2 - 2019-08-21
|
||||
|
||||
* Stop disabling TLS session tickets in Nginx as it caused TLS failures on
|
||||
some systems.
|
||||
|
||||
More details about these changes can be found on our GitHub repo.
|
||||
|
||||
## 0.37.1 - 2019-08-08
|
||||
|
||||
### Fixed
|
||||
|
||||
* Stop disabling TLS session tickets in Apache as it caused TLS failures on
|
||||
some systems.
|
||||
|
||||
More details about these changes can be found on our GitHub repo.
|
||||
|
||||
## 0.37.0 - 2019-08-07
|
||||
|
||||
### Added
|
||||
|
||||
* Turn off session tickets for apache plugin by default
|
||||
* acme: Authz deactivation added to `acme` module.
|
||||
|
||||
### Changed
|
||||
|
||||
* Follow updated Mozilla recommendations for Nginx ssl_protocols, ssl_ciphers,
|
||||
and ssl_prefer_server_ciphers
|
||||
|
||||
### Fixed
|
||||
|
||||
* Fix certbot-auto failures on RHEL 8.
|
||||
|
||||
More details about these changes can be found on our GitHub repo.
|
||||
|
||||
## 0.36.0 - 2019-07-11
|
||||
|
||||
### Added
|
||||
|
||||
* Turn off session tickets for nginx plugin by default
|
||||
* Added missing error types from RFC8555 to acme
|
||||
|
||||
### Changed
|
||||
|
||||
* Support for Ubuntu 14.04 Trusty has been removed.
|
||||
* Update the 'manage your account' help to be more generic.
|
||||
* The error message when Certbot's Apache plugin is unable to modify your
|
||||
Apache configuration has been improved.
|
||||
* Certbot's config_changes subcommand has been deprecated and will be
|
||||
removed in a future release.
|
||||
* `certbot config_changes` no longer accepts a --num parameter.
|
||||
* The functions `certbot.plugins.common.Installer.view_config_changes` and
|
||||
`certbot.reverter.Reverter.view_config_changes` have been deprecated and will
|
||||
be removed in a future release.
|
||||
|
||||
### Fixed
|
||||
|
||||
* Replace some unnecessary platform-specific line separation.
|
||||
|
||||
More details about these changes can be found on our GitHub repo.
|
||||
|
||||
## 0.35.1 - 2019-06-10
|
||||
|
||||
### Fixed
|
||||
|
||||
* Support for specifying an authoritative base domain in our dns-rfc2136 plugin
|
||||
has been removed. This feature was added in our last release but had a bug
|
||||
which caused the plugin to fail so the feature has been removed until it can
|
||||
be added properly.
|
||||
|
||||
Despite us having broken lockstep, we are continuing to release new versions of
|
||||
all Certbot components during releases for the time being, however, the only
|
||||
package with changes other than its version number was:
|
||||
|
||||
* certbot-dns-rfc2136
|
||||
|
||||
More details about these changes can be found on our GitHub repo.
|
||||
|
||||
## 0.35.0 - 2019-06-05
|
||||
|
||||
### Added
|
||||
|
||||
* dns_rfc2136 plugin now supports explicitly specifing an authorative
|
||||
base domain for cases when the automatic method does not work (e.g.
|
||||
Split horizon DNS)
|
||||
|
||||
### Changed
|
||||
|
||||
*
|
||||
|
||||
### Fixed
|
||||
|
||||
* Renewal parameter `webroot_path` is always saved, avoiding some regressions
|
||||
when `webroot` authenticator plugin is invoked with no challenge to perform.
|
||||
* Certbot now accepts OCSP responses when an explicit authorized
|
||||
responder, different from the issuer, is used to sign OCSP
|
||||
responses.
|
||||
* Scripts in Certbot hook directories are no longer executed when their
|
||||
filenames end in a tilde.
|
||||
|
||||
Despite us having broken lockstep, we are continuing to release new versions of
|
||||
all Certbot components during releases for the time being, however, the only
|
||||
package with changes other than its version number was:
|
||||
|
||||
* certbot
|
||||
* certbot-dns-rfc2136
|
||||
|
||||
More details about these changes can be found on our GitHub repo.
|
||||
|
||||
## 0.34.2 - 2019-05-07
|
||||
|
||||
### Fixed
|
||||
|
||||
* certbot-auto no longer writes a check_permissions.py script at the root
|
||||
of the filesystem.
|
||||
|
||||
Despite us having broken lockstep, we are continuing to release new versions of
|
||||
all Certbot components during releases for the time being, however, the only
|
||||
changes in this release were to certbot-auto.
|
||||
|
||||
More details about these changes can be found on our GitHub repo.
|
||||
|
||||
## 0.34.1 - 2019-05-06
|
||||
|
||||
### Fixed
|
||||
|
||||
* certbot-auto no longer prints a blank line when there are no permissions
|
||||
problems.
|
||||
|
||||
Despite us having broken lockstep, we are continuing to release new versions of
|
||||
all Certbot components during releases for the time being, however, the only
|
||||
changes in this release were to certbot-auto.
|
||||
|
||||
More details about these changes can be found on our GitHub repo.
|
||||
|
||||
## 0.34.0 - 2019-05-01
|
||||
|
||||
### Changed
|
||||
|
||||
* Apache plugin now tries to restart httpd on Fedora using systemctl if a
|
||||
configuration test error is detected. This has to be done due to the way
|
||||
Fedora now generates the self signed certificate files upon first
|
||||
restart.
|
||||
* Updated Certbot and its plugins to improve the handling of file system permissions
|
||||
on Windows as a step towards adding proper Windows support to Certbot.
|
||||
* Updated urllib3 to 1.24.2 in certbot-auto.
|
||||
* Removed the fallback introduced with 0.32.0 in `acme` to retry a challenge response
|
||||
with a `keyAuthorization` if sending the response without this field caused a
|
||||
`malformed` error to be received from the ACME server.
|
||||
* Linode DNS plugin now supports api keys created from their new panel
|
||||
at [cloud.linode.com](https://cloud.linode.com)
|
||||
|
||||
### Fixed
|
||||
|
||||
* Fixed Google DNS Challenge issues when private zones exist
|
||||
* Adding a warning noting that future versions of Certbot will automatically configure the
|
||||
webserver so that all requests redirect to secure HTTPS access. You can control this
|
||||
behavior and disable this warning with the --redirect and --no-redirect flags.
|
||||
* certbot-auto now prints warnings when run as root with insecure file system
|
||||
permissions. If you see these messages, you should fix the problem by
|
||||
following the instructions at
|
||||
https://community.letsencrypt.org/t/certbot-auto-deployment-best-practices/91979/,
|
||||
however, these warnings can be disabled as necessary with the flag
|
||||
--no-permissions-check.
|
||||
* `acme` module uses now a POST-as-GET request to retrieve the registration
|
||||
from an ACME v2 server
|
||||
* Convert the tsig algorithm specified in the certbot_dns_rfc2136 configuration file to
|
||||
all uppercase letters before validating. This makes the value in the config case
|
||||
insensitive.
|
||||
|
||||
Despite us having broken lockstep, we are continuing to release new versions of
|
||||
all Certbot components during releases for the time being, however, the only
|
||||
package with changes other than its version number was:
|
||||
|
||||
* acme
|
||||
* certbot
|
||||
* certbot-apache
|
||||
* certbot-dns-cloudflare
|
||||
* certbot-dns-cloudxns
|
||||
* certbot-dns-digitalocean
|
||||
* certbot-dns-dnsimple
|
||||
* certbot-dns-dnsmadeeasy
|
||||
* certbot-dns-gehirn
|
||||
* certbot-dns-google
|
||||
* certbot-dns-linode
|
||||
* certbot-dns-luadns
|
||||
* certbot-dns-nsone
|
||||
* certbot-dns-ovh
|
||||
* certbot-dns-rfc2136
|
||||
* certbot-dns-route53
|
||||
* certbot-dns-sakuracloud
|
||||
* certbot-nginx
|
||||
|
||||
More details about these changes can be found on our GitHub repo.
|
||||
|
||||
## 0.33.1 - 2019-04-04
|
||||
|
||||
### Fixed
|
||||
|
||||
* A bug causing certbot-auto to print warnings or crash on some RHEL based
|
||||
systems has been resolved.
|
||||
|
||||
Despite us having broken lockstep, we are continuing to release new versions of
|
||||
all Certbot components during releases for the time being, however, the only
|
||||
changes in this release were to certbot-auto.
|
||||
|
||||
More details about these changes can be found on our GitHub repo.
|
||||
|
||||
## 0.33.0 - 2019-04-03
|
||||
|
||||
### Added
|
||||
|
||||
* Fedora 29+ is now supported by certbot-auto. Since Python 2.x is on a deprecation
|
||||
path in Fedora, certbot-auto will install and use Python 3.x on Fedora 29+.
|
||||
* CLI flag `--https-port` has been added for Nginx plugin exclusively, and replaces
|
||||
`--tls-sni-01-port`. It defines the HTTPS port the Nginx plugin will use while
|
||||
setting up a new SSL vhost. By default the HTTPS port is 443.
|
||||
|
||||
### Changed
|
||||
|
||||
* Support for TLS-SNI-01 has been removed from all official Certbot plugins.
|
||||
* Attributes related to the TLS-SNI-01 challenge in `acme.challenges` and `acme.standalone`
|
||||
modules are deprecated and will be removed soon.
|
||||
* CLI flags `--tls-sni-01-port` and `--tls-sni-01-address` are now no-op, will
|
||||
generate a deprecation warning if used, and will be removed soon.
|
||||
* Options `tls-sni` and `tls-sni-01` in `--preferred-challenges` flag are now no-op,
|
||||
will generate a deprecation warning if used, and will be removed soon.
|
||||
* CLI flag `--standalone-supported-challenges` has been removed.
|
||||
|
||||
### Fixed
|
||||
|
||||
* Certbot uses the Python library cryptography for OCSP when cryptography>=2.5
|
||||
is installed. We fixed a bug in Certbot causing it to interpret timestamps in
|
||||
the OCSP response as being in the local timezone rather than UTC.
|
||||
* Issue causing the default CentOS 6 TLS configuration to ignore some of the
|
||||
HTTPS VirtualHosts created by Certbot. mod_ssl loading is now moved to main
|
||||
http.conf for this environment where possible.
|
||||
|
||||
Despite us having broken lockstep, we are continuing to release new versions of
|
||||
all Certbot components during releases for the time being, however, the only
|
||||
package with changes other than its version number was:
|
||||
|
||||
* acme
|
||||
* certbot
|
||||
* certbot-apache
|
||||
* certbot-nginx
|
||||
|
||||
More details about these changes can be found on our GitHub repo.
|
||||
|
||||
## 0.32.0 - 2019-03-06
|
||||
|
||||
### Added
|
||||
|
||||
* If possible, Certbot uses built-in support for OCSP from recent cryptography
|
||||
versions instead of the OpenSSL binary: as a consequence Certbot does not need
|
||||
the OpenSSL binary to be installed anymore if cryptography>=2.5 is installed.
|
||||
|
||||
### Changed
|
||||
|
||||
* Certbot and its acme module now depend on josepy>=1.1.0 to avoid printing the
|
||||
warnings described at https://github.com/certbot/josepy/issues/13.
|
||||
* Apache plugin now respects CERTBOT_DOCS environment variable when adding
|
||||
command line defaults.
|
||||
* The running of manual plugin hooks is now always included in Certbot's log
|
||||
output.
|
||||
* Tests execution for certbot, certbot-apache and certbot-nginx packages now relies on pytest.
|
||||
* An ACME CA server may return a "Retry-After" HTTP header on authorization polling, as
|
||||
specified in the ACME protocol, to indicate when the next polling should occur. Certbot now
|
||||
reads this header if set and respect its value.
|
||||
* The `acme` module avoids sending the `keyAuthorization` field in the JWS
|
||||
payload when responding to a challenge as the field is not included in the
|
||||
current ACME protocol. To ease the migration path for ACME CA servers,
|
||||
Certbot and its `acme` module will first try the request without the
|
||||
`keyAuthorization` field but will temporarily retry the request with the
|
||||
field included if a `malformed` error is received. This fallback will be
|
||||
removed in version 0.34.0.
|
||||
|
||||
Despite us having broken lockstep, we are continuing to release new versions of
|
||||
all Certbot components during releases for the time being, however, the only
|
||||
package with changes other than its version number was:
|
||||
|
||||
* acme
|
||||
* certbot
|
||||
* certbot-apache
|
||||
* certbot-nginx
|
||||
|
||||
More details about these changes can be found on our GitHub repo.
|
||||
|
||||
## 0.31.0 - 2019-02-07
|
||||
|
||||
### Added
|
||||
|
||||
* Avoid reprocessing challenges that are already validated
|
||||
when a certificate is issued.
|
||||
* Support for initiating (but not solving end-to-end) TLS-ALPN-01 challenges
|
||||
with the `acme` module.
|
||||
|
||||
### Changed
|
||||
|
||||
* Certbot's official Docker images are now based on Alpine Linux 3.9 rather
|
||||
than 3.7. The new version comes with OpenSSL 1.1.1.
|
||||
* Lexicon-based DNS plugins are now fully compatible with Lexicon 3.x (support
|
||||
on 2.x branch is maintained).
|
||||
* Apache plugin now attempts to configure all VirtualHosts matching requested
|
||||
domain name instead of only a single one when answering the HTTP-01 challenge.
|
||||
|
||||
### Fixed
|
||||
|
||||
@@ -26,6 +374,7 @@ package with changes other than its version number was:
|
||||
|
||||
* acme
|
||||
* certbot
|
||||
* certbot-apache
|
||||
* certbot-dns-cloudxns
|
||||
* certbot-dns-dnsimple
|
||||
* certbot-dns-dnsmadeeasy
|
||||
|
||||
1
CODE_OF_CONDUCT.md
Normal file
1
CODE_OF_CONDUCT.md
Normal file
@@ -0,0 +1 @@
|
||||
This project is governed by [EFF's Public Projects Code of Conduct](https://www.eff.org/pages/eppcode).
|
||||
@@ -33,3 +33,5 @@ started. In particular, we recommend you read these sections
|
||||
- [Finding issues to work on](https://certbot.eff.org/docs/contributing.html#find-issues-to-work-on)
|
||||
- [Coding style](https://certbot.eff.org/docs/contributing.html#coding-style)
|
||||
- [Submitting a pull request](https://certbot.eff.org/docs/contributing.html#submitting-a-pull-request)
|
||||
- [EFF's Public Projects Code of Conduct](https://www.eff.org/pages/eppcode)
|
||||
|
||||
|
||||
29
Dockerfile
29
Dockerfile
@@ -1,29 +0,0 @@
|
||||
FROM python:2-alpine3.7
|
||||
|
||||
ENTRYPOINT [ "certbot" ]
|
||||
EXPOSE 80 443
|
||||
VOLUME /etc/letsencrypt /var/lib/letsencrypt
|
||||
WORKDIR /opt/certbot
|
||||
|
||||
COPY CHANGELOG.md README.rst setup.py src/
|
||||
COPY letsencrypt-auto-source/pieces/dependency-requirements.txt .
|
||||
COPY acme src/acme
|
||||
COPY certbot src/certbot
|
||||
|
||||
RUN apk add --no-cache --virtual .certbot-deps \
|
||||
libffi \
|
||||
libssl1.0 \
|
||||
openssl \
|
||||
ca-certificates \
|
||||
binutils
|
||||
RUN apk add --no-cache --virtual .build-deps \
|
||||
gcc \
|
||||
linux-headers \
|
||||
openssl-dev \
|
||||
musl-dev \
|
||||
libffi-dev \
|
||||
&& pip install -r /opt/certbot/dependency-requirements.txt \
|
||||
&& pip install --no-cache-dir \
|
||||
--editable /opt/certbot/src/acme \
|
||||
--editable /opt/certbot/src \
|
||||
&& apk del .build-deps
|
||||
@@ -1,5 +1,5 @@
|
||||
# This Dockerfile builds an image for development.
|
||||
FROM ubuntu:xenial
|
||||
FROM debian:buster
|
||||
|
||||
# Note: this only exposes the port to other docker containers.
|
||||
EXPOSE 80 443
|
||||
|
||||
@@ -1,75 +0,0 @@
|
||||
# https://github.com/letsencrypt/letsencrypt/pull/431#issuecomment-103659297
|
||||
# it is more likely developers will already have ubuntu:trusty rather
|
||||
# than e.g. debian:jessie and image size differences are negligible
|
||||
FROM ubuntu:trusty
|
||||
MAINTAINER Jakub Warmuz <jakub@warmuz.org>
|
||||
MAINTAINER William Budington <bill@eff.org>
|
||||
|
||||
# Note: this only exposes the port to other docker containers. You
|
||||
# still have to bind to 443@host at runtime, as per the ACME spec.
|
||||
EXPOSE 443
|
||||
|
||||
# TODO: make sure --config-dir and --work-dir cannot be changed
|
||||
# through the CLI (certbot-docker wrapper that uses standalone
|
||||
# authenticator and text mode only?)
|
||||
VOLUME /etc/letsencrypt /var/lib/letsencrypt
|
||||
|
||||
WORKDIR /opt/certbot
|
||||
|
||||
# no need to mkdir anything:
|
||||
# https://docs.docker.com/reference/builder/#copy
|
||||
# If <dest> doesn't exist, it is created along with all missing
|
||||
# directories in its path.
|
||||
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
COPY letsencrypt-auto-source/letsencrypt-auto /opt/certbot/src/letsencrypt-auto-source/letsencrypt-auto
|
||||
RUN /opt/certbot/src/letsencrypt-auto-source/letsencrypt-auto --os-packages-only && \
|
||||
apt-get clean && \
|
||||
rm -rf /var/lib/apt/lists/* \
|
||||
/tmp/* \
|
||||
/var/tmp/*
|
||||
|
||||
# the above is not likely to change, so by putting it further up the
|
||||
# Dockerfile we make sure we cache as much as possible
|
||||
|
||||
|
||||
COPY setup.py README.rst CHANGELOG.md MANIFEST.in letsencrypt-auto-source/pieces/pipstrap.py /opt/certbot/src/
|
||||
|
||||
# all above files are necessary for setup.py and venv setup, however,
|
||||
# package source code directory has to be copied separately to a
|
||||
# subdirectory...
|
||||
# https://docs.docker.com/reference/builder/#copy: "If <src> is a
|
||||
# directory, the entire contents of the directory are copied,
|
||||
# including filesystem metadata. Note: The directory itself is not
|
||||
# copied, just its contents." Order again matters, three files are far
|
||||
# more likely to be cached than the whole project directory
|
||||
|
||||
COPY certbot /opt/certbot/src/certbot/
|
||||
COPY acme /opt/certbot/src/acme/
|
||||
COPY certbot-apache /opt/certbot/src/certbot-apache/
|
||||
COPY certbot-nginx /opt/certbot/src/certbot-nginx/
|
||||
|
||||
|
||||
RUN VIRTUALENV_NO_DOWNLOAD=1 virtualenv --no-site-packages -p python2 /opt/certbot/venv
|
||||
|
||||
# PATH is set now so pipstrap upgrades the correct (v)env
|
||||
ENV PATH /opt/certbot/venv/bin:$PATH
|
||||
RUN /opt/certbot/venv/bin/python /opt/certbot/src/pipstrap.py && \
|
||||
/opt/certbot/venv/bin/pip install \
|
||||
-e /opt/certbot/src/acme \
|
||||
-e /opt/certbot/src \
|
||||
-e /opt/certbot/src/certbot-apache \
|
||||
-e /opt/certbot/src/certbot-nginx
|
||||
|
||||
# install in editable mode (-e) to save space: it's not possible to
|
||||
# "rm -rf /opt/certbot/src" (it's stays in the underlaying image);
|
||||
# this might also help in debugging: you can "docker run --entrypoint
|
||||
# bash" and investigate, apply patches, etc.
|
||||
|
||||
# set up certbot/letsencrypt wrapper to warn people about Dockerfile changes
|
||||
COPY tools/docker-warning.sh /opt/certbot/bin/certbot
|
||||
RUN ln -s /opt/certbot/bin/certbot /opt/certbot/bin/letsencrypt
|
||||
ENV PATH /opt/certbot/bin:$PATH
|
||||
|
||||
ENTRYPOINT [ "certbot" ]
|
||||
40
README.rst
40
README.rst
@@ -28,45 +28,19 @@ Contributing
|
||||
If you'd like to contribute to this project please read `Developer Guide
|
||||
<https://certbot.eff.org/docs/contributing.html>`_.
|
||||
|
||||
This project is governed by `EFF's Public Projects Code of Conduct <https://www.eff.org/pages/eppcode>`_.
|
||||
|
||||
.. _installation:
|
||||
|
||||
Installation
|
||||
------------
|
||||
|
||||
The easiest way to install Certbot is by visiting `certbot.eff.org`_, where you can
|
||||
find the correct installation instructions for many web server and OS combinations.
|
||||
For more information, see `Get Certbot <https://certbot.eff.org/docs/install.html>`_.
|
||||
|
||||
.. _certbot.eff.org: https://certbot.eff.org/
|
||||
|
||||
How to run the client
|
||||
---------------------
|
||||
|
||||
In many cases, you can just run ``certbot-auto`` or ``certbot``, and the
|
||||
client will guide you through the process of obtaining and installing certs
|
||||
interactively.
|
||||
|
||||
For full command line help, you can type::
|
||||
|
||||
./certbot-auto --help all
|
||||
|
||||
|
||||
You can also tell it exactly what you want it to do from the command line.
|
||||
For instance, if you want to obtain a cert for ``example.com``,
|
||||
``www.example.com``, and ``other.example.net``, using the Apache plugin to both
|
||||
obtain and install the certs, you could do this::
|
||||
|
||||
./certbot-auto --apache -d example.com -d www.example.com -d other.example.net
|
||||
|
||||
(The first time you run the command, it will make an account, and ask for an
|
||||
email and agreement to the Let's Encrypt Subscriber Agreement; you can
|
||||
automate those with ``--email`` and ``--agree-tos``)
|
||||
|
||||
If you want to use a webserver that doesn't have full plugin support yet, you
|
||||
can still use "standalone" or "webroot" plugins to obtain a certificate::
|
||||
|
||||
./certbot-auto certonly --standalone --email admin@example.com -d example.com -d www.example.com -d other.example.net
|
||||
The easiest way to install and run Certbot is by visiting `certbot.eff.org`_,
|
||||
where you can find the correct instructions for many web server and OS
|
||||
combinations. For more information, see `Get Certbot
|
||||
<https://certbot.eff.org/docs/install.html>`_.
|
||||
|
||||
.. _certbot.eff.org: https://certbot.eff.org/
|
||||
|
||||
Understanding the client in more depth
|
||||
--------------------------------------
|
||||
|
||||
@@ -6,6 +6,7 @@ This module is an implementation of the `ACME protocol`_.
|
||||
|
||||
"""
|
||||
import sys
|
||||
import warnings
|
||||
|
||||
# This code exists to keep backwards compatibility with people using acme.jose
|
||||
# before it became the standalone josepy package.
|
||||
@@ -20,3 +21,30 @@ for mod in list(sys.modules):
|
||||
# preserved (acme.jose.* is josepy.*)
|
||||
if mod == 'josepy' or mod.startswith('josepy.'):
|
||||
sys.modules['acme.' + mod.replace('josepy', 'jose', 1)] = sys.modules[mod]
|
||||
|
||||
|
||||
# This class takes a similar approach to the cryptography project to deprecate attributes
|
||||
# in public modules. See the _ModuleWithDeprecation class here:
|
||||
# https://github.com/pyca/cryptography/blob/91105952739442a74582d3e62b3d2111365b0dc7/src/cryptography/utils.py#L129
|
||||
class _TLSSNI01DeprecationModule(object):
|
||||
"""
|
||||
Internal class delegating to a module, and displaying warnings when
|
||||
attributes related to TLS-SNI-01 are accessed.
|
||||
"""
|
||||
def __init__(self, module):
|
||||
self.__dict__['_module'] = module
|
||||
|
||||
def __getattr__(self, attr):
|
||||
if 'TLSSNI01' in attr:
|
||||
warnings.warn('{0} attribute is deprecated, and will be removed soon.'.format(attr),
|
||||
DeprecationWarning, stacklevel=2)
|
||||
return getattr(self._module, attr)
|
||||
|
||||
def __setattr__(self, attr, value): # pragma: no cover
|
||||
setattr(self._module, attr, value)
|
||||
|
||||
def __delattr__(self, attr): # pragma: no cover
|
||||
delattr(self._module, attr)
|
||||
|
||||
def __dir__(self): # pragma: no cover
|
||||
return ['_module'] + dir(self._module)
|
||||
|
||||
@@ -4,7 +4,7 @@ import functools
|
||||
import hashlib
|
||||
import logging
|
||||
import socket
|
||||
import warnings
|
||||
import sys
|
||||
|
||||
from cryptography.hazmat.primitives import hashes # type: ignore
|
||||
import josepy as jose
|
||||
@@ -15,15 +15,13 @@ import six
|
||||
from acme import errors
|
||||
from acme import crypto_util
|
||||
from acme import fields
|
||||
from acme import _TLSSNI01DeprecationModule
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# pylint: disable=too-few-public-methods
|
||||
|
||||
|
||||
class Challenge(jose.TypedJSONObjectWithFields):
|
||||
# _fields_to_partial_json | pylint: disable=abstract-method
|
||||
# _fields_to_partial_json
|
||||
"""ACME challenge."""
|
||||
TYPES = {} # type: dict
|
||||
|
||||
@@ -37,7 +35,7 @@ class Challenge(jose.TypedJSONObjectWithFields):
|
||||
|
||||
|
||||
class ChallengeResponse(jose.TypedJSONObjectWithFields):
|
||||
# _fields_to_partial_json | pylint: disable=abstract-method
|
||||
# _fields_to_partial_json
|
||||
"""ACME challenge response."""
|
||||
TYPES = {} # type: dict
|
||||
resource_type = 'challenge'
|
||||
@@ -96,6 +94,7 @@ class _TokenChallenge(Challenge):
|
||||
"""
|
||||
# TODO: check that path combined with uri does not go above
|
||||
# URI_ROOT_PATH!
|
||||
# pylint: disable=unsupported-membership-test
|
||||
return b'..' not in self.token and b'/' not in self.token
|
||||
|
||||
|
||||
@@ -140,10 +139,14 @@ class KeyAuthorizationChallengeResponse(ChallengeResponse):
|
||||
|
||||
return True
|
||||
|
||||
def to_partial_json(self):
|
||||
jobj = super(KeyAuthorizationChallengeResponse, self).to_partial_json()
|
||||
jobj.pop('keyAuthorization', None)
|
||||
return jobj
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class KeyAuthorizationChallenge(_TokenChallenge):
|
||||
# pylint: disable=abstract-class-little-used,too-many-ancestors
|
||||
"""Challenge based on Key Authorization.
|
||||
|
||||
:param response_cls: Subclass of `KeyAuthorizationChallengeResponse`
|
||||
@@ -175,7 +178,7 @@ class KeyAuthorizationChallenge(_TokenChallenge):
|
||||
:rtype: KeyAuthorizationChallengeResponse
|
||||
|
||||
"""
|
||||
return self.response_cls(
|
||||
return self.response_cls( # pylint: disable=not-callable
|
||||
key_authorization=self.key_authorization(account_key))
|
||||
|
||||
@abc.abstractmethod
|
||||
@@ -212,7 +215,7 @@ class DNS01Response(KeyAuthorizationChallengeResponse):
|
||||
"""ACME dns-01 challenge response."""
|
||||
typ = "dns-01"
|
||||
|
||||
def simple_verify(self, chall, domain, account_public_key):
|
||||
def simple_verify(self, chall, domain, account_public_key): # pylint: disable=unused-argument
|
||||
"""Simple verify.
|
||||
|
||||
This method no longer checks DNS records and is a simple wrapper
|
||||
@@ -228,7 +231,6 @@ class DNS01Response(KeyAuthorizationChallengeResponse):
|
||||
:rtype: bool
|
||||
|
||||
"""
|
||||
# pylint: disable=unused-argument
|
||||
verified = self.verify(chall, account_public_key)
|
||||
if not verified:
|
||||
logger.debug("Verification of key authorization in response failed")
|
||||
@@ -433,7 +435,6 @@ class TLSSNI01Response(KeyAuthorizationChallengeResponse):
|
||||
kwargs.setdefault("port", self.PORT)
|
||||
kwargs["name"] = self.z_domain
|
||||
# TODO: try different methods?
|
||||
# pylint: disable=protected-access
|
||||
return crypto_util.probe_sni(**kwargs)
|
||||
|
||||
def verify_cert(self, cert):
|
||||
@@ -494,11 +495,6 @@ class TLSSNI01(KeyAuthorizationChallenge):
|
||||
# boulder#962, ietf-wg-acme#22
|
||||
#n = jose.Field("n", encoder=int, decoder=int)
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
warnings.warn("TLS-SNI-01 is deprecated, and will stop working soon.",
|
||||
DeprecationWarning, stacklevel=2)
|
||||
super(TLSSNI01, self).__init__(*args, **kwargs)
|
||||
|
||||
def validation(self, account_key, **kwargs):
|
||||
"""Generate validation.
|
||||
|
||||
@@ -513,6 +509,17 @@ class TLSSNI01(KeyAuthorizationChallenge):
|
||||
return self.response(account_key).gen_cert(key=kwargs.get('cert_key'))
|
||||
|
||||
|
||||
@ChallengeResponse.register
|
||||
class TLSALPN01Response(KeyAuthorizationChallengeResponse):
|
||||
"""ACME TLS-ALPN-01 challenge response.
|
||||
|
||||
This class only allows initiating a TLS-ALPN-01 challenge returned from the
|
||||
CA. Full support for responding to TLS-ALPN-01 challenges by generating and
|
||||
serving the expected response certificate is not currently provided.
|
||||
"""
|
||||
typ = "tls-alpn-01"
|
||||
|
||||
|
||||
@Challenge.register # pylint: disable=too-many-ancestors
|
||||
class TLSALPN01(KeyAuthorizationChallenge):
|
||||
"""ACME tls-alpn-01 challenge.
|
||||
@@ -522,13 +529,14 @@ class TLSALPN01(KeyAuthorizationChallenge):
|
||||
|
||||
"""
|
||||
typ = "tls-alpn-01"
|
||||
response_cls = TLSALPN01Response
|
||||
|
||||
def validation(self, account_key, **kwargs):
|
||||
"""Generate validation for the challenge."""
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
@Challenge.register # pylint: disable=too-many-ancestors
|
||||
@Challenge.register
|
||||
class DNS(_TokenChallenge):
|
||||
"""ACME "dns" challenge."""
|
||||
typ = "dns"
|
||||
@@ -609,3 +617,7 @@ class DNSResponse(ChallengeResponse):
|
||||
|
||||
"""
|
||||
return chall.check_validation(self.validation, account_public_key)
|
||||
|
||||
|
||||
# Patching ourselves to warn about TLS-SNI challenge deprecation and removal.
|
||||
sys.modules[__name__] = _TLSSNI01DeprecationModule(sys.modules[__name__])
|
||||
|
||||
@@ -1,13 +1,12 @@
|
||||
"""Tests for acme.challenges."""
|
||||
import unittest
|
||||
import warnings
|
||||
|
||||
import josepy as jose
|
||||
import mock
|
||||
import OpenSSL
|
||||
import requests
|
||||
|
||||
from six.moves.urllib import parse as urllib_parse # pylint: disable=import-error
|
||||
from six.moves.urllib import parse as urllib_parse # pylint: disable=relative-import
|
||||
|
||||
from acme import errors
|
||||
from acme import test_util
|
||||
@@ -94,7 +93,8 @@ class DNS01ResponseTest(unittest.TestCase):
|
||||
self.response = self.chall.response(KEY)
|
||||
|
||||
def test_to_partial_json(self):
|
||||
self.assertEqual(self.jmsg, self.msg.to_partial_json())
|
||||
self.assertEqual({k: v for k, v in self.jmsg.items() if k != 'keyAuthorization'},
|
||||
self.msg.to_partial_json())
|
||||
|
||||
def test_from_json(self):
|
||||
from acme.challenges import DNS01Response
|
||||
@@ -165,7 +165,8 @@ class HTTP01ResponseTest(unittest.TestCase):
|
||||
self.response = self.chall.response(KEY)
|
||||
|
||||
def test_to_partial_json(self):
|
||||
self.assertEqual(self.jmsg, self.msg.to_partial_json())
|
||||
self.assertEqual({k: v for k, v in self.jmsg.items() if k != 'keyAuthorization'},
|
||||
self.msg.to_partial_json())
|
||||
|
||||
def test_from_json(self):
|
||||
from acme.challenges import HTTP01Response
|
||||
@@ -285,7 +286,8 @@ class TLSSNI01ResponseTest(unittest.TestCase):
|
||||
self.assertEqual(self.z_domain, self.response.z_domain)
|
||||
|
||||
def test_to_partial_json(self):
|
||||
self.assertEqual(self.jmsg, self.response.to_partial_json())
|
||||
self.assertEqual({k: v for k, v in self.jmsg.items() if k != 'keyAuthorization'},
|
||||
self.response.to_partial_json())
|
||||
|
||||
def test_from_json(self):
|
||||
from acme.challenges import TLSSNI01Response
|
||||
@@ -365,25 +367,16 @@ class TLSSNI01Test(unittest.TestCase):
|
||||
'type': 'tls-sni-01',
|
||||
'token': 'a82d5ff8ef740d12881f6d3c2277ab2e',
|
||||
}
|
||||
|
||||
def _msg(self):
|
||||
from acme.challenges import TLSSNI01
|
||||
with warnings.catch_warnings(record=True) as warn:
|
||||
warnings.simplefilter("always")
|
||||
msg = TLSSNI01(
|
||||
token=jose.b64decode('a82d5ff8ef740d12881f6d3c2277ab2e'))
|
||||
assert warn is not None # using a raw assert for mypy
|
||||
self.assertTrue(len(warn) == 1)
|
||||
self.assertTrue(issubclass(warn[-1].category, DeprecationWarning))
|
||||
self.assertTrue('deprecated' in str(warn[-1].message))
|
||||
return msg
|
||||
self.msg = TLSSNI01(
|
||||
token=jose.b64decode('a82d5ff8ef740d12881f6d3c2277ab2e'))
|
||||
|
||||
def test_to_partial_json(self):
|
||||
self.assertEqual(self.jmsg, self._msg().to_partial_json())
|
||||
self.assertEqual(self.jmsg, self.msg.to_partial_json())
|
||||
|
||||
def test_from_json(self):
|
||||
from acme.challenges import TLSSNI01
|
||||
self.assertEqual(self._msg(), TLSSNI01.from_json(self.jmsg))
|
||||
self.assertEqual(self.msg, TLSSNI01.from_json(self.jmsg))
|
||||
|
||||
def test_from_json_hashable(self):
|
||||
from acme.challenges import TLSSNI01
|
||||
@@ -398,10 +391,46 @@ class TLSSNI01Test(unittest.TestCase):
|
||||
@mock.patch('acme.challenges.TLSSNI01Response.gen_cert')
|
||||
def test_validation(self, mock_gen_cert):
|
||||
mock_gen_cert.return_value = ('cert', 'key')
|
||||
self.assertEqual(('cert', 'key'), self._msg().validation(
|
||||
self.assertEqual(('cert', 'key'), self.msg.validation(
|
||||
KEY, cert_key=mock.sentinel.cert_key))
|
||||
mock_gen_cert.assert_called_once_with(key=mock.sentinel.cert_key)
|
||||
|
||||
def test_deprecation_message(self):
|
||||
with mock.patch('acme.warnings.warn') as mock_warn:
|
||||
from acme.challenges import TLSSNI01
|
||||
assert TLSSNI01
|
||||
self.assertEqual(mock_warn.call_count, 1)
|
||||
self.assertTrue('deprecated' in mock_warn.call_args[0][0])
|
||||
|
||||
|
||||
class TLSALPN01ResponseTest(unittest.TestCase):
|
||||
# pylint: disable=too-many-instance-attributes
|
||||
|
||||
def setUp(self):
|
||||
from acme.challenges import TLSALPN01Response
|
||||
self.msg = TLSALPN01Response(key_authorization=u'foo')
|
||||
self.jmsg = {
|
||||
'resource': 'challenge',
|
||||
'type': 'tls-alpn-01',
|
||||
'keyAuthorization': u'foo',
|
||||
}
|
||||
|
||||
from acme.challenges import TLSALPN01
|
||||
self.chall = TLSALPN01(token=(b'x' * 16))
|
||||
self.response = self.chall.response(KEY)
|
||||
|
||||
def test_to_partial_json(self):
|
||||
self.assertEqual({k: v for k, v in self.jmsg.items() if k != 'keyAuthorization'},
|
||||
self.msg.to_partial_json())
|
||||
|
||||
def test_from_json(self):
|
||||
from acme.challenges import TLSALPN01Response
|
||||
self.assertEqual(self.msg, TLSALPN01Response.from_json(self.jmsg))
|
||||
|
||||
def test_from_json_hashable(self):
|
||||
from acme.challenges import TLSALPN01Response
|
||||
hash(TLSALPN01Response.from_json(self.jmsg))
|
||||
|
||||
|
||||
class TLSALPN01Test(unittest.TestCase):
|
||||
|
||||
|
||||
@@ -6,16 +6,16 @@ from email.utils import parsedate_tz
|
||||
import heapq
|
||||
import logging
|
||||
import time
|
||||
import re
|
||||
import sys
|
||||
|
||||
import six
|
||||
from six.moves import http_client # pylint: disable=import-error
|
||||
import josepy as jose
|
||||
import OpenSSL
|
||||
import re
|
||||
from requests_toolbelt.adapters.source import SourceAddressAdapter
|
||||
import requests
|
||||
from requests.adapters import HTTPAdapter
|
||||
import sys
|
||||
from requests_toolbelt.adapters.source import SourceAddressAdapter
|
||||
|
||||
from acme import crypto_util
|
||||
from acme import errors
|
||||
@@ -123,14 +123,20 @@ class ClientBase(object): # pylint: disable=too-many-instance-attributes
|
||||
"""
|
||||
return self.update_registration(regr, update={'status': 'deactivated'})
|
||||
|
||||
def query_registration(self, regr):
|
||||
"""Query server about registration.
|
||||
def deactivate_authorization(self, authzr):
|
||||
# type: (messages.AuthorizationResource) -> messages.AuthorizationResource
|
||||
"""Deactivate authorization.
|
||||
|
||||
:param messages.RegistrationResource: Existing Registration
|
||||
Resource.
|
||||
:param messages.AuthorizationResource authzr: The Authorization resource
|
||||
to be deactivated.
|
||||
|
||||
:returns: The Authorization resource that was deactivated.
|
||||
:rtype: `.AuthorizationResource`
|
||||
|
||||
"""
|
||||
return self._send_recv_regr(regr, messages.UpdateRegistration())
|
||||
body = messages.UpdateAuthorization(status='deactivated')
|
||||
response = self._post(authzr.uri, body)
|
||||
return self._authzr_from_response(response)
|
||||
|
||||
def _authzr_from_response(self, response, identifier=None, uri=None):
|
||||
authzr = messages.AuthorizationResource(
|
||||
@@ -276,6 +282,15 @@ class Client(ClientBase):
|
||||
# pylint: disable=no-member
|
||||
return self._regr_from_response(response)
|
||||
|
||||
def query_registration(self, regr):
|
||||
"""Query server about registration.
|
||||
|
||||
:param messages.RegistrationResource: Existing Registration
|
||||
Resource.
|
||||
|
||||
"""
|
||||
return self._send_recv_regr(regr, messages.UpdateRegistration())
|
||||
|
||||
def agree_to_tos(self, regr):
|
||||
"""Agree to the terms-of-service.
|
||||
|
||||
@@ -603,10 +618,13 @@ class ClientV2(ClientBase):
|
||||
Resource.
|
||||
|
||||
"""
|
||||
self.net.account = regr
|
||||
updated_regr = super(ClientV2, self).query_registration(regr)
|
||||
self.net.account = updated_regr
|
||||
return updated_regr
|
||||
self.net.account = regr # See certbot/certbot#6258
|
||||
# ACME v2 requires to use a POST-as-GET request (POST an empty JWS) here.
|
||||
# This is done by passing None instead of an empty UpdateRegistration to _post().
|
||||
response = self._post(regr.uri, None)
|
||||
self.net.account = self._regr_from_response(response, uri=regr.uri,
|
||||
terms_of_service=regr.terms_of_service)
|
||||
return self.net.account
|
||||
|
||||
def update_registration(self, regr, update=None):
|
||||
"""Update registration.
|
||||
@@ -652,7 +670,7 @@ class ClientV2(ClientBase):
|
||||
response = self._post(self.directory['newOrder'], order)
|
||||
body = messages.Order.from_json(response.json())
|
||||
authorizations = []
|
||||
for url in body.authorizations:
|
||||
for url in body.authorizations: # pylint: disable=not-an-iterable
|
||||
authorizations.append(self._authzr_from_response(self._post_as_get(url), uri=url))
|
||||
return messages.OrderResource(
|
||||
body=body,
|
||||
@@ -714,7 +732,7 @@ class ClientV2(ClientBase):
|
||||
for chall in authzr.body.challenges:
|
||||
if chall.error != None:
|
||||
failed.append(authzr)
|
||||
if len(failed) > 0:
|
||||
if failed:
|
||||
raise errors.ValidationError(failed)
|
||||
return orderr.update(authorizations=responses)
|
||||
|
||||
@@ -739,8 +757,7 @@ class ClientV2(ClientBase):
|
||||
if body.error is not None:
|
||||
raise errors.IssuanceError(body.error)
|
||||
if body.certificate is not None:
|
||||
certificate_response = self._post_as_get(body.certificate,
|
||||
content_type=DER_CONTENT_TYPE).text
|
||||
certificate_response = self._post_as_get(body.certificate).text
|
||||
return orderr.update(body=body, fullchain_pem=certificate_response)
|
||||
raise errors.TimeoutError()
|
||||
|
||||
@@ -759,10 +776,7 @@ class ClientV2(ClientBase):
|
||||
|
||||
def external_account_required(self):
|
||||
"""Checks if ACME server requires External Account Binding authentication."""
|
||||
if hasattr(self.directory, 'meta') and self.directory.meta.external_account_required:
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
return hasattr(self.directory, 'meta') and self.directory.meta.external_account_required
|
||||
|
||||
def _post_as_get(self, *args, **kwargs):
|
||||
"""
|
||||
@@ -778,11 +792,11 @@ class ClientV2(ClientBase):
|
||||
# We add an empty payload for POST-as-GET requests
|
||||
new_args = args[:1] + (None,) + args[1:]
|
||||
try:
|
||||
return self._post(*new_args, **kwargs) # pylint: disable=star-args
|
||||
return self._post(*new_args, **kwargs)
|
||||
except messages.Error as error:
|
||||
if error.code == 'malformed':
|
||||
logger.debug('Error during a POST-as-GET request, '
|
||||
'your ACME CA may not support it:\n%s', error)
|
||||
'your ACME CA server may not support it:\n%s', error)
|
||||
logger.debug('Retrying request with GET.')
|
||||
else: # pragma: no cover
|
||||
raise
|
||||
@@ -866,8 +880,7 @@ class BackwardsCompatibleClientV2(object):
|
||||
for domain in dnsNames:
|
||||
authorizations.append(self.client.request_domain_challenges(domain))
|
||||
return messages.OrderResource(authorizations=authorizations, csr_pem=csr_pem)
|
||||
else:
|
||||
return self.client.new_order(csr_pem)
|
||||
return self.client.new_order(csr_pem)
|
||||
|
||||
def finalize_order(self, orderr, deadline):
|
||||
"""Finalize an order and obtain a certificate.
|
||||
@@ -904,8 +917,7 @@ class BackwardsCompatibleClientV2(object):
|
||||
chain = crypto_util.dump_pyopenssl_chain(chain).decode()
|
||||
|
||||
return orderr.update(fullchain_pem=(cert + chain))
|
||||
else:
|
||||
return self.client.finalize_order(orderr, deadline)
|
||||
return self.client.finalize_order(orderr, deadline)
|
||||
|
||||
def revoke(self, cert, rsn):
|
||||
"""Revoke certificate.
|
||||
@@ -923,8 +935,7 @@ class BackwardsCompatibleClientV2(object):
|
||||
def _acme_version_from_directory(self, directory):
|
||||
if hasattr(directory, 'newNonce'):
|
||||
return 2
|
||||
else:
|
||||
return 1
|
||||
return 1
|
||||
|
||||
def external_account_required(self):
|
||||
"""Checks if the server requires an external account for ACMEv2 servers.
|
||||
@@ -932,8 +943,7 @@ class BackwardsCompatibleClientV2(object):
|
||||
Always return False for ACMEv1 servers, as it doesn't use External Account Binding."""
|
||||
if self.acme_version == 1:
|
||||
return False
|
||||
else:
|
||||
return self.client.external_account_required()
|
||||
return self.client.external_account_required()
|
||||
|
||||
|
||||
class ClientNetwork(object): # pylint: disable=too-many-instance-attributes
|
||||
@@ -1011,7 +1021,6 @@ class ClientNetwork(object): # pylint: disable=too-many-instance-attributes
|
||||
if self.account is not None:
|
||||
kwargs["kid"] = self.account["uri"]
|
||||
kwargs["key"] = self.key
|
||||
# pylint: disable=star-args
|
||||
return jws.JWS.sign(jobj, **kwargs).json_dumps(indent=2)
|
||||
|
||||
@classmethod
|
||||
@@ -1192,10 +1201,7 @@ class ClientNetwork(object): # pylint: disable=too-many-instance-attributes
|
||||
|
||||
def _post_once(self, url, obj, content_type=JOSE_CONTENT_TYPE,
|
||||
acme_version=1, **kwargs):
|
||||
try:
|
||||
new_nonce_url = kwargs.pop('new_nonce_url')
|
||||
except KeyError:
|
||||
new_nonce_url = None
|
||||
new_nonce_url = kwargs.pop('new_nonce_url', None)
|
||||
data = self._wrap_in_jws(obj, self._get_nonce(url, new_nonce_url), url, acme_version)
|
||||
kwargs.setdefault('headers', {'Content-Type': content_type})
|
||||
response = self._send_request('POST', url, data=data, **kwargs)
|
||||
|
||||
@@ -64,7 +64,7 @@ class ClientTestBase(unittest.TestCase):
|
||||
reg = messages.Registration(
|
||||
contact=self.contact, key=KEY.public_key())
|
||||
the_arg = dict(reg) # type: Dict
|
||||
self.new_reg = messages.NewRegistration(**the_arg) # pylint: disable=star-args
|
||||
self.new_reg = messages.NewRegistration(**the_arg)
|
||||
self.regr = messages.RegistrationResource(
|
||||
body=reg, uri='https://www.letsencrypt-demo.org/acme/reg/1')
|
||||
|
||||
@@ -358,7 +358,6 @@ class ClientTest(ClientTestBase):
|
||||
|
||||
def test_register(self):
|
||||
# "Instance of 'Field' has no to_json/update member" bug:
|
||||
# pylint: disable=no-member
|
||||
self.response.status_code = http_client.CREATED
|
||||
self.response.json.return_value = self.regr.body.to_json()
|
||||
self.response.headers['Location'] = self.regr.uri
|
||||
@@ -371,7 +370,6 @@ class ClientTest(ClientTestBase):
|
||||
|
||||
def test_update_registration(self):
|
||||
# "Instance of 'Field' has no to_json/update member" bug:
|
||||
# pylint: disable=no-member
|
||||
self.response.headers['Location'] = self.regr.uri
|
||||
self.response.json.return_value = self.regr.body.to_json()
|
||||
self.assertEqual(self.regr, self.client.update_registration(self.regr))
|
||||
@@ -639,6 +637,14 @@ class ClientTest(ClientTestBase):
|
||||
errors.PollError, self.client.poll_and_request_issuance,
|
||||
csr, authzrs, mintime=mintime, max_attempts=2)
|
||||
|
||||
def test_deactivate_authorization(self):
|
||||
authzb = self.authzr.body.update(status=messages.STATUS_DEACTIVATED)
|
||||
self.response.json.return_value = authzb.to_json()
|
||||
authzr = self.client.deactivate_authorization(self.authzr)
|
||||
self.assertEqual(authzb, authzr.body)
|
||||
self.assertEqual(self.client.net.post.call_count, 1)
|
||||
self.assertTrue(self.authzr.uri in self.net.post.call_args_list[0][0])
|
||||
|
||||
def test_check_cert(self):
|
||||
self.response.headers['Location'] = self.certr.uri
|
||||
self.response.content = CERT_DER
|
||||
@@ -844,7 +850,6 @@ class ClientV2Test(ClientTestBase):
|
||||
|
||||
def test_update_registration(self):
|
||||
# "Instance of 'Field' has no to_json/update member" bug:
|
||||
# pylint: disable=no-member
|
||||
self.response.headers['Location'] = self.regr.uri
|
||||
self.response.json.return_value = self.regr.body.to_json()
|
||||
self.assertEqual(self.regr, self.client.update_registration(self.regr))
|
||||
@@ -906,7 +911,7 @@ class MockJSONDeSerializable(jose.JSONDeSerializable):
|
||||
return {'foo': self.value}
|
||||
|
||||
@classmethod
|
||||
def from_json(cls, value):
|
||||
def from_json(cls, jobj):
|
||||
pass # pragma: no cover
|
||||
|
||||
|
||||
|
||||
@@ -18,17 +18,14 @@ from acme.magic_typing import Callable, Union, Tuple, Optional
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# TLSSNI01 certificate serving and probing is not affected by SSL
|
||||
# vulnerabilities: prober needs to check certificate for expected
|
||||
# contents anyway. Working SNI is the only thing that's necessary for
|
||||
# the challenge and thus scoping down SSL/TLS method (version) would
|
||||
# cause interoperability issues: TLSv1_METHOD is only compatible with
|
||||
# Default SSL method selected here is the most compatible, while secure
|
||||
# SSL method: TLSv1_METHOD is only compatible with
|
||||
# TLSv1_METHOD, while SSLv23_METHOD is compatible with all other
|
||||
# methods, including TLSv2_METHOD (read more at
|
||||
# https://www.openssl.org/docs/ssl/SSLv23_method.html). _serve_sni
|
||||
# should be changed to use "set_options" to disable SSLv2 and SSLv3,
|
||||
# in case it's used for things other than probing/serving!
|
||||
_DEFAULT_TLSSNI01_SSL_METHOD = SSL.SSLv23_METHOD # type: ignore
|
||||
_DEFAULT_SSL_METHOD = SSL.SSLv23_METHOD # type: ignore
|
||||
|
||||
|
||||
class SSLSocket(object): # pylint: disable=too-few-public-methods
|
||||
@@ -40,7 +37,7 @@ class SSLSocket(object): # pylint: disable=too-few-public-methods
|
||||
:ivar method: See `OpenSSL.SSL.Context` for allowed values.
|
||||
|
||||
"""
|
||||
def __init__(self, sock, certs, method=_DEFAULT_TLSSNI01_SSL_METHOD):
|
||||
def __init__(self, sock, certs, method=_DEFAULT_SSL_METHOD):
|
||||
self.sock = sock
|
||||
self.certs = certs
|
||||
self.method = method
|
||||
@@ -112,7 +109,7 @@ class SSLSocket(object): # pylint: disable=too-few-public-methods
|
||||
|
||||
|
||||
def probe_sni(name, host, port=443, timeout=300,
|
||||
method=_DEFAULT_TLSSNI01_SSL_METHOD, source_address=('', 0)):
|
||||
method=_DEFAULT_SSL_METHOD, source_address=('', 0)):
|
||||
"""Probe SNI server for SSL certificate.
|
||||
|
||||
:param bytes name: Byte string to send as the server name in the
|
||||
@@ -137,7 +134,6 @@ def probe_sni(name, host, port=443, timeout=300,
|
||||
socket_kwargs = {'source_address': source_address}
|
||||
|
||||
try:
|
||||
# pylint: disable=star-args
|
||||
logger.debug(
|
||||
"Attempting to connect to %s:%d%s.", host, port,
|
||||
" from {0}:{1}".format(
|
||||
@@ -198,8 +194,7 @@ def _pyopenssl_cert_or_req_all_names(loaded_cert_or_req):
|
||||
|
||||
if common_name is None:
|
||||
return sans
|
||||
else:
|
||||
return [common_name] + [d for d in sans if d != common_name]
|
||||
return [common_name] + [d for d in sans if d != common_name]
|
||||
|
||||
def _pyopenssl_cert_or_req_san(cert_or_req):
|
||||
"""Get Subject Alternative Names from certificate or CSR using pyOpenSSL.
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
"""ACME protocol messages."""
|
||||
import six
|
||||
import json
|
||||
import six
|
||||
try:
|
||||
from collections.abc import Hashable # pylint: disable=no-name-in-module
|
||||
except ImportError:
|
||||
except ImportError: # pragma: no cover
|
||||
from collections import Hashable
|
||||
|
||||
import josepy as jose
|
||||
@@ -18,20 +18,35 @@ OLD_ERROR_PREFIX = "urn:acme:error:"
|
||||
ERROR_PREFIX = "urn:ietf:params:acme:error:"
|
||||
|
||||
ERROR_CODES = {
|
||||
'accountDoesNotExist': 'The request specified an account that does not exist',
|
||||
'alreadyRevoked': 'The request specified a certificate to be revoked that has' \
|
||||
' already been revoked',
|
||||
'badCSR': 'The CSR is unacceptable (e.g., due to a short key)',
|
||||
'badNonce': 'The client sent an unacceptable anti-replay nonce',
|
||||
'badPublicKey': 'The JWS was signed by a public key the server does not support',
|
||||
'badRevocationReason': 'The revocation reason provided is not allowed by the server',
|
||||
'badSignatureAlgorithm': 'The JWS was signed with an algorithm the server does not support',
|
||||
'caa': 'Certification Authority Authorization (CAA) records forbid the CA from issuing' \
|
||||
' a certificate',
|
||||
'compound': 'Specific error conditions are indicated in the "subproblems" array',
|
||||
'connection': ('The server could not connect to the client to verify the'
|
||||
' domain'),
|
||||
'dns': 'There was a problem with a DNS query during identifier validation',
|
||||
'dnssec': 'The server could not validate a DNSSEC signed domain',
|
||||
'incorrectResponse': 'Response recieved didn\'t match the challenge\'s requirements',
|
||||
# deprecate invalidEmail
|
||||
'invalidEmail': 'The provided email for a registration was invalid',
|
||||
'invalidContact': 'The provided contact URI was invalid',
|
||||
'malformed': 'The request message was malformed',
|
||||
'rejectedIdentifier': 'The server will not issue certificates for the identifier',
|
||||
'orderNotReady': 'The request attempted to finalize an order that is not ready to be finalized',
|
||||
'rateLimited': 'There were too many requests of a given type',
|
||||
'serverInternal': 'The server experienced an internal error',
|
||||
'tls': 'The server experienced a TLS error during domain verification',
|
||||
'unauthorized': 'The client lacks sufficient authorization',
|
||||
'unsupportedContact': 'A contact URL for an account used an unsupported protocol scheme',
|
||||
'unknownHost': 'The server could not resolve a domain name',
|
||||
'unsupportedIdentifier': 'An identifier is of an unsupported type',
|
||||
'externalAccountRequired': 'The server requires external account binding',
|
||||
}
|
||||
|
||||
@@ -46,8 +61,7 @@ def is_acme_error(err):
|
||||
"""Check if argument is an ACME error."""
|
||||
if isinstance(err, Error) and (err.typ is not None):
|
||||
return (ERROR_PREFIX in err.typ) or (OLD_ERROR_PREFIX in err.typ)
|
||||
else:
|
||||
return False
|
||||
return False
|
||||
|
||||
|
||||
@six.python_2_unicode_compatible
|
||||
@@ -102,6 +116,7 @@ class Error(jose.JSONObjectWithFields, errors.Error):
|
||||
code = str(self.typ).split(':')[-1]
|
||||
if code in ERROR_CODES:
|
||||
return code
|
||||
return None
|
||||
|
||||
def __str__(self):
|
||||
return b' :: '.join(
|
||||
@@ -116,18 +131,19 @@ class _Constant(jose.JSONDeSerializable, Hashable): # type: ignore
|
||||
POSSIBLE_NAMES = NotImplemented
|
||||
|
||||
def __init__(self, name):
|
||||
self.POSSIBLE_NAMES[name] = self
|
||||
super(_Constant, self).__init__()
|
||||
self.POSSIBLE_NAMES[name] = self # pylint: disable=unsupported-assignment-operation
|
||||
self.name = name
|
||||
|
||||
def to_partial_json(self):
|
||||
return self.name
|
||||
|
||||
@classmethod
|
||||
def from_json(cls, value):
|
||||
if value not in cls.POSSIBLE_NAMES:
|
||||
def from_json(cls, jobj):
|
||||
if jobj not in cls.POSSIBLE_NAMES: # pylint: disable=unsupported-membership-test
|
||||
raise jose.DeserializationError(
|
||||
'{0} not recognized'.format(cls.__name__))
|
||||
return cls.POSSIBLE_NAMES[value]
|
||||
return cls.POSSIBLE_NAMES[jobj] # pylint: disable=unsubscriptable-object
|
||||
|
||||
def __repr__(self):
|
||||
return '{0}({1})'.format(self.__class__.__name__, self.name)
|
||||
@@ -152,6 +168,7 @@ STATUS_VALID = Status('valid')
|
||||
STATUS_INVALID = Status('invalid')
|
||||
STATUS_REVOKED = Status('revoked')
|
||||
STATUS_READY = Status('ready')
|
||||
STATUS_DEACTIVATED = Status('deactivated')
|
||||
|
||||
|
||||
class IdentifierType(_Constant):
|
||||
@@ -186,7 +203,6 @@ class Directory(jose.JSONDeSerializable):
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
kwargs = dict((self._internal_name(k), v) for k, v in kwargs.items())
|
||||
# pylint: disable=star-args
|
||||
super(Directory.Meta, self).__init__(**kwargs)
|
||||
|
||||
@property
|
||||
@@ -322,7 +338,7 @@ class Registration(ResourceBody):
|
||||
|
||||
def _filter_contact(self, prefix):
|
||||
return tuple(
|
||||
detail[len(prefix):] for detail in self.contact
|
||||
detail[len(prefix):] for detail in self.contact # pylint: disable=not-an-iterable
|
||||
if detail.startswith(prefix))
|
||||
|
||||
@property
|
||||
@@ -394,7 +410,6 @@ class ChallengeBody(ResourceBody):
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
kwargs = dict((self._internal_name(k), v) for k, v in kwargs.items())
|
||||
# pylint: disable=star-args
|
||||
super(ChallengeBody, self).__init__(**kwargs)
|
||||
|
||||
def encode(self, name):
|
||||
@@ -457,7 +472,7 @@ class Authorization(ResourceBody):
|
||||
:ivar datetime.datetime expires:
|
||||
|
||||
"""
|
||||
identifier = jose.Field('identifier', decoder=Identifier.from_json)
|
||||
identifier = jose.Field('identifier', decoder=Identifier.from_json, omitempty=True)
|
||||
challenges = jose.Field('challenges', omitempty=True)
|
||||
combinations = jose.Field('combinations', omitempty=True)
|
||||
|
||||
@@ -477,7 +492,7 @@ class Authorization(ResourceBody):
|
||||
def resolved_combinations(self):
|
||||
"""Combinations with challenges instead of indices."""
|
||||
return tuple(tuple(self.challenges[idx] for idx in combo)
|
||||
for combo in self.combinations)
|
||||
for combo in self.combinations) # pylint: disable=not-an-iterable
|
||||
|
||||
|
||||
@Directory.register
|
||||
@@ -487,6 +502,12 @@ class NewAuthorization(Authorization):
|
||||
resource = fields.Resource(resource_type)
|
||||
|
||||
|
||||
class UpdateAuthorization(Authorization):
|
||||
"""Update authorization."""
|
||||
resource_type = 'authz'
|
||||
resource = fields.Resource(resource_type)
|
||||
|
||||
|
||||
class AuthorizationResource(ResourceWithURI):
|
||||
"""Authorization Resource.
|
||||
|
||||
|
||||
@@ -17,6 +17,7 @@ import OpenSSL
|
||||
from acme import challenges
|
||||
from acme import crypto_util
|
||||
from acme.magic_typing import List # pylint: disable=unused-import, no-name-in-module
|
||||
from acme import _TLSSNI01DeprecationModule
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@@ -37,7 +38,7 @@ class TLSServer(socketserver.TCPServer):
|
||||
self.certs = kwargs.pop("certs", {})
|
||||
self.method = kwargs.pop(
|
||||
# pylint: disable=protected-access
|
||||
"method", crypto_util._DEFAULT_TLSSNI01_SSL_METHOD)
|
||||
"method", crypto_util._DEFAULT_SSL_METHOD)
|
||||
self.allow_reuse_address = kwargs.pop("allow_reuse_address", True)
|
||||
socketserver.TCPServer.__init__(self, *args, **kwargs)
|
||||
|
||||
@@ -82,7 +83,7 @@ class BaseDualNetworkedServers(object):
|
||||
kwargs["ipv6"] = ip_version
|
||||
new_address = (server_address[0],) + (port,) + server_address[2:]
|
||||
new_args = (new_address,) + remaining_args
|
||||
server = ServerClass(*new_args, **kwargs) # pylint: disable=star-args
|
||||
server = ServerClass(*new_args, **kwargs)
|
||||
logger.debug(
|
||||
"Successfully bound to %s:%s using %s", new_address[0],
|
||||
new_address[1], "IPv6" if ip_version else "IPv4")
|
||||
@@ -90,8 +91,8 @@ class BaseDualNetworkedServers(object):
|
||||
if self.servers:
|
||||
# Already bound using IPv6.
|
||||
logger.debug(
|
||||
"Certbot wasn't able to bind to %s:%s using %s, this " +
|
||||
"is often expected due to the dual stack nature of " +
|
||||
"Certbot wasn't able to bind to %s:%s using %s, this "
|
||||
"is often expected due to the dual stack nature of "
|
||||
"IPv6 socket implementations.",
|
||||
new_address[0], new_address[1],
|
||||
"IPv6" if ip_version else "IPv4")
|
||||
@@ -104,7 +105,7 @@ class BaseDualNetworkedServers(object):
|
||||
# If two servers are set up and port 0 was passed in, ensure we always
|
||||
# bind to the same port for both servers.
|
||||
port = server.socket.getsockname()[1]
|
||||
if len(self.servers) == 0:
|
||||
if not self.servers:
|
||||
raise socket.error("Could not bind to IPv4 or IPv6.")
|
||||
|
||||
def serve_forever(self):
|
||||
@@ -296,5 +297,9 @@ def simple_tls_sni_01_server(cli_args, forever=True):
|
||||
server.handle_request()
|
||||
|
||||
|
||||
# Patching ourselves to warn about TLS-SNI challenge deprecation and removal.
|
||||
sys.modules[__name__] = _TLSSNI01DeprecationModule(sys.modules[__name__])
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(simple_tls_sni_01_server(sys.argv)) # pragma: no cover
|
||||
|
||||
@@ -1,13 +1,15 @@
|
||||
"""Tests for acme.standalone."""
|
||||
import multiprocessing
|
||||
import os
|
||||
import shutil
|
||||
import socket
|
||||
import threading
|
||||
import tempfile
|
||||
import unittest
|
||||
import time
|
||||
from contextlib import closing
|
||||
|
||||
from six.moves import http_client # pylint: disable=import-error
|
||||
from six.moves import queue # pylint: disable=import-error
|
||||
from six.moves import socketserver # type: ignore # pylint: disable=import-error
|
||||
|
||||
import josepy as jose
|
||||
@@ -16,6 +18,7 @@ import requests
|
||||
|
||||
from acme import challenges
|
||||
from acme import crypto_util
|
||||
from acme import errors
|
||||
from acme import test_util
|
||||
from acme.magic_typing import Set # pylint: disable=unused-import, no-name-in-module
|
||||
|
||||
@@ -28,14 +31,14 @@ class TLSServerTest(unittest.TestCase):
|
||||
from acme.standalone import TLSServer
|
||||
server = TLSServer(
|
||||
('', 0), socketserver.BaseRequestHandler, bind_and_activate=True)
|
||||
server.server_close() # pylint: disable=no-member
|
||||
server.server_close()
|
||||
|
||||
def test_ipv6(self):
|
||||
if socket.has_ipv6:
|
||||
from acme.standalone import TLSServer
|
||||
server = TLSServer(
|
||||
('', 0), socketserver.BaseRequestHandler, bind_and_activate=True, ipv6=True)
|
||||
server.server_close() # pylint: disable=no-member
|
||||
server.server_close()
|
||||
|
||||
|
||||
class TLSSNI01ServerTest(unittest.TestCase):
|
||||
@@ -49,12 +52,11 @@ class TLSSNI01ServerTest(unittest.TestCase):
|
||||
)}
|
||||
from acme.standalone import TLSSNI01Server
|
||||
self.server = TLSSNI01Server(('localhost', 0), certs=self.certs)
|
||||
# pylint: disable=no-member
|
||||
self.thread = threading.Thread(target=self.server.serve_forever)
|
||||
self.thread.start()
|
||||
|
||||
def tearDown(self):
|
||||
self.server.shutdown() # pylint: disable=no-member
|
||||
self.server.shutdown()
|
||||
self.thread.join()
|
||||
|
||||
def test_it(self):
|
||||
@@ -77,13 +79,12 @@ class HTTP01ServerTest(unittest.TestCase):
|
||||
from acme.standalone import HTTP01Server
|
||||
self.server = HTTP01Server(('', 0), resources=self.resources)
|
||||
|
||||
# pylint: disable=no-member
|
||||
self.port = self.server.socket.getsockname()[1]
|
||||
self.thread = threading.Thread(target=self.server.serve_forever)
|
||||
self.thread.start()
|
||||
|
||||
def tearDown(self):
|
||||
self.server.shutdown() # pylint: disable=no-member
|
||||
self.server.shutdown()
|
||||
self.thread.join()
|
||||
|
||||
def test_index(self):
|
||||
@@ -136,7 +137,6 @@ class BaseDualNetworkedServersTest(unittest.TestCase):
|
||||
# NB: On Windows, socket.IPPROTO_IPV6 constant may be missing.
|
||||
# We use the corresponding value (41) instead.
|
||||
level = getattr(socket, "IPPROTO_IPV6", 41)
|
||||
# pylint: disable=no-member
|
||||
self.socket.setsockopt(level, socket.IPV6_V6ONLY, 1)
|
||||
try:
|
||||
self.server_bind()
|
||||
@@ -209,7 +209,6 @@ class HTTP01DualNetworkedServersTest(unittest.TestCase):
|
||||
from acme.standalone import HTTP01DualNetworkedServers
|
||||
self.servers = HTTP01DualNetworkedServers(('', 0), resources=self.resources)
|
||||
|
||||
# pylint: disable=no-member
|
||||
self.port = self.servers.getsocknames()[0][1]
|
||||
self.servers.serve_forever()
|
||||
|
||||
@@ -248,7 +247,6 @@ class HTTP01DualNetworkedServersTest(unittest.TestCase):
|
||||
self.assertFalse(self._test_http01(add=False))
|
||||
|
||||
|
||||
@test_util.broken_on_windows
|
||||
class TestSimpleTLSSNI01Server(unittest.TestCase):
|
||||
"""Tests for acme.standalone.simple_tls_sni_01_server."""
|
||||
|
||||
@@ -263,35 +261,45 @@ class TestSimpleTLSSNI01Server(unittest.TestCase):
|
||||
shutil.copy(test_util.vector_path('rsa2048_key.pem'),
|
||||
os.path.join(localhost_dir, 'key.pem'))
|
||||
|
||||
with closing(socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as sock:
|
||||
sock.bind(('', 0))
|
||||
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
||||
self.port = sock.getsockname()[1]
|
||||
|
||||
from acme.standalone import simple_tls_sni_01_server
|
||||
self.thread = threading.Thread(
|
||||
target=simple_tls_sni_01_server, kwargs={
|
||||
'cli_args': ('filename',),
|
||||
'forever': False,
|
||||
},
|
||||
)
|
||||
self.process = multiprocessing.Process(target=simple_tls_sni_01_server,
|
||||
args=(['path', '-p', str(self.port)],))
|
||||
self.old_cwd = os.getcwd()
|
||||
os.chdir(self.test_cwd)
|
||||
|
||||
def tearDown(self):
|
||||
os.chdir(self.old_cwd)
|
||||
self.thread.join()
|
||||
if self.process.is_alive():
|
||||
self.process.terminate()
|
||||
self.process.join(timeout=5)
|
||||
# Check that we didn't timeout waiting for the process to
|
||||
# terminate.
|
||||
self.assertNotEqual(self.process.exitcode, None)
|
||||
shutil.rmtree(self.test_cwd)
|
||||
|
||||
@mock.patch('acme.standalone.logger')
|
||||
def test_it(self, mock_logger):
|
||||
# Use a Queue because mock objects aren't thread safe.
|
||||
q = queue.Queue() # type: queue.Queue[int]
|
||||
# Add port number to the queue.
|
||||
mock_logger.info.side_effect = lambda *args: q.put(args[-1])
|
||||
self.thread.start()
|
||||
@mock.patch('acme.standalone.TLSSNI01Server.handle_request')
|
||||
def test_mock(self, handle):
|
||||
from acme.standalone import simple_tls_sni_01_server
|
||||
simple_tls_sni_01_server(cli_args=['path', '-p', str(self.port)], forever=False)
|
||||
self.assertEqual(handle.call_count, 1)
|
||||
|
||||
# After the timeout, an exception is raised if the queue is empty.
|
||||
port = q.get(timeout=5)
|
||||
cert = crypto_util.probe_sni(b'localhost', b'0.0.0.0', port)
|
||||
def test_live(self):
|
||||
self.process.start()
|
||||
cert = None
|
||||
for _ in range(50):
|
||||
time.sleep(0.1)
|
||||
try:
|
||||
cert = crypto_util.probe_sni(b'localhost', b'127.0.0.1', self.port)
|
||||
break
|
||||
except errors.Error: # pragma: no cover
|
||||
pass
|
||||
self.assertEqual(jose.ComparableX509(cert),
|
||||
test_util.load_comparable_cert(
|
||||
'rsa2048_cert.pem'))
|
||||
test_util.load_comparable_cert('rsa2048_cert.pem'))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
@@ -4,9 +4,8 @@
|
||||
|
||||
"""
|
||||
import os
|
||||
import sys
|
||||
import pkg_resources
|
||||
import unittest
|
||||
import pkg_resources
|
||||
|
||||
from cryptography.hazmat.backends import default_backend
|
||||
from cryptography.hazmat.primitives import serialization
|
||||
@@ -93,13 +92,4 @@ def skip_unless(condition, reason): # pragma: no cover
|
||||
return unittest.skipUnless(condition, reason)
|
||||
elif condition:
|
||||
return lambda cls: cls
|
||||
else:
|
||||
return lambda cls: None
|
||||
|
||||
def broken_on_windows(function):
|
||||
"""Decorator to skip temporarily a broken test on Windows."""
|
||||
reason = 'Test is broken and ignored on windows but should be fixed.'
|
||||
return unittest.skipIf(
|
||||
sys.platform == 'win32'
|
||||
and os.environ.get('SKIP_BROKEN_TESTS_ON_WINDOWS', 'true') == 'true',
|
||||
reason)(function)
|
||||
return lambda cls: None
|
||||
|
||||
240
acme/examples/http01_example.py
Normal file
240
acme/examples/http01_example.py
Normal file
@@ -0,0 +1,240 @@
|
||||
"""Example ACME-V2 API for HTTP-01 challenge.
|
||||
|
||||
Brief:
|
||||
|
||||
This a complete usage example of the python-acme API.
|
||||
|
||||
Limitations of this example:
|
||||
- Works for only one Domain name
|
||||
- Performs only HTTP-01 challenge
|
||||
- Uses ACME-v2
|
||||
|
||||
Workflow:
|
||||
(Account creation)
|
||||
- Create account key
|
||||
- Register account and accept TOS
|
||||
(Certificate actions)
|
||||
- Select HTTP-01 within offered challenges by the CA server
|
||||
- Set up http challenge resource
|
||||
- Set up standalone web server
|
||||
- Create domain private key and CSR
|
||||
- Issue certificate
|
||||
- Renew certificate
|
||||
- Revoke certificate
|
||||
(Account update actions)
|
||||
- Change contact information
|
||||
- Deactivate Account
|
||||
"""
|
||||
from contextlib import contextmanager
|
||||
from cryptography.hazmat.backends import default_backend
|
||||
from cryptography.hazmat.primitives.asymmetric import rsa
|
||||
import OpenSSL
|
||||
|
||||
from acme import challenges
|
||||
from acme import client
|
||||
from acme import crypto_util
|
||||
from acme import errors
|
||||
from acme import messages
|
||||
from acme import standalone
|
||||
import josepy as jose
|
||||
|
||||
# Constants:
|
||||
|
||||
# This is the staging point for ACME-V2 within Let's Encrypt.
|
||||
DIRECTORY_URL = 'https://acme-staging-v02.api.letsencrypt.org/directory'
|
||||
|
||||
USER_AGENT = 'python-acme-example'
|
||||
|
||||
# Account key size
|
||||
ACC_KEY_BITS = 2048
|
||||
|
||||
# Certificate private key size
|
||||
CERT_PKEY_BITS = 2048
|
||||
|
||||
# Domain name for the certificate.
|
||||
DOMAIN = 'client.example.com'
|
||||
|
||||
# If you are running Boulder locally, it is possible to configure any port
|
||||
# number to execute the challenge, but real CA servers will always use port
|
||||
# 80, as described in the ACME specification.
|
||||
PORT = 80
|
||||
|
||||
|
||||
# Useful methods and classes:
|
||||
|
||||
|
||||
def new_csr_comp(domain_name, pkey_pem=None):
|
||||
"""Create certificate signing request."""
|
||||
if pkey_pem is None:
|
||||
# Create private key.
|
||||
pkey = OpenSSL.crypto.PKey()
|
||||
pkey.generate_key(OpenSSL.crypto.TYPE_RSA, CERT_PKEY_BITS)
|
||||
pkey_pem = OpenSSL.crypto.dump_privatekey(OpenSSL.crypto.FILETYPE_PEM,
|
||||
pkey)
|
||||
csr_pem = crypto_util.make_csr(pkey_pem, [domain_name])
|
||||
return pkey_pem, csr_pem
|
||||
|
||||
|
||||
def select_http01_chall(orderr):
|
||||
"""Extract authorization resource from within order resource."""
|
||||
# Authorization Resource: authz.
|
||||
# This object holds the offered challenges by the server and their status.
|
||||
authz_list = orderr.authorizations
|
||||
|
||||
for authz in authz_list:
|
||||
# Choosing challenge.
|
||||
# authz.body.challenges is a set of ChallengeBody objects.
|
||||
for i in authz.body.challenges:
|
||||
# Find the supported challenge.
|
||||
if isinstance(i.chall, challenges.HTTP01):
|
||||
return i
|
||||
|
||||
raise Exception('HTTP-01 challenge was not offered by the CA server.')
|
||||
|
||||
|
||||
@contextmanager
|
||||
def challenge_server(http_01_resources):
|
||||
"""Manage standalone server set up and shutdown."""
|
||||
|
||||
# Setting up a fake server that binds at PORT and any address.
|
||||
address = ('', PORT)
|
||||
try:
|
||||
servers = standalone.HTTP01DualNetworkedServers(address,
|
||||
http_01_resources)
|
||||
# Start client standalone web server.
|
||||
servers.serve_forever()
|
||||
yield servers
|
||||
finally:
|
||||
# Shutdown client web server and unbind from PORT
|
||||
servers.shutdown_and_server_close()
|
||||
|
||||
|
||||
def perform_http01(client_acme, challb, orderr):
|
||||
"""Set up standalone webserver and perform HTTP-01 challenge."""
|
||||
|
||||
response, validation = challb.response_and_validation(client_acme.net.key)
|
||||
|
||||
resource = standalone.HTTP01RequestHandler.HTTP01Resource(
|
||||
chall=challb.chall, response=response, validation=validation)
|
||||
|
||||
with challenge_server({resource}):
|
||||
# Let the CA server know that we are ready for the challenge.
|
||||
client_acme.answer_challenge(challb, response)
|
||||
|
||||
# Wait for challenge status and then issue a certificate.
|
||||
# It is possible to set a deadline time.
|
||||
finalized_orderr = client_acme.poll_and_finalize(orderr)
|
||||
|
||||
return finalized_orderr.fullchain_pem
|
||||
|
||||
|
||||
# Main examples:
|
||||
|
||||
|
||||
def example_http():
|
||||
"""This example executes the whole process of fulfilling a HTTP-01
|
||||
challenge for one specific domain.
|
||||
|
||||
The workflow consists of:
|
||||
(Account creation)
|
||||
- Create account key
|
||||
- Register account and accept TOS
|
||||
(Certificate actions)
|
||||
- Select HTTP-01 within offered challenges by the CA server
|
||||
- Set up http challenge resource
|
||||
- Set up standalone web server
|
||||
- Create domain private key and CSR
|
||||
- Issue certificate
|
||||
- Renew certificate
|
||||
- Revoke certificate
|
||||
(Account update actions)
|
||||
- Change contact information
|
||||
- Deactivate Account
|
||||
|
||||
"""
|
||||
# Create account key
|
||||
|
||||
acc_key = jose.JWKRSA(
|
||||
key=rsa.generate_private_key(public_exponent=65537,
|
||||
key_size=ACC_KEY_BITS,
|
||||
backend=default_backend()))
|
||||
|
||||
# Register account and accept TOS
|
||||
|
||||
net = client.ClientNetwork(acc_key, user_agent=USER_AGENT)
|
||||
directory = messages.Directory.from_json(net.get(DIRECTORY_URL).json())
|
||||
client_acme = client.ClientV2(directory, net=net)
|
||||
|
||||
# Terms of Service URL is in client_acme.directory.meta.terms_of_service
|
||||
# Registration Resource: regr
|
||||
# Creates account with contact information.
|
||||
email = ('fake@example.com')
|
||||
regr = client_acme.new_account(
|
||||
messages.NewRegistration.from_data(
|
||||
email=email, terms_of_service_agreed=True))
|
||||
|
||||
# Create domain private key and CSR
|
||||
pkey_pem, csr_pem = new_csr_comp(DOMAIN)
|
||||
|
||||
# Issue certificate
|
||||
|
||||
orderr = client_acme.new_order(csr_pem)
|
||||
|
||||
# Select HTTP-01 within offered challenges by the CA server
|
||||
challb = select_http01_chall(orderr)
|
||||
|
||||
# The certificate is ready to be used in the variable "fullchain_pem".
|
||||
fullchain_pem = perform_http01(client_acme, challb, orderr)
|
||||
|
||||
# Renew certificate
|
||||
|
||||
_, csr_pem = new_csr_comp(DOMAIN, pkey_pem)
|
||||
|
||||
orderr = client_acme.new_order(csr_pem)
|
||||
|
||||
challb = select_http01_chall(orderr)
|
||||
|
||||
# Performing challenge
|
||||
fullchain_pem = perform_http01(client_acme, challb, orderr)
|
||||
|
||||
# Revoke certificate
|
||||
|
||||
fullchain_com = jose.ComparableX509(
|
||||
OpenSSL.crypto.load_certificate(
|
||||
OpenSSL.crypto.FILETYPE_PEM, fullchain_pem))
|
||||
|
||||
try:
|
||||
client_acme.revoke(fullchain_com, 0) # revocation reason = 0
|
||||
except errors.ConflictError:
|
||||
# Certificate already revoked.
|
||||
pass
|
||||
|
||||
# Query registration status.
|
||||
client_acme.net.account = regr
|
||||
try:
|
||||
regr = client_acme.query_registration(regr)
|
||||
except errors.Error as err:
|
||||
if err.typ == messages.OLD_ERROR_PREFIX + 'unauthorized' \
|
||||
or err.typ == messages.ERROR_PREFIX + 'unauthorized':
|
||||
# Status is deactivated.
|
||||
pass
|
||||
raise
|
||||
|
||||
# Change contact information
|
||||
|
||||
email = 'newfake@example.com'
|
||||
regr = client_acme.update_registration(
|
||||
regr.update(
|
||||
body=regr.body.update(
|
||||
contact=('mailto:' + email,)
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
# Deactivate account/registration
|
||||
|
||||
regr = client_acme.deactivate_registration(regr)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
example_http()
|
||||
@@ -3,7 +3,7 @@ from setuptools import find_packages
|
||||
from setuptools.command.test import test as TestCommand
|
||||
import sys
|
||||
|
||||
version = '0.31.0.dev0'
|
||||
version = '0.40.0.dev0'
|
||||
|
||||
# Please update tox.ini when modifying dependency version requirements
|
||||
install_requires = [
|
||||
@@ -11,7 +11,9 @@ install_requires = [
|
||||
# rsa_recover_prime_factors (>=0.8)
|
||||
'cryptography>=1.2.3',
|
||||
# formerly known as acme.jose:
|
||||
'josepy>=1.0.0',
|
||||
# 1.1.0+ is required to avoid the warnings described at
|
||||
# https://github.com/certbot/josepy/issues/13.
|
||||
'josepy>=1.1.0',
|
||||
# Connection.set_tlsext_host_name (>=0.13)
|
||||
'mock',
|
||||
'PyOpenSSL>=0.13.1',
|
||||
@@ -34,6 +36,7 @@ docs_extras = [
|
||||
'sphinx_rtd_theme',
|
||||
]
|
||||
|
||||
|
||||
class PyTest(TestCommand):
|
||||
user_options = []
|
||||
|
||||
@@ -48,6 +51,7 @@ class PyTest(TestCommand):
|
||||
errno = pytest.main(shlex.split(self.pytest_args))
|
||||
sys.exit(errno)
|
||||
|
||||
|
||||
setup(
|
||||
name='acme',
|
||||
version=version,
|
||||
@@ -69,6 +73,7 @@ setup(
|
||||
'Programming Language :: Python :: 3.5',
|
||||
'Programming Language :: Python :: 3.6',
|
||||
'Programming Language :: Python :: 3.7',
|
||||
'Programming Language :: Python :: 3.8',
|
||||
'Topic :: Internet :: WWW/HTTP',
|
||||
'Topic :: Security',
|
||||
],
|
||||
@@ -80,7 +85,7 @@ setup(
|
||||
'dev': dev_extras,
|
||||
'docs': docs_extras,
|
||||
},
|
||||
tests_require=["pytest"],
|
||||
test_suite='acme',
|
||||
tests_require=["pytest"],
|
||||
cmdclass={"test": PyTest},
|
||||
)
|
||||
|
||||
25
appveyor.yml
25
appveyor.yml
@@ -4,22 +4,37 @@ environment:
|
||||
matrix:
|
||||
- TOXENV: py35
|
||||
- TOXENV: py37-cover
|
||||
- TOXENV: integration-certbot
|
||||
|
||||
branches:
|
||||
only:
|
||||
# apache-parser-v2 is a temporary branch for doing work related to
|
||||
# rewriting the parser in the Apache plugin.
|
||||
- apache-parser-v2
|
||||
- master
|
||||
- /^\d+\.\d+\.x$/ # Version branches like X.X.X
|
||||
- /^test-.*$/
|
||||
|
||||
init:
|
||||
# Since master can receive only commits from PR that have already been tested, following
|
||||
# condition avoid to launch all jobs except the coverage one for commits pushed to master.
|
||||
- ps: |
|
||||
if (-Not $Env:APPVEYOR_PULL_REQUEST_NUMBER -And $Env:APPVEYOR_REPO_BRANCH -Eq 'master' `
|
||||
-And -Not ($Env:TOXENV -Like '*-cover'))
|
||||
{ $Env:APPVEYOR_SKIP_FINALIZE_ON_EXIT = 'true'; Exit-AppVeyorBuild }
|
||||
|
||||
install:
|
||||
# Use Python 3.7 by default
|
||||
- "SET PATH=C:\\Python37;C:\\Python37\\Scripts;%PATH%"
|
||||
- SET PATH=C:\\Python37;C:\\Python37\\Scripts;%PATH%
|
||||
# Using 4 processes is proven to be the most efficient integration tests config for AppVeyor
|
||||
- IF %TOXENV%==integration-certbot SET PYTEST_ADDOPTS=--numprocesses=4
|
||||
# Check env
|
||||
- "python --version"
|
||||
- python --version
|
||||
# Upgrade pip to avoid warnings
|
||||
- "python -m pip install --upgrade pip"
|
||||
- python -m pip install --upgrade pip
|
||||
# Ready to install tox and coverage
|
||||
- "pip install tox codecov"
|
||||
# tools/pip_install.py is used to pin packages to a known working version.
|
||||
- python tools\\pip_install.py tox codecov
|
||||
|
||||
build: off
|
||||
|
||||
@@ -29,4 +44,4 @@ test_script:
|
||||
- tox
|
||||
|
||||
on_success:
|
||||
- if exist .coverage codecov
|
||||
- if exist .coverage codecov -F windows
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
""" Utility functions for certbot-apache plugin """
|
||||
import binascii
|
||||
import os
|
||||
|
||||
from certbot import util
|
||||
from certbot.compat import os
|
||||
|
||||
|
||||
def get_mod_deps(mod_name):
|
||||
"""Get known module dependencies.
|
||||
|
||||
@@ -1,207 +0,0 @@
|
||||
"""Class of Augeas Configurators."""
|
||||
import logging
|
||||
|
||||
|
||||
from certbot import errors
|
||||
from certbot.plugins import common
|
||||
|
||||
from certbot_apache import constants
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class AugeasConfigurator(common.Installer):
|
||||
"""Base Augeas Configurator class.
|
||||
|
||||
:ivar config: Configuration.
|
||||
:type config: :class:`~certbot.interfaces.IConfig`
|
||||
|
||||
:ivar aug: Augeas object
|
||||
:type aug: :class:`augeas.Augeas`
|
||||
|
||||
:ivar str save_notes: Human-readable configuration change notes
|
||||
:ivar reverter: saves and reverts checkpoints
|
||||
:type reverter: :class:`certbot.reverter.Reverter`
|
||||
|
||||
"""
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(AugeasConfigurator, self).__init__(*args, **kwargs)
|
||||
|
||||
# Placeholder for augeas
|
||||
self.aug = None
|
||||
|
||||
self.save_notes = ""
|
||||
|
||||
|
||||
def init_augeas(self):
|
||||
""" Initialize the actual Augeas instance """
|
||||
import augeas
|
||||
self.aug = augeas.Augeas(
|
||||
# specify a directory to load our preferred lens from
|
||||
loadpath=constants.AUGEAS_LENS_DIR,
|
||||
# Do not save backup (we do it ourselves), do not load
|
||||
# anything by default
|
||||
flags=(augeas.Augeas.NONE |
|
||||
augeas.Augeas.NO_MODL_AUTOLOAD |
|
||||
augeas.Augeas.ENABLE_SPAN))
|
||||
# See if any temporary changes need to be recovered
|
||||
# This needs to occur before VirtualHost objects are setup...
|
||||
# because this will change the underlying configuration and potential
|
||||
# vhosts
|
||||
self.recovery_routine()
|
||||
|
||||
def check_parsing_errors(self, lens):
|
||||
"""Verify Augeas can parse all of the lens files.
|
||||
|
||||
:param str lens: lens to check for errors
|
||||
|
||||
:raises .errors.PluginError: If there has been an error in parsing with
|
||||
the specified lens.
|
||||
|
||||
"""
|
||||
error_files = self.aug.match("/augeas//error")
|
||||
|
||||
for path in error_files:
|
||||
# Check to see if it was an error resulting from the use of
|
||||
# the httpd lens
|
||||
lens_path = self.aug.get(path + "/lens")
|
||||
# As aug.get may return null
|
||||
if lens_path and lens in lens_path:
|
||||
msg = (
|
||||
"There has been an error in parsing the file {0} on line {1}: "
|
||||
"{2}".format(
|
||||
# Strip off /augeas/files and /error
|
||||
path[13:len(path) - 6],
|
||||
self.aug.get(path + "/line"),
|
||||
self.aug.get(path + "/message")))
|
||||
raise errors.PluginError(msg)
|
||||
|
||||
def ensure_augeas_state(self):
|
||||
"""Makes sure that all Augeas dom changes are written to files to avoid
|
||||
loss of configuration directives when doing additional augeas parsing,
|
||||
causing a possible augeas.load() resulting dom reset
|
||||
"""
|
||||
|
||||
if self.unsaved_files():
|
||||
self.save_notes += "(autosave)"
|
||||
self.save()
|
||||
|
||||
def unsaved_files(self):
|
||||
"""Lists files that have modified Augeas DOM but the changes have not
|
||||
been written to the filesystem yet, used by `self.save()` and
|
||||
ApacheConfigurator to check the file state.
|
||||
|
||||
:raises .errors.PluginError: If there was an error in Augeas, in
|
||||
an attempt to save the configuration, or an error creating a
|
||||
checkpoint
|
||||
|
||||
:returns: `set` of unsaved files
|
||||
"""
|
||||
save_state = self.aug.get("/augeas/save")
|
||||
self.aug.set("/augeas/save", "noop")
|
||||
# Existing Errors
|
||||
ex_errs = self.aug.match("/augeas//error")
|
||||
try:
|
||||
# This is a noop save
|
||||
self.aug.save()
|
||||
except (RuntimeError, IOError):
|
||||
self._log_save_errors(ex_errs)
|
||||
# Erase Save Notes
|
||||
self.save_notes = ""
|
||||
raise errors.PluginError(
|
||||
"Error saving files, check logs for more info.")
|
||||
|
||||
# Return the original save method
|
||||
self.aug.set("/augeas/save", save_state)
|
||||
|
||||
# Retrieve list of modified files
|
||||
# Note: Noop saves can cause the file to be listed twice, I used a
|
||||
# set to remove this possibility. This is a known augeas 0.10 error.
|
||||
save_paths = self.aug.match("/augeas/events/saved")
|
||||
|
||||
save_files = set()
|
||||
if save_paths:
|
||||
for path in save_paths:
|
||||
save_files.add(self.aug.get(path)[6:])
|
||||
return save_files
|
||||
|
||||
def save(self, title=None, temporary=False):
|
||||
"""Saves all changes to the configuration files.
|
||||
|
||||
This function first checks for save errors, if none are found,
|
||||
all configuration changes made will be saved. According to the
|
||||
function parameters. If an exception is raised, a new checkpoint
|
||||
was not created.
|
||||
|
||||
:param str title: The title of the save. If a title is given, the
|
||||
configuration will be saved as a new checkpoint and put in a
|
||||
timestamped directory.
|
||||
|
||||
:param bool temporary: Indicates whether the changes made will
|
||||
be quickly reversed in the future (ie. challenges)
|
||||
|
||||
"""
|
||||
save_files = self.unsaved_files()
|
||||
if save_files:
|
||||
self.add_to_checkpoint(save_files,
|
||||
self.save_notes, temporary=temporary)
|
||||
|
||||
self.save_notes = ""
|
||||
self.aug.save()
|
||||
|
||||
# Force reload if files were modified
|
||||
# This is needed to recalculate augeas directive span
|
||||
if save_files:
|
||||
for sf in save_files:
|
||||
self.aug.remove("/files/"+sf)
|
||||
self.aug.load()
|
||||
if title and not temporary:
|
||||
self.finalize_checkpoint(title)
|
||||
|
||||
def _log_save_errors(self, ex_errs):
|
||||
"""Log errors due to bad Augeas save.
|
||||
|
||||
:param list ex_errs: Existing errors before save
|
||||
|
||||
"""
|
||||
# Check for the root of save problems
|
||||
new_errs = self.aug.match("/augeas//error")
|
||||
# logger.error("During Save - %s", mod_conf)
|
||||
logger.error("Unable to save files: %s. Attempted Save Notes: %s",
|
||||
", ".join(err[13:len(err) - 6] for err in new_errs
|
||||
# Only new errors caused by recent save
|
||||
if err not in ex_errs), self.save_notes)
|
||||
|
||||
# Wrapper functions for Reverter class
|
||||
def recovery_routine(self):
|
||||
"""Revert all previously modified files.
|
||||
|
||||
Reverts all modified files that have not been saved as a checkpoint
|
||||
|
||||
:raises .errors.PluginError: If unable to recover the configuration
|
||||
|
||||
"""
|
||||
super(AugeasConfigurator, self).recovery_routine()
|
||||
# Need to reload configuration after these changes take effect
|
||||
self.aug.load()
|
||||
|
||||
def revert_challenge_config(self):
|
||||
"""Used to cleanup challenge configurations.
|
||||
|
||||
:raises .errors.PluginError: If unable to revert the challenge config.
|
||||
|
||||
"""
|
||||
self.revert_temporary_config()
|
||||
self.aug.load()
|
||||
|
||||
def rollback_checkpoints(self, rollback=1):
|
||||
"""Rollback saved checkpoints.
|
||||
|
||||
:param int rollback: Number of checkpoints to revert
|
||||
|
||||
:raises .errors.PluginError: If there is a problem with the input or
|
||||
the function is unable to correctly revert the configuration
|
||||
|
||||
"""
|
||||
super(AugeasConfigurator, self).rollback_checkpoints(rollback)
|
||||
self.aug.load()
|
||||
@@ -1,40 +1,40 @@
|
||||
"""Apache Configuration based off of Augeas Configurator."""
|
||||
"""Apache Configurator."""
|
||||
# pylint: disable=too-many-lines
|
||||
import copy
|
||||
import fnmatch
|
||||
import logging
|
||||
import os
|
||||
import pkg_resources
|
||||
import re
|
||||
import six
|
||||
import socket
|
||||
import time
|
||||
|
||||
from collections import defaultdict
|
||||
|
||||
import pkg_resources
|
||||
import six
|
||||
|
||||
import zope.component
|
||||
import zope.interface
|
||||
|
||||
from acme import challenges
|
||||
from acme.magic_typing import Any, DefaultDict, Dict, List, Set, Union # pylint: disable=unused-import, no-name-in-module
|
||||
from acme.magic_typing import DefaultDict, Dict, List, Set, Union # pylint: disable=unused-import, no-name-in-module
|
||||
|
||||
from certbot import errors
|
||||
from certbot import interfaces
|
||||
from certbot import util
|
||||
|
||||
from certbot.achallenges import KeyAuthorizationAnnotatedChallenge # pylint: disable=unused-import
|
||||
from certbot.compat import filesystem
|
||||
from certbot.compat import os
|
||||
from certbot.plugins import common
|
||||
from certbot.plugins.util import path_surgery
|
||||
from certbot.plugins.enhancements import AutoHSTSEnhancement
|
||||
|
||||
from certbot_apache import apache_util
|
||||
from certbot_apache import augeas_configurator
|
||||
from certbot_apache import constants
|
||||
from certbot_apache import display_ops
|
||||
from certbot_apache import http_01
|
||||
from certbot_apache import obj
|
||||
from certbot_apache import parser
|
||||
from certbot_apache import tls_sni_01
|
||||
|
||||
from collections import defaultdict
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -70,13 +70,10 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
@zope.interface.implementer(interfaces.IAuthenticator, interfaces.IInstaller)
|
||||
@zope.interface.provider(interfaces.IPluginFactory)
|
||||
class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
class ApacheConfigurator(common.Installer):
|
||||
# pylint: disable=too-many-instance-attributes,too-many-public-methods
|
||||
"""Apache configurator.
|
||||
|
||||
State of Configurator: This code has been been tested and built for Ubuntu
|
||||
14.04 Apache 2.4 and it works for Ubuntu 12.04 Apache 2.2
|
||||
|
||||
:ivar config: Configuration.
|
||||
:type config: :class:`~certbot.interfaces.IConfig`
|
||||
|
||||
@@ -92,6 +89,11 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
"""
|
||||
|
||||
description = "Apache Web Server plugin"
|
||||
if os.environ.get("CERTBOT_DOCS") == "1":
|
||||
description += ( # pragma: no cover
|
||||
" (Please note that the default values of the Apache plugin options"
|
||||
" change depending on the operating system Certbot is run on.)"
|
||||
)
|
||||
|
||||
OS_DEFAULTS = dict(
|
||||
server_root="/etc/apache2",
|
||||
@@ -141,28 +143,36 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
# When adding, modifying or deleting command line arguments, be sure to
|
||||
# include the changes in the list used in method _prepare_options() to
|
||||
# ensure consistent behavior.
|
||||
add("enmod", default=cls.OS_DEFAULTS["enmod"],
|
||||
|
||||
# Respect CERTBOT_DOCS environment variable and use default values from
|
||||
# base class regardless of the underlying distribution (overrides).
|
||||
if os.environ.get("CERTBOT_DOCS") == "1":
|
||||
DEFAULTS = ApacheConfigurator.OS_DEFAULTS
|
||||
else:
|
||||
# cls.OS_DEFAULTS can be distribution specific, see override classes
|
||||
DEFAULTS = cls.OS_DEFAULTS
|
||||
add("enmod", default=DEFAULTS["enmod"],
|
||||
help="Path to the Apache 'a2enmod' binary")
|
||||
add("dismod", default=cls.OS_DEFAULTS["dismod"],
|
||||
add("dismod", default=DEFAULTS["dismod"],
|
||||
help="Path to the Apache 'a2dismod' binary")
|
||||
add("le-vhost-ext", default=cls.OS_DEFAULTS["le_vhost_ext"],
|
||||
add("le-vhost-ext", default=DEFAULTS["le_vhost_ext"],
|
||||
help="SSL vhost configuration extension")
|
||||
add("server-root", default=cls.OS_DEFAULTS["server_root"],
|
||||
add("server-root", default=DEFAULTS["server_root"],
|
||||
help="Apache server root directory")
|
||||
add("vhost-root", default=None,
|
||||
help="Apache server VirtualHost configuration root")
|
||||
add("logs-root", default=cls.OS_DEFAULTS["logs_root"],
|
||||
add("logs-root", default=DEFAULTS["logs_root"],
|
||||
help="Apache server logs directory")
|
||||
add("challenge-location",
|
||||
default=cls.OS_DEFAULTS["challenge_location"],
|
||||
default=DEFAULTS["challenge_location"],
|
||||
help="Directory path for challenge configuration")
|
||||
add("handle-modules", default=cls.OS_DEFAULTS["handle_modules"],
|
||||
add("handle-modules", default=DEFAULTS["handle_modules"],
|
||||
help="Let installer handle enabling required modules for you " +
|
||||
"(Only Ubuntu/Debian currently)")
|
||||
add("handle-sites", default=cls.OS_DEFAULTS["handle_sites"],
|
||||
add("handle-sites", default=DEFAULTS["handle_sites"],
|
||||
help="Let installer handle enabling sites for you " +
|
||||
"(Only Ubuntu/Debian currently)")
|
||||
add("ctl", default=cls.OS_DEFAULTS["ctl"],
|
||||
add("ctl", default=DEFAULTS["ctl"],
|
||||
help="Full path to Apache control script")
|
||||
util.add_deprecated_argument(
|
||||
add, argument_name="init-script", nargs=1)
|
||||
@@ -188,6 +198,8 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
self._enhanced_vhosts = defaultdict(set) # type: DefaultDict[str, Set[obj.VirtualHost]]
|
||||
# Temporary state for AutoHSTS enhancement
|
||||
self._autohsts = {} # type: Dict[str, Dict[str, Union[int, float]]]
|
||||
# Reverter save notes
|
||||
self.save_notes = ""
|
||||
|
||||
# These will be set in the prepare function
|
||||
self._prepared = False
|
||||
@@ -202,15 +214,13 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
@property
|
||||
def mod_ssl_conf(self):
|
||||
"""Full absolute path to SSL configuration file."""
|
||||
return os.path.join(self.config.config_dir,
|
||||
constants.MOD_SSL_CONF_DEST)
|
||||
return os.path.join(self.config.config_dir, constants.MOD_SSL_CONF_DEST)
|
||||
|
||||
@property
|
||||
def updated_mod_ssl_conf_digest(self):
|
||||
"""Full absolute path to digest of updated SSL configuration file."""
|
||||
return os.path.join(self.config.config_dir, constants.UPDATED_MOD_SSL_CONF_DIGEST)
|
||||
|
||||
|
||||
def prepare(self):
|
||||
"""Prepare the authenticator/installer.
|
||||
|
||||
@@ -220,12 +230,6 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
:raises .errors.PluginError: If there is any other error
|
||||
|
||||
"""
|
||||
# Perform the actual Augeas initialization to be able to react
|
||||
try:
|
||||
self.init_augeas()
|
||||
except ImportError:
|
||||
raise errors.NoInstallationError("Problem in Augeas installation")
|
||||
|
||||
self._prepare_options()
|
||||
|
||||
# Verify Apache is installed
|
||||
@@ -241,18 +245,16 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
'.'.join(str(i) for i in self.version))
|
||||
if self.version < (2, 2):
|
||||
raise errors.NotSupportedError(
|
||||
"Apache Version %s not supported.", str(self.version))
|
||||
|
||||
if not self._check_aug_version():
|
||||
raise errors.NotSupportedError(
|
||||
"Apache plugin support requires libaugeas0 and augeas-lenses "
|
||||
"version 1.2.0 or higher, please make sure you have you have "
|
||||
"those installed.")
|
||||
"Apache Version {0} not supported.".format(str(self.version)))
|
||||
|
||||
# Recover from previous crash before Augeas initialization to have the
|
||||
# correct parse tree from the get go.
|
||||
self.recovery_routine()
|
||||
# Perform the actual Augeas initialization to be able to react
|
||||
self.parser = self.get_parser()
|
||||
|
||||
# Check for errors in parsing files with Augeas
|
||||
self.check_parsing_errors("httpd.aug")
|
||||
self.parser.check_parsing_errors("httpd.aug")
|
||||
|
||||
# Get all of the available vhosts
|
||||
self.vhosts = self.get_virtual_hosts()
|
||||
@@ -266,9 +268,72 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
except (OSError, errors.LockError):
|
||||
logger.debug("Encountered error:", exc_info=True)
|
||||
raise errors.PluginError(
|
||||
"Unable to lock %s", self.option("server_root"))
|
||||
"Unable to create a lock file in {0}. Are you running"
|
||||
" Certbot with sufficient privileges to modify your"
|
||||
" Apache configuration?".format(self.option("server_root")))
|
||||
self._prepared = True
|
||||
|
||||
def save(self, title=None, temporary=False):
|
||||
"""Saves all changes to the configuration files.
|
||||
|
||||
This function first checks for save errors, if none are found,
|
||||
all configuration changes made will be saved. According to the
|
||||
function parameters. If an exception is raised, a new checkpoint
|
||||
was not created.
|
||||
|
||||
:param str title: The title of the save. If a title is given, the
|
||||
configuration will be saved as a new checkpoint and put in a
|
||||
timestamped directory.
|
||||
|
||||
:param bool temporary: Indicates whether the changes made will
|
||||
be quickly reversed in the future (ie. challenges)
|
||||
|
||||
"""
|
||||
save_files = self.parser.unsaved_files()
|
||||
if save_files:
|
||||
self.add_to_checkpoint(save_files,
|
||||
self.save_notes, temporary=temporary)
|
||||
# Handle the parser specific tasks
|
||||
self.parser.save(save_files)
|
||||
if title and not temporary:
|
||||
self.finalize_checkpoint(title)
|
||||
|
||||
def recovery_routine(self):
|
||||
"""Revert all previously modified files.
|
||||
|
||||
Reverts all modified files that have not been saved as a checkpoint
|
||||
|
||||
:raises .errors.PluginError: If unable to recover the configuration
|
||||
|
||||
"""
|
||||
super(ApacheConfigurator, self).recovery_routine()
|
||||
# Reload configuration after these changes take effect if needed
|
||||
# ie. ApacheParser has been initialized.
|
||||
if self.parser:
|
||||
# TODO: wrap into non-implementation specific parser interface
|
||||
self.parser.aug.load()
|
||||
|
||||
def revert_challenge_config(self):
|
||||
"""Used to cleanup challenge configurations.
|
||||
|
||||
:raises .errors.PluginError: If unable to revert the challenge config.
|
||||
|
||||
"""
|
||||
self.revert_temporary_config()
|
||||
self.parser.aug.load()
|
||||
|
||||
def rollback_checkpoints(self, rollback=1):
|
||||
"""Rollback saved checkpoints.
|
||||
|
||||
:param int rollback: Number of checkpoints to revert
|
||||
|
||||
:raises .errors.PluginError: If there is a problem with the input or
|
||||
the function is unable to correctly revert the configuration
|
||||
|
||||
"""
|
||||
super(ApacheConfigurator, self).rollback_checkpoints(rollback)
|
||||
self.parser.aug.load()
|
||||
|
||||
def _verify_exe_availability(self, exe):
|
||||
"""Checks availability of Apache executable"""
|
||||
if not util.exe_exists(exe):
|
||||
@@ -276,26 +341,11 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
raise errors.NoInstallationError(
|
||||
'Cannot find Apache executable {0}'.format(exe))
|
||||
|
||||
def _check_aug_version(self):
|
||||
""" Checks that we have recent enough version of libaugeas.
|
||||
If augeas version is recent enough, it will support case insensitive
|
||||
regexp matching"""
|
||||
|
||||
self.aug.set("/test/path/testing/arg", "aRgUMeNT")
|
||||
try:
|
||||
matches = self.aug.match(
|
||||
"/test//*[self::arg=~regexp('argument', 'i')]")
|
||||
except RuntimeError:
|
||||
self.aug.remove("/test/path")
|
||||
return False
|
||||
self.aug.remove("/test/path")
|
||||
return matches
|
||||
|
||||
def get_parser(self):
|
||||
"""Initializes the ApacheParser"""
|
||||
# If user provided vhost_root value in command line, use it
|
||||
return parser.ApacheParser(
|
||||
self.aug, self.option("server_root"), self.conf("vhost-root"),
|
||||
self.option("server_root"), self.conf("vhost-root"),
|
||||
self.version, configurator=self)
|
||||
|
||||
def _wildcard_domain(self, domain):
|
||||
@@ -382,7 +432,7 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
"""
|
||||
if len(name.split(".")) == len(domain.split(".")):
|
||||
return fnmatch.fnmatch(name, domain)
|
||||
|
||||
return None
|
||||
|
||||
def _choose_vhosts_wildcard(self, domain, create_ssl=True):
|
||||
"""Prompts user to choose vhosts to install a wildcard certificate for"""
|
||||
@@ -428,7 +478,6 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
self._wildcard_vhosts[domain] = return_vhosts
|
||||
return return_vhosts
|
||||
|
||||
|
||||
def _deploy_cert(self, vhost, cert_path, key_path, chain_path, fullchain_path):
|
||||
"""
|
||||
Helper function for deploy_cert() that handles the actual deployment
|
||||
@@ -436,8 +485,6 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
domain originally passed for deploy_cert(). This is especially true
|
||||
with wildcard certificates
|
||||
"""
|
||||
|
||||
|
||||
# This is done first so that ssl module is enabled and cert_path,
|
||||
# cert_key... can all be parsed appropriately
|
||||
self.prepare_server_https("443")
|
||||
@@ -477,8 +524,8 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
# install SSLCertificateFile, SSLCertificateKeyFile,
|
||||
# and SSLCertificateChainFile directives
|
||||
set_cert_path = cert_path
|
||||
self.aug.set(path["cert_path"][-1], cert_path)
|
||||
self.aug.set(path["cert_key"][-1], key_path)
|
||||
self.parser.aug.set(path["cert_path"][-1], cert_path)
|
||||
self.parser.aug.set(path["cert_key"][-1], key_path)
|
||||
if chain_path is not None:
|
||||
self.parser.add_dir(vhost.path,
|
||||
"SSLCertificateChainFile", chain_path)
|
||||
@@ -490,8 +537,8 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
raise errors.PluginError("Please provide the --fullchain-path "
|
||||
"option pointing to your full chain file")
|
||||
set_cert_path = fullchain_path
|
||||
self.aug.set(path["cert_path"][-1], fullchain_path)
|
||||
self.aug.set(path["cert_key"][-1], key_path)
|
||||
self.parser.aug.set(path["cert_path"][-1], fullchain_path)
|
||||
self.parser.aug.set(path["cert_key"][-1], key_path)
|
||||
|
||||
# Enable the new vhost if needed
|
||||
if not vhost.enabled:
|
||||
@@ -577,8 +624,9 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
self.assoc[target_name] = vhost
|
||||
return vhost
|
||||
|
||||
def included_in_wildcard(self, names, target_name):
|
||||
"""Is target_name covered by a wildcard?
|
||||
def domain_in_names(self, names, target_name):
|
||||
"""Checks if target domain is covered by one or more of the provided
|
||||
names. The target name is matched by wildcard as well as exact match.
|
||||
|
||||
:param names: server aliases
|
||||
:type names: `collections.Iterable` of `str`
|
||||
@@ -649,7 +697,7 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
names = vhost.get_names()
|
||||
if target_name in names:
|
||||
points = 3
|
||||
elif self.included_in_wildcard(names, target_name):
|
||||
elif self.domain_in_names(names, target_name):
|
||||
points = 2
|
||||
elif any(addr.get_addr() == target_name for addr in vhost.addrs):
|
||||
points = 1
|
||||
@@ -708,7 +756,7 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
if name:
|
||||
all_names.add(name)
|
||||
|
||||
if len(vhost_macro) > 0:
|
||||
if vhost_macro:
|
||||
zope.component.getUtility(interfaces.IDisplay).notification(
|
||||
"Apache mod_macro seems to be in use in file(s):\n{0}"
|
||||
"\n\nUnfortunately mod_macro is not yet supported".format(
|
||||
@@ -790,7 +838,7 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
"""
|
||||
addrs = set()
|
||||
try:
|
||||
args = self.aug.match(path + "/arg")
|
||||
args = self.parser.aug.match(path + "/arg")
|
||||
except RuntimeError:
|
||||
logger.warning("Encountered a problem while parsing file: %s, skipping", path)
|
||||
return None
|
||||
@@ -808,7 +856,7 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
is_ssl = True
|
||||
|
||||
filename = apache_util.get_file_path(
|
||||
self.aug.get("/augeas/files%s/path" % apache_util.get_file_path(path)))
|
||||
self.parser.aug.get("/augeas/files%s/path" % apache_util.get_file_path(path)))
|
||||
if filename is None:
|
||||
return None
|
||||
|
||||
@@ -838,7 +886,7 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
# Make a list of parser paths because the parser_paths
|
||||
# dictionary may be modified during the loop.
|
||||
for vhost_path in list(self.parser.parser_paths):
|
||||
paths = self.aug.match(
|
||||
paths = self.parser.aug.match(
|
||||
("/files%s//*[label()=~regexp('%s')]" %
|
||||
(vhost_path, parser.case_i("VirtualHost"))))
|
||||
paths = [path for path in paths if
|
||||
@@ -848,7 +896,7 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
if not new_vhost:
|
||||
continue
|
||||
internal_path = apache_util.get_internal_aug_path(new_vhost.path)
|
||||
realpath = os.path.realpath(new_vhost.filep)
|
||||
realpath = filesystem.realpath(new_vhost.filep)
|
||||
if realpath not in file_paths:
|
||||
file_paths[realpath] = new_vhost.filep
|
||||
internal_paths[realpath].add(internal_path)
|
||||
@@ -1054,6 +1102,7 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
# Ugly but takes care of protocol def, eg: 1.1.1.1:443 https
|
||||
if listen.split(":")[-1].split(" ")[0] == port:
|
||||
return True
|
||||
return None
|
||||
|
||||
def prepare_https_modules(self, temp):
|
||||
"""Helper method for prepare_server_https, taking care of enabling
|
||||
@@ -1069,23 +1118,6 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
if "ssl_module" not in self.parser.modules:
|
||||
self.enable_mod("ssl", temp=temp)
|
||||
|
||||
def make_addrs_sni_ready(self, addrs):
|
||||
"""Checks to see if the server is ready for SNI challenges.
|
||||
|
||||
:param addrs: Addresses to check SNI compatibility
|
||||
:type addrs: :class:`~certbot_apache.obj.Addr`
|
||||
|
||||
"""
|
||||
# Version 2.4 and later are automatically SNI ready.
|
||||
if self.version >= (2, 4):
|
||||
return
|
||||
|
||||
for addr in addrs:
|
||||
if not self.is_name_vhost(addr):
|
||||
logger.debug("Setting VirtualHost at %s to be a name "
|
||||
"based virtual host", addr)
|
||||
self.add_name_vhost(addr)
|
||||
|
||||
def make_vhost_ssl(self, nonssl_vhost): # pylint: disable=too-many-locals
|
||||
"""Makes an ssl_vhost version of a nonssl_vhost.
|
||||
|
||||
@@ -1108,16 +1140,16 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
avail_fp = nonssl_vhost.filep
|
||||
ssl_fp = self._get_ssl_vhost_path(avail_fp)
|
||||
|
||||
orig_matches = self.aug.match("/files%s//* [label()=~regexp('%s')]" %
|
||||
orig_matches = self.parser.aug.match("/files%s//* [label()=~regexp('%s')]" %
|
||||
(self._escape(ssl_fp),
|
||||
parser.case_i("VirtualHost")))
|
||||
|
||||
self._copy_create_ssl_vhost_skeleton(nonssl_vhost, ssl_fp)
|
||||
|
||||
# Reload augeas to take into account the new vhost
|
||||
self.aug.load()
|
||||
self.parser.aug.load()
|
||||
# Get Vhost augeas path for new vhost
|
||||
new_matches = self.aug.match("/files%s//* [label()=~regexp('%s')]" %
|
||||
new_matches = self.parser.aug.match("/files%s//* [label()=~regexp('%s')]" %
|
||||
(self._escape(ssl_fp),
|
||||
parser.case_i("VirtualHost")))
|
||||
|
||||
@@ -1128,7 +1160,7 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
# Make Augeas aware of the new vhost
|
||||
self.parser.parse_file(ssl_fp)
|
||||
# Try to search again
|
||||
new_matches = self.aug.match(
|
||||
new_matches = self.parser.aug.match(
|
||||
"/files%s//* [label()=~regexp('%s')]" %
|
||||
(self._escape(ssl_fp),
|
||||
parser.case_i("VirtualHost")))
|
||||
@@ -1190,16 +1222,15 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
"""
|
||||
|
||||
if self.conf("vhost-root") and os.path.exists(self.conf("vhost-root")):
|
||||
fp = os.path.join(os.path.realpath(self.option("vhost_root")),
|
||||
fp = os.path.join(filesystem.realpath(self.option("vhost_root")),
|
||||
os.path.basename(non_ssl_vh_fp))
|
||||
else:
|
||||
# Use non-ssl filepath
|
||||
fp = os.path.realpath(non_ssl_vh_fp)
|
||||
fp = filesystem.realpath(non_ssl_vh_fp)
|
||||
|
||||
if fp.endswith(".conf"):
|
||||
return fp[:-(len(".conf"))] + self.option("le_vhost_ext")
|
||||
else:
|
||||
return fp + self.option("le_vhost_ext")
|
||||
return fp + self.option("le_vhost_ext")
|
||||
|
||||
def _sift_rewrite_rule(self, line):
|
||||
"""Decides whether a line should be copied to a SSL vhost.
|
||||
@@ -1279,8 +1310,8 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
"vhost for your HTTPS site located at {1} because they have "
|
||||
"the potential to create redirection loops.".format(
|
||||
vhost.filep, ssl_fp), reporter.MEDIUM_PRIORITY)
|
||||
self.aug.set("/augeas/files%s/mtime" % (self._escape(ssl_fp)), "0")
|
||||
self.aug.set("/augeas/files%s/mtime" % (self._escape(vhost.filep)), "0")
|
||||
self.parser.aug.set("/augeas/files%s/mtime" % (self._escape(ssl_fp)), "0")
|
||||
self.parser.aug.set("/augeas/files%s/mtime" % (self._escape(vhost.filep)), "0")
|
||||
|
||||
def _sift_rewrite_rules(self, contents):
|
||||
""" Helper function for _copy_create_ssl_vhost_skeleton to prepare the
|
||||
@@ -1355,7 +1386,7 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
"""
|
||||
|
||||
try:
|
||||
span_val = self.aug.span(vhost.path)
|
||||
span_val = self.parser.aug.span(vhost.path)
|
||||
except ValueError:
|
||||
logger.critical("Error while reading the VirtualHost %s from "
|
||||
"file %s", vhost.name, vhost.filep, exc_info=True)
|
||||
@@ -1390,13 +1421,13 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
|
||||
def _update_ssl_vhosts_addrs(self, vh_path):
|
||||
ssl_addrs = set()
|
||||
ssl_addr_p = self.aug.match(vh_path + "/arg")
|
||||
ssl_addr_p = self.parser.aug.match(vh_path + "/arg")
|
||||
|
||||
for addr in ssl_addr_p:
|
||||
old_addr = obj.Addr.fromstring(
|
||||
str(self.parser.get_arg(addr)))
|
||||
ssl_addr = old_addr.get_addr_obj("443")
|
||||
self.aug.set(addr, str(ssl_addr))
|
||||
self.parser.aug.set(addr, str(ssl_addr))
|
||||
ssl_addrs.add(ssl_addr)
|
||||
|
||||
return ssl_addrs
|
||||
@@ -1415,15 +1446,14 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
vh_path, False)) > 1:
|
||||
directive_path = self.parser.find_dir(directive, None,
|
||||
vh_path, False)
|
||||
self.aug.remove(re.sub(r"/\w*$", "", directive_path[0]))
|
||||
self.parser.aug.remove(re.sub(r"/\w*$", "", directive_path[0]))
|
||||
|
||||
def _remove_directives(self, vh_path, directives):
|
||||
for directive in directives:
|
||||
while len(self.parser.find_dir(directive, None,
|
||||
vh_path, False)) > 0:
|
||||
while self.parser.find_dir(directive, None, vh_path, False):
|
||||
directive_path = self.parser.find_dir(directive, None,
|
||||
vh_path, False)
|
||||
self.aug.remove(re.sub(r"/\w*$", "", directive_path[0]))
|
||||
self.parser.aug.remove(re.sub(r"/\w*$", "", directive_path[0]))
|
||||
|
||||
def _add_dummy_ssl_directives(self, vh_path):
|
||||
self.parser.add_dir(vh_path, "SSLCertificateFile",
|
||||
@@ -1462,8 +1492,8 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
"""
|
||||
matches = self.parser.find_dir(
|
||||
"ServerAlias", start=vh_path, exclude=False)
|
||||
aliases = (self.aug.get(match) for match in matches)
|
||||
return self.included_in_wildcard(aliases, target_name)
|
||||
aliases = (self.parser.aug.get(match) for match in matches)
|
||||
return self.domain_in_names(aliases, target_name)
|
||||
|
||||
def _add_name_vhost_if_necessary(self, vhost):
|
||||
"""Add NameVirtualHost Directives if necessary for new vhost.
|
||||
@@ -1645,7 +1675,7 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
if header_path:
|
||||
pat = '(?:[ "]|^)(strict-transport-security)(?:[ "]|$)'
|
||||
for match in header_path:
|
||||
if re.search(pat, self.aug.get(match).lower()):
|
||||
if re.search(pat, self.parser.aug.get(match).lower()):
|
||||
hsts_dirpath = match
|
||||
if not hsts_dirpath:
|
||||
err_msg = ("Certbot was unable to find the existing HSTS header "
|
||||
@@ -1659,7 +1689,7 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
# Our match statement was for string strict-transport-security, but
|
||||
# we need to update the value instead. The next index is for the value
|
||||
hsts_dirpath = hsts_dirpath.replace("arg[3]", "arg[4]")
|
||||
self.aug.set(hsts_dirpath, hsts_maxage)
|
||||
self.parser.aug.set(hsts_dirpath, hsts_maxage)
|
||||
note_msg = ("Increasing HSTS max-age value to {0} for VirtualHost "
|
||||
"in {1}\n".format(nextstep_value, vhost.filep))
|
||||
logger.debug(note_msg)
|
||||
@@ -1741,7 +1771,7 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
# We'll simply delete the directive, so that we'll have a
|
||||
# consistent OCSP cache path.
|
||||
if stapling_cache_aug_path:
|
||||
self.aug.remove(
|
||||
self.parser.aug.remove(
|
||||
re.sub(r"/\w*$", "", stapling_cache_aug_path[0]))
|
||||
|
||||
self.parser.add_dir_to_ifmodssl(ssl_vhost_aug_path,
|
||||
@@ -1818,7 +1848,7 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
# "Existing Header directive for virtualhost"
|
||||
pat = '(?:[ "]|^)(%s)(?:[ "]|$)' % (header_substring.lower())
|
||||
for match in header_path:
|
||||
if re.search(pat, self.aug.get(match).lower()):
|
||||
if re.search(pat, self.parser.aug.get(match).lower()):
|
||||
raise errors.PluginEnhancementAlreadyPresent(
|
||||
"Existing %s header" % (header_substring))
|
||||
|
||||
@@ -1911,7 +1941,6 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
self.parser.add_dir(vhost.path, "RewriteRule",
|
||||
constants.REWRITE_HTTPS_ARGS)
|
||||
|
||||
|
||||
def _verify_no_certbot_redirect(self, vhost):
|
||||
"""Checks to see if a redirect was already installed by certbot.
|
||||
|
||||
@@ -1946,11 +1975,11 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
constants.REWRITE_HTTPS_ARGS_WITH_END]
|
||||
|
||||
for dir_path, args_paths in rewrite_args_dict.items():
|
||||
arg_vals = [self.aug.get(x) for x in args_paths]
|
||||
arg_vals = [self.parser.aug.get(x) for x in args_paths]
|
||||
|
||||
# Search for past redirection rule, delete it, set the new one
|
||||
if arg_vals in constants.OLD_REWRITE_HTTPS_ARGS:
|
||||
self.aug.remove(dir_path)
|
||||
self.parser.aug.remove(dir_path)
|
||||
self._set_https_redirection_rewrite_rule(vhost)
|
||||
self.save()
|
||||
raise errors.PluginEnhancementAlreadyPresent(
|
||||
@@ -2006,7 +2035,7 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
|
||||
redirect_filepath = self._write_out_redirect(ssl_vhost, text)
|
||||
|
||||
self.aug.load()
|
||||
self.parser.aug.load()
|
||||
# Make a new vhost data structure and add it to the lists
|
||||
new_vhost = self._create_vhost(parser.get_aug_path(self._escape(redirect_filepath)))
|
||||
self.vhosts.append(new_vhost)
|
||||
@@ -2142,7 +2171,7 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
vhost.enabled = True
|
||||
return
|
||||
|
||||
def enable_mod(self, mod_name, temp=False): # pylint: disable=unused-argument
|
||||
def enable_mod(self, mod_name, temp=False): # pylint: disable=unused-argument
|
||||
"""Enables module in Apache.
|
||||
|
||||
Both enables and reloads Apache so module is active.
|
||||
@@ -2179,7 +2208,6 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
:raises .errors.MisconfigurationError: If reload fails
|
||||
|
||||
"""
|
||||
error = ""
|
||||
try:
|
||||
util.run_script(self.option("restart_cmd"))
|
||||
except errors.SubprocessError as err:
|
||||
@@ -2253,7 +2281,7 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
###########################################################################
|
||||
def get_chall_pref(self, unused_domain): # pylint: disable=no-self-use
|
||||
"""Return list of challenge preferences."""
|
||||
return [challenges.HTTP01, challenges.TLSSNI01]
|
||||
return [challenges.HTTP01]
|
||||
|
||||
def perform(self, achalls):
|
||||
"""Perform the configuration related challenge.
|
||||
@@ -2266,20 +2294,15 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
self._chall_out.update(achalls)
|
||||
responses = [None] * len(achalls)
|
||||
http_doer = http_01.ApacheHttp01(self)
|
||||
sni_doer = tls_sni_01.ApacheTlsSni01(self)
|
||||
|
||||
for i, achall in enumerate(achalls):
|
||||
# Currently also have chall_doer hold associated index of the
|
||||
# challenge. This helps to put all of the responses back together
|
||||
# when they are all complete.
|
||||
if isinstance(achall.chall, challenges.HTTP01):
|
||||
http_doer.add_chall(achall, i)
|
||||
else: # tls-sni-01
|
||||
sni_doer.add_chall(achall, i)
|
||||
http_doer.add_chall(achall, i)
|
||||
|
||||
http_response = http_doer.perform()
|
||||
sni_response = sni_doer.perform()
|
||||
if http_response or sni_response:
|
||||
if http_response:
|
||||
# Must reload in order to activate the challenges.
|
||||
# Handled here because we may be able to load up other challenge
|
||||
# types
|
||||
@@ -2290,7 +2313,6 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
time.sleep(3)
|
||||
|
||||
self._update_responses(responses, http_response, http_doer)
|
||||
self._update_responses(responses, sni_response, sni_doer)
|
||||
|
||||
return responses
|
||||
|
||||
|
||||
@@ -9,6 +9,7 @@ MOD_SSL_CONF_DEST = "options-ssl-apache.conf"
|
||||
UPDATED_MOD_SSL_CONF_DIGEST = ".updated-options-ssl-apache-conf-digest.txt"
|
||||
"""Name of the hash of the updated or informed mod_ssl_conf as saved in `IConfig.config_dir`."""
|
||||
|
||||
# NEVER REMOVE A SINGLE HASH FROM THIS LIST UNLESS YOU KNOW EXACTLY WHAT YOU ARE DOING!
|
||||
ALL_SSL_OPTIONS_HASHES = [
|
||||
'2086bca02db48daf93468332543c60ac6acdb6f0b58c7bfdf578a5d47092f82a',
|
||||
'4844d36c9a0f587172d9fa10f4f1c9518e3bcfa1947379f155e16a70a728c21a',
|
||||
@@ -18,6 +19,10 @@ ALL_SSL_OPTIONS_HASHES = [
|
||||
'cfdd7c18d2025836ea3307399f509cfb1ebf2612c87dd600a65da2a8e2f2797b',
|
||||
'80720bd171ccdc2e6b917ded340defae66919e4624962396b992b7218a561791',
|
||||
'c0c022ea6b8a51ecc8f1003d0a04af6c3f2bc1c3ce506b3c2dfc1f11ef931082',
|
||||
'717b0a89f5e4c39b09a42813ac6e747cfbdeb93439499e73f4f70a1fe1473f20',
|
||||
'0fcdc81280cd179a07ec4d29d3595068b9326b455c488de4b09f585d5dafc137',
|
||||
'86cc09ad5415cd6d5f09a947fe2501a9344328b1e8a8b458107ea903e80baa6c',
|
||||
'06675349e457eae856120cdebb564efe546f0b87399f2264baeb41e442c724c7',
|
||||
]
|
||||
"""SHA256 hashes of the contents of previous versions of all versions of MOD_SSL_CONF_SRC"""
|
||||
|
||||
|
||||
@@ -1,14 +1,12 @@
|
||||
"""Contains UI methods for Apache operations."""
|
||||
import logging
|
||||
import os
|
||||
|
||||
import zope.component
|
||||
|
||||
import certbot.display.util as display_util
|
||||
from certbot import errors
|
||||
from certbot import interfaces
|
||||
|
||||
import certbot.display.util as display_util
|
||||
|
||||
from certbot.compat import os
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -26,7 +24,7 @@ def select_vhost_multiple(vhosts):
|
||||
return list()
|
||||
tags_list = [vhost.display_repr()+"\n" for vhost in vhosts]
|
||||
# Remove the extra newline from the last entry
|
||||
if len(tags_list):
|
||||
if tags_list:
|
||||
tags_list[-1] = tags_list[-1][:-1]
|
||||
code, names = zope.component.getUtility(interfaces.IDisplay).checklist(
|
||||
"Which VirtualHosts would you like to install the wildcard certificate for?",
|
||||
@@ -62,8 +60,7 @@ def select_vhost(domain, vhosts):
|
||||
code, tag = _vhost_menu(domain, vhosts)
|
||||
if code == display_util.OK:
|
||||
return vhosts[tag]
|
||||
else:
|
||||
return None
|
||||
return None
|
||||
|
||||
def _vhost_menu(domain, vhosts):
|
||||
"""Select an appropriate Apache Vhost.
|
||||
@@ -93,7 +90,7 @@ def _vhost_menu(domain, vhosts):
|
||||
for vhost in vhosts:
|
||||
if len(vhost.get_names()) == 1:
|
||||
disp_name = next(iter(vhost.get_names()))
|
||||
elif len(vhost.get_names()) == 0:
|
||||
elif not vhost.get_names():
|
||||
disp_name = ""
|
||||
else:
|
||||
disp_name = "Multiple Names"
|
||||
|
||||
@@ -1,8 +1,13 @@
|
||||
""" Entry point for Apache Plugin """
|
||||
# Pylint does not like disutils.version when running inside a venv.
|
||||
# See: https://github.com/PyCQA/pylint/issues/73
|
||||
from distutils.version import LooseVersion # pylint: disable=no-name-in-module,import-error
|
||||
|
||||
from certbot import util
|
||||
|
||||
from certbot_apache import configurator
|
||||
from certbot_apache import override_arch
|
||||
from certbot_apache import override_fedora
|
||||
from certbot_apache import override_darwin
|
||||
from certbot_apache import override_debian
|
||||
from certbot_apache import override_centos
|
||||
@@ -16,8 +21,10 @@ OVERRIDE_CLASSES = {
|
||||
"ubuntu": override_debian.DebianConfigurator,
|
||||
"centos": override_centos.CentOSConfigurator,
|
||||
"centos linux": override_centos.CentOSConfigurator,
|
||||
"fedora": override_centos.CentOSConfigurator,
|
||||
"fedora_old": override_centos.CentOSConfigurator,
|
||||
"fedora": override_fedora.FedoraConfigurator,
|
||||
"ol": override_centos.CentOSConfigurator,
|
||||
"redhatenterpriseserver": override_centos.CentOSConfigurator,
|
||||
"red hat enterprise linux server": override_centos.CentOSConfigurator,
|
||||
"rhel": override_centos.CentOSConfigurator,
|
||||
"amazon": override_centos.CentOSConfigurator,
|
||||
@@ -25,14 +32,23 @@ OVERRIDE_CLASSES = {
|
||||
"gentoo base system": override_gentoo.GentooConfigurator,
|
||||
"opensuse": override_suse.OpenSUSEConfigurator,
|
||||
"suse": override_suse.OpenSUSEConfigurator,
|
||||
"scientific": override_centos.CentOSConfigurator,
|
||||
"scientific linux": override_centos.CentOSConfigurator,
|
||||
}
|
||||
|
||||
|
||||
def get_configurator():
|
||||
""" Get correct configurator class based on the OS fingerprint """
|
||||
os_info = util.get_os_info()
|
||||
os_name, os_version = util.get_os_info()
|
||||
os_name = os_name.lower()
|
||||
override_class = None
|
||||
|
||||
# Special case for older Fedora versions
|
||||
if os_name == 'fedora' and LooseVersion(os_version) < LooseVersion('29'):
|
||||
os_name = 'fedora_old'
|
||||
|
||||
try:
|
||||
override_class = OVERRIDE_CLASSES[os_info[0].lower()]
|
||||
override_class = OVERRIDE_CLASSES[os_name]
|
||||
except KeyError:
|
||||
# OS not found in the list
|
||||
os_like = util.get_systemd_os_like()
|
||||
@@ -45,4 +61,5 @@ def get_configurator():
|
||||
override_class = configurator.ApacheConfigurator
|
||||
return override_class
|
||||
|
||||
|
||||
ENTRYPOINT = get_configurator()
|
||||
|
||||
@@ -1,15 +1,19 @@
|
||||
"""A class that performs HTTP-01 challenges for Apache"""
|
||||
import logging
|
||||
import os
|
||||
|
||||
from acme.magic_typing import Set # pylint: disable=unused-import, no-name-in-module
|
||||
from acme.magic_typing import List, Set # pylint: disable=unused-import, no-name-in-module
|
||||
|
||||
from certbot import errors
|
||||
from certbot.compat import os
|
||||
from certbot.compat import filesystem
|
||||
from certbot.plugins import common
|
||||
|
||||
from certbot_apache.obj import VirtualHost # pylint: disable=unused-import
|
||||
from certbot_apache.parser import get_aug_path
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ApacheHttp01(common.TLSSNI01):
|
||||
"""Class that performs HTTP-01 challenges within the Apache configurator."""
|
||||
|
||||
@@ -89,15 +93,27 @@ class ApacheHttp01(common.TLSSNI01):
|
||||
self.configurator.enable_mod(mod, temp=True)
|
||||
|
||||
def _mod_config(self):
|
||||
selected_vhosts = [] # type: List[VirtualHost]
|
||||
http_port = str(self.configurator.config.http01_port)
|
||||
for chall in self.achalls:
|
||||
vh = self.configurator.find_best_http_vhost(
|
||||
chall.domain, filter_defaults=False,
|
||||
port=str(self.configurator.config.http01_port))
|
||||
if vh:
|
||||
self._set_up_include_directives(vh)
|
||||
else:
|
||||
for vh in self._relevant_vhosts():
|
||||
self._set_up_include_directives(vh)
|
||||
# Search for matching VirtualHosts
|
||||
for vh in self._matching_vhosts(chall.domain):
|
||||
selected_vhosts.append(vh)
|
||||
|
||||
# Ensure that we have one or more VirtualHosts that we can continue
|
||||
# with. (one that listens to port configured with --http-01-port)
|
||||
found = False
|
||||
for vhost in selected_vhosts:
|
||||
if any(a.is_wildcard() or a.get_port() == http_port for a in vhost.addrs):
|
||||
found = True
|
||||
|
||||
if not found:
|
||||
for vh in self._relevant_vhosts():
|
||||
selected_vhosts.append(vh)
|
||||
|
||||
# Add the challenge configuration
|
||||
for vh in selected_vhosts:
|
||||
self._set_up_include_directives(vh)
|
||||
|
||||
self.configurator.reverter.register_file_creation(
|
||||
True, self.challenge_conf_pre)
|
||||
@@ -121,6 +137,20 @@ class ApacheHttp01(common.TLSSNI01):
|
||||
with open(self.challenge_conf_post, "w") as new_conf:
|
||||
new_conf.write(config_text_post)
|
||||
|
||||
def _matching_vhosts(self, domain):
|
||||
"""Return all VirtualHost objects that have the requested domain name or
|
||||
a wildcard name that would match the domain in ServerName or ServerAlias
|
||||
directive.
|
||||
"""
|
||||
matching_vhosts = []
|
||||
for vhost in self.configurator.vhosts:
|
||||
if self.configurator.domain_in_names(vhost.get_names(), domain):
|
||||
# domain_in_names also matches the exact names, so no need
|
||||
# to check "domain in vhost.get_names()" explicitly here
|
||||
matching_vhosts.append(vhost)
|
||||
|
||||
return matching_vhosts
|
||||
|
||||
def _relevant_vhosts(self):
|
||||
http01_port = str(self.configurator.config.http01_port)
|
||||
relevant_vhosts = []
|
||||
@@ -139,8 +169,7 @@ class ApacheHttp01(common.TLSSNI01):
|
||||
|
||||
def _set_up_challenges(self):
|
||||
if not os.path.isdir(self.challenge_dir):
|
||||
os.makedirs(self.challenge_dir)
|
||||
os.chmod(self.challenge_dir, 0o755)
|
||||
filesystem.makedirs(self.challenge_dir, 0o755)
|
||||
|
||||
responses = []
|
||||
for achall in self.achalls:
|
||||
@@ -156,7 +185,7 @@ class ApacheHttp01(common.TLSSNI01):
|
||||
self.configurator.reverter.register_file_creation(True, name)
|
||||
with open(name, 'wb') as f:
|
||||
f.write(validation.encode())
|
||||
os.chmod(name, 0o644)
|
||||
filesystem.chmod(name, 0o644)
|
||||
|
||||
return response
|
||||
|
||||
|
||||
@@ -26,7 +26,7 @@ class Addr(common.Addr):
|
||||
def __repr__(self):
|
||||
return "certbot_apache.obj.Addr(" + repr(self.tup) + ")"
|
||||
|
||||
def __hash__(self):
|
||||
def __hash__(self): # pylint: disable=useless-super-delegation
|
||||
# Python 3 requires explicit overridden for __hash__ if __eq__ or
|
||||
# __cmp__ is overridden. See https://bugs.python.org/issue2235
|
||||
return super(Addr, self).__hash__()
|
||||
@@ -47,8 +47,7 @@ class Addr(common.Addr):
|
||||
return 0
|
||||
elif self.get_addr() == "*":
|
||||
return 1
|
||||
else:
|
||||
return 2
|
||||
return 2
|
||||
|
||||
def conflicts(self, addr):
|
||||
r"""Returns if address could conflict with correct function of self.
|
||||
|
||||
@@ -1,14 +1,24 @@
|
||||
""" Distribution specific override class for CentOS family (RHEL, Fedora) """
|
||||
import pkg_resources
|
||||
import logging
|
||||
|
||||
import pkg_resources
|
||||
import zope.interface
|
||||
|
||||
from certbot import errors
|
||||
from certbot import interfaces
|
||||
from certbot import util
|
||||
from certbot.errors import MisconfigurationError
|
||||
|
||||
from acme.magic_typing import List # pylint: disable=unused-import, no-name-in-module
|
||||
|
||||
from certbot_apache import apache_util
|
||||
from certbot_apache import configurator
|
||||
from certbot_apache import parser
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@zope.interface.provider(interfaces.IPluginFactory)
|
||||
class CentOSConfigurator(configurator.ApacheConfigurator):
|
||||
"""CentOS specific ApacheConfigurator override class"""
|
||||
@@ -33,6 +43,38 @@ class CentOSConfigurator(configurator.ApacheConfigurator):
|
||||
"certbot_apache", "centos-options-ssl-apache.conf")
|
||||
)
|
||||
|
||||
def config_test(self):
|
||||
"""
|
||||
Override config_test to mitigate configtest error in vanilla installation
|
||||
of mod_ssl in Fedora. The error is caused by non-existent self-signed
|
||||
certificates referenced by the configuration, that would be autogenerated
|
||||
during the first (re)start of httpd.
|
||||
"""
|
||||
|
||||
os_info = util.get_os_info()
|
||||
fedora = os_info[0].lower() == "fedora"
|
||||
|
||||
try:
|
||||
super(CentOSConfigurator, self).config_test()
|
||||
except errors.MisconfigurationError:
|
||||
if fedora:
|
||||
self._try_restart_fedora()
|
||||
else:
|
||||
raise
|
||||
|
||||
def _try_restart_fedora(self):
|
||||
"""
|
||||
Tries to restart httpd using systemctl to generate the self signed keypair.
|
||||
"""
|
||||
|
||||
try:
|
||||
util.run_script(['systemctl', 'restart', 'httpd'])
|
||||
except errors.SubprocessError as err:
|
||||
raise errors.MisconfigurationError(str(err))
|
||||
|
||||
# Finish with actual config check to see if systemctl restart helped
|
||||
super(CentOSConfigurator, self).config_test()
|
||||
|
||||
def _prepare_options(self):
|
||||
"""
|
||||
Override the options dictionary initialization in order to support
|
||||
@@ -44,9 +86,86 @@ class CentOSConfigurator(configurator.ApacheConfigurator):
|
||||
def get_parser(self):
|
||||
"""Initializes the ApacheParser"""
|
||||
return CentOSParser(
|
||||
self.aug, self.option("server_root"), self.option("vhost_root"),
|
||||
self.option("server_root"), self.option("vhost_root"),
|
||||
self.version, configurator=self)
|
||||
|
||||
def _deploy_cert(self, *args, **kwargs): # pylint: disable=arguments-differ
|
||||
"""
|
||||
Override _deploy_cert in order to ensure that the Apache configuration
|
||||
has "LoadModule ssl_module..." before parsing the VirtualHost configuration
|
||||
that was created by Certbot
|
||||
"""
|
||||
super(CentOSConfigurator, self)._deploy_cert(*args, **kwargs)
|
||||
if self.version < (2, 4, 0):
|
||||
self._deploy_loadmodule_ssl_if_needed()
|
||||
|
||||
def _deploy_loadmodule_ssl_if_needed(self):
|
||||
"""
|
||||
Add "LoadModule ssl_module <pre-existing path>" to main httpd.conf if
|
||||
it doesn't exist there already.
|
||||
"""
|
||||
|
||||
loadmods = self.parser.find_dir("LoadModule", "ssl_module", exclude=False)
|
||||
|
||||
correct_ifmods = [] # type: List[str]
|
||||
loadmod_args = [] # type: List[str]
|
||||
loadmod_paths = [] # type: List[str]
|
||||
for m in loadmods:
|
||||
noarg_path = m.rpartition("/")[0]
|
||||
path_args = self.parser.get_all_args(noarg_path)
|
||||
if loadmod_args:
|
||||
if loadmod_args != path_args:
|
||||
msg = ("Certbot encountered multiple LoadModule directives "
|
||||
"for LoadModule ssl_module with differing library paths. "
|
||||
"Please remove or comment out the one(s) that are not in "
|
||||
"use, and run Certbot again.")
|
||||
raise MisconfigurationError(msg)
|
||||
else:
|
||||
loadmod_args = path_args
|
||||
|
||||
if self.parser.not_modssl_ifmodule(noarg_path): # pylint: disable=no-member
|
||||
if self.parser.loc["default"] in noarg_path:
|
||||
# LoadModule already in the main configuration file
|
||||
if ("ifmodule/" in noarg_path.lower() or
|
||||
"ifmodule[1]" in noarg_path.lower()):
|
||||
# It's the first or only IfModule in the file
|
||||
return
|
||||
# Populate the list of known !mod_ssl.c IfModules
|
||||
nodir_path = noarg_path.rpartition("/directive")[0]
|
||||
correct_ifmods.append(nodir_path)
|
||||
else:
|
||||
loadmod_paths.append(noarg_path)
|
||||
|
||||
if not loadmod_args:
|
||||
# Do not try to enable mod_ssl
|
||||
return
|
||||
|
||||
# Force creation as the directive wasn't found from the beginning of
|
||||
# httpd.conf
|
||||
rootconf_ifmod = self.parser.create_ifmod(
|
||||
parser.get_aug_path(self.parser.loc["default"]),
|
||||
"!mod_ssl.c", beginning=True)
|
||||
# parser.get_ifmod returns a path postfixed with "/", remove that
|
||||
self.parser.add_dir(rootconf_ifmod[:-1], "LoadModule", loadmod_args)
|
||||
correct_ifmods.append(rootconf_ifmod[:-1])
|
||||
self.save_notes += "Added LoadModule ssl_module to main configuration.\n"
|
||||
|
||||
# Wrap LoadModule mod_ssl inside of <IfModule !mod_ssl.c> if it's not
|
||||
# configured like this already.
|
||||
for loadmod_path in loadmod_paths:
|
||||
nodir_path = loadmod_path.split("/directive")[0]
|
||||
# Remove the old LoadModule directive
|
||||
self.parser.aug.remove(loadmod_path)
|
||||
|
||||
# Create a new IfModule !mod_ssl.c if not already found on path
|
||||
ssl_ifmod = self.parser.get_ifmod(nodir_path, "!mod_ssl.c",
|
||||
beginning=True)[:-1]
|
||||
if ssl_ifmod not in correct_ifmods:
|
||||
self.parser.add_dir(ssl_ifmod, "LoadModule", loadmod_args)
|
||||
correct_ifmods.append(ssl_ifmod)
|
||||
self.save_notes += ("Wrapped pre-existing LoadModule ssl_module "
|
||||
"inside of <IfModule !mod_ssl> block.\n")
|
||||
|
||||
|
||||
class CentOSParser(parser.ApacheParser):
|
||||
"""CentOS specific ApacheParser override class"""
|
||||
@@ -64,5 +183,35 @@ class CentOSParser(parser.ApacheParser):
|
||||
def parse_sysconfig_var(self):
|
||||
""" Parses Apache CLI options from CentOS configuration file """
|
||||
defines = apache_util.parse_define_file(self.sysconfig_filep, "OPTIONS")
|
||||
for k in defines.keys():
|
||||
for k in defines:
|
||||
self.variables[k] = defines[k]
|
||||
|
||||
def not_modssl_ifmodule(self, path):
|
||||
"""Checks if the provided Augeas path has argument !mod_ssl"""
|
||||
|
||||
if "ifmodule" not in path.lower():
|
||||
return False
|
||||
|
||||
# Trim the path to the last ifmodule
|
||||
workpath = path.lower()
|
||||
while workpath:
|
||||
# Get path to the last IfModule (ignore the tail)
|
||||
parts = workpath.rpartition("ifmodule")
|
||||
|
||||
if not parts[0]:
|
||||
# IfModule not found
|
||||
break
|
||||
ifmod_path = parts[0] + parts[1]
|
||||
# Check if ifmodule had an index
|
||||
if parts[2].startswith("["):
|
||||
# Append the index from tail
|
||||
ifmod_path += parts[2].partition("/")[0]
|
||||
# Get the original path trimmed to correct length
|
||||
# This is required to preserve cases
|
||||
ifmod_real_path = path[0:len(ifmod_path)]
|
||||
if "!mod_ssl.c" in self.get_all_args(ifmod_real_path):
|
||||
return True
|
||||
# Set the workpath to the heading part
|
||||
workpath = parts[0]
|
||||
|
||||
return False
|
||||
|
||||
@@ -1,19 +1,21 @@
|
||||
""" Distribution specific override class for Debian family (Ubuntu/Debian) """
|
||||
import logging
|
||||
import os
|
||||
import pkg_resources
|
||||
|
||||
import pkg_resources
|
||||
import zope.interface
|
||||
|
||||
from certbot import errors
|
||||
from certbot import interfaces
|
||||
from certbot import util
|
||||
from certbot.compat import filesystem
|
||||
from certbot.compat import os
|
||||
|
||||
from certbot_apache import apache_util
|
||||
from certbot_apache import configurator
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@zope.interface.provider(interfaces.IPluginFactory)
|
||||
class DebianConfigurator(configurator.ApacheConfigurator):
|
||||
"""Debian specific ApacheConfigurator override class"""
|
||||
@@ -51,7 +53,7 @@ class DebianConfigurator(configurator.ApacheConfigurator):
|
||||
|
||||
"""
|
||||
if vhost.enabled:
|
||||
return
|
||||
return None
|
||||
|
||||
enabled_path = ("%s/sites-enabled/%s" %
|
||||
(self.parser.root,
|
||||
@@ -64,11 +66,11 @@ class DebianConfigurator(configurator.ApacheConfigurator):
|
||||
try:
|
||||
os.symlink(vhost.filep, enabled_path)
|
||||
except OSError as err:
|
||||
if os.path.islink(enabled_path) and os.path.realpath(
|
||||
if os.path.islink(enabled_path) and filesystem.realpath(
|
||||
enabled_path) == vhost.filep:
|
||||
# Already in shape
|
||||
vhost.enabled = True
|
||||
return
|
||||
return None
|
||||
else:
|
||||
logger.warning(
|
||||
"Could not symlink %s to %s, got error: %s", enabled_path,
|
||||
@@ -81,9 +83,9 @@ class DebianConfigurator(configurator.ApacheConfigurator):
|
||||
vhost.enabled = True
|
||||
logger.info("Enabling available site: %s", vhost.filep)
|
||||
self.save_notes += "Enabled site %s\n" % vhost.filep
|
||||
return None
|
||||
|
||||
def enable_mod(self, mod_name, temp=False):
|
||||
# pylint: disable=unused-argument
|
||||
"""Enables module in Apache.
|
||||
|
||||
Both enables and reloads Apache so module is active.
|
||||
|
||||
98
certbot-apache/certbot_apache/override_fedora.py
Normal file
98
certbot-apache/certbot_apache/override_fedora.py
Normal file
@@ -0,0 +1,98 @@
|
||||
""" Distribution specific override class for Fedora 29+ """
|
||||
import pkg_resources
|
||||
import zope.interface
|
||||
|
||||
from certbot import errors
|
||||
from certbot import interfaces
|
||||
from certbot import util
|
||||
|
||||
from certbot_apache import apache_util
|
||||
from certbot_apache import configurator
|
||||
from certbot_apache import parser
|
||||
|
||||
|
||||
@zope.interface.provider(interfaces.IPluginFactory)
|
||||
class FedoraConfigurator(configurator.ApacheConfigurator):
|
||||
"""Fedora 29+ specific ApacheConfigurator override class"""
|
||||
|
||||
OS_DEFAULTS = dict(
|
||||
server_root="/etc/httpd",
|
||||
vhost_root="/etc/httpd/conf.d",
|
||||
vhost_files="*.conf",
|
||||
logs_root="/var/log/httpd",
|
||||
ctl="httpd",
|
||||
version_cmd=['httpd', '-v'],
|
||||
restart_cmd=['apachectl', 'graceful'],
|
||||
restart_cmd_alt=['apachectl', 'restart'],
|
||||
conftest_cmd=['apachectl', 'configtest'],
|
||||
enmod=None,
|
||||
dismod=None,
|
||||
le_vhost_ext="-le-ssl.conf",
|
||||
handle_modules=False,
|
||||
handle_sites=False,
|
||||
challenge_location="/etc/httpd/conf.d",
|
||||
MOD_SSL_CONF_SRC=pkg_resources.resource_filename(
|
||||
# TODO: eventually newest version of Fedora will need their own config
|
||||
"certbot_apache", "centos-options-ssl-apache.conf")
|
||||
)
|
||||
|
||||
def config_test(self):
|
||||
"""
|
||||
Override config_test to mitigate configtest error in vanilla installation
|
||||
of mod_ssl in Fedora. The error is caused by non-existent self-signed
|
||||
certificates referenced by the configuration, that would be autogenerated
|
||||
during the first (re)start of httpd.
|
||||
"""
|
||||
try:
|
||||
super(FedoraConfigurator, self).config_test()
|
||||
except errors.MisconfigurationError:
|
||||
self._try_restart_fedora()
|
||||
|
||||
def get_parser(self):
|
||||
"""Initializes the ApacheParser"""
|
||||
return FedoraParser(
|
||||
self.option("server_root"), self.option("vhost_root"),
|
||||
self.version, configurator=self)
|
||||
|
||||
def _try_restart_fedora(self):
|
||||
"""
|
||||
Tries to restart httpd using systemctl to generate the self signed keypair.
|
||||
"""
|
||||
try:
|
||||
util.run_script(['systemctl', 'restart', 'httpd'])
|
||||
except errors.SubprocessError as err:
|
||||
raise errors.MisconfigurationError(str(err))
|
||||
|
||||
# Finish with actual config check to see if systemctl restart helped
|
||||
super(FedoraConfigurator, self).config_test()
|
||||
|
||||
def _prepare_options(self):
|
||||
"""
|
||||
Override the options dictionary initialization to keep using apachectl
|
||||
instead of httpd and so take advantages of this new bash script in newer versions
|
||||
of Fedora to restart httpd.
|
||||
"""
|
||||
super(FedoraConfigurator, self)._prepare_options()
|
||||
self.options["restart_cmd"][0] = 'apachectl'
|
||||
self.options["restart_cmd_alt"][0] = 'apachectl'
|
||||
self.options["conftest_cmd"][0] = 'apachectl'
|
||||
|
||||
|
||||
class FedoraParser(parser.ApacheParser):
|
||||
"""Fedora 29+ specific ApacheParser override class"""
|
||||
def __init__(self, *args, **kwargs):
|
||||
# Fedora 29+ specific configuration file for Apache
|
||||
self.sysconfig_filep = "/etc/sysconfig/httpd"
|
||||
super(FedoraParser, self).__init__(*args, **kwargs)
|
||||
|
||||
def update_runtime_variables(self):
|
||||
""" Override for update_runtime_variables for custom parsing """
|
||||
# Opportunistic, works if SELinux not enforced
|
||||
super(FedoraParser, self).update_runtime_variables()
|
||||
self._parse_sysconfig_var()
|
||||
|
||||
def _parse_sysconfig_var(self):
|
||||
""" Parses Apache CLI options from Fedora configuration file """
|
||||
defines = apache_util.parse_define_file(self.sysconfig_filep, "OPTIONS")
|
||||
for k in defines:
|
||||
self.variables[k] = defines[k]
|
||||
@@ -44,7 +44,7 @@ class GentooConfigurator(configurator.ApacheConfigurator):
|
||||
def get_parser(self):
|
||||
"""Initializes the ApacheParser"""
|
||||
return GentooParser(
|
||||
self.aug, self.option("server_root"), self.option("vhost_root"),
|
||||
self.option("server_root"), self.option("vhost_root"),
|
||||
self.version, configurator=self)
|
||||
|
||||
|
||||
@@ -64,7 +64,7 @@ class GentooParser(parser.ApacheParser):
|
||||
""" Parses Apache CLI options from Gentoo configuration file """
|
||||
defines = apache_util.parse_define_file(self.apacheconfig_filep,
|
||||
"APACHE2_OPTS")
|
||||
for k in defines.keys():
|
||||
for k in defines:
|
||||
self.variables[k] = defines[k]
|
||||
|
||||
def update_modules(self):
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
import copy
|
||||
import fnmatch
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
@@ -10,7 +9,11 @@ import sys
|
||||
import six
|
||||
|
||||
from acme.magic_typing import Dict, List, Set # pylint: disable=unused-import, no-name-in-module
|
||||
|
||||
from certbot import errors
|
||||
from certbot.compat import os
|
||||
|
||||
from certbot_apache import constants
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -31,7 +34,7 @@ class ApacheParser(object):
|
||||
arg_var_interpreter = re.compile(r"\$\{[^ \}]*}")
|
||||
fnmatch_chars = set(["*", "?", "\\", "[", "]"])
|
||||
|
||||
def __init__(self, aug, root, vhostroot=None, version=(2, 4),
|
||||
def __init__(self, root, vhostroot=None, version=(2, 4),
|
||||
configurator=None):
|
||||
# Note: Order is important here.
|
||||
|
||||
@@ -40,11 +43,20 @@ class ApacheParser(object):
|
||||
# issues with aug.load() after adding new files / defines to parse tree
|
||||
self.configurator = configurator
|
||||
|
||||
# Initialize augeas
|
||||
self.aug = None
|
||||
self.init_augeas()
|
||||
|
||||
if not self.check_aug_version():
|
||||
raise errors.NotSupportedError(
|
||||
"Apache plugin support requires libaugeas0 and augeas-lenses "
|
||||
"version 1.2.0 or higher, please make sure you have you have "
|
||||
"those installed.")
|
||||
|
||||
self.modules = set() # type: Set[str]
|
||||
self.parser_paths = {} # type: Dict[str, List[str]]
|
||||
self.variables = {} # type: Dict[str, str]
|
||||
|
||||
self.aug = aug
|
||||
# Find configuration root and make sure augeas can parse it.
|
||||
self.root = os.path.abspath(root)
|
||||
self.loc = {"root": self._find_config_root()}
|
||||
@@ -76,6 +88,146 @@ class ApacheParser(object):
|
||||
if self.find_dir("Define", exclude=False):
|
||||
raise errors.PluginError("Error parsing runtime variables")
|
||||
|
||||
def init_augeas(self):
|
||||
""" Initialize the actual Augeas instance """
|
||||
|
||||
try:
|
||||
import augeas
|
||||
except ImportError: # pragma: no cover
|
||||
raise errors.NoInstallationError("Problem in Augeas installation")
|
||||
|
||||
self.aug = augeas.Augeas(
|
||||
# specify a directory to load our preferred lens from
|
||||
loadpath=constants.AUGEAS_LENS_DIR,
|
||||
# Do not save backup (we do it ourselves), do not load
|
||||
# anything by default
|
||||
flags=(augeas.Augeas.NONE |
|
||||
augeas.Augeas.NO_MODL_AUTOLOAD |
|
||||
augeas.Augeas.ENABLE_SPAN))
|
||||
|
||||
def check_parsing_errors(self, lens):
|
||||
"""Verify Augeas can parse all of the lens files.
|
||||
|
||||
:param str lens: lens to check for errors
|
||||
|
||||
:raises .errors.PluginError: If there has been an error in parsing with
|
||||
the specified lens.
|
||||
|
||||
"""
|
||||
error_files = self.aug.match("/augeas//error")
|
||||
|
||||
for path in error_files:
|
||||
# Check to see if it was an error resulting from the use of
|
||||
# the httpd lens
|
||||
lens_path = self.aug.get(path + "/lens")
|
||||
# As aug.get may return null
|
||||
if lens_path and lens in lens_path:
|
||||
msg = (
|
||||
"There has been an error in parsing the file {0} on line {1}: "
|
||||
"{2}".format(
|
||||
# Strip off /augeas/files and /error
|
||||
path[13:len(path) - 6],
|
||||
self.aug.get(path + "/line"),
|
||||
self.aug.get(path + "/message")))
|
||||
raise errors.PluginError(msg)
|
||||
|
||||
def check_aug_version(self):
|
||||
""" Checks that we have recent enough version of libaugeas.
|
||||
If augeas version is recent enough, it will support case insensitive
|
||||
regexp matching"""
|
||||
|
||||
self.aug.set("/test/path/testing/arg", "aRgUMeNT")
|
||||
try:
|
||||
matches = self.aug.match(
|
||||
"/test//*[self::arg=~regexp('argument', 'i')]")
|
||||
except RuntimeError:
|
||||
self.aug.remove("/test/path")
|
||||
return False
|
||||
self.aug.remove("/test/path")
|
||||
return matches
|
||||
|
||||
def unsaved_files(self):
|
||||
"""Lists files that have modified Augeas DOM but the changes have not
|
||||
been written to the filesystem yet, used by `self.save()` and
|
||||
ApacheConfigurator to check the file state.
|
||||
|
||||
:raises .errors.PluginError: If there was an error in Augeas, in
|
||||
an attempt to save the configuration, or an error creating a
|
||||
checkpoint
|
||||
|
||||
:returns: `set` of unsaved files
|
||||
"""
|
||||
save_state = self.aug.get("/augeas/save")
|
||||
self.aug.set("/augeas/save", "noop")
|
||||
# Existing Errors
|
||||
ex_errs = self.aug.match("/augeas//error")
|
||||
try:
|
||||
# This is a noop save
|
||||
self.aug.save()
|
||||
except (RuntimeError, IOError):
|
||||
self._log_save_errors(ex_errs)
|
||||
# Erase Save Notes
|
||||
self.configurator.save_notes = ""
|
||||
raise errors.PluginError(
|
||||
"Error saving files, check logs for more info.")
|
||||
|
||||
# Return the original save method
|
||||
self.aug.set("/augeas/save", save_state)
|
||||
|
||||
# Retrieve list of modified files
|
||||
# Note: Noop saves can cause the file to be listed twice, I used a
|
||||
# set to remove this possibility. This is a known augeas 0.10 error.
|
||||
save_paths = self.aug.match("/augeas/events/saved")
|
||||
|
||||
save_files = set()
|
||||
if save_paths:
|
||||
for path in save_paths:
|
||||
save_files.add(self.aug.get(path)[6:])
|
||||
return save_files
|
||||
|
||||
def ensure_augeas_state(self):
|
||||
"""Makes sure that all Augeas dom changes are written to files to avoid
|
||||
loss of configuration directives when doing additional augeas parsing,
|
||||
causing a possible augeas.load() resulting dom reset
|
||||
"""
|
||||
|
||||
if self.unsaved_files():
|
||||
self.configurator.save_notes += "(autosave)"
|
||||
self.configurator.save()
|
||||
|
||||
def save(self, save_files):
|
||||
"""Saves all changes to the configuration files.
|
||||
|
||||
save() is called from ApacheConfigurator to handle the parser specific
|
||||
tasks of saving.
|
||||
|
||||
:param list save_files: list of strings of file paths that we need to save.
|
||||
|
||||
"""
|
||||
self.configurator.save_notes = ""
|
||||
self.aug.save()
|
||||
|
||||
# Force reload if files were modified
|
||||
# This is needed to recalculate augeas directive span
|
||||
if save_files:
|
||||
for sf in save_files:
|
||||
self.aug.remove("/files/"+sf)
|
||||
self.aug.load()
|
||||
|
||||
def _log_save_errors(self, ex_errs):
|
||||
"""Log errors due to bad Augeas save.
|
||||
|
||||
:param list ex_errs: Existing errors before save
|
||||
|
||||
"""
|
||||
# Check for the root of save problems
|
||||
new_errs = self.aug.match("/augeas//error")
|
||||
# logger.error("During Save - %s", mod_conf)
|
||||
logger.error("Unable to save files: %s. Attempted Save Notes: %s",
|
||||
", ".join(err[13:len(err) - 6] for err in new_errs
|
||||
# Only new errors caused by recent save
|
||||
if err not in ex_errs), self.configurator.save_notes)
|
||||
|
||||
def add_include(self, main_config, inc_path):
|
||||
"""Add Include for a new configuration file if one does not exist
|
||||
|
||||
@@ -83,7 +235,7 @@ class ApacheParser(object):
|
||||
:param str inc_path: path of file to include
|
||||
|
||||
"""
|
||||
if len(self.find_dir(case_i("Include"), inc_path)) == 0:
|
||||
if not self.find_dir(case_i("Include"), inc_path):
|
||||
logger.debug("Adding Include %s to %s",
|
||||
inc_path, get_aug_path(main_config))
|
||||
self.add_dir(
|
||||
@@ -93,12 +245,7 @@ class ApacheParser(object):
|
||||
# Add new path to parser paths
|
||||
new_dir = os.path.dirname(inc_path)
|
||||
new_file = os.path.basename(inc_path)
|
||||
if new_dir in self.existing_paths.keys():
|
||||
# Add to existing path
|
||||
self.existing_paths[new_dir].append(new_file)
|
||||
else:
|
||||
# Create a new path
|
||||
self.existing_paths[new_dir] = [new_file]
|
||||
self.existing_paths.setdefault(new_dir, []).append(new_file)
|
||||
|
||||
def add_mod(self, mod_name):
|
||||
"""Shortcut for updating parser modules."""
|
||||
@@ -139,7 +286,7 @@ class ApacheParser(object):
|
||||
mods.add(os.path.basename(mod_filename)[:-2] + "c")
|
||||
else:
|
||||
logger.debug("Could not read LoadModule directive from " +
|
||||
"Augeas path: {0}".format(match_name[6:]))
|
||||
"Augeas path: %s", match_name[6:])
|
||||
self.modules.update(mods)
|
||||
|
||||
def update_runtime_variables(self):
|
||||
@@ -229,8 +376,8 @@ class ApacheParser(object):
|
||||
"Error running command %s for runtime parameters!%s",
|
||||
command, os.linesep)
|
||||
raise errors.MisconfigurationError(
|
||||
"Error accessing loaded Apache parameters: %s",
|
||||
command)
|
||||
"Error accessing loaded Apache parameters: {0}".format(
|
||||
command))
|
||||
# Small errors that do not impede
|
||||
if proc.returncode != 0:
|
||||
logger.warning("Error in checking parameter list: %s", stderr)
|
||||
@@ -256,12 +403,12 @@ class ApacheParser(object):
|
||||
"""
|
||||
filtered = []
|
||||
if args == 1:
|
||||
for i in range(len(matches)):
|
||||
if matches[i].endswith("/arg"):
|
||||
for i, match in enumerate(matches):
|
||||
if match.endswith("/arg"):
|
||||
filtered.append(matches[i][:-4])
|
||||
else:
|
||||
for i in range(len(matches)):
|
||||
if matches[i].endswith("/arg[%d]" % args):
|
||||
for i, match in enumerate(matches):
|
||||
if match.endswith("/arg[%d]" % args):
|
||||
# Make sure we don't cause an IndexError (end of list)
|
||||
# Check to make sure arg + 1 doesn't exist
|
||||
if (i == (len(matches) - 1) or
|
||||
@@ -286,7 +433,7 @@ class ApacheParser(object):
|
||||
"""
|
||||
# TODO: Add error checking code... does the path given even exist?
|
||||
# Does it throw exceptions?
|
||||
if_mod_path = self._get_ifmod(aug_conf_path, "mod_ssl.c")
|
||||
if_mod_path = self.get_ifmod(aug_conf_path, "mod_ssl.c")
|
||||
# IfModule can have only one valid argument, so append after
|
||||
self.aug.insert(if_mod_path + "arg", "directive", False)
|
||||
nvh_path = if_mod_path + "directive[1]"
|
||||
@@ -297,22 +444,54 @@ class ApacheParser(object):
|
||||
for i, arg in enumerate(args):
|
||||
self.aug.set("%s/arg[%d]" % (nvh_path, i + 1), arg)
|
||||
|
||||
def _get_ifmod(self, aug_conf_path, mod):
|
||||
def get_ifmod(self, aug_conf_path, mod, beginning=False):
|
||||
"""Returns the path to <IfMod mod> and creates one if it doesn't exist.
|
||||
|
||||
:param str aug_conf_path: Augeas configuration path
|
||||
:param str mod: module ie. mod_ssl.c
|
||||
:param bool beginning: If the IfModule should be created to the beginning
|
||||
of augeas path DOM tree.
|
||||
|
||||
:returns: Augeas path of the requested IfModule directive that pre-existed
|
||||
or was created during the process. The path may be dynamic,
|
||||
i.e. .../IfModule[last()]
|
||||
:rtype: str
|
||||
|
||||
"""
|
||||
if_mods = self.aug.match(("%s/IfModule/*[self::arg='%s']" %
|
||||
(aug_conf_path, mod)))
|
||||
if len(if_mods) == 0:
|
||||
self.aug.set("%s/IfModule[last() + 1]" % aug_conf_path, "")
|
||||
self.aug.set("%s/IfModule[last()]/arg" % aug_conf_path, mod)
|
||||
if_mods = self.aug.match(("%s/IfModule/*[self::arg='%s']" %
|
||||
(aug_conf_path, mod)))
|
||||
if not if_mods:
|
||||
return self.create_ifmod(aug_conf_path, mod, beginning)
|
||||
|
||||
# Strip off "arg" at end of first ifmod path
|
||||
return if_mods[0][:len(if_mods[0]) - 3]
|
||||
return if_mods[0].rpartition("arg")[0]
|
||||
|
||||
def create_ifmod(self, aug_conf_path, mod, beginning=False):
|
||||
"""Creates a new <IfMod mod> and returns its path.
|
||||
|
||||
:param str aug_conf_path: Augeas configuration path
|
||||
:param str mod: module ie. mod_ssl.c
|
||||
:param bool beginning: If the IfModule should be created to the beginning
|
||||
of augeas path DOM tree.
|
||||
|
||||
:returns: Augeas path of the newly created IfModule directive.
|
||||
The path may be dynamic, i.e. .../IfModule[last()]
|
||||
:rtype: str
|
||||
|
||||
"""
|
||||
if beginning:
|
||||
c_path_arg = "{}/IfModule[1]/arg".format(aug_conf_path)
|
||||
# Insert IfModule before the first directive
|
||||
self.aug.insert("{}/directive[1]".format(aug_conf_path),
|
||||
"IfModule", True)
|
||||
retpath = "{}/IfModule[1]/".format(aug_conf_path)
|
||||
else:
|
||||
c_path = "{}/IfModule[last() + 1]".format(aug_conf_path)
|
||||
c_path_arg = "{}/IfModule[last()]/arg".format(aug_conf_path)
|
||||
self.aug.set(c_path, "")
|
||||
retpath = "{}/IfModule[last()]/".format(aug_conf_path)
|
||||
self.aug.set(c_path_arg, mod)
|
||||
return retpath
|
||||
|
||||
def add_dir(self, aug_conf_path, directive, args):
|
||||
"""Appends directive to the end fo the file given by aug_conf_path.
|
||||
@@ -458,6 +637,20 @@ class ApacheParser(object):
|
||||
|
||||
return ordered_matches
|
||||
|
||||
def get_all_args(self, match):
|
||||
"""
|
||||
Tries to fetch all arguments for a directive. See get_arg.
|
||||
|
||||
Note that if match is an ancestor node, it returns all names of
|
||||
child directives as well as the list of arguments.
|
||||
|
||||
"""
|
||||
|
||||
if match[-1] != "/":
|
||||
match = match+"/"
|
||||
allargs = self.aug.match(match + '*')
|
||||
return [self.get_arg(arg) for arg in allargs]
|
||||
|
||||
def get_arg(self, match):
|
||||
"""Uses augeas.get to get argument value and interprets result.
|
||||
|
||||
@@ -601,9 +794,8 @@ class ApacheParser(object):
|
||||
if sys.version_info < (3, 6):
|
||||
# This strips off final /Z(?ms)
|
||||
return fnmatch.translate(clean_fn_match)[:-7]
|
||||
else: # pragma: no cover
|
||||
# Since Python 3.6, it returns a different pattern like (?s:.*\.load)\Z
|
||||
return fnmatch.translate(clean_fn_match)[4:-3]
|
||||
# Since Python 3.6, it returns a different pattern like (?s:.*\.load)\Z
|
||||
return fnmatch.translate(clean_fn_match)[4:-3] # pragma: no cover
|
||||
|
||||
def parse_file(self, filepath):
|
||||
"""Parse file with Augeas
|
||||
@@ -617,8 +809,7 @@ class ApacheParser(object):
|
||||
use_new, remove_old = self._check_path_actions(filepath)
|
||||
# Ensure that we have the latest Augeas DOM state on disk before
|
||||
# calling aug.load() which reloads the state from disk
|
||||
if self.configurator:
|
||||
self.configurator.ensure_augeas_state()
|
||||
self.ensure_augeas_state()
|
||||
# Test if augeas included file for Httpd.lens
|
||||
# Note: This works for augeas globs, ie. *.conf
|
||||
if use_new:
|
||||
@@ -685,10 +876,7 @@ class ApacheParser(object):
|
||||
use_new = False
|
||||
else:
|
||||
use_new = True
|
||||
if new_file_match == "*":
|
||||
remove_old = True
|
||||
else:
|
||||
remove_old = False
|
||||
remove_old = new_file_match == "*"
|
||||
except KeyError:
|
||||
use_new = True
|
||||
remove_old = False
|
||||
|
||||
@@ -0,0 +1,27 @@
|
||||
#!/usr/bin/env python
|
||||
"""
|
||||
This executable script wraps the apache-conf-test bash script, in order to setup a pebble instance
|
||||
before its execution. Directory URL is passed through the SERVER environment variable.
|
||||
"""
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
from certbot_integration_tests.utils import acme_server
|
||||
|
||||
SCRIPT_DIRNAME = os.path.dirname(__file__)
|
||||
|
||||
|
||||
def main(args=None):
|
||||
if not args:
|
||||
args = sys.argv[1:]
|
||||
with acme_server.ACMEServer('pebble', [], False) as acme_xdist:
|
||||
environ = os.environ.copy()
|
||||
environ['SERVER'] = acme_xdist['directory_url']
|
||||
command = [os.path.join(SCRIPT_DIRNAME, 'apache-conf-test')]
|
||||
command.extend(args)
|
||||
return subprocess.call(command, env=environ)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main())
|
||||
@@ -1,7 +1,7 @@
|
||||
#LoadModule ssl_module modules/mod_ssl.so
|
||||
|
||||
Listen 443
|
||||
<VirtualHost *:443>
|
||||
Listen 4443
|
||||
<VirtualHost *:4443>
|
||||
# The ServerName directive sets the request scheme, hostname and port that
|
||||
# the server uses to identify itself. This is used when creating
|
||||
# redirection URLs. In the context of virtual hosts, the ServerName
|
||||
|
||||
@@ -35,8 +35,9 @@ class AutoHSTSTest(util.ApacheTest):
|
||||
pat = '(?:[ "]|^)(strict-transport-security)(?:[ "]|$)'
|
||||
for head in header_path:
|
||||
if re.search(pat, self.config.parser.aug.get(head).lower()):
|
||||
return self.config.parser.aug.get(head.replace("arg[3]",
|
||||
"arg[4]"))
|
||||
return self.config.parser.aug.get(
|
||||
head.replace("arg[3]", "arg[4]"))
|
||||
return None # pragma: no cover
|
||||
|
||||
@mock.patch("certbot_apache.configurator.ApacheConfigurator.restart")
|
||||
@mock.patch("certbot_apache.configurator.ApacheConfigurator.enable_mod")
|
||||
|
||||
222
certbot-apache/certbot_apache/tests/centos6_test.py
Normal file
222
certbot-apache/certbot_apache/tests/centos6_test.py
Normal file
@@ -0,0 +1,222 @@
|
||||
"""Test for certbot_apache.configurator for CentOS 6 overrides"""
|
||||
import unittest
|
||||
|
||||
from certbot.compat import os
|
||||
from certbot.errors import MisconfigurationError
|
||||
|
||||
from certbot_apache import obj
|
||||
from certbot_apache import override_centos
|
||||
from certbot_apache import parser
|
||||
from certbot_apache.tests import util
|
||||
|
||||
|
||||
def get_vh_truth(temp_dir, config_name):
|
||||
"""Return the ground truth for the specified directory."""
|
||||
prefix = os.path.join(
|
||||
temp_dir, config_name, "httpd/conf.d")
|
||||
|
||||
aug_pre = "/files" + prefix
|
||||
vh_truth = [
|
||||
obj.VirtualHost(
|
||||
os.path.join(prefix, "test.example.com.conf"),
|
||||
os.path.join(aug_pre, "test.example.com.conf/VirtualHost"),
|
||||
set([obj.Addr.fromstring("*:80")]),
|
||||
False, True, "test.example.com"),
|
||||
obj.VirtualHost(
|
||||
os.path.join(prefix, "ssl.conf"),
|
||||
os.path.join(aug_pre, "ssl.conf/VirtualHost"),
|
||||
set([obj.Addr.fromstring("_default_:443")]),
|
||||
True, True, None)
|
||||
]
|
||||
return vh_truth
|
||||
|
||||
class CentOS6Tests(util.ApacheTest):
|
||||
"""Tests for CentOS 6"""
|
||||
|
||||
def setUp(self): # pylint: disable=arguments-differ
|
||||
test_dir = "centos6_apache/apache"
|
||||
config_root = "centos6_apache/apache/httpd"
|
||||
vhost_root = "centos6_apache/apache/httpd/conf.d"
|
||||
super(CentOS6Tests, self).setUp(test_dir=test_dir,
|
||||
config_root=config_root,
|
||||
vhost_root=vhost_root)
|
||||
|
||||
self.config = util.get_apache_configurator(
|
||||
self.config_path, self.vhost_path, self.config_dir, self.work_dir,
|
||||
version=(2, 2, 15), os_info="centos")
|
||||
self.vh_truth = get_vh_truth(
|
||||
self.temp_dir, "centos6_apache/apache")
|
||||
|
||||
def test_get_parser(self):
|
||||
self.assertTrue(isinstance(self.config.parser,
|
||||
override_centos.CentOSParser))
|
||||
|
||||
def test_get_virtual_hosts(self):
|
||||
"""Make sure all vhosts are being properly found."""
|
||||
vhs = self.config.get_virtual_hosts()
|
||||
self.assertEqual(len(vhs), 2)
|
||||
found = 0
|
||||
|
||||
for vhost in vhs:
|
||||
for centos_truth in self.vh_truth:
|
||||
if vhost == centos_truth:
|
||||
found += 1
|
||||
break
|
||||
else:
|
||||
raise Exception("Missed: %s" % vhost) # pragma: no cover
|
||||
self.assertEqual(found, 2)
|
||||
|
||||
def test_loadmod_default(self):
|
||||
ssl_loadmods = self.config.parser.find_dir(
|
||||
"LoadModule", "ssl_module", exclude=False)
|
||||
self.assertEqual(len(ssl_loadmods), 1)
|
||||
# Make sure the LoadModule ssl_module is in ssl.conf (default)
|
||||
self.assertTrue("ssl.conf" in ssl_loadmods[0])
|
||||
# ...and that it's not inside of <IfModule>
|
||||
self.assertFalse("IfModule" in ssl_loadmods[0])
|
||||
|
||||
# Get the example vhost
|
||||
self.config.assoc["test.example.com"] = self.vh_truth[0]
|
||||
self.config.deploy_cert(
|
||||
"random.demo", "example/cert.pem", "example/key.pem",
|
||||
"example/cert_chain.pem", "example/fullchain.pem")
|
||||
self.config.save()
|
||||
|
||||
post_loadmods = self.config.parser.find_dir(
|
||||
"LoadModule", "ssl_module", exclude=False)
|
||||
|
||||
# We should now have LoadModule ssl_module in root conf and ssl.conf
|
||||
self.assertEqual(len(post_loadmods), 2)
|
||||
for lm in post_loadmods:
|
||||
# lm[:-7] removes "/arg[#]" from the path
|
||||
arguments = self.config.parser.get_all_args(lm[:-7])
|
||||
self.assertEqual(arguments, ["ssl_module", "modules/mod_ssl.so"])
|
||||
# ...and both of them should be wrapped in <IfModule !mod_ssl.c>
|
||||
# lm[:-17] strips off /directive/arg[1] from the path.
|
||||
ifmod_args = self.config.parser.get_all_args(lm[:-17])
|
||||
self.assertTrue("!mod_ssl.c" in ifmod_args)
|
||||
|
||||
def test_loadmod_multiple(self):
|
||||
sslmod_args = ["ssl_module", "modules/mod_ssl.so"]
|
||||
# Adds another LoadModule to main httpd.conf in addtition to ssl.conf
|
||||
self.config.parser.add_dir(self.config.parser.loc["default"], "LoadModule",
|
||||
sslmod_args)
|
||||
self.config.save()
|
||||
pre_loadmods = self.config.parser.find_dir(
|
||||
"LoadModule", "ssl_module", exclude=False)
|
||||
# LoadModules are not within IfModule blocks
|
||||
self.assertFalse(any(["ifmodule" in m.lower() for m in pre_loadmods]))
|
||||
self.config.assoc["test.example.com"] = self.vh_truth[0]
|
||||
self.config.deploy_cert(
|
||||
"random.demo", "example/cert.pem", "example/key.pem",
|
||||
"example/cert_chain.pem", "example/fullchain.pem")
|
||||
post_loadmods = self.config.parser.find_dir(
|
||||
"LoadModule", "ssl_module", exclude=False)
|
||||
|
||||
for mod in post_loadmods:
|
||||
self.assertTrue(self.config.parser.not_modssl_ifmodule(mod)) #pylint: disable=no-member
|
||||
|
||||
def test_loadmod_rootconf_exists(self):
|
||||
sslmod_args = ["ssl_module", "modules/mod_ssl.so"]
|
||||
rootconf_ifmod = self.config.parser.get_ifmod(
|
||||
parser.get_aug_path(self.config.parser.loc["default"]),
|
||||
"!mod_ssl.c", beginning=True)
|
||||
self.config.parser.add_dir(rootconf_ifmod[:-1], "LoadModule", sslmod_args)
|
||||
self.config.save()
|
||||
# Get the example vhost
|
||||
self.config.assoc["test.example.com"] = self.vh_truth[0]
|
||||
self.config.deploy_cert(
|
||||
"random.demo", "example/cert.pem", "example/key.pem",
|
||||
"example/cert_chain.pem", "example/fullchain.pem")
|
||||
self.config.save()
|
||||
|
||||
root_loadmods = self.config.parser.find_dir(
|
||||
"LoadModule", "ssl_module",
|
||||
start=parser.get_aug_path(self.config.parser.loc["default"]),
|
||||
exclude=False)
|
||||
|
||||
mods = [lm for lm in root_loadmods if self.config.parser.loc["default"] in lm]
|
||||
|
||||
self.assertEqual(len(mods), 1)
|
||||
# [:-7] removes "/arg[#]" from the path
|
||||
self.assertEqual(
|
||||
self.config.parser.get_all_args(mods[0][:-7]),
|
||||
sslmod_args)
|
||||
|
||||
def test_neg_loadmod_already_on_path(self):
|
||||
loadmod_args = ["ssl_module", "modules/mod_ssl.so"]
|
||||
ifmod = self.config.parser.get_ifmod(
|
||||
self.vh_truth[1].path, "!mod_ssl.c", beginning=True)
|
||||
self.config.parser.add_dir(ifmod[:-1], "LoadModule", loadmod_args)
|
||||
self.config.parser.add_dir(self.vh_truth[1].path, "LoadModule", loadmod_args)
|
||||
self.config.save()
|
||||
pre_loadmods = self.config.parser.find_dir(
|
||||
"LoadModule", "ssl_module", start=self.vh_truth[1].path, exclude=False)
|
||||
self.assertEqual(len(pre_loadmods), 2)
|
||||
# The ssl.conf now has two LoadModule directives, one inside of
|
||||
# !mod_ssl.c IfModule
|
||||
self.config.assoc["test.example.com"] = self.vh_truth[0]
|
||||
self.config.deploy_cert(
|
||||
"random.demo", "example/cert.pem", "example/key.pem",
|
||||
"example/cert_chain.pem", "example/fullchain.pem")
|
||||
self.config.save()
|
||||
# Ensure that the additional LoadModule wasn't written into the IfModule
|
||||
post_loadmods = self.config.parser.find_dir(
|
||||
"LoadModule", "ssl_module", start=self.vh_truth[1].path, exclude=False)
|
||||
self.assertEqual(len(post_loadmods), 1)
|
||||
|
||||
def test_loadmod_non_duplicate(self):
|
||||
# the modules/mod_ssl.so exists in ssl.conf
|
||||
sslmod_args = ["ssl_module", "modules/mod_somethingelse.so"]
|
||||
rootconf_ifmod = self.config.parser.get_ifmod(
|
||||
parser.get_aug_path(self.config.parser.loc["default"]),
|
||||
"!mod_ssl.c", beginning=True)
|
||||
self.config.parser.add_dir(rootconf_ifmod[:-1], "LoadModule", sslmod_args)
|
||||
self.config.save()
|
||||
self.config.assoc["test.example.com"] = self.vh_truth[0]
|
||||
pre_matches = self.config.parser.find_dir("LoadModule",
|
||||
"ssl_module", exclude=False)
|
||||
|
||||
self.assertRaises(MisconfigurationError, self.config.deploy_cert,
|
||||
"random.demo", "example/cert.pem", "example/key.pem",
|
||||
"example/cert_chain.pem", "example/fullchain.pem")
|
||||
|
||||
post_matches = self.config.parser.find_dir("LoadModule",
|
||||
"ssl_module", exclude=False)
|
||||
# Make sure that none was changed
|
||||
self.assertEqual(pre_matches, post_matches)
|
||||
|
||||
def test_loadmod_not_found(self):
|
||||
# Remove all existing LoadModule ssl_module... directives
|
||||
orig_loadmods = self.config.parser.find_dir("LoadModule",
|
||||
"ssl_module",
|
||||
exclude=False)
|
||||
for mod in orig_loadmods:
|
||||
noarg_path = mod.rpartition("/")[0]
|
||||
self.config.parser.aug.remove(noarg_path)
|
||||
self.config.save()
|
||||
self.config.deploy_cert(
|
||||
"random.demo", "example/cert.pem", "example/key.pem",
|
||||
"example/cert_chain.pem", "example/fullchain.pem")
|
||||
|
||||
post_loadmods = self.config.parser.find_dir("LoadModule",
|
||||
"ssl_module",
|
||||
exclude=False)
|
||||
self.assertFalse(post_loadmods)
|
||||
|
||||
def test_no_ifmod_search_false(self):
|
||||
#pylint: disable=no-member
|
||||
|
||||
self.assertFalse(self.config.parser.not_modssl_ifmodule(
|
||||
"/path/does/not/include/ifmod"
|
||||
))
|
||||
self.assertFalse(self.config.parser.not_modssl_ifmodule(
|
||||
""
|
||||
))
|
||||
self.assertFalse(self.config.parser.not_modssl_ifmodule(
|
||||
"/path/includes/IfModule/but/no/arguments"
|
||||
))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main() # pragma: no cover
|
||||
@@ -1,15 +1,17 @@
|
||||
"""Test for certbot_apache.configurator for Centos overrides"""
|
||||
import os
|
||||
import unittest
|
||||
|
||||
import mock
|
||||
|
||||
from certbot import errors
|
||||
from certbot.compat import filesystem
|
||||
from certbot.compat import os
|
||||
|
||||
from certbot_apache import obj
|
||||
from certbot_apache import override_centos
|
||||
from certbot_apache.tests import util
|
||||
|
||||
|
||||
def get_vh_truth(temp_dir, config_name):
|
||||
"""Return the ground truth for the specified directory."""
|
||||
prefix = os.path.join(
|
||||
@@ -30,6 +32,59 @@ def get_vh_truth(temp_dir, config_name):
|
||||
]
|
||||
return vh_truth
|
||||
|
||||
class FedoraRestartTest(util.ApacheTest):
|
||||
"""Tests for Fedora specific self-signed certificate override"""
|
||||
|
||||
def setUp(self): # pylint: disable=arguments-differ
|
||||
test_dir = "centos7_apache/apache"
|
||||
config_root = "centos7_apache/apache/httpd"
|
||||
vhost_root = "centos7_apache/apache/httpd/conf.d"
|
||||
super(FedoraRestartTest, self).setUp(test_dir=test_dir,
|
||||
config_root=config_root,
|
||||
vhost_root=vhost_root)
|
||||
self.config = util.get_apache_configurator(
|
||||
self.config_path, self.vhost_path, self.config_dir, self.work_dir,
|
||||
os_info="fedora_old")
|
||||
self.vh_truth = get_vh_truth(
|
||||
self.temp_dir, "centos7_apache/apache")
|
||||
|
||||
def _run_fedora_test(self):
|
||||
self.assertIsInstance(self.config, override_centos.CentOSConfigurator)
|
||||
with mock.patch("certbot.util.get_os_info") as mock_info:
|
||||
mock_info.return_value = ["fedora", "28"]
|
||||
self.config.config_test()
|
||||
|
||||
def test_non_fedora_error(self):
|
||||
c_test = "certbot_apache.configurator.ApacheConfigurator.config_test"
|
||||
with mock.patch(c_test) as mock_test:
|
||||
mock_test.side_effect = errors.MisconfigurationError
|
||||
with mock.patch("certbot.util.get_os_info") as mock_info:
|
||||
mock_info.return_value = ["not_fedora"]
|
||||
self.assertRaises(errors.MisconfigurationError,
|
||||
self.config.config_test)
|
||||
|
||||
def test_fedora_restart_error(self):
|
||||
c_test = "certbot_apache.configurator.ApacheConfigurator.config_test"
|
||||
with mock.patch(c_test) as mock_test:
|
||||
# First call raises error, second doesn't
|
||||
mock_test.side_effect = [errors.MisconfigurationError, '']
|
||||
with mock.patch("certbot.util.run_script") as mock_run:
|
||||
mock_run.side_effect = errors.SubprocessError
|
||||
self.assertRaises(errors.MisconfigurationError,
|
||||
self._run_fedora_test)
|
||||
|
||||
def test_fedora_restart(self):
|
||||
c_test = "certbot_apache.configurator.ApacheConfigurator.config_test"
|
||||
with mock.patch(c_test) as mock_test:
|
||||
with mock.patch("certbot.util.run_script") as mock_run:
|
||||
# First call raises error, second doesn't
|
||||
mock_test.side_effect = [errors.MisconfigurationError, '']
|
||||
self._run_fedora_test()
|
||||
self.assertEqual(mock_test.call_count, 2)
|
||||
self.assertEqual(mock_run.call_args[0][0],
|
||||
['systemctl', 'restart', 'httpd'])
|
||||
|
||||
|
||||
class MultipleVhostsTestCentOS(util.ApacheTest):
|
||||
"""Multiple vhost tests for CentOS / RHEL family of distros"""
|
||||
|
||||
@@ -50,8 +105,7 @@ class MultipleVhostsTestCentOS(util.ApacheTest):
|
||||
self.temp_dir, "centos7_apache/apache")
|
||||
|
||||
def test_get_parser(self):
|
||||
self.assertTrue(isinstance(self.config.parser,
|
||||
override_centos.CentOSParser))
|
||||
self.assertIsInstance(self.config.parser, override_centos.CentOSParser)
|
||||
|
||||
@mock.patch("certbot_apache.parser.ApacheParser._get_runtime_cfg")
|
||||
def test_opportunistic_httpd_runtime_parsing(self, mock_get):
|
||||
@@ -107,7 +161,7 @@ class MultipleVhostsTestCentOS(util.ApacheTest):
|
||||
"""Make sure we read the sysconfig OPTIONS variable correctly"""
|
||||
# Return nothing for the process calls
|
||||
mock_cfg.return_value = ""
|
||||
self.config.parser.sysconfig_filep = os.path.realpath(
|
||||
self.config.parser.sysconfig_filep = filesystem.realpath(
|
||||
os.path.join(self.config.parser.root, "../sysconfig/httpd"))
|
||||
self.config.parser.variables = {}
|
||||
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
"""Tests for certbot_apache.parser."""
|
||||
import os
|
||||
import shutil
|
||||
import unittest
|
||||
|
||||
from certbot import errors
|
||||
from certbot.compat import os
|
||||
|
||||
from certbot_apache.tests import util
|
||||
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
"""Test for certbot_apache.augeas_configurator."""
|
||||
import os
|
||||
"""Test for certbot_apache.configurator implementations of reverter"""
|
||||
import shutil
|
||||
import unittest
|
||||
|
||||
@@ -10,12 +9,12 @@ from certbot import errors
|
||||
from certbot_apache.tests import util
|
||||
|
||||
|
||||
class AugeasConfiguratorTest(util.ApacheTest):
|
||||
"""Test for Augeas Configurator base class."""
|
||||
class ConfiguratorReverterTest(util.ApacheTest):
|
||||
"""Test for ApacheConfigurator reverter methods"""
|
||||
|
||||
|
||||
def setUp(self): # pylint: disable=arguments-differ
|
||||
super(AugeasConfiguratorTest, self).setUp()
|
||||
super(ConfiguratorReverterTest, self).setUp()
|
||||
|
||||
self.config = util.get_apache_configurator(
|
||||
self.config_path, self.vhost_path, self.config_dir, self.work_dir)
|
||||
@@ -28,20 +27,6 @@ class AugeasConfiguratorTest(util.ApacheTest):
|
||||
shutil.rmtree(self.work_dir)
|
||||
shutil.rmtree(self.temp_dir)
|
||||
|
||||
def test_bad_parse(self):
|
||||
# pylint: disable=protected-access
|
||||
self.config.parser.parse_file(os.path.join(
|
||||
self.config.parser.root, "conf-available", "bad_conf_file.conf"))
|
||||
self.assertRaises(
|
||||
errors.PluginError, self.config.check_parsing_errors, "httpd.aug")
|
||||
|
||||
def test_bad_save(self):
|
||||
mock_save = mock.Mock()
|
||||
mock_save.side_effect = IOError
|
||||
self.config.aug.save = mock_save
|
||||
|
||||
self.assertRaises(errors.PluginError, self.config.save)
|
||||
|
||||
def test_bad_save_checkpoint(self):
|
||||
self.config.reverter.add_to_checkpoint = mock.Mock(
|
||||
side_effect=errors.ReverterError)
|
||||
@@ -63,23 +48,9 @@ class AugeasConfiguratorTest(util.ApacheTest):
|
||||
|
||||
self.assertTrue(mock_finalize.is_called)
|
||||
|
||||
def test_recovery_routine(self):
|
||||
mock_load = mock.Mock()
|
||||
self.config.aug.load = mock_load
|
||||
|
||||
self.config.recovery_routine()
|
||||
self.assertEqual(mock_load.call_count, 1)
|
||||
|
||||
def test_recovery_routine_error(self):
|
||||
self.config.reverter.recovery_routine = mock.Mock(
|
||||
side_effect=errors.ReverterError)
|
||||
|
||||
self.assertRaises(
|
||||
errors.PluginError, self.config.recovery_routine)
|
||||
|
||||
def test_revert_challenge_config(self):
|
||||
mock_load = mock.Mock()
|
||||
self.config.aug.load = mock_load
|
||||
self.config.parser.aug.load = mock_load
|
||||
|
||||
self.config.revert_challenge_config()
|
||||
self.assertEqual(mock_load.call_count, 1)
|
||||
@@ -93,7 +64,7 @@ class AugeasConfiguratorTest(util.ApacheTest):
|
||||
|
||||
def test_rollback_checkpoints(self):
|
||||
mock_load = mock.Mock()
|
||||
self.config.aug.load = mock_load
|
||||
self.config.parser.aug.load = mock_load
|
||||
|
||||
self.config.rollback_checkpoints()
|
||||
self.assertEqual(mock_load.call_count, 1)
|
||||
@@ -103,13 +74,11 @@ class AugeasConfiguratorTest(util.ApacheTest):
|
||||
side_effect=errors.ReverterError)
|
||||
self.assertRaises(errors.PluginError, self.config.rollback_checkpoints)
|
||||
|
||||
def test_view_config_changes(self):
|
||||
self.config.view_config_changes()
|
||||
|
||||
def test_view_config_changes_error(self):
|
||||
self.config.reverter.view_config_changes = mock.Mock(
|
||||
side_effect=errors.ReverterError)
|
||||
self.assertRaises(errors.PluginError, self.config.view_config_changes)
|
||||
def test_recovery_routine_reload(self):
|
||||
mock_load = mock.Mock()
|
||||
self.config.parser.aug.load = mock_load
|
||||
self.config.recovery_routine()
|
||||
self.assertEqual(mock_load.call_count, 1)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
@@ -1,6 +1,6 @@
|
||||
# pylint: disable=too-many-public-methods,too-many-lines
|
||||
"""Test for certbot_apache.configurator."""
|
||||
import os
|
||||
import copy
|
||||
import shutil
|
||||
import socket
|
||||
import tempfile
|
||||
@@ -15,22 +15,21 @@ from acme import challenges
|
||||
from certbot import achallenges
|
||||
from certbot import crypto_util
|
||||
from certbot import errors
|
||||
|
||||
from certbot.compat import os
|
||||
from certbot.compat import filesystem
|
||||
from certbot.tests import acme_util
|
||||
from certbot.tests import util as certbot_util
|
||||
|
||||
from certbot_apache import apache_util
|
||||
from certbot_apache import constants
|
||||
from certbot_apache import parser
|
||||
from certbot_apache import obj
|
||||
|
||||
from certbot_apache import parser
|
||||
from certbot_apache.tests import util
|
||||
|
||||
|
||||
class MultipleVhostsTest(util.ApacheTest):
|
||||
"""Test two standard well-configured HTTP vhosts."""
|
||||
|
||||
|
||||
def setUp(self): # pylint: disable=arguments-differ
|
||||
super(MultipleVhostsTest, self).setUp()
|
||||
|
||||
@@ -52,25 +51,14 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
self.config.deploy_cert = mocked_deploy_cert
|
||||
return self.config
|
||||
|
||||
@mock.patch("certbot_apache.configurator.ApacheConfigurator.init_augeas")
|
||||
@mock.patch("certbot_apache.configurator.path_surgery")
|
||||
def test_prepare_no_install(self, mock_surgery, _init_augeas):
|
||||
def test_prepare_no_install(self, mock_surgery):
|
||||
silly_path = {"PATH": "/tmp/nothingness2342"}
|
||||
mock_surgery.return_value = False
|
||||
with mock.patch.dict('os.environ', silly_path):
|
||||
self.assertRaises(errors.NoInstallationError, self.config.prepare)
|
||||
self.assertEqual(mock_surgery.call_count, 1)
|
||||
|
||||
@mock.patch("certbot_apache.augeas_configurator.AugeasConfigurator.init_augeas")
|
||||
def test_prepare_no_augeas(self, mock_init_augeas):
|
||||
""" Test augeas initialization ImportError """
|
||||
def side_effect_error():
|
||||
""" Side effect error for the test """
|
||||
raise ImportError
|
||||
mock_init_augeas.side_effect = side_effect_error
|
||||
self.assertRaises(
|
||||
errors.NoInstallationError, self.config.prepare)
|
||||
|
||||
@mock.patch("certbot_apache.parser.ApacheParser")
|
||||
@mock.patch("certbot_apache.configurator.util.exe_exists")
|
||||
def test_prepare_version(self, mock_exe_exists, _):
|
||||
@@ -82,16 +70,6 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
self.assertRaises(
|
||||
errors.NotSupportedError, self.config.prepare)
|
||||
|
||||
@mock.patch("certbot_apache.parser.ApacheParser")
|
||||
@mock.patch("certbot_apache.configurator.util.exe_exists")
|
||||
def test_prepare_old_aug(self, mock_exe_exists, _):
|
||||
mock_exe_exists.return_value = True
|
||||
self.config.config_test = mock.Mock()
|
||||
# pylint: disable=protected-access
|
||||
self.config._check_aug_version = mock.Mock(return_value=False)
|
||||
self.assertRaises(
|
||||
errors.NotSupportedError, self.config.prepare)
|
||||
|
||||
def test_prepare_locked(self):
|
||||
server_root = self.config.conf("server-root")
|
||||
self.config.config_test = mock.Mock()
|
||||
@@ -115,6 +93,37 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
# Weak test..
|
||||
ApacheConfigurator.add_parser_arguments(mock.MagicMock())
|
||||
|
||||
def test_docs_parser_arguments(self):
|
||||
os.environ["CERTBOT_DOCS"] = "1"
|
||||
from certbot_apache.configurator import ApacheConfigurator
|
||||
mock_add = mock.MagicMock()
|
||||
ApacheConfigurator.add_parser_arguments(mock_add)
|
||||
parserargs = ["server_root", "enmod", "dismod", "le_vhost_ext",
|
||||
"vhost_root", "logs_root", "challenge_location",
|
||||
"handle_modules", "handle_sites", "ctl"]
|
||||
exp = dict()
|
||||
|
||||
for k in ApacheConfigurator.OS_DEFAULTS:
|
||||
if k in parserargs:
|
||||
exp[k.replace("_", "-")] = ApacheConfigurator.OS_DEFAULTS[k]
|
||||
# Special cases
|
||||
exp["vhost-root"] = None
|
||||
exp["init-script"] = None
|
||||
|
||||
found = set()
|
||||
for call in mock_add.call_args_list:
|
||||
# init-script is a special case: deprecated argument
|
||||
if call[0][0] != "init-script":
|
||||
self.assertEqual(exp[call[0][0]], call[1]['default'])
|
||||
found.add(call[0][0])
|
||||
|
||||
# Make sure that all (and only) the expected values exist
|
||||
self.assertEqual(len(mock_add.call_args_list), len(found))
|
||||
for e in exp:
|
||||
self.assertTrue(e in found)
|
||||
|
||||
del os.environ["CERTBOT_DOCS"]
|
||||
|
||||
def test_add_parser_arguments_all_configurators(self): # pylint: disable=no-self-use
|
||||
from certbot_apache.entrypoint import OVERRIDE_CLASSES
|
||||
for cls in OVERRIDE_CLASSES.values():
|
||||
@@ -139,7 +148,8 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
names = self.config.get_all_names()
|
||||
self.assertEqual(names, set(
|
||||
["certbot.demo", "ocspvhost.com", "encryption-example.demo",
|
||||
"nonsym.link", "vhost.in.rootconf", "www.certbot.demo"]
|
||||
"nonsym.link", "vhost.in.rootconf", "www.certbot.demo",
|
||||
"duplicate.example.com"]
|
||||
))
|
||||
|
||||
@certbot_util.patch_get_utility()
|
||||
@@ -158,8 +168,7 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
self.config.vhosts.append(vhost)
|
||||
|
||||
names = self.config.get_all_names()
|
||||
# Names get filtered, only 5 are returned
|
||||
self.assertEqual(len(names), 8)
|
||||
self.assertEqual(len(names), 9)
|
||||
self.assertTrue("zombo.com" in names)
|
||||
self.assertTrue("google.com" in names)
|
||||
self.assertTrue("certbot.demo" in names)
|
||||
@@ -200,7 +209,7 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
def test_get_virtual_hosts(self):
|
||||
"""Make sure all vhosts are being properly found."""
|
||||
vhs = self.config.get_virtual_hosts()
|
||||
self.assertEqual(len(vhs), 10)
|
||||
self.assertEqual(len(vhs), 12)
|
||||
found = 0
|
||||
|
||||
for vhost in vhs:
|
||||
@@ -211,7 +220,7 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
else:
|
||||
raise Exception("Missed: %s" % vhost) # pragma: no cover
|
||||
|
||||
self.assertEqual(found, 10)
|
||||
self.assertEqual(found, 12)
|
||||
|
||||
# Handle case of non-debian layout get_virtual_hosts
|
||||
with mock.patch(
|
||||
@@ -219,7 +228,7 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
) as mock_conf:
|
||||
mock_conf.return_value = False
|
||||
vhs = self.config.get_virtual_hosts()
|
||||
self.assertEqual(len(vhs), 10)
|
||||
self.assertEqual(len(vhs), 12)
|
||||
|
||||
@mock.patch("certbot_apache.display_ops.select_vhost")
|
||||
def test_choose_vhost_none_avail(self, mock_select):
|
||||
@@ -322,7 +331,7 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
self.config.vhosts = [
|
||||
vh for vh in self.config.vhosts
|
||||
if vh.name not in ["certbot.demo", "nonsym.link",
|
||||
"encryption-example.demo",
|
||||
"encryption-example.demo", "duplicate.example.com",
|
||||
"ocspvhost.com", "vhost.in.rootconf"]
|
||||
and "*.blue.purple.com" not in vh.aliases
|
||||
]
|
||||
@@ -333,7 +342,7 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
def test_non_default_vhosts(self):
|
||||
# pylint: disable=protected-access
|
||||
vhosts = self.config._non_default_vhosts(self.config.vhosts)
|
||||
self.assertEqual(len(vhosts), 8)
|
||||
self.assertEqual(len(vhosts), 10)
|
||||
|
||||
def test_deploy_cert_enable_new_vhost(self):
|
||||
# Create
|
||||
@@ -353,6 +362,7 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
"""Mock method for parser.find_dir"""
|
||||
if directive == "Include" and argument.endswith("options-ssl-apache.conf"):
|
||||
return ["/path/to/whatever"]
|
||||
return None # pragma: no cover
|
||||
|
||||
mock_add = mock.MagicMock()
|
||||
self.config.parser.add_dir = mock_add
|
||||
@@ -464,8 +474,7 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
but an SSLCertificateKeyFile directive is missing."""
|
||||
if "SSLCertificateFile" in args:
|
||||
return ["example/cert.pem"]
|
||||
else:
|
||||
return []
|
||||
return []
|
||||
|
||||
mock_find_dir = mock.MagicMock(return_value=[])
|
||||
mock_find_dir.side_effect = side_effect
|
||||
@@ -645,7 +654,7 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
# span excludes the closing </VirtualHost> tag in older versions
|
||||
# of Augeas
|
||||
return_value = [self.vh_truth[0].filep, 1, 12, 0, 0, 0, 1142]
|
||||
with mock.patch.object(self.config.aug, 'span') as mock_span:
|
||||
with mock.patch.object(self.config.parser.aug, 'span') as mock_span:
|
||||
mock_span.return_value = return_value
|
||||
self.test_make_vhost_ssl()
|
||||
|
||||
@@ -653,7 +662,7 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
# span includes the closing </VirtualHost> tag in newer versions
|
||||
# of Augeas
|
||||
return_value = [self.vh_truth[0].filep, 1, 12, 0, 0, 0, 1157]
|
||||
with mock.patch.object(self.config.aug, 'span') as mock_span:
|
||||
with mock.patch.object(self.config.parser.aug, 'span') as mock_span:
|
||||
mock_span.return_value = return_value
|
||||
self.test_make_vhost_ssl()
|
||||
|
||||
@@ -666,8 +675,7 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
def test_make_vhost_ssl_nonexistent_vhost_path(self):
|
||||
ssl_vhost = self.config.make_vhost_ssl(self.vh_truth[1])
|
||||
self.assertEqual(os.path.dirname(ssl_vhost.filep),
|
||||
os.path.dirname(os.path.realpath(
|
||||
self.vh_truth[1].filep)))
|
||||
os.path.dirname(filesystem.realpath(self.vh_truth[1].filep)))
|
||||
|
||||
def test_make_vhost_ssl(self):
|
||||
ssl_vhost = self.config.make_vhost_ssl(self.vh_truth[0])
|
||||
@@ -688,7 +696,7 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
self.assertEqual(self.config.is_name_vhost(self.vh_truth[0]),
|
||||
self.config.is_name_vhost(ssl_vhost))
|
||||
|
||||
self.assertEqual(len(self.config.vhosts), 11)
|
||||
self.assertEqual(len(self.config.vhosts), 13)
|
||||
|
||||
def test_clean_vhost_ssl(self):
|
||||
# pylint: disable=protected-access
|
||||
@@ -781,32 +789,19 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
self.assertEqual(self.config.add_name_vhost.call_count, 2)
|
||||
|
||||
@mock.patch("certbot_apache.configurator.http_01.ApacheHttp01.perform")
|
||||
@mock.patch("certbot_apache.configurator.tls_sni_01.ApacheTlsSni01.perform")
|
||||
@mock.patch("certbot_apache.configurator.ApacheConfigurator.restart")
|
||||
def test_perform(self, mock_restart, mock_tls_perform, mock_http_perform):
|
||||
def test_perform(self, mock_restart, mock_http_perform):
|
||||
# Only tests functionality specific to configurator.perform
|
||||
# Note: As more challenges are offered this will have to be expanded
|
||||
account_key, achalls = self.get_key_and_achalls()
|
||||
|
||||
all_expected = []
|
||||
http_expected = []
|
||||
tls_expected = []
|
||||
for achall in achalls:
|
||||
response = achall.response(account_key)
|
||||
if isinstance(achall.chall, challenges.HTTP01):
|
||||
http_expected.append(response)
|
||||
else:
|
||||
tls_expected.append(response)
|
||||
all_expected.append(response)
|
||||
|
||||
mock_http_perform.return_value = http_expected
|
||||
mock_tls_perform.return_value = tls_expected
|
||||
expected = [achall.response(account_key) for achall in achalls]
|
||||
mock_http_perform.return_value = expected
|
||||
|
||||
responses = self.config.perform(achalls)
|
||||
|
||||
self.assertEqual(mock_http_perform.call_count, 1)
|
||||
self.assertEqual(mock_tls_perform.call_count, 1)
|
||||
self.assertEqual(responses, all_expected)
|
||||
self.assertEqual(responses, expected)
|
||||
|
||||
self.assertEqual(mock_restart.call_count, 1)
|
||||
|
||||
@@ -1026,7 +1021,7 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
|
||||
# pylint: disable=protected-access
|
||||
http_vh = self.config._get_http_vhost(ssl_vh)
|
||||
self.assertTrue(http_vh.ssl == False)
|
||||
self.assertFalse(http_vh.ssl)
|
||||
|
||||
@mock.patch("certbot.util.run_script")
|
||||
@mock.patch("certbot.util.exe_exists")
|
||||
@@ -1215,7 +1210,7 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
except errors.PluginEnhancementAlreadyPresent:
|
||||
args_paths = self.config.parser.find_dir(
|
||||
"RewriteRule", None, http_vhost.path, False)
|
||||
arg_vals = [self.config.aug.get(x) for x in args_paths]
|
||||
arg_vals = [self.config.parser.aug.get(x) for x in args_paths]
|
||||
self.assertEqual(arg_vals, constants.REWRITE_HTTPS_ARGS)
|
||||
|
||||
|
||||
@@ -1269,7 +1264,7 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
|
||||
# pylint: disable=protected-access
|
||||
self.config._enable_redirect(self.vh_truth[1], "")
|
||||
self.assertEqual(len(self.config.vhosts), 11)
|
||||
self.assertEqual(len(self.config.vhosts), 13)
|
||||
|
||||
def test_create_own_redirect_for_old_apache_version(self):
|
||||
self.config.parser.modules.add("rewrite_module")
|
||||
@@ -1280,7 +1275,7 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
|
||||
# pylint: disable=protected-access
|
||||
self.config._enable_redirect(self.vh_truth[1], "")
|
||||
self.assertEqual(len(self.config.vhosts), 11)
|
||||
self.assertEqual(len(self.config.vhosts), 13)
|
||||
|
||||
def test_sift_rewrite_rule(self):
|
||||
# pylint: disable=protected-access
|
||||
@@ -1318,24 +1313,6 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
|
||||
return account_key, (achall1, achall2, achall3)
|
||||
|
||||
def test_make_addrs_sni_ready(self):
|
||||
self.config.version = (2, 2)
|
||||
self.config.make_addrs_sni_ready(
|
||||
set([obj.Addr.fromstring("*:443"), obj.Addr.fromstring("*:80")]))
|
||||
self.assertTrue(self.config.parser.find_dir(
|
||||
"NameVirtualHost", "*:80", exclude=False))
|
||||
self.assertTrue(self.config.parser.find_dir(
|
||||
"NameVirtualHost", "*:443", exclude=False))
|
||||
|
||||
def test_aug_version(self):
|
||||
mock_match = mock.Mock(return_value=["something"])
|
||||
self.config.aug.match = mock_match
|
||||
# pylint: disable=protected-access
|
||||
self.assertEqual(self.config._check_aug_version(),
|
||||
["something"])
|
||||
self.config.aug.match.side_effect = RuntimeError
|
||||
self.assertFalse(self.config._check_aug_version())
|
||||
|
||||
def test_enable_site_nondebian(self):
|
||||
inc_path = "/path/to/wherever"
|
||||
vhost = self.vh_truth[0]
|
||||
@@ -1358,8 +1335,8 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
self.config.parser.modules.add("ssl_module")
|
||||
self.config.parser.modules.add("mod_ssl.c")
|
||||
self.config.parser.modules.add("socache_shmcb_module")
|
||||
tmp_path = os.path.realpath(tempfile.mkdtemp("vhostroot"))
|
||||
os.chmod(tmp_path, 0o755)
|
||||
tmp_path = filesystem.realpath(tempfile.mkdtemp("vhostroot"))
|
||||
filesystem.chmod(tmp_path, 0o755)
|
||||
mock_p = "certbot_apache.configurator.ApacheConfigurator._get_ssl_vhost_path"
|
||||
mock_a = "certbot_apache.parser.ApacheParser.add_include"
|
||||
|
||||
@@ -1391,7 +1368,7 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
# pylint: disable=protected-access
|
||||
cases = {u"*.example.org": True, b"*.x.example.org": True,
|
||||
u"a.example.org": False, b"a.x.example.org": False}
|
||||
for key in cases.keys():
|
||||
for key in cases:
|
||||
self.assertEqual(self.config._wildcard_domain(key), cases[key])
|
||||
|
||||
def test_choose_vhosts_wildcard(self):
|
||||
@@ -1503,6 +1480,29 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
second_id = self.config.add_vhost_id(self.vh_truth[0])
|
||||
self.assertEqual(first_id, second_id)
|
||||
|
||||
def test_realpath_replaces_symlink(self):
|
||||
orig_match = self.config.parser.aug.match
|
||||
mock_vhost = copy.deepcopy(self.vh_truth[0])
|
||||
mock_vhost.filep = mock_vhost.filep.replace('sites-enabled', u'sites-available')
|
||||
mock_vhost.path = mock_vhost.path.replace('sites-enabled', 'sites-available')
|
||||
mock_vhost.enabled = False
|
||||
self.config.parser.parse_file(mock_vhost.filep)
|
||||
|
||||
def mock_match(aug_expr):
|
||||
"""Return a mocked match list of VirtualHosts"""
|
||||
if "/mocked/path" in aug_expr:
|
||||
return [self.vh_truth[1].path, self.vh_truth[0].path, mock_vhost.path]
|
||||
return orig_match(aug_expr)
|
||||
|
||||
self.config.parser.parser_paths = ["/mocked/path"]
|
||||
self.config.parser.aug.match = mock_match
|
||||
vhs = self.config.get_virtual_hosts()
|
||||
self.assertEqual(len(vhs), 2)
|
||||
self.assertTrue(vhs[0] == self.vh_truth[1])
|
||||
# mock_vhost should have replaced the vh_truth[0], because its filepath
|
||||
# isn't a symlink
|
||||
self.assertTrue(vhs[1] == mock_vhost)
|
||||
|
||||
|
||||
class AugeasVhostsTest(util.ApacheTest):
|
||||
"""Test vhosts with illegal names dependent on augeas version."""
|
||||
@@ -1521,8 +1521,8 @@ class AugeasVhostsTest(util.ApacheTest):
|
||||
self.work_dir)
|
||||
|
||||
def test_choosevhost_with_illegal_name(self):
|
||||
self.config.aug = mock.MagicMock()
|
||||
self.config.aug.match.side_effect = RuntimeError
|
||||
self.config.parser.aug = mock.MagicMock()
|
||||
self.config.parser.aug.match.side_effect = RuntimeError
|
||||
path = "debian_apache_2_4/augeas_vhosts/apache2/sites-available/old-and-default.conf"
|
||||
chosen_vhost = self.config._create_vhost(path)
|
||||
self.assertEqual(None, chosen_vhost)
|
||||
@@ -1530,7 +1530,7 @@ class AugeasVhostsTest(util.ApacheTest):
|
||||
def test_choosevhost_works(self):
|
||||
path = "debian_apache_2_4/augeas_vhosts/apache2/sites-available/old-and-default.conf"
|
||||
chosen_vhost = self.config._create_vhost(path)
|
||||
self.assertTrue(chosen_vhost == None or chosen_vhost.path == path)
|
||||
self.assertTrue(chosen_vhost is None or chosen_vhost.path == path)
|
||||
|
||||
@mock.patch("certbot_apache.configurator.ApacheConfigurator._create_vhost")
|
||||
def test_get_vhost_continue(self, mock_vhost):
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
"""Test for certbot_apache.configurator for Debian overrides"""
|
||||
import os
|
||||
import shutil
|
||||
import unittest
|
||||
|
||||
import mock
|
||||
|
||||
from certbot import errors
|
||||
from certbot.compat import os
|
||||
|
||||
from certbot_apache import apache_util
|
||||
from certbot_apache import obj
|
||||
@@ -79,9 +79,9 @@ class MultipleVhostsTestDebian(util.ApacheTest):
|
||||
|
||||
def test_enable_site_failure(self):
|
||||
self.config.parser.root = "/tmp/nonexistent"
|
||||
with mock.patch("os.path.isdir") as mock_dir:
|
||||
with mock.patch("certbot.compat.os.path.isdir") as mock_dir:
|
||||
mock_dir.return_value = True
|
||||
with mock.patch("os.path.islink") as mock_link:
|
||||
with mock.patch("certbot.compat.os.path.islink") as mock_link:
|
||||
mock_link.return_value = False
|
||||
self.assertRaises(
|
||||
errors.NotSupportedError,
|
||||
|
||||
@@ -6,6 +6,7 @@ import mock
|
||||
from certbot_apache import configurator
|
||||
from certbot_apache import entrypoint
|
||||
|
||||
|
||||
class EntryPointTest(unittest.TestCase):
|
||||
"""Entrypoint tests"""
|
||||
|
||||
@@ -14,8 +15,13 @@ class EntryPointTest(unittest.TestCase):
|
||||
def test_get_configurator(self):
|
||||
|
||||
with mock.patch("certbot.util.get_os_info") as mock_info:
|
||||
for distro in entrypoint.OVERRIDE_CLASSES.keys():
|
||||
mock_info.return_value = (distro, "whatever")
|
||||
for distro in entrypoint.OVERRIDE_CLASSES:
|
||||
return_value = (distro, "whatever")
|
||||
if distro == 'fedora_old':
|
||||
return_value = ('fedora', '28')
|
||||
elif distro == 'fedora':
|
||||
return_value = ('fedora', '29')
|
||||
mock_info.return_value = return_value
|
||||
self.assertEqual(entrypoint.get_configurator(),
|
||||
entrypoint.OVERRIDE_CLASSES[distro])
|
||||
|
||||
@@ -23,7 +29,7 @@ class EntryPointTest(unittest.TestCase):
|
||||
with mock.patch("certbot.util.get_os_info") as mock_info:
|
||||
mock_info.return_value = ("nonexistent", "irrelevant")
|
||||
with mock.patch("certbot.util.get_systemd_os_like") as mock_like:
|
||||
for like in entrypoint.OVERRIDE_CLASSES.keys():
|
||||
for like in entrypoint.OVERRIDE_CLASSES:
|
||||
mock_like.return_value = [like]
|
||||
self.assertEqual(entrypoint.get_configurator(),
|
||||
entrypoint.OVERRIDE_CLASSES[like])
|
||||
|
||||
195
certbot-apache/certbot_apache/tests/fedora_test.py
Normal file
195
certbot-apache/certbot_apache/tests/fedora_test.py
Normal file
@@ -0,0 +1,195 @@
|
||||
"""Test for certbot_apache.configurator for Fedora 29+ overrides"""
|
||||
import unittest
|
||||
|
||||
import mock
|
||||
|
||||
from certbot import errors
|
||||
from certbot.compat import filesystem
|
||||
from certbot.compat import os
|
||||
|
||||
from certbot_apache import obj
|
||||
from certbot_apache import override_fedora
|
||||
from certbot_apache.tests import util
|
||||
|
||||
|
||||
def get_vh_truth(temp_dir, config_name):
|
||||
"""Return the ground truth for the specified directory."""
|
||||
prefix = os.path.join(
|
||||
temp_dir, config_name, "httpd/conf.d")
|
||||
|
||||
aug_pre = "/files" + prefix
|
||||
# TODO: eventually, these tests should have a dedicated configuration instead
|
||||
# of reusing the ones from centos_test
|
||||
vh_truth = [
|
||||
obj.VirtualHost(
|
||||
os.path.join(prefix, "centos.example.com.conf"),
|
||||
os.path.join(aug_pre, "centos.example.com.conf/VirtualHost"),
|
||||
{obj.Addr.fromstring("*:80")},
|
||||
False, True, "centos.example.com"),
|
||||
obj.VirtualHost(
|
||||
os.path.join(prefix, "ssl.conf"),
|
||||
os.path.join(aug_pre, "ssl.conf/VirtualHost"),
|
||||
{obj.Addr.fromstring("_default_:443")},
|
||||
True, True, None)
|
||||
]
|
||||
return vh_truth
|
||||
|
||||
|
||||
class FedoraRestartTest(util.ApacheTest):
|
||||
"""Tests for Fedora specific self-signed certificate override"""
|
||||
|
||||
# TODO: eventually, these tests should have a dedicated configuration instead
|
||||
# of reusing the ones from centos_test
|
||||
def setUp(self): # pylint: disable=arguments-differ
|
||||
test_dir = "centos7_apache/apache"
|
||||
config_root = "centos7_apache/apache/httpd"
|
||||
vhost_root = "centos7_apache/apache/httpd/conf.d"
|
||||
super(FedoraRestartTest, self).setUp(test_dir=test_dir,
|
||||
config_root=config_root,
|
||||
vhost_root=vhost_root)
|
||||
self.config = util.get_apache_configurator(
|
||||
self.config_path, self.vhost_path, self.config_dir, self.work_dir,
|
||||
os_info="fedora")
|
||||
self.vh_truth = get_vh_truth(
|
||||
self.temp_dir, "centos7_apache/apache")
|
||||
|
||||
def _run_fedora_test(self):
|
||||
self.assertIsInstance(self.config, override_fedora.FedoraConfigurator)
|
||||
self.config.config_test()
|
||||
|
||||
def test_fedora_restart_error(self):
|
||||
c_test = "certbot_apache.configurator.ApacheConfigurator.config_test"
|
||||
with mock.patch(c_test) as mock_test:
|
||||
# First call raises error, second doesn't
|
||||
mock_test.side_effect = [errors.MisconfigurationError, '']
|
||||
with mock.patch("certbot.util.run_script") as mock_run:
|
||||
mock_run.side_effect = errors.SubprocessError
|
||||
self.assertRaises(errors.MisconfigurationError,
|
||||
self._run_fedora_test)
|
||||
|
||||
def test_fedora_restart(self):
|
||||
c_test = "certbot_apache.configurator.ApacheConfigurator.config_test"
|
||||
with mock.patch(c_test) as mock_test:
|
||||
with mock.patch("certbot.util.run_script") as mock_run:
|
||||
# First call raises error, second doesn't
|
||||
mock_test.side_effect = [errors.MisconfigurationError, '']
|
||||
self._run_fedora_test()
|
||||
self.assertEqual(mock_test.call_count, 2)
|
||||
self.assertEqual(mock_run.call_args[0][0],
|
||||
['systemctl', 'restart', 'httpd'])
|
||||
|
||||
|
||||
class MultipleVhostsTestFedora(util.ApacheTest):
|
||||
"""Multiple vhost tests for CentOS / RHEL family of distros"""
|
||||
|
||||
_multiprocess_can_split_ = True
|
||||
|
||||
def setUp(self): # pylint: disable=arguments-differ
|
||||
test_dir = "centos7_apache/apache"
|
||||
config_root = "centos7_apache/apache/httpd"
|
||||
vhost_root = "centos7_apache/apache/httpd/conf.d"
|
||||
super(MultipleVhostsTestFedora, self).setUp(test_dir=test_dir,
|
||||
config_root=config_root,
|
||||
vhost_root=vhost_root)
|
||||
|
||||
self.config = util.get_apache_configurator(
|
||||
self.config_path, self.vhost_path, self.config_dir, self.work_dir,
|
||||
os_info="fedora")
|
||||
self.vh_truth = get_vh_truth(
|
||||
self.temp_dir, "centos7_apache/apache")
|
||||
|
||||
def test_get_parser(self):
|
||||
self.assertIsInstance(self.config.parser, override_fedora.FedoraParser)
|
||||
|
||||
@mock.patch("certbot_apache.parser.ApacheParser._get_runtime_cfg")
|
||||
def test_opportunistic_httpd_runtime_parsing(self, mock_get):
|
||||
define_val = (
|
||||
'Define: TEST1\n'
|
||||
'Define: TEST2\n'
|
||||
'Define: DUMP_RUN_CFG\n'
|
||||
)
|
||||
mod_val = (
|
||||
'Loaded Modules:\n'
|
||||
' mock_module (static)\n'
|
||||
' another_module (static)\n'
|
||||
)
|
||||
def mock_get_cfg(command):
|
||||
"""Mock httpd process stdout"""
|
||||
if command == ['httpd', '-t', '-D', 'DUMP_RUN_CFG']:
|
||||
return define_val
|
||||
elif command == ['httpd', '-t', '-D', 'DUMP_MODULES']:
|
||||
return mod_val
|
||||
return ""
|
||||
mock_get.side_effect = mock_get_cfg
|
||||
self.config.parser.modules = set()
|
||||
self.config.parser.variables = {}
|
||||
|
||||
with mock.patch("certbot.util.get_os_info") as mock_osi:
|
||||
# Make sure we have the have the CentOS httpd constants
|
||||
mock_osi.return_value = ("fedora", "29")
|
||||
self.config.parser.update_runtime_variables()
|
||||
|
||||
self.assertEqual(mock_get.call_count, 3)
|
||||
self.assertEqual(len(self.config.parser.modules), 4)
|
||||
self.assertEqual(len(self.config.parser.variables), 2)
|
||||
self.assertTrue("TEST2" in self.config.parser.variables.keys())
|
||||
self.assertTrue("mod_another.c" in self.config.parser.modules)
|
||||
|
||||
@mock.patch("certbot_apache.configurator.util.run_script")
|
||||
def test_get_version(self, mock_run_script):
|
||||
mock_run_script.return_value = ('', None)
|
||||
self.assertRaises(errors.PluginError, self.config.get_version)
|
||||
self.assertEqual(mock_run_script.call_args[0][0][0], 'httpd')
|
||||
|
||||
def test_get_virtual_hosts(self):
|
||||
"""Make sure all vhosts are being properly found."""
|
||||
vhs = self.config.get_virtual_hosts()
|
||||
self.assertEqual(len(vhs), 2)
|
||||
found = 0
|
||||
|
||||
for vhost in vhs:
|
||||
for centos_truth in self.vh_truth:
|
||||
if vhost == centos_truth:
|
||||
found += 1
|
||||
break
|
||||
else:
|
||||
raise Exception("Missed: %s" % vhost) # pragma: no cover
|
||||
self.assertEqual(found, 2)
|
||||
|
||||
@mock.patch("certbot_apache.parser.ApacheParser._get_runtime_cfg")
|
||||
def test_get_sysconfig_vars(self, mock_cfg):
|
||||
"""Make sure we read the sysconfig OPTIONS variable correctly"""
|
||||
# Return nothing for the process calls
|
||||
mock_cfg.return_value = ""
|
||||
self.config.parser.sysconfig_filep = filesystem.realpath(
|
||||
os.path.join(self.config.parser.root, "../sysconfig/httpd"))
|
||||
self.config.parser.variables = {}
|
||||
|
||||
with mock.patch("certbot.util.get_os_info") as mock_osi:
|
||||
# Make sure we have the have the CentOS httpd constants
|
||||
mock_osi.return_value = ("fedora", "29")
|
||||
self.config.parser.update_runtime_variables()
|
||||
|
||||
self.assertTrue("mock_define" in self.config.parser.variables.keys())
|
||||
self.assertTrue("mock_define_too" in self.config.parser.variables.keys())
|
||||
self.assertTrue("mock_value" in self.config.parser.variables.keys())
|
||||
self.assertEqual("TRUE", self.config.parser.variables["mock_value"])
|
||||
self.assertTrue("MOCK_NOSEP" in self.config.parser.variables.keys())
|
||||
self.assertEqual("NOSEP_VAL", self.config.parser.variables["NOSEP_TWO"])
|
||||
|
||||
@mock.patch("certbot_apache.configurator.util.run_script")
|
||||
def test_alt_restart_works(self, mock_run_script):
|
||||
mock_run_script.side_effect = [None, errors.SubprocessError, None]
|
||||
self.config.restart()
|
||||
self.assertEqual(mock_run_script.call_count, 3)
|
||||
|
||||
@mock.patch("certbot_apache.configurator.util.run_script")
|
||||
def test_alt_restart_errors(self, mock_run_script):
|
||||
mock_run_script.side_effect = [None,
|
||||
errors.SubprocessError,
|
||||
errors.SubprocessError]
|
||||
self.assertRaises(errors.MisconfigurationError, self.config.restart)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main() # pragma: no cover
|
||||
@@ -1,15 +1,17 @@
|
||||
"""Test for certbot_apache.configurator for Gentoo overrides"""
|
||||
import os
|
||||
import unittest
|
||||
|
||||
import mock
|
||||
|
||||
from certbot import errors
|
||||
from certbot.compat import filesystem
|
||||
from certbot.compat import os
|
||||
|
||||
from certbot_apache import override_gentoo
|
||||
from certbot_apache import obj
|
||||
from certbot_apache import override_gentoo
|
||||
from certbot_apache.tests import util
|
||||
|
||||
|
||||
def get_vh_truth(temp_dir, config_name):
|
||||
"""Return the ground truth for the specified directory."""
|
||||
prefix = os.path.join(
|
||||
@@ -80,7 +82,7 @@ class MultipleVhostsTestGentoo(util.ApacheTest):
|
||||
"""Make sure we read the Gentoo APACHE2_OPTS variable correctly"""
|
||||
defines = ['DEFAULT_VHOST', 'INFO',
|
||||
'SSL', 'SSL_DEFAULT_VHOST', 'LANGUAGE']
|
||||
self.config.parser.apacheconfig_filep = os.path.realpath(
|
||||
self.config.parser.apacheconfig_filep = filesystem.realpath(
|
||||
os.path.join(self.config.parser.root, "../conf.d/apache2"))
|
||||
self.config.parser.variables = {}
|
||||
with mock.patch("certbot_apache.override_gentoo.GentooParser.update_modules"):
|
||||
@@ -113,6 +115,7 @@ class MultipleVhostsTestGentoo(util.ApacheTest):
|
||||
"""Mock httpd process stdout"""
|
||||
if command == ['apache2ctl', 'modules']:
|
||||
return mod_val
|
||||
return None # pragma: no cover
|
||||
mock_get.side_effect = mock_get_cfg
|
||||
self.config.parser.modules = set()
|
||||
|
||||
|
||||
@@ -1,15 +1,16 @@
|
||||
"""Test for certbot_apache.http_01."""
|
||||
import mock
|
||||
import os
|
||||
import unittest
|
||||
import mock
|
||||
|
||||
from acme import challenges
|
||||
from acme.magic_typing import List # pylint: disable=unused-import, no-name-in-module
|
||||
|
||||
from certbot import achallenges
|
||||
from certbot import errors
|
||||
|
||||
from certbot.compat import filesystem
|
||||
from certbot.compat import os
|
||||
from certbot.tests import acme_util
|
||||
|
||||
from certbot_apache.parser import get_aug_path
|
||||
from certbot_apache.tests import util
|
||||
|
||||
@@ -20,15 +21,15 @@ NUM_ACHALLS = 3
|
||||
class ApacheHttp01Test(util.ApacheTest):
|
||||
"""Test for certbot_apache.http_01.ApacheHttp01."""
|
||||
|
||||
def setUp(self, *args, **kwargs):
|
||||
def setUp(self, *args, **kwargs): # pylint: disable=arguments-differ
|
||||
super(ApacheHttp01Test, self).setUp(*args, **kwargs)
|
||||
|
||||
self.account_key = self.rsa512jwk
|
||||
self.achalls = [] # type: List[achallenges.KeyAuthorizationAnnotatedChallenge]
|
||||
vh_truth = util.get_vh_truth(
|
||||
self.temp_dir, "debian_apache_2_4/multiple_vhosts")
|
||||
# Takes the vhosts for encryption-example.demo, certbot.demo, and
|
||||
# vhost.in.rootconf
|
||||
# Takes the vhosts for encryption-example.demo, certbot.demo
|
||||
# and vhost.in.rootconf
|
||||
self.vhosts = [vh_truth[0], vh_truth[3], vh_truth[10]]
|
||||
|
||||
for i in range(NUM_ACHALLS):
|
||||
@@ -39,7 +40,7 @@ class ApacheHttp01Test(util.ApacheTest):
|
||||
"pending"),
|
||||
domain=self.vhosts[i].name, account_key=self.account_key))
|
||||
|
||||
modules = ["rewrite", "authz_core", "authz_host"]
|
||||
modules = ["ssl", "rewrite", "authz_core", "authz_host"]
|
||||
for mod in modules:
|
||||
self.config.parser.modules.add("mod_{0}.c".format(mod))
|
||||
self.config.parser.modules.add(mod + "_module")
|
||||
@@ -78,7 +79,7 @@ class ApacheHttp01Test(util.ApacheTest):
|
||||
calls = mock_enmod.call_args_list
|
||||
other_calls = []
|
||||
for call in calls:
|
||||
if "rewrite" != call[0][0]:
|
||||
if call[0][0] != "rewrite":
|
||||
other_calls.append(call)
|
||||
|
||||
# If these lists are equal, we never enabled mod_rewrite
|
||||
@@ -111,6 +112,17 @@ class ApacheHttp01Test(util.ApacheTest):
|
||||
domain="something.nonexistent", account_key=self.account_key)]
|
||||
self.common_perform_test(achalls, vhosts)
|
||||
|
||||
def test_configure_multiple_vhosts(self):
|
||||
vhosts = [v for v in self.config.vhosts if "duplicate.example.com" in v.get_names()]
|
||||
self.assertEqual(len(vhosts), 2)
|
||||
achalls = [
|
||||
achallenges.KeyAuthorizationAnnotatedChallenge(
|
||||
challb=acme_util.chall_to_challb(
|
||||
challenges.HTTP01(token=((b'a' * 16))),
|
||||
"pending"),
|
||||
domain="duplicate.example.com", account_key=self.account_key)]
|
||||
self.common_perform_test(achalls, vhosts)
|
||||
|
||||
def test_no_vhost(self):
|
||||
for achall in self.achalls:
|
||||
self.http.add_chall(achall)
|
||||
@@ -169,22 +181,21 @@ class ApacheHttp01Test(util.ApacheTest):
|
||||
self.assertEqual(self.http.perform(), expected_response)
|
||||
|
||||
self.assertTrue(os.path.isdir(self.http.challenge_dir))
|
||||
self._has_min_permissions(self.http.challenge_dir, 0o755)
|
||||
self.assertTrue(filesystem.has_min_permissions(self.http.challenge_dir, 0o755))
|
||||
self._test_challenge_conf()
|
||||
|
||||
for achall in achalls:
|
||||
self._test_challenge_file(achall)
|
||||
|
||||
for vhost in vhosts:
|
||||
if not vhost.ssl:
|
||||
matches = self.config.parser.find_dir("Include",
|
||||
self.http.challenge_conf_pre,
|
||||
vhost.path)
|
||||
self.assertEqual(len(matches), 1)
|
||||
matches = self.config.parser.find_dir("Include",
|
||||
self.http.challenge_conf_post,
|
||||
vhost.path)
|
||||
self.assertEqual(len(matches), 1)
|
||||
matches = self.config.parser.find_dir("Include",
|
||||
self.http.challenge_conf_pre,
|
||||
vhost.path)
|
||||
self.assertEqual(len(matches), 1)
|
||||
matches = self.config.parser.find_dir("Include",
|
||||
self.http.challenge_conf_post,
|
||||
vhost.path)
|
||||
self.assertEqual(len(matches), 1)
|
||||
|
||||
self.assertTrue(os.path.exists(challenge_dir))
|
||||
|
||||
@@ -208,15 +219,10 @@ class ApacheHttp01Test(util.ApacheTest):
|
||||
name = os.path.join(self.http.challenge_dir, achall.chall.encode("token"))
|
||||
validation = achall.validation(self.account_key)
|
||||
|
||||
self._has_min_permissions(name, 0o644)
|
||||
self.assertTrue(filesystem.has_min_permissions(name, 0o644))
|
||||
with open(name, 'rb') as f:
|
||||
self.assertEqual(f.read(), validation.encode())
|
||||
|
||||
def _has_min_permissions(self, path, min_mode):
|
||||
"""Tests the given file has at least the permissions in mode."""
|
||||
st_mode = os.stat(path).st_mode
|
||||
self.assertEqual(st_mode, st_mode | min_mode)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main() # pragma: no cover
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
# pylint: disable=too-many-public-methods
|
||||
"""Tests for certbot_apache.parser."""
|
||||
import os
|
||||
import shutil
|
||||
import unittest
|
||||
|
||||
import augeas
|
||||
import mock
|
||||
|
||||
from certbot import errors
|
||||
from certbot.compat import os
|
||||
|
||||
from certbot_apache.tests import util
|
||||
|
||||
@@ -22,6 +22,27 @@ class BasicParserTest(util.ParserTest):
|
||||
shutil.rmtree(self.config_dir)
|
||||
shutil.rmtree(self.work_dir)
|
||||
|
||||
def test_bad_parse(self):
|
||||
self.parser.parse_file(os.path.join(self.parser.root,
|
||||
"conf-available", "bad_conf_file.conf"))
|
||||
self.assertRaises(
|
||||
errors.PluginError, self.parser.check_parsing_errors, "httpd.aug")
|
||||
|
||||
def test_bad_save(self):
|
||||
mock_save = mock.Mock()
|
||||
mock_save.side_effect = IOError
|
||||
self.parser.aug.save = mock_save
|
||||
self.assertRaises(errors.PluginError, self.parser.unsaved_files)
|
||||
|
||||
def test_aug_version(self):
|
||||
mock_match = mock.Mock(return_value=["something"])
|
||||
self.parser.aug.match = mock_match
|
||||
# pylint: disable=protected-access
|
||||
self.assertEqual(self.parser.check_aug_version(),
|
||||
["something"])
|
||||
self.parser.aug.match.side_effect = RuntimeError
|
||||
self.assertFalse(self.parser.check_aug_version())
|
||||
|
||||
def test_find_config_root_no_root(self):
|
||||
# pylint: disable=protected-access
|
||||
os.remove(self.parser.loc["root"])
|
||||
@@ -52,7 +73,7 @@ class BasicParserTest(util.ParserTest):
|
||||
test2 = self.parser.find_dir("documentroot")
|
||||
|
||||
self.assertEqual(len(test), 1)
|
||||
self.assertEqual(len(test2), 7)
|
||||
self.assertEqual(len(test2), 8)
|
||||
|
||||
def test_add_dir(self):
|
||||
aug_default = "/files" + self.parser.loc["default"]
|
||||
@@ -234,6 +255,7 @@ class BasicParserTest(util.ParserTest):
|
||||
return inc_val
|
||||
elif cmd[-1] == "DUMP_MODULES":
|
||||
return mod_val
|
||||
return None # pragma: no cover
|
||||
|
||||
mock_cfg.side_effect = mock_get_vars
|
||||
|
||||
@@ -310,21 +332,38 @@ class BasicParserTest(util.ParserTest):
|
||||
class ParserInitTest(util.ApacheTest):
|
||||
def setUp(self): # pylint: disable=arguments-differ
|
||||
super(ParserInitTest, self).setUp()
|
||||
self.aug = augeas.Augeas(
|
||||
flags=augeas.Augeas.NONE | augeas.Augeas.NO_MODL_AUTOLOAD)
|
||||
|
||||
def tearDown(self):
|
||||
shutil.rmtree(self.temp_dir)
|
||||
shutil.rmtree(self.config_dir)
|
||||
shutil.rmtree(self.work_dir)
|
||||
|
||||
@mock.patch("certbot_apache.parser.ApacheParser.init_augeas")
|
||||
def test_prepare_no_augeas(self, mock_init_augeas):
|
||||
from certbot_apache.parser import ApacheParser
|
||||
mock_init_augeas.side_effect = errors.NoInstallationError
|
||||
self.config.config_test = mock.Mock()
|
||||
self.assertRaises(
|
||||
errors.NoInstallationError, ApacheParser,
|
||||
os.path.relpath(self.config_path), "/dummy/vhostpath",
|
||||
version=(2, 4, 22), configurator=self.config)
|
||||
|
||||
def test_init_old_aug(self):
|
||||
from certbot_apache.parser import ApacheParser
|
||||
with mock.patch("certbot_apache.parser.ApacheParser.check_aug_version") as mock_c:
|
||||
mock_c.return_value = False
|
||||
self.assertRaises(
|
||||
errors.NotSupportedError,
|
||||
ApacheParser, os.path.relpath(self.config_path),
|
||||
"/dummy/vhostpath", version=(2, 4, 22), configurator=self.config)
|
||||
|
||||
@mock.patch("certbot_apache.parser.ApacheParser._get_runtime_cfg")
|
||||
def test_unparseable(self, mock_cfg):
|
||||
from certbot_apache.parser import ApacheParser
|
||||
mock_cfg.return_value = ('Define: TEST')
|
||||
self.assertRaises(
|
||||
errors.PluginError,
|
||||
ApacheParser, self.aug, os.path.relpath(self.config_path),
|
||||
ApacheParser, os.path.relpath(self.config_path),
|
||||
"/dummy/vhostpath", version=(2, 2, 22), configurator=self.config)
|
||||
|
||||
def test_root_normalized(self):
|
||||
@@ -336,8 +375,7 @@ class ParserInitTest(util.ApacheTest):
|
||||
self.temp_dir,
|
||||
"debian_apache_2_4/////multiple_vhosts/../multiple_vhosts/apache2")
|
||||
|
||||
parser = ApacheParser(self.aug, path,
|
||||
"/dummy/vhostpath", configurator=self.config)
|
||||
parser = ApacheParser(path, "/dummy/vhostpath", configurator=self.config)
|
||||
|
||||
self.assertEqual(parser.root, self.config_path)
|
||||
|
||||
@@ -346,7 +384,7 @@ class ParserInitTest(util.ApacheTest):
|
||||
with mock.patch("certbot_apache.parser.ApacheParser."
|
||||
"update_runtime_variables"):
|
||||
parser = ApacheParser(
|
||||
self.aug, os.path.relpath(self.config_path),
|
||||
os.path.relpath(self.config_path),
|
||||
"/dummy/vhostpath", configurator=self.config)
|
||||
|
||||
self.assertEqual(parser.root, self.config_path)
|
||||
@@ -356,7 +394,7 @@ class ParserInitTest(util.ApacheTest):
|
||||
with mock.patch("certbot_apache.parser.ApacheParser."
|
||||
"update_runtime_variables"):
|
||||
parser = ApacheParser(
|
||||
self.aug, self.config_path + os.path.sep,
|
||||
self.config_path + os.path.sep,
|
||||
"/dummy/vhostpath", configurator=self.config)
|
||||
self.assertEqual(parser.root, self.config_path)
|
||||
|
||||
|
||||
9
certbot-apache/certbot_apache/tests/testdata/centos6_apache/apache/httpd/conf.d/README
vendored
Normal file
9
certbot-apache/certbot_apache/tests/testdata/centos6_apache/apache/httpd/conf.d/README
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
|
||||
This directory holds Apache 2.0 module-specific configuration files;
|
||||
any files in this directory which have the ".conf" extension will be
|
||||
processed as Apache configuration files.
|
||||
|
||||
Files are processed in alphabetical order, so if using configuration
|
||||
directives which depend on, say, mod_perl being loaded, ensure that
|
||||
these are placed in a filename later in the sort order than "perl.conf".
|
||||
|
||||
222
certbot-apache/certbot_apache/tests/testdata/centos6_apache/apache/httpd/conf.d/ssl.conf
vendored
Normal file
222
certbot-apache/certbot_apache/tests/testdata/centos6_apache/apache/httpd/conf.d/ssl.conf
vendored
Normal file
@@ -0,0 +1,222 @@
|
||||
#
|
||||
# This is the Apache server configuration file providing SSL support.
|
||||
# It contains the configuration directives to instruct the server how to
|
||||
# serve pages over an https connection. For detailing information about these
|
||||
# directives see <URL:http://httpd.apache.org/docs/2.2/mod/mod_ssl.html>
|
||||
#
|
||||
# Do NOT simply read the instructions in here without understanding
|
||||
# what they do. They're here only as hints or reminders. If you are unsure
|
||||
# consult the online docs. You have been warned.
|
||||
#
|
||||
|
||||
LoadModule ssl_module modules/mod_ssl.so
|
||||
|
||||
#
|
||||
# When we also provide SSL we have to listen to the
|
||||
# the HTTPS port in addition.
|
||||
#
|
||||
Listen 443
|
||||
|
||||
##
|
||||
## SSL Global Context
|
||||
##
|
||||
## All SSL configuration in this context applies both to
|
||||
## the main server and all SSL-enabled virtual hosts.
|
||||
##
|
||||
|
||||
# Pass Phrase Dialog:
|
||||
# Configure the pass phrase gathering process.
|
||||
# The filtering dialog program (`builtin' is a internal
|
||||
# terminal dialog) has to provide the pass phrase on stdout.
|
||||
SSLPassPhraseDialog builtin
|
||||
|
||||
# Inter-Process Session Cache:
|
||||
# Configure the SSL Session Cache: First the mechanism
|
||||
# to use and second the expiring timeout (in seconds).
|
||||
SSLSessionCache shmcb:/var/cache/mod_ssl/scache(512000)
|
||||
SSLSessionCacheTimeout 300
|
||||
|
||||
# Semaphore:
|
||||
# Configure the path to the mutual exclusion semaphore the
|
||||
# SSL engine uses internally for inter-process synchronization.
|
||||
SSLMutex default
|
||||
|
||||
# Pseudo Random Number Generator (PRNG):
|
||||
# Configure one or more sources to seed the PRNG of the
|
||||
# SSL library. The seed data should be of good random quality.
|
||||
# WARNING! On some platforms /dev/random blocks if not enough entropy
|
||||
# is available. This means you then cannot use the /dev/random device
|
||||
# because it would lead to very long connection times (as long as
|
||||
# it requires to make more entropy available). But usually those
|
||||
# platforms additionally provide a /dev/urandom device which doesn't
|
||||
# block. So, if available, use this one instead. Read the mod_ssl User
|
||||
# Manual for more details.
|
||||
SSLRandomSeed startup file:/dev/urandom 256
|
||||
SSLRandomSeed connect builtin
|
||||
#SSLRandomSeed startup file:/dev/random 512
|
||||
#SSLRandomSeed connect file:/dev/random 512
|
||||
#SSLRandomSeed connect file:/dev/urandom 512
|
||||
|
||||
#
|
||||
# Use "SSLCryptoDevice" to enable any supported hardware
|
||||
# accelerators. Use "openssl engine -v" to list supported
|
||||
# engine names. NOTE: If you enable an accelerator and the
|
||||
# server does not start, consult the error logs and ensure
|
||||
# your accelerator is functioning properly.
|
||||
#
|
||||
SSLCryptoDevice builtin
|
||||
#SSLCryptoDevice ubsec
|
||||
|
||||
##
|
||||
## SSL Virtual Host Context
|
||||
##
|
||||
|
||||
<VirtualHost _default_:443>
|
||||
|
||||
# General setup for the virtual host, inherited from global configuration
|
||||
#DocumentRoot "/var/www/html"
|
||||
#ServerName www.example.com:443
|
||||
|
||||
# Use separate log files for the SSL virtual host; note that LogLevel
|
||||
# is not inherited from httpd.conf.
|
||||
ErrorLog logs/ssl_error_log
|
||||
TransferLog logs/ssl_access_log
|
||||
LogLevel warn
|
||||
|
||||
# SSL Engine Switch:
|
||||
# Enable/Disable SSL for this virtual host.
|
||||
SSLEngine on
|
||||
|
||||
# SSL Protocol support:
|
||||
# List the enable protocol levels with which clients will be able to
|
||||
# connect. Disable SSLv2 access by default:
|
||||
SSLProtocol all -SSLv2
|
||||
|
||||
# SSL Cipher Suite:
|
||||
# List the ciphers that the client is permitted to negotiate.
|
||||
# See the mod_ssl documentation for a complete list.
|
||||
SSLCipherSuite DEFAULT:!EXP:!SSLv2:!DES:!IDEA:!SEED:+3DES
|
||||
|
||||
# Server Certificate:
|
||||
# Point SSLCertificateFile at a PEM encoded certificate. If
|
||||
# the certificate is encrypted, then you will be prompted for a
|
||||
# pass phrase. Note that a kill -HUP will prompt again. A new
|
||||
# certificate can be generated using the genkey(1) command.
|
||||
SSLCertificateFile /etc/pki/tls/certs/localhost.crt
|
||||
|
||||
# Server Private Key:
|
||||
# If the key is not combined with the certificate, use this
|
||||
# directive to point at the key file. Keep in mind that if
|
||||
# you've both a RSA and a DSA private key you can configure
|
||||
# both in parallel (to also allow the use of DSA ciphers, etc.)
|
||||
SSLCertificateKeyFile /etc/pki/tls/private/localhost.key
|
||||
|
||||
# Server Certificate Chain:
|
||||
# Point SSLCertificateChainFile at a file containing the
|
||||
# concatenation of PEM encoded CA certificates which form the
|
||||
# certificate chain for the server certificate. Alternatively
|
||||
# the referenced file can be the same as SSLCertificateFile
|
||||
# when the CA certificates are directly appended to the server
|
||||
# certificate for convinience.
|
||||
#SSLCertificateChainFile /etc/pki/tls/certs/server-chain.crt
|
||||
|
||||
# Certificate Authority (CA):
|
||||
# Set the CA certificate verification path where to find CA
|
||||
# certificates for client authentication or alternatively one
|
||||
# huge file containing all of them (file must be PEM encoded)
|
||||
#SSLCACertificateFile /etc/pki/tls/certs/ca-bundle.crt
|
||||
|
||||
# Client Authentication (Type):
|
||||
# Client certificate verification type and depth. Types are
|
||||
# none, optional, require and optional_no_ca. Depth is a
|
||||
# number which specifies how deeply to verify the certificate
|
||||
# issuer chain before deciding the certificate is not valid.
|
||||
#SSLVerifyClient require
|
||||
#SSLVerifyDepth 10
|
||||
|
||||
# Access Control:
|
||||
# With SSLRequire you can do per-directory access control based
|
||||
# on arbitrary complex boolean expressions containing server
|
||||
# variable checks and other lookup directives. The syntax is a
|
||||
# mixture between C and Perl. See the mod_ssl documentation
|
||||
# for more details.
|
||||
#<Location />
|
||||
#SSLRequire ( %{SSL_CIPHER} !~ m/^(EXP|NULL)/ \
|
||||
# and %{SSL_CLIENT_S_DN_O} eq "Snake Oil, Ltd." \
|
||||
# and %{SSL_CLIENT_S_DN_OU} in {"Staff", "CA", "Dev"} \
|
||||
# and %{TIME_WDAY} >= 1 and %{TIME_WDAY} <= 5 \
|
||||
# and %{TIME_HOUR} >= 8 and %{TIME_HOUR} <= 20 ) \
|
||||
# or %{REMOTE_ADDR} =~ m/^192\.76\.162\.[0-9]+$/
|
||||
#</Location>
|
||||
|
||||
# SSL Engine Options:
|
||||
# Set various options for the SSL engine.
|
||||
# o FakeBasicAuth:
|
||||
# Translate the client X.509 into a Basic Authorisation. This means that
|
||||
# the standard Auth/DBMAuth methods can be used for access control. The
|
||||
# user name is the `one line' version of the client's X.509 certificate.
|
||||
# Note that no password is obtained from the user. Every entry in the user
|
||||
# file needs this password: `xxj31ZMTZzkVA'.
|
||||
# o ExportCertData:
|
||||
# This exports two additional environment variables: SSL_CLIENT_CERT and
|
||||
# SSL_SERVER_CERT. These contain the PEM-encoded certificates of the
|
||||
# server (always existing) and the client (only existing when client
|
||||
# authentication is used). This can be used to import the certificates
|
||||
# into CGI scripts.
|
||||
# o StdEnvVars:
|
||||
# This exports the standard SSL/TLS related `SSL_*' environment variables.
|
||||
# Per default this exportation is switched off for performance reasons,
|
||||
# because the extraction step is an expensive operation and is usually
|
||||
# useless for serving static content. So one usually enables the
|
||||
# exportation for CGI and SSI requests only.
|
||||
# o StrictRequire:
|
||||
# This denies access when "SSLRequireSSL" or "SSLRequire" applied even
|
||||
# under a "Satisfy any" situation, i.e. when it applies access is denied
|
||||
# and no other module can change it.
|
||||
# o OptRenegotiate:
|
||||
# This enables optimized SSL connection renegotiation handling when SSL
|
||||
# directives are used in per-directory context.
|
||||
#SSLOptions +FakeBasicAuth +ExportCertData +StrictRequire
|
||||
<Files ~ "\.(cgi|shtml|phtml|php3?)$">
|
||||
SSLOptions +StdEnvVars
|
||||
</Files>
|
||||
<Directory "/var/www/cgi-bin">
|
||||
SSLOptions +StdEnvVars
|
||||
</Directory>
|
||||
|
||||
# SSL Protocol Adjustments:
|
||||
# The safe and default but still SSL/TLS standard compliant shutdown
|
||||
# approach is that mod_ssl sends the close notify alert but doesn't wait for
|
||||
# the close notify alert from client. When you need a different shutdown
|
||||
# approach you can use one of the following variables:
|
||||
# o ssl-unclean-shutdown:
|
||||
# This forces an unclean shutdown when the connection is closed, i.e. no
|
||||
# SSL close notify alert is send or allowed to received. This violates
|
||||
# the SSL/TLS standard but is needed for some brain-dead browsers. Use
|
||||
# this when you receive I/O errors because of the standard approach where
|
||||
# mod_ssl sends the close notify alert.
|
||||
# o ssl-accurate-shutdown:
|
||||
# This forces an accurate shutdown when the connection is closed, i.e. a
|
||||
# SSL close notify alert is send and mod_ssl waits for the close notify
|
||||
# alert of the client. This is 100% SSL/TLS standard compliant, but in
|
||||
# practice often causes hanging connections with brain-dead browsers. Use
|
||||
# this only for browsers where you know that their SSL implementation
|
||||
# works correctly.
|
||||
# Notice: Most problems of broken clients are also related to the HTTP
|
||||
# keep-alive facility, so you usually additionally want to disable
|
||||
# keep-alive for those clients, too. Use variable "nokeepalive" for this.
|
||||
# Similarly, one has to force some clients to use HTTP/1.0 to workaround
|
||||
# their broken HTTP/1.1 implementation. Use variables "downgrade-1.0" and
|
||||
# "force-response-1.0" for this.
|
||||
SetEnvIf User-Agent ".*MSIE.*" \
|
||||
nokeepalive ssl-unclean-shutdown \
|
||||
downgrade-1.0 force-response-1.0
|
||||
|
||||
# Per-Server Logging:
|
||||
# The home of a custom SSL log file. Use this when you want a
|
||||
# compact non-error SSL logfile on a virtual host basis.
|
||||
CustomLog logs/ssl_request_log \
|
||||
"%t %h %{SSL_PROTOCOL}x %{SSL_CIPHER}x \"%r\" %b"
|
||||
|
||||
</VirtualHost>
|
||||
|
||||
@@ -0,0 +1,7 @@
|
||||
<VirtualHost *:80>
|
||||
ServerName test.example.com
|
||||
ServerAdmin webmaster@dummy-host.example.com
|
||||
DocumentRoot /var/www/htdocs
|
||||
ErrorLog logs/dummy-host.example.com-error_log
|
||||
CustomLog logs/dummy-host.example.com-access_log common
|
||||
</VirtualHost>
|
||||
11
certbot-apache/certbot_apache/tests/testdata/centos6_apache/apache/httpd/conf.d/welcome.conf
vendored
Normal file
11
certbot-apache/certbot_apache/tests/testdata/centos6_apache/apache/httpd/conf.d/welcome.conf
vendored
Normal file
@@ -0,0 +1,11 @@
|
||||
#
|
||||
# This configuration file enables the default "Welcome"
|
||||
# page if there is no default index page present for
|
||||
# the root URL. To disable the Welcome page, comment
|
||||
# out all the lines below.
|
||||
#
|
||||
<LocationMatch "^/+$">
|
||||
Options -Indexes
|
||||
ErrorDocument 403 /error/noindex.html
|
||||
</LocationMatch>
|
||||
|
||||
1009
certbot-apache/certbot_apache/tests/testdata/centos6_apache/apache/httpd/conf/httpd.conf
vendored
Normal file
1009
certbot-apache/certbot_apache/tests/testdata/centos6_apache/apache/httpd/conf/httpd.conf
vendored
Normal file
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,9 @@
|
||||
<VirtualHost 10.2.3.4:80>
|
||||
ServerName duplicate.example.com
|
||||
|
||||
ServerAdmin webmaster@certbot.demo
|
||||
DocumentRoot /var/www/html
|
||||
|
||||
ErrorLog ${APACHE_LOG_DIR}/error.log
|
||||
CustomLog ${APACHE_LOG_DIR}/access.log combined
|
||||
</VirtualHost>
|
||||
@@ -0,0 +1,14 @@
|
||||
<IfModule mod_ssl.c>
|
||||
<VirtualHost 10.2.3.4:443>
|
||||
ServerName duplicate.example.com
|
||||
|
||||
ServerAdmin webmaster@certbot.demo
|
||||
DocumentRoot /var/www/html
|
||||
|
||||
ErrorLog ${APACHE_LOG_DIR}/error.log
|
||||
CustomLog ${APACHE_LOG_DIR}/access.log combined
|
||||
|
||||
SSLCertificateFile /etc/apache2/certs/certbot-cert_5.pem
|
||||
SSLCertificateKeyFile /etc/apache2/ssl/key-certbot_15.pem
|
||||
</VirtualHost>
|
||||
</IfModule>
|
||||
@@ -0,0 +1 @@
|
||||
../sites-available/duplicatehttp.conf
|
||||
@@ -0,0 +1 @@
|
||||
../sites-available/duplicatehttps.conf
|
||||
@@ -1,151 +0,0 @@
|
||||
"""Test for certbot_apache.tls_sni_01."""
|
||||
import shutil
|
||||
import unittest
|
||||
|
||||
import mock
|
||||
|
||||
from certbot import errors
|
||||
from certbot.plugins import common_test
|
||||
|
||||
from certbot_apache import obj
|
||||
from certbot_apache.tests import util
|
||||
|
||||
from six.moves import xrange # pylint: disable=redefined-builtin, import-error
|
||||
|
||||
|
||||
class TlsSniPerformTest(util.ApacheTest):
|
||||
"""Test the ApacheTlsSni01 challenge."""
|
||||
|
||||
auth_key = common_test.AUTH_KEY
|
||||
achalls = common_test.ACHALLS
|
||||
|
||||
def setUp(self): # pylint: disable=arguments-differ
|
||||
super(TlsSniPerformTest, self).setUp()
|
||||
|
||||
config = util.get_apache_configurator(
|
||||
self.config_path, self.vhost_path, self.config_dir,
|
||||
self.work_dir)
|
||||
config.config.tls_sni_01_port = 443
|
||||
|
||||
from certbot_apache import tls_sni_01
|
||||
self.sni = tls_sni_01.ApacheTlsSni01(config)
|
||||
|
||||
def tearDown(self):
|
||||
shutil.rmtree(self.temp_dir)
|
||||
shutil.rmtree(self.config_dir)
|
||||
shutil.rmtree(self.work_dir)
|
||||
|
||||
def test_perform0(self):
|
||||
resp = self.sni.perform()
|
||||
self.assertEqual(len(resp), 0)
|
||||
|
||||
@mock.patch("certbot.util.exe_exists")
|
||||
@mock.patch("certbot.util.run_script")
|
||||
def test_perform1(self, _, mock_exists):
|
||||
self.sni.configurator.parser.modules.add("socache_shmcb_module")
|
||||
self.sni.configurator.parser.modules.add("ssl_module")
|
||||
|
||||
mock_exists.return_value = True
|
||||
self.sni.configurator.parser.update_runtime_variables = mock.Mock()
|
||||
|
||||
achall = self.achalls[0]
|
||||
self.sni.add_chall(achall)
|
||||
response = self.achalls[0].response(self.auth_key)
|
||||
mock_setup_cert = mock.MagicMock(return_value=response)
|
||||
# pylint: disable=protected-access
|
||||
self.sni._setup_challenge_cert = mock_setup_cert
|
||||
|
||||
responses = self.sni.perform()
|
||||
mock_setup_cert.assert_called_once_with(achall)
|
||||
|
||||
# Check to make sure challenge config path is included in apache config
|
||||
self.assertEqual(
|
||||
len(self.sni.configurator.parser.find_dir(
|
||||
"Include", self.sni.challenge_conf)), 1)
|
||||
self.assertEqual(len(responses), 1)
|
||||
self.assertEqual(responses[0], response)
|
||||
|
||||
def test_perform2(self):
|
||||
# Avoid load module
|
||||
self.sni.configurator.parser.modules.add("ssl_module")
|
||||
self.sni.configurator.parser.modules.add("socache_shmcb_module")
|
||||
acme_responses = []
|
||||
for achall in self.achalls:
|
||||
self.sni.add_chall(achall)
|
||||
acme_responses.append(achall.response(self.auth_key))
|
||||
|
||||
mock_setup_cert = mock.MagicMock(side_effect=acme_responses)
|
||||
# pylint: disable=protected-access
|
||||
self.sni._setup_challenge_cert = mock_setup_cert
|
||||
|
||||
with mock.patch(
|
||||
"certbot_apache.override_debian.DebianConfigurator.enable_mod"):
|
||||
sni_responses = self.sni.perform()
|
||||
|
||||
self.assertEqual(mock_setup_cert.call_count, 2)
|
||||
|
||||
# Make sure calls made to mocked function were correct
|
||||
self.assertEqual(
|
||||
mock_setup_cert.call_args_list[0], mock.call(self.achalls[0]))
|
||||
self.assertEqual(
|
||||
mock_setup_cert.call_args_list[1], mock.call(self.achalls[1]))
|
||||
|
||||
self.assertEqual(
|
||||
len(self.sni.configurator.parser.find_dir(
|
||||
"Include", self.sni.challenge_conf)),
|
||||
1)
|
||||
self.assertEqual(len(sni_responses), 2)
|
||||
for i in xrange(2):
|
||||
self.assertEqual(sni_responses[i], acme_responses[i])
|
||||
|
||||
def test_mod_config(self):
|
||||
z_domains = []
|
||||
for achall in self.achalls:
|
||||
self.sni.add_chall(achall)
|
||||
z_domain = achall.response(self.auth_key).z_domain
|
||||
z_domains.append(set([z_domain.decode('ascii')]))
|
||||
|
||||
self.sni._mod_config() # pylint: disable=protected-access
|
||||
self.sni.configurator.save()
|
||||
|
||||
self.sni.configurator.parser.find_dir(
|
||||
"Include", self.sni.challenge_conf)
|
||||
vh_match = self.sni.configurator.aug.match(
|
||||
"/files" + self.sni.challenge_conf + "//VirtualHost")
|
||||
|
||||
vhs = []
|
||||
for match in vh_match:
|
||||
# pylint: disable=protected-access
|
||||
vhs.append(self.sni.configurator._create_vhost(match))
|
||||
self.assertEqual(len(vhs), 2)
|
||||
for vhost in vhs:
|
||||
self.assertEqual(vhost.addrs, set([obj.Addr.fromstring("*:443")]))
|
||||
names = vhost.get_names()
|
||||
self.assertTrue(names in z_domains)
|
||||
|
||||
def test_get_addrs_default(self):
|
||||
self.sni.configurator.choose_vhost = mock.Mock(
|
||||
return_value=obj.VirtualHost(
|
||||
"path", "aug_path",
|
||||
set([obj.Addr.fromstring("_default_:443")]),
|
||||
False, False)
|
||||
)
|
||||
|
||||
# pylint: disable=protected-access
|
||||
self.assertEqual(
|
||||
set([obj.Addr.fromstring("*:443")]),
|
||||
self.sni._get_addrs(self.achalls[0]))
|
||||
|
||||
def test_get_addrs_no_vhost_found(self):
|
||||
self.sni.configurator.choose_vhost = mock.Mock(
|
||||
side_effect=errors.MissingCommandlineFlag(
|
||||
"Failed to run Apache plugin non-interactively"))
|
||||
|
||||
# pylint: disable=protected-access
|
||||
self.assertEqual(
|
||||
set([obj.Addr.fromstring("*:443")]),
|
||||
self.sni._get_addrs(self.achalls[0]))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main() # pragma: no cover
|
||||
@@ -1,5 +1,4 @@
|
||||
"""Common utilities for certbot_apache."""
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
import unittest
|
||||
@@ -9,10 +8,9 @@ import josepy as jose
|
||||
import mock
|
||||
import zope.component
|
||||
|
||||
from certbot.compat import os
|
||||
from certbot.display import util as display_util
|
||||
|
||||
from certbot.plugins import common
|
||||
|
||||
from certbot.tests import util as test_util
|
||||
|
||||
from certbot_apache import configurator
|
||||
@@ -80,8 +78,7 @@ class ParserTest(ApacheTest):
|
||||
with mock.patch("certbot_apache.parser.ApacheParser."
|
||||
"update_runtime_variables"):
|
||||
self.parser = ApacheParser(
|
||||
self.aug, self.config_path, self.vhost_path,
|
||||
configurator=self.config)
|
||||
self.config_path, self.vhost_path, configurator=self.config)
|
||||
|
||||
|
||||
def get_apache_configurator( # pylint: disable=too-many-arguments, too-many-locals
|
||||
@@ -196,7 +193,17 @@ def get_vh_truth(temp_dir, config_name):
|
||||
"/files" + os.path.join(temp_dir, config_name,
|
||||
"apache2/apache2.conf/VirtualHost"),
|
||||
set([obj.Addr.fromstring("*:80")]), False, True,
|
||||
"vhost.in.rootconf")]
|
||||
"vhost.in.rootconf"),
|
||||
obj.VirtualHost(
|
||||
os.path.join(prefix, "duplicatehttp.conf"),
|
||||
os.path.join(aug_pre, "duplicatehttp.conf/VirtualHost"),
|
||||
set([obj.Addr.fromstring("10.2.3.4:80")]), False, True,
|
||||
"duplicate.example.com"),
|
||||
obj.VirtualHost(
|
||||
os.path.join(prefix, "duplicatehttps.conf"),
|
||||
os.path.join(aug_pre, "duplicatehttps.conf/IfModule/VirtualHost"),
|
||||
set([obj.Addr.fromstring("10.2.3.4:443")]), True, True,
|
||||
"duplicate.example.com")]
|
||||
return vh_truth
|
||||
if config_name == "debian_apache_2_4/multi_vhosts":
|
||||
prefix = os.path.join(
|
||||
|
||||
@@ -1,174 +0,0 @@
|
||||
"""A class that performs TLS-SNI-01 challenges for Apache"""
|
||||
|
||||
import os
|
||||
import logging
|
||||
|
||||
from acme.magic_typing import Set # pylint: disable=unused-import, no-name-in-module
|
||||
from certbot.plugins import common
|
||||
from certbot.errors import PluginError, MissingCommandlineFlag
|
||||
|
||||
from certbot_apache import obj
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ApacheTlsSni01(common.TLSSNI01):
|
||||
"""Class that performs TLS-SNI-01 challenges within the Apache configurator
|
||||
|
||||
:ivar configurator: ApacheConfigurator object
|
||||
:type configurator: :class:`~apache.configurator.ApacheConfigurator`
|
||||
|
||||
:ivar list achalls: Annotated TLS-SNI-01
|
||||
(`.KeyAuthorizationAnnotatedChallenge`) challenges.
|
||||
|
||||
:param list indices: Meant to hold indices of challenges in a
|
||||
larger array. ApacheTlsSni01 is capable of solving many challenges
|
||||
at once which causes an indexing issue within ApacheConfigurator
|
||||
who must return all responses in order. Imagine ApacheConfigurator
|
||||
maintaining state about where all of the http-01 Challenges,
|
||||
TLS-SNI-01 Challenges belong in the response array. This is an
|
||||
optional utility.
|
||||
|
||||
:param str challenge_conf: location of the challenge config file
|
||||
|
||||
"""
|
||||
|
||||
VHOST_TEMPLATE = """\
|
||||
<VirtualHost {vhost}>
|
||||
ServerName {server_name}
|
||||
UseCanonicalName on
|
||||
SSLStrictSNIVHostCheck on
|
||||
|
||||
LimitRequestBody 1048576
|
||||
|
||||
Include {ssl_options_conf_path}
|
||||
SSLCertificateFile {cert_path}
|
||||
SSLCertificateKeyFile {key_path}
|
||||
|
||||
DocumentRoot {document_root}
|
||||
</VirtualHost>
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(ApacheTlsSni01, self).__init__(*args, **kwargs)
|
||||
|
||||
self.challenge_conf = os.path.join(
|
||||
self.configurator.conf("challenge-location"),
|
||||
"le_tls_sni_01_cert_challenge.conf")
|
||||
|
||||
def perform(self):
|
||||
"""Perform a TLS-SNI-01 challenge."""
|
||||
if not self.achalls:
|
||||
return []
|
||||
# Save any changes to the configuration as a precaution
|
||||
# About to make temporary changes to the config
|
||||
self.configurator.save("Changes before challenge setup", True)
|
||||
|
||||
# Prepare the server for HTTPS
|
||||
self.configurator.prepare_server_https(
|
||||
str(self.configurator.config.tls_sni_01_port), True)
|
||||
|
||||
responses = []
|
||||
|
||||
# Create all of the challenge certs
|
||||
for achall in self.achalls:
|
||||
responses.append(self._setup_challenge_cert(achall))
|
||||
|
||||
# Setup the configuration
|
||||
addrs = self._mod_config()
|
||||
self.configurator.save("Don't lose mod_config changes", True)
|
||||
self.configurator.make_addrs_sni_ready(addrs)
|
||||
|
||||
# Save reversible changes
|
||||
self.configurator.save("SNI Challenge", True)
|
||||
|
||||
return responses
|
||||
|
||||
def _mod_config(self):
|
||||
"""Modifies Apache config files to include challenge vhosts.
|
||||
|
||||
Result: Apache config includes virtual servers for issued challs
|
||||
|
||||
:returns: All TLS-SNI-01 addresses used
|
||||
:rtype: set
|
||||
|
||||
"""
|
||||
addrs = set() # type: Set[obj.Addr]
|
||||
config_text = "<IfModule mod_ssl.c>\n"
|
||||
|
||||
for achall in self.achalls:
|
||||
achall_addrs = self._get_addrs(achall)
|
||||
addrs.update(achall_addrs)
|
||||
|
||||
config_text += self._get_config_text(achall, achall_addrs)
|
||||
|
||||
config_text += "</IfModule>\n"
|
||||
|
||||
self.configurator.parser.add_include(
|
||||
self.configurator.parser.loc["default"], self.challenge_conf)
|
||||
self.configurator.reverter.register_file_creation(
|
||||
True, self.challenge_conf)
|
||||
|
||||
logger.debug("writing a config file with text:\n %s", config_text)
|
||||
with open(self.challenge_conf, "w") as new_conf:
|
||||
new_conf.write(config_text)
|
||||
|
||||
return addrs
|
||||
|
||||
def _get_addrs(self, achall):
|
||||
"""Return the Apache addresses needed for TLS-SNI-01."""
|
||||
# TODO: Checkout _default_ rules.
|
||||
addrs = set()
|
||||
default_addr = obj.Addr(("*", str(
|
||||
self.configurator.config.tls_sni_01_port)))
|
||||
|
||||
try:
|
||||
vhost = self.configurator.choose_vhost(achall.domain,
|
||||
create_if_no_ssl=False)
|
||||
except (PluginError, MissingCommandlineFlag):
|
||||
# We couldn't find the virtualhost for this domain, possibly
|
||||
# because it's a new vhost that's not configured yet
|
||||
# (GH #677). See also GH #2600.
|
||||
logger.warning("Falling back to default vhost %s...", default_addr)
|
||||
addrs.add(default_addr)
|
||||
return addrs
|
||||
|
||||
for addr in vhost.addrs:
|
||||
if "_default_" == addr.get_addr():
|
||||
addrs.add(default_addr)
|
||||
else:
|
||||
addrs.add(
|
||||
addr.get_sni_addr(
|
||||
self.configurator.config.tls_sni_01_port))
|
||||
|
||||
return addrs
|
||||
|
||||
def _get_config_text(self, achall, ip_addrs):
|
||||
"""Chocolate virtual server configuration text
|
||||
|
||||
:param .KeyAuthorizationAnnotatedChallenge achall: Annotated
|
||||
TLS-SNI-01 challenge.
|
||||
|
||||
:param list ip_addrs: addresses of challenged domain
|
||||
:class:`list` of type `~.obj.Addr`
|
||||
|
||||
:returns: virtual host configuration text
|
||||
:rtype: str
|
||||
|
||||
"""
|
||||
ips = " ".join(str(i) for i in ip_addrs)
|
||||
document_root = os.path.join(
|
||||
self.configurator.config.work_dir, "tls_sni_01_page/")
|
||||
# TODO: Python docs is not clear how multiline string literal
|
||||
# newlines are parsed on different platforms. At least on
|
||||
# Linux (Debian sid), when source file uses CRLF, Python still
|
||||
# parses it as "\n"... c.f.:
|
||||
# https://docs.python.org/2.7/reference/lexical_analysis.html
|
||||
return self.VHOST_TEMPLATE.format(
|
||||
vhost=ips,
|
||||
server_name=achall.response(achall.account_key).z_domain.decode('ascii'),
|
||||
ssl_options_conf_path=self.configurator.mod_ssl_conf,
|
||||
cert_path=self.get_cert_path(achall),
|
||||
key_path=self.get_key_path(achall),
|
||||
document_root=document_root).replace("\n", os.linesep)
|
||||
@@ -13,7 +13,7 @@
|
||||
# serve to show the default.
|
||||
|
||||
import sys
|
||||
import os
|
||||
from certbot.compat import os
|
||||
import shlex
|
||||
|
||||
import mock
|
||||
|
||||
@@ -1,2 +1,3 @@
|
||||
acme[dev]==0.25.0
|
||||
certbot[dev]==0.26.0
|
||||
# Remember to update setup.py to match the package versions below.
|
||||
acme[dev]==0.29.0
|
||||
certbot[dev]==0.39.0
|
||||
|
||||
@@ -1,14 +1,16 @@
|
||||
from setuptools import setup
|
||||
from setuptools import find_packages
|
||||
from setuptools.command.test import test as TestCommand
|
||||
import sys
|
||||
|
||||
|
||||
version = '0.31.0.dev0'
|
||||
version = '0.40.0.dev0'
|
||||
|
||||
# Remember to update local-oldest-requirements.txt when changing the minimum
|
||||
# acme/certbot version.
|
||||
install_requires = [
|
||||
'acme>=0.25.0',
|
||||
'certbot>=0.26.0',
|
||||
'acme>=0.29.0',
|
||||
'certbot>=0.39.0',
|
||||
'mock',
|
||||
'python-augeas',
|
||||
'setuptools',
|
||||
@@ -21,6 +23,22 @@ docs_extras = [
|
||||
'sphinx_rtd_theme',
|
||||
]
|
||||
|
||||
|
||||
class PyTest(TestCommand):
|
||||
user_options = []
|
||||
|
||||
def initialize_options(self):
|
||||
TestCommand.initialize_options(self)
|
||||
self.pytest_args = ''
|
||||
|
||||
def run_tests(self):
|
||||
import shlex
|
||||
# import here, cause outside the eggs aren't loaded
|
||||
import pytest
|
||||
errno = pytest.main(shlex.split(self.pytest_args))
|
||||
sys.exit(errno)
|
||||
|
||||
|
||||
setup(
|
||||
name='certbot-apache',
|
||||
version=version,
|
||||
@@ -44,6 +62,7 @@ setup(
|
||||
'Programming Language :: Python :: 3.5',
|
||||
'Programming Language :: Python :: 3.6',
|
||||
'Programming Language :: Python :: 3.7',
|
||||
'Programming Language :: Python :: 3.8',
|
||||
'Topic :: Internet :: WWW/HTTP',
|
||||
'Topic :: Security',
|
||||
'Topic :: System :: Installation/Setup',
|
||||
@@ -64,4 +83,6 @@ setup(
|
||||
],
|
||||
},
|
||||
test_suite='certbot_apache',
|
||||
tests_require=["pytest"],
|
||||
cmdclass={"test": PyTest},
|
||||
)
|
||||
|
||||
693
certbot-auto
693
certbot-auto
@@ -31,7 +31,7 @@ if [ -z "$VENV_PATH" ]; then
|
||||
fi
|
||||
VENV_BIN="$VENV_PATH/bin"
|
||||
BOOTSTRAP_VERSION_PATH="$VENV_PATH/certbot-auto-bootstrap-version.txt"
|
||||
LE_AUTO_VERSION="0.30.2"
|
||||
LE_AUTO_VERSION="0.39.0"
|
||||
BASENAME=$(basename $0)
|
||||
USAGE="Usage: $BASENAME [OPTIONS]
|
||||
A self-updating wrapper script for the Certbot ACME client. When run, updates
|
||||
@@ -45,6 +45,7 @@ Help for certbot itself cannot be provided until it is installed.
|
||||
-h, --help print this help
|
||||
-n, --non-interactive, --noninteractive run without asking for user input
|
||||
--no-bootstrap do not install OS dependencies
|
||||
--no-permissions-check do not warn about file system permissions
|
||||
--no-self-upgrade do not download updates
|
||||
--os-packages-only install OS dependencies and exit
|
||||
--install-only install certbot, upgrade if needed, and exit
|
||||
@@ -67,6 +68,8 @@ for arg in "$@" ; do
|
||||
# Do not upgrade this script (also prevents client upgrades, because each
|
||||
# copy of the script pins a hash of the python client)
|
||||
NO_SELF_UPGRADE=1;;
|
||||
--no-permissions-check)
|
||||
NO_PERMISSIONS_CHECK=1;;
|
||||
--no-bootstrap)
|
||||
NO_BOOTSTRAP=1;;
|
||||
--help)
|
||||
@@ -172,7 +175,11 @@ SetRootAuthMechanism() {
|
||||
sudo)
|
||||
SUDO="sudo -E"
|
||||
;;
|
||||
'') ;; # Nothing to do for plain root method.
|
||||
'')
|
||||
# If we're not running with root, don't check that this script can only
|
||||
# be modified by system users and groups.
|
||||
NO_PERMISSIONS_CHECK=1
|
||||
;;
|
||||
*)
|
||||
error "Error: unknown root authorization mechanism '$LE_AUTO_SUDO'."
|
||||
exit 1
|
||||
@@ -333,63 +340,11 @@ BootstrapDebCommon() {
|
||||
fi
|
||||
|
||||
augeas_pkg="libaugeas0 augeas-lenses"
|
||||
AUGVERSION=`LC_ALL=C apt-cache show --no-all-versions libaugeas0 | grep ^Version: | cut -d" " -f2`
|
||||
|
||||
if [ "$ASSUME_YES" = 1 ]; then
|
||||
YES_FLAG="-y"
|
||||
fi
|
||||
|
||||
AddBackportRepo() {
|
||||
# ARGS:
|
||||
BACKPORT_NAME="$1"
|
||||
BACKPORT_SOURCELINE="$2"
|
||||
say "To use the Apache Certbot plugin, augeas needs to be installed from $BACKPORT_NAME."
|
||||
if ! grep -v -e ' *#' /etc/apt/sources.list | grep -q "$BACKPORT_NAME" ; then
|
||||
# This can theoretically error if sources.list.d is empty, but in that case we don't care.
|
||||
if ! grep -v -e ' *#' /etc/apt/sources.list.d/* 2>/dev/null | grep -q "$BACKPORT_NAME"; then
|
||||
if [ "$ASSUME_YES" = 1 ]; then
|
||||
/bin/echo -n "Installing augeas from $BACKPORT_NAME in 3 seconds..."
|
||||
sleep 1s
|
||||
/bin/echo -ne "\e[0K\rInstalling augeas from $BACKPORT_NAME in 2 seconds..."
|
||||
sleep 1s
|
||||
/bin/echo -e "\e[0K\rInstalling augeas from $BACKPORT_NAME in 1 second ..."
|
||||
sleep 1s
|
||||
add_backports=1
|
||||
else
|
||||
read -p "Would you like to enable the $BACKPORT_NAME repository [Y/n]? " response
|
||||
case $response in
|
||||
[yY][eE][sS]|[yY]|"")
|
||||
add_backports=1;;
|
||||
*)
|
||||
add_backports=0;;
|
||||
esac
|
||||
fi
|
||||
if [ "$add_backports" = 1 ]; then
|
||||
sh -c "echo $BACKPORT_SOURCELINE >> /etc/apt/sources.list.d/$BACKPORT_NAME.list"
|
||||
apt-get $QUIET_FLAG update
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
if [ "$add_backports" != 0 ]; then
|
||||
apt-get install $QUIET_FLAG $YES_FLAG --no-install-recommends -t "$BACKPORT_NAME" $augeas_pkg
|
||||
augeas_pkg=
|
||||
fi
|
||||
}
|
||||
|
||||
|
||||
if dpkg --compare-versions 1.0 gt "$AUGVERSION" ; then
|
||||
if lsb_release -a | grep -q wheezy ; then
|
||||
AddBackportRepo wheezy-backports "deb http://http.debian.net/debian wheezy-backports main"
|
||||
elif lsb_release -a | grep -q precise ; then
|
||||
# XXX add ARM case
|
||||
AddBackportRepo precise-backports "deb http://archive.ubuntu.com/ubuntu precise-backports main restricted universe multiverse"
|
||||
else
|
||||
echo "No libaugeas0 version is available that's new enough to run the"
|
||||
echo "Certbot apache plugin..."
|
||||
fi
|
||||
# XXX add a case for ubuntu PPAs
|
||||
fi
|
||||
|
||||
apt-get install $QUIET_FLAG $YES_FLAG --no-install-recommends \
|
||||
python \
|
||||
python-dev \
|
||||
@@ -540,11 +495,18 @@ BOOTSTRAP_RPM_PYTHON3_VERSION=1
|
||||
BootstrapRpmPython3() {
|
||||
# Tested with:
|
||||
# - CentOS 6
|
||||
# - Fedora 29
|
||||
|
||||
InitializeRPMCommonBase
|
||||
|
||||
# Fedora 29 must use python3-virtualenv
|
||||
if $TOOL list python3-virtualenv >/dev/null 2>&1; then
|
||||
python_pkgs="python3
|
||||
python3-virtualenv
|
||||
python3-devel
|
||||
"
|
||||
# EPEL uses python34
|
||||
if $TOOL list python34 >/dev/null 2>&1; then
|
||||
elif $TOOL list python34 >/dev/null 2>&1; then
|
||||
python_pkgs="python34
|
||||
python34-devel
|
||||
python34-tools
|
||||
@@ -573,10 +535,20 @@ BootstrapSuseCommon() {
|
||||
QUIET_FLAG='-qq'
|
||||
fi
|
||||
|
||||
if zypper search -x python-virtualenv >/dev/null 2>&1; then
|
||||
OPENSUSE_VIRTUALENV_PACKAGES="python-virtualenv"
|
||||
else
|
||||
# Since Leap 15.0 (and associated Tumbleweed version), python-virtualenv
|
||||
# is a source package, and python2-virtualenv must be used instead.
|
||||
# Also currently python2-setuptools is not a dependency of python2-virtualenv,
|
||||
# while it should be. Installing it explicitly until upstream fix.
|
||||
OPENSUSE_VIRTUALENV_PACKAGES="python2-virtualenv python2-setuptools"
|
||||
fi
|
||||
|
||||
zypper $QUIET_FLAG $zypper_flags in $install_flags \
|
||||
python \
|
||||
python-devel \
|
||||
python-virtualenv \
|
||||
$OPENSUSE_VIRTUALENV_PACKAGES \
|
||||
gcc \
|
||||
augeas-lenses \
|
||||
libopenssl-devel \
|
||||
@@ -783,7 +755,33 @@ elif [ -f /etc/redhat-release ]; then
|
||||
prev_le_python="$LE_PYTHON"
|
||||
unset LE_PYTHON
|
||||
DeterminePythonVersion "NOCRASH"
|
||||
if [ "$PYVER" -eq 26 ]; then
|
||||
|
||||
RPM_DIST_NAME=`(. /etc/os-release 2> /dev/null && echo $ID) || echo "unknown"`
|
||||
|
||||
# Set RPM_DIST_VERSION to VERSION_ID from /etc/os-release after splitting on
|
||||
# '.' characters (e.g. "8.0" becomes "8"). If the command exits with an
|
||||
# error, RPM_DIST_VERSION is set to "unknown".
|
||||
RPM_DIST_VERSION=$( (. /etc/os-release 2> /dev/null && echo "$VERSION_ID") | cut -d '.' -f1 || echo "unknown")
|
||||
|
||||
# If RPM_DIST_VERSION is an empty string or it contains any nonnumeric
|
||||
# characters, the value is unexpected so we set RPM_DIST_VERSION to 0.
|
||||
if [ -z "$RPM_DIST_VERSION" ] || [ -n "$(echo "$RPM_DIST_VERSION" | tr -d '[0-9]')" ]; then
|
||||
RPM_DIST_VERSION=0
|
||||
fi
|
||||
|
||||
# Starting to Fedora 29, python2 is on a deprecation path. Let's move to python3 then.
|
||||
# RHEL 8 also uses python3 by default.
|
||||
if [ "$RPM_DIST_NAME" = "fedora" -a "$RPM_DIST_VERSION" -ge 29 -o "$PYVER" -eq 26 ]; then
|
||||
RPM_USE_PYTHON_3=1
|
||||
elif [ "$RPM_DIST_NAME" = "rhel" -a "$RPM_DIST_VERSION" -ge 8 ]; then
|
||||
RPM_USE_PYTHON_3=1
|
||||
elif [ "$RPM_DIST_NAME" = "centos" -a "$RPM_DIST_VERSION" -ge 8 ]; then
|
||||
RPM_USE_PYTHON_3=1
|
||||
else
|
||||
RPM_USE_PYTHON_3=0
|
||||
fi
|
||||
|
||||
if [ "$RPM_USE_PYTHON_3" = 1 ]; then
|
||||
Bootstrap() {
|
||||
BootstrapMessage "RedHat-based OSes that will use Python3"
|
||||
BootstrapRpmPython3
|
||||
@@ -797,6 +795,7 @@ elif [ -f /etc/redhat-release ]; then
|
||||
}
|
||||
BOOTSTRAP_VERSION="BootstrapRpmCommon $BOOTSTRAP_RPM_COMMON_VERSION"
|
||||
fi
|
||||
|
||||
LE_PYTHON="$prev_le_python"
|
||||
elif [ -f /etc/os-release ] && `grep -q openSUSE /etc/os-release` ; then
|
||||
Bootstrap() {
|
||||
@@ -940,6 +939,130 @@ else:
|
||||
UNLIKELY_EOF
|
||||
}
|
||||
|
||||
# Create a new virtual environment for Certbot. It will overwrite any existing one.
|
||||
# Parameters: LE_PYTHON, VENV_PATH, PYVER, VERBOSE
|
||||
CreateVenv() {
|
||||
"$1" - "$2" "$3" "$4" << "UNLIKELY_EOF"
|
||||
#!/usr/bin/env python
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
|
||||
def create_venv(venv_path, pyver, verbose):
|
||||
if os.path.exists(venv_path):
|
||||
shutil.rmtree(venv_path)
|
||||
|
||||
stdout = sys.stdout if verbose == '1' else open(os.devnull, 'w')
|
||||
|
||||
if int(pyver) <= 27:
|
||||
# Use virtualenv binary
|
||||
environ = os.environ.copy()
|
||||
environ['VIRTUALENV_NO_DOWNLOAD'] = '1'
|
||||
command = ['virtualenv', '--no-site-packages', '--python', sys.executable, venv_path]
|
||||
subprocess.check_call(command, stdout=stdout, env=environ)
|
||||
else:
|
||||
# Use embedded venv module in Python 3
|
||||
command = [sys.executable, '-m', 'venv', venv_path]
|
||||
subprocess.check_call(command, stdout=stdout)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
create_venv(*sys.argv[1:])
|
||||
|
||||
UNLIKELY_EOF
|
||||
}
|
||||
|
||||
# Check that the given PATH_TO_CHECK has secured permissions.
|
||||
# Parameters: LE_PYTHON, PATH_TO_CHECK
|
||||
CheckPathPermissions() {
|
||||
"$1" - "$2" << "UNLIKELY_EOF"
|
||||
"""Verifies certbot-auto cannot be modified by unprivileged users.
|
||||
|
||||
This script takes the path to certbot-auto as its only command line
|
||||
argument. It then checks that the file can only be modified by uid/gid
|
||||
< 1000 and if other users can modify the file, it prints a warning with
|
||||
a suggestion on how to solve the problem.
|
||||
|
||||
Permissions on symlinks in the absolute path of certbot-auto are ignored
|
||||
and only the canonical path to certbot-auto is checked. There could be
|
||||
permissions problems due to the symlinks that are unreported by this
|
||||
script, however, issues like this were not caused by our documentation
|
||||
and are ignored for the sake of simplicity.
|
||||
|
||||
All warnings are printed to stdout rather than stderr so all stderr
|
||||
output from this script can be suppressed to avoid printing messages if
|
||||
this script fails for some reason.
|
||||
|
||||
"""
|
||||
from __future__ import print_function
|
||||
|
||||
import os
|
||||
import stat
|
||||
import sys
|
||||
|
||||
|
||||
FORUM_POST_URL = 'https://community.letsencrypt.org/t/certbot-auto-deployment-best-practices/91979/'
|
||||
|
||||
|
||||
def has_safe_permissions(path):
|
||||
"""Returns True if the given path has secure permissions.
|
||||
|
||||
The permissions are considered safe if the file is only writable by
|
||||
uid/gid < 1000.
|
||||
|
||||
The reason we allow more IDs than 0 is because on some systems such
|
||||
as Debian, system users/groups other than uid/gid 0 are used for the
|
||||
path we recommend in our instructions which is /usr/local/bin. 1000
|
||||
was chosen because on Debian 0-999 is reserved for system IDs[1] and
|
||||
on RHEL either 0-499 or 0-999 is reserved depending on the
|
||||
version[2][3]. Due to these differences across different OSes, this
|
||||
detection isn't perfect so we only determine permissions are
|
||||
insecure when we can be reasonably confident there is a problem
|
||||
regardless of the underlying OS.
|
||||
|
||||
[1] https://www.debian.org/doc/debian-policy/ch-opersys.html#uid-and-gid-classes
|
||||
[2] https://access.redhat.com/documentation/en-us/red_hat_enterprise_linux/6/html/deployment_guide/ch-managing_users_and_groups
|
||||
[3] https://access.redhat.com/documentation/en-us/red_hat_enterprise_linux/7/html/system_administrators_guide/ch-managing_users_and_groups
|
||||
|
||||
:param str path: filesystem path to check
|
||||
:returns: True if the path has secure permissions, otherwise, False
|
||||
:rtype: bool
|
||||
|
||||
"""
|
||||
# os.stat follows symlinks before obtaining information about a file.
|
||||
stat_result = os.stat(path)
|
||||
if stat_result.st_mode & stat.S_IWOTH:
|
||||
return False
|
||||
if stat_result.st_mode & stat.S_IWGRP and stat_result.st_gid >= 1000:
|
||||
return False
|
||||
if stat_result.st_mode & stat.S_IWUSR and stat_result.st_uid >= 1000:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def main(certbot_auto_path):
|
||||
current_path = os.path.realpath(certbot_auto_path)
|
||||
last_path = None
|
||||
permissions_ok = True
|
||||
# This loop makes use of the fact that os.path.dirname('/') == '/'.
|
||||
while current_path != last_path and permissions_ok:
|
||||
permissions_ok = has_safe_permissions(current_path)
|
||||
last_path = current_path
|
||||
current_path = os.path.dirname(current_path)
|
||||
|
||||
if not permissions_ok:
|
||||
print('{0} has insecure permissions!'.format(certbot_auto_path))
|
||||
print('To learn how to fix them, visit {0}'.format(FORUM_POST_URL))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main(sys.argv[1])
|
||||
|
||||
UNLIKELY_EOF
|
||||
}
|
||||
|
||||
if [ "$1" = "--le-auto-phase2" ]; then
|
||||
# Phase 2: Create venv, install LE, and run.
|
||||
|
||||
@@ -995,22 +1118,7 @@ if [ "$1" = "--le-auto-phase2" ]; then
|
||||
if [ "$LE_AUTO_VERSION" != "$INSTALLED_VERSION" ]; then
|
||||
say "Creating virtual environment..."
|
||||
DeterminePythonVersion
|
||||
rm -rf "$VENV_PATH"
|
||||
if [ "$PYVER" -le 27 ]; then
|
||||
# Use an environment variable instead of a flag for compatibility with old versions
|
||||
if [ "$VERBOSE" = 1 ]; then
|
||||
VIRTUALENV_NO_DOWNLOAD=1 virtualenv --no-site-packages --python "$LE_PYTHON" "$VENV_PATH"
|
||||
else
|
||||
VIRTUALENV_NO_DOWNLOAD=1 virtualenv --no-site-packages --python "$LE_PYTHON" "$VENV_PATH" \
|
||||
> /dev/null
|
||||
fi
|
||||
else
|
||||
if [ "$VERBOSE" = 1 ]; then
|
||||
"$LE_PYTHON" -m venv "$VENV_PATH"
|
||||
else
|
||||
"$LE_PYTHON" -m venv "$VENV_PATH" > /dev/null
|
||||
fi
|
||||
fi
|
||||
CreateVenv "$LE_PYTHON" "$VENV_PATH" "$PYVER" "$VERBOSE"
|
||||
|
||||
if [ -n "$BOOTSTRAP_VERSION" ]; then
|
||||
echo "$BOOTSTRAP_VERSION" > "$BOOTSTRAP_VERSION_PATH"
|
||||
@@ -1024,202 +1132,200 @@ if [ "$1" = "--le-auto-phase2" ]; then
|
||||
# There is no $ interpolation due to quotes on starting heredoc delimiter.
|
||||
# -------------------------------------------------------------------------
|
||||
cat << "UNLIKELY_EOF" > "$TEMP_DIR/letsencrypt-auto-requirements.txt"
|
||||
# This is the flattened list of packages certbot-auto installs. To generate
|
||||
# this, do
|
||||
# `pip install --no-cache-dir -e acme -e . -e certbot-apache -e certbot-nginx`,
|
||||
# and then use `hashin` or a more secure method to gather the hashes.
|
||||
|
||||
# Hashin example:
|
||||
# This is the flattened list of packages certbot-auto installs.
|
||||
# To generate this, do (with docker and package hashin installed):
|
||||
# ```
|
||||
# letsencrypt-auto-source/rebuild_dependencies.py \
|
||||
# letsencrypt-auto-source/pieces/dependency-requirements.txt
|
||||
# ```
|
||||
# If you want to update a single dependency, run commands similar to these:
|
||||
# ```
|
||||
# pip install hashin
|
||||
# hashin -r dependency-requirements.txt cryptography==1.5.2
|
||||
# sets the new certbot-auto pinned version of cryptography to 1.5.2
|
||||
|
||||
argparse==1.4.0 \
|
||||
--hash=sha256:c31647edb69fd3d465a847ea3157d37bed1f95f19760b11a47aa91c04b666314 \
|
||||
--hash=sha256:62b089a55be1d8949cd2bc7e0df0bddb9e028faefc8c32038cc84862aefdd6e4
|
||||
|
||||
# This comes before cffi because cffi will otherwise install an unchecked
|
||||
# version via setup_requires.
|
||||
pycparser==2.14 \
|
||||
--hash=sha256:7959b4a74abdc27b312fed1c21e6caf9309ce0b29ea86b591fd2e99ecdf27f73 \
|
||||
--no-binary pycparser
|
||||
|
||||
asn1crypto==0.22.0 \
|
||||
--hash=sha256:d232509fefcfcdb9a331f37e9c9dc20441019ad927c7d2176cf18ed5da0ba097 \
|
||||
--hash=sha256:cbbadd640d3165ab24b06ef25d1dca09a3441611ac15f6a6b452474fdf0aed1a
|
||||
cffi==1.11.5 \
|
||||
--hash=sha256:1b0493c091a1898f1136e3f4f991a784437fac3673780ff9de3bcf46c80b6b50 \
|
||||
--hash=sha256:87f37fe5130574ff76c17cab61e7d2538a16f843bb7bca8ebbc4b12de3078596 \
|
||||
--hash=sha256:1553d1e99f035ace1c0544050622b7bc963374a00c467edafac50ad7bd276aef \
|
||||
--hash=sha256:151b7eefd035c56b2b2e1eb9963c90c6302dc15fbd8c1c0a83a163ff2c7d7743 \
|
||||
--hash=sha256:edabd457cd23a02965166026fd9bfd196f4324fe6032e866d0f3bd0301cd486f \
|
||||
--hash=sha256:ba5e697569f84b13640c9e193170e89c13c6244c24400fc57e88724ef610cd31 \
|
||||
--hash=sha256:79f9b6f7c46ae1f8ded75f68cf8ad50e5729ed4d590c74840471fc2823457d04 \
|
||||
--hash=sha256:b0f7d4a3df8f06cf49f9f121bead236e328074de6449866515cea4907bbc63d6 \
|
||||
--hash=sha256:4c91af6e967c2015729d3e69c2e51d92f9898c330d6a851bf8f121236f3defd3 \
|
||||
--hash=sha256:7a33145e04d44ce95bcd71e522b478d282ad0eafaf34fe1ec5bbd73e662f22b6 \
|
||||
--hash=sha256:95d5251e4b5ca00061f9d9f3d6fe537247e145a8524ae9fd30a2f8fbce993b5b \
|
||||
--hash=sha256:b75110fb114fa366b29a027d0c9be3709579602ae111ff61674d28c93606acca \
|
||||
--hash=sha256:ae5e35a2c189d397b91034642cb0eab0e346f776ec2eb44a49a459e6615d6e2e \
|
||||
--hash=sha256:fdf1c1dc5bafc32bc5d08b054f94d659422b05aba244d6be4ddc1c72d9aa70fb \
|
||||
--hash=sha256:9d1d3e63a4afdc29bd76ce6aa9d58c771cd1599fbba8cf5057e7860b203710dd \
|
||||
--hash=sha256:be2a9b390f77fd7676d80bc3cdc4f8edb940d8c198ed2d8c0be1319018c778e1 \
|
||||
--hash=sha256:ed01918d545a38998bfa5902c7c00e0fee90e957ce036a4000a88e3fe2264917 \
|
||||
--hash=sha256:857959354ae3a6fa3da6651b966d13b0a8bed6bbc87a0de7b38a549db1d2a359 \
|
||||
--hash=sha256:2ba8a45822b7aee805ab49abfe7eec16b90587f7f26df20c71dd89e45a97076f \
|
||||
--hash=sha256:a36c5c154f9d42ec176e6e620cb0dd275744aa1d804786a71ac37dc3661a5e95 \
|
||||
--hash=sha256:e55e22ac0a30023426564b1059b035973ec82186ddddbac867078435801c7801 \
|
||||
--hash=sha256:3eb6434197633b7748cea30bf0ba9f66727cdce45117a712b29a443943733257 \
|
||||
--hash=sha256:ecbb7b01409e9b782df5ded849c178a0aa7c906cf8c5a67368047daab282b184 \
|
||||
--hash=sha256:770f3782b31f50b68627e22f91cb182c48c47c02eb405fd689472aa7b7aa16dc \
|
||||
--hash=sha256:d5d8555d9bfc3f02385c1c37e9f998e2011f0db4f90e250e5bc0c0a85a813085 \
|
||||
--hash=sha256:3c85641778460581c42924384f5e68076d724ceac0f267d66c757f7535069c93 \
|
||||
--hash=sha256:ca1bd81f40adc59011f58159e4aa6445fc585a32bb8ac9badf7a2c1aa23822f2 \
|
||||
--hash=sha256:3bb6bd7266598f318063e584378b8e27c67de998a43362e8fce664c54ee52d30 \
|
||||
--hash=sha256:a6a5cb8809091ec9ac03edde9304b3ad82ad4466333432b16d78ef40e0cce0d5 \
|
||||
--hash=sha256:57b2533356cb2d8fac1555815929f7f5f14d68ac77b085d2326b571310f34f6e \
|
||||
--hash=sha256:495c5c2d43bf6cebe0178eb3e88f9c4aa48d8934aa6e3cddb865c058da76756b \
|
||||
--hash=sha256:e90f17980e6ab0f3c2f3730e56d1fe9bcba1891eeea58966e89d352492cc74f4
|
||||
ConfigArgParse==0.12.0 \
|
||||
--hash=sha256:28cd7d67669651f2a4518367838c49539457504584a139709b2b8f6c208ef339 \
|
||||
--no-binary ConfigArgParse
|
||||
# ```
|
||||
ConfigArgParse==0.14.0 \
|
||||
--hash=sha256:2e2efe2be3f90577aca9415e32cb629aa2ecd92078adbe27b53a03e53ff12e91
|
||||
asn1crypto==0.24.0 \
|
||||
--hash=sha256:2f1adbb7546ed199e3c90ef23ec95c5cf3585bac7d11fb7eb562a3fe89c64e87 \
|
||||
--hash=sha256:9d5c20441baf0cb60a4ac34cc447c6c189024b6b4c6cd7877034f4965c464e49
|
||||
certifi==2019.6.16 \
|
||||
--hash=sha256:046832c04d4e752f37383b628bc601a7ea7211496b4638f6514d0e5b9acc4939 \
|
||||
--hash=sha256:945e3ba63a0b9f577b1395204e13c3a231f9bc0223888be653286534e5873695
|
||||
cffi==1.12.3 \
|
||||
--hash=sha256:041c81822e9f84b1d9c401182e174996f0bae9991f33725d059b771744290774 \
|
||||
--hash=sha256:046ef9a22f5d3eed06334d01b1e836977eeef500d9b78e9ef693f9380ad0b83d \
|
||||
--hash=sha256:066bc4c7895c91812eff46f4b1c285220947d4aa46fa0a2651ff85f2afae9c90 \
|
||||
--hash=sha256:066c7ff148ae33040c01058662d6752fd73fbc8e64787229ea8498c7d7f4041b \
|
||||
--hash=sha256:2444d0c61f03dcd26dbf7600cf64354376ee579acad77aef459e34efcb438c63 \
|
||||
--hash=sha256:300832850b8f7967e278870c5d51e3819b9aad8f0a2c8dbe39ab11f119237f45 \
|
||||
--hash=sha256:34c77afe85b6b9e967bd8154e3855e847b70ca42043db6ad17f26899a3df1b25 \
|
||||
--hash=sha256:46de5fa00f7ac09f020729148ff632819649b3e05a007d286242c4882f7b1dc3 \
|
||||
--hash=sha256:4aa8ee7ba27c472d429b980c51e714a24f47ca296d53f4d7868075b175866f4b \
|
||||
--hash=sha256:4d0004eb4351e35ed950c14c11e734182591465a33e960a4ab5e8d4f04d72647 \
|
||||
--hash=sha256:4e3d3f31a1e202b0f5a35ba3bc4eb41e2fc2b11c1eff38b362de710bcffb5016 \
|
||||
--hash=sha256:50bec6d35e6b1aaeb17f7c4e2b9374ebf95a8975d57863546fa83e8d31bdb8c4 \
|
||||
--hash=sha256:55cad9a6df1e2a1d62063f79d0881a414a906a6962bc160ac968cc03ed3efcfb \
|
||||
--hash=sha256:5662ad4e4e84f1eaa8efce5da695c5d2e229c563f9d5ce5b0113f71321bcf753 \
|
||||
--hash=sha256:59b4dc008f98fc6ee2bb4fd7fc786a8d70000d058c2bbe2698275bc53a8d3fa7 \
|
||||
--hash=sha256:73e1ffefe05e4ccd7bcea61af76f36077b914f92b76f95ccf00b0c1b9186f3f9 \
|
||||
--hash=sha256:a1f0fd46eba2d71ce1589f7e50a9e2ffaeb739fb2c11e8192aa2b45d5f6cc41f \
|
||||
--hash=sha256:a2e85dc204556657661051ff4bab75a84e968669765c8a2cd425918699c3d0e8 \
|
||||
--hash=sha256:a5457d47dfff24882a21492e5815f891c0ca35fefae8aa742c6c263dac16ef1f \
|
||||
--hash=sha256:a8dccd61d52a8dae4a825cdbb7735da530179fea472903eb871a5513b5abbfdc \
|
||||
--hash=sha256:ae61af521ed676cf16ae94f30fe202781a38d7178b6b4ab622e4eec8cefaff42 \
|
||||
--hash=sha256:b012a5edb48288f77a63dba0840c92d0504aa215612da4541b7b42d849bc83a3 \
|
||||
--hash=sha256:d2c5cfa536227f57f97c92ac30c8109688ace8fa4ac086d19d0af47d134e2909 \
|
||||
--hash=sha256:d42b5796e20aacc9d15e66befb7a345454eef794fdb0737d1af593447c6c8f45 \
|
||||
--hash=sha256:dee54f5d30d775f525894d67b1495625dd9322945e7fee00731952e0368ff42d \
|
||||
--hash=sha256:e070535507bd6aa07124258171be2ee8dfc19119c28ca94c9dfb7efd23564512 \
|
||||
--hash=sha256:e1ff2748c84d97b065cc95429814cdba39bcbd77c9c85c89344b317dc0d9cbff \
|
||||
--hash=sha256:ed851c75d1e0e043cbf5ca9a8e1b13c4c90f3fbd863dacb01c0808e2b5204201
|
||||
chardet==3.0.4 \
|
||||
--hash=sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae \
|
||||
--hash=sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691
|
||||
configobj==5.0.6 \
|
||||
--hash=sha256:a2f5650770e1c87fb335af19a9b7eb73fc05ccf22144eb68db7d00cd2bcb0902 \
|
||||
--no-binary configobj
|
||||
cryptography==2.2.2 \
|
||||
--hash=sha256:3f3b65d5a16e6b52fba63dc860b62ca9832f51f1a2ae5083c78b6840275f12dd \
|
||||
--hash=sha256:5251e7de0de66810833606439ca65c9b9e45da62196b0c88bfadf27740aac09f \
|
||||
--hash=sha256:551a3abfe0c8c6833df4192a63371aa2ff43afd8f570ed345d31f251d78e7e04 \
|
||||
--hash=sha256:5cb990056b7cadcca26813311187ad751ea644712022a3976443691168781b6f \
|
||||
--hash=sha256:60bda7f12ecb828358be53095fc9c6edda7de8f1ef571f96c00b2363643fa3cd \
|
||||
--hash=sha256:64b5c67acc9a7c83fbb4b69166f3105a0ab722d27934fac2cb26456718eec2ba \
|
||||
--hash=sha256:6fef51ec447fe9f8351894024e94736862900d3a9aa2961528e602eb65c92bdb \
|
||||
--hash=sha256:77d0ad229d47a6e0272d00f6bf8ac06ce14715a9fd02c9a97f5a2869aab3ccb2 \
|
||||
--hash=sha256:808fe471b1a6b777f026f7dc7bd9a4959da4bfab64972f2bbe91e22527c1c037 \
|
||||
--hash=sha256:9b62fb4d18529c84b961efd9187fecbb48e89aa1a0f9f4161c61b7fc42a101bd \
|
||||
--hash=sha256:9e5bed45ec6b4f828866ac6a6bedf08388ffcfa68abe9e94b34bb40977aba531 \
|
||||
--hash=sha256:9fc295bf69130a342e7a19a39d7bbeb15c0bcaabc7382ec33ef3b2b7d18d2f63 \
|
||||
--hash=sha256:abd070b5849ed64e6d349199bef955ee0ad99aefbad792f0c587f8effa681a5e \
|
||||
--hash=sha256:ba6a774749b6e510cffc2fb98535f717e0e5fd91c7c99a61d223293df79ab351 \
|
||||
--hash=sha256:c332118647f084c983c6a3e1dba0f3bcb051f69d12baccac68db8d62d177eb8a \
|
||||
--hash=sha256:d6f46e862ee36df81e6342c2177ba84e70f722d9dc9c6c394f9f1f434c4a5563 \
|
||||
--hash=sha256:db6013746f73bf8edd9c3d1d3f94db635b9422f503db3fc5ef105233d4c011ab \
|
||||
--hash=sha256:f57008eaff597c69cf692c3518f6d4800f0309253bb138b526a37fe9ef0c7471 \
|
||||
--hash=sha256:f6c821ac253c19f2ad4c8691633ae1d1a17f120d5b01ea1d256d7b602bc59887
|
||||
enum34==1.1.2 ; python_version < '3.4' \
|
||||
--hash=sha256:2475d7fcddf5951e92ff546972758802de5260bf409319a9f1934e6bbc8b1dc7 \
|
||||
--hash=sha256:35907defb0f992b75ab7788f65fedc1cf20ffa22688e0e6f6f12afc06b3ea501
|
||||
--hash=sha256:a2f5650770e1c87fb335af19a9b7eb73fc05ccf22144eb68db7d00cd2bcb0902
|
||||
cryptography==2.7 \
|
||||
--hash=sha256:24b61e5fcb506424d3ec4e18bca995833839bf13c59fc43e530e488f28d46b8c \
|
||||
--hash=sha256:25dd1581a183e9e7a806fe0543f485103232f940fcfc301db65e630512cce643 \
|
||||
--hash=sha256:3452bba7c21c69f2df772762be0066c7ed5dc65df494a1d53a58b683a83e1216 \
|
||||
--hash=sha256:41a0be220dd1ed9e998f5891948306eb8c812b512dc398e5a01846d855050799 \
|
||||
--hash=sha256:5751d8a11b956fbfa314f6553d186b94aa70fdb03d8a4d4f1c82dcacf0cbe28a \
|
||||
--hash=sha256:5f61c7d749048fa6e3322258b4263463bfccefecb0dd731b6561cb617a1d9bb9 \
|
||||
--hash=sha256:72e24c521fa2106f19623a3851e9f89ddfdeb9ac63871c7643790f872a305dfc \
|
||||
--hash=sha256:7b97ae6ef5cba2e3bb14256625423413d5ce8d1abb91d4f29b6d1a081da765f8 \
|
||||
--hash=sha256:961e886d8a3590fd2c723cf07be14e2a91cf53c25f02435c04d39e90780e3b53 \
|
||||
--hash=sha256:96d8473848e984184b6728e2c9d391482008646276c3ff084a1bd89e15ff53a1 \
|
||||
--hash=sha256:ae536da50c7ad1e002c3eee101871d93abdc90d9c5f651818450a0d3af718609 \
|
||||
--hash=sha256:b0db0cecf396033abb4a93c95d1602f268b3a68bb0a9cc06a7cff587bb9a7292 \
|
||||
--hash=sha256:cfee9164954c186b191b91d4193989ca994703b2fff406f71cf454a2d3c7327e \
|
||||
--hash=sha256:e6347742ac8f35ded4a46ff835c60e68c22a536a8ae5c4422966d06946b6d4c6 \
|
||||
--hash=sha256:f27d93f0139a3c056172ebb5d4f9056e770fdf0206c2f422ff2ebbad142e09ed \
|
||||
--hash=sha256:f57b76e46a58b63d1c6375017f4564a28f19a5ca912691fd2e4261b3414b618d
|
||||
distro==1.4.0 \
|
||||
--hash=sha256:362dde65d846d23baee4b5c058c8586f219b5a54be1cf5fc6ff55c4578392f57 \
|
||||
--hash=sha256:eedf82a470ebe7d010f1872c17237c79ab04097948800029994fa458e52fb4b4
|
||||
enum34==1.1.6 \
|
||||
--hash=sha256:2d81cbbe0e73112bdfe6ef8576f2238f2ba27dd0d55752a776c41d38b7da2850 \
|
||||
--hash=sha256:644837f692e5f550741432dd3f223bbb9852018674981b1664e5dc339387588a \
|
||||
--hash=sha256:6bd0f6ad48ec2aa117d3d141940d484deccda84d4fcd884f5c3d93c23ecd8c79 \
|
||||
--hash=sha256:8ad8c4783bf61ded74527bffb48ed9b54166685e4230386a9ed9b1279e2df5b1
|
||||
funcsigs==1.0.2 \
|
||||
--hash=sha256:330cc27ccbf7f1e992e69fef78261dc7c6569012cf397db8d3de0234e6c937ca \
|
||||
--hash=sha256:a7bb0f2cf3a3fd1ab2732cb49eba4252c2af4240442415b4abce3b87022a8f50
|
||||
idna==2.5 \
|
||||
--hash=sha256:cc19709fd6d0cbfed39ea875d29ba6d4e22c0cebc510a76d6302a28385e8bb70 \
|
||||
--hash=sha256:3cb5ce08046c4e3a560fc02f138d0ac63e00f8ce5901a56b32ec8b7994082aab
|
||||
ipaddress==1.0.16 \
|
||||
--hash=sha256:935712800ce4760701d89ad677666cd52691fd2f6f0b340c8b4239a3c17988a5 \
|
||||
--hash=sha256:5a3182b322a706525c46282ca6f064d27a02cffbd449f9f47416f1dc96aa71b0
|
||||
josepy==1.1.0 \
|
||||
--hash=sha256:1309a25aac3caeff5239729c58ff9b583f7d022ffdb1553406ddfc8e5b52b76e \
|
||||
--hash=sha256:fb5c62c77d26e04df29cb5ecd01b9ce69b6fcc9e521eb1ca193b7faa2afa7086
|
||||
linecache2==1.0.0 \
|
||||
--hash=sha256:e78be9c0a0dfcbac712fe04fbf92b96cddae80b1b842f24248214c8496f006ef \
|
||||
--hash=sha256:4b26ff4e7110db76eeb6f5a7b64a82623839d595c2038eeda662f2a2db78e97c
|
||||
# Using an older version of mock here prevents regressions of #5276.
|
||||
future==0.17.1 \
|
||||
--hash=sha256:67045236dcfd6816dc439556d009594abf643e5eb48992e36beac09c2ca659b8
|
||||
idna==2.8 \
|
||||
--hash=sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407 \
|
||||
--hash=sha256:ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c
|
||||
ipaddress==1.0.22 \
|
||||
--hash=sha256:64b28eec5e78e7510698f6d4da08800a5c575caa4a286c93d651c5d3ff7b6794 \
|
||||
--hash=sha256:b146c751ea45cad6188dd6cf2d9b757f6f4f8d6ffb96a023e6f2e26eea02a72c
|
||||
josepy==1.2.0 \
|
||||
--hash=sha256:8ea15573203f28653c00f4ac0142520777b1c59d9eddd8da3f256c6ba3cac916 \
|
||||
--hash=sha256:9cec9a839fe9520f0420e4f38e7219525daccce4813296627436fe444cd002d3
|
||||
mock==1.3.0 \
|
||||
--hash=sha256:3f573a18be94de886d1191f27c168427ef693e8dcfcecf95b170577b2eb69cbb \
|
||||
--hash=sha256:1e247dbecc6ce057299eb7ee019ad68314bb93152e81d9a6110d35f4d5eca0f6
|
||||
ordereddict==1.1 \
|
||||
--hash=sha256:1c35b4ac206cef2d24816c89f89cf289dd3d38cf7c449bb3fab7bf6d43f01b1f \
|
||||
--no-binary ordereddict
|
||||
packaging==16.8 \
|
||||
--hash=sha256:99276dc6e3a7851f32027a68f1095cd3f77c148091b092ea867a351811cfe388 \
|
||||
--hash=sha256:5d50835fdf0a7edf0b55e311b7c887786504efea1177abd7e69329a8e5ea619e
|
||||
parsedatetime==2.1 \
|
||||
--hash=sha256:ce9d422165cf6e963905cd5f74f274ebf7cc98c941916169178ef93f0e557838 \
|
||||
--hash=sha256:17c578775520c99131634e09cfca5a05ea9e1bd2a05cd06967ebece10df7af2d
|
||||
pbr==1.8.1 \
|
||||
--hash=sha256:46c8db75ae75a056bd1cc07fa21734fe2e603d11a07833ecc1eeb74c35c72e0c \
|
||||
--hash=sha256:e2127626a91e6c885db89668976db31020f0af2da728924b56480fc7ccf09649
|
||||
pyOpenSSL==16.2.0 \
|
||||
--hash=sha256:26ca380ddf272f7556e48064bbcd5bd71f83dfc144f3583501c7ddbd9434ee17 \
|
||||
--hash=sha256:7779a3bbb74e79db234af6a08775568c6769b5821faecf6e2f4143edb227516e
|
||||
pyparsing==2.1.8 \
|
||||
--hash=sha256:2f0f5ceb14eccd5aef809d6382e87df22ca1da583c79f6db01675ce7d7f49c18 \
|
||||
--hash=sha256:03a4869b9f3493807ee1f1cb405e6d576a1a2ca4d81a982677c0c1ad6177c56b \
|
||||
--hash=sha256:ab09aee814c0241ff0c503cff30018219fe1fc14501d89f406f4664a0ec9fbcd \
|
||||
--hash=sha256:6e9a7f052f8e26bcf749e4033e3115b6dc7e3c85aafcb794b9a88c9d9ef13c97 \
|
||||
--hash=sha256:9f463a6bcc4eeb6c08f1ed84439b17818e2085937c0dee0d7674ac127c67c12b \
|
||||
--hash=sha256:3626b4d81cfb300dad57f52f2f791caaf7b06c09b368c0aa7b868e53a5775424 \
|
||||
--hash=sha256:367b90cc877b46af56d4580cd0ae278062903f02b8204ab631f5a2c0f50adfd0 \
|
||||
--hash=sha256:9f1ea360086cd68681e7f4ca8f1f38df47bf81942a0d76a9673c2d23eff35b13
|
||||
pyRFC3339==1.0 \
|
||||
--hash=sha256:eea31835c56e2096af4363a5745a784878a61d043e247d3a6d6a0a32a9741f56 \
|
||||
--hash=sha256:8dfbc6c458b8daba1c0f3620a8c78008b323a268b27b7359e92a4ae41325f535
|
||||
--hash=sha256:1e247dbecc6ce057299eb7ee019ad68314bb93152e81d9a6110d35f4d5eca0f6 \
|
||||
--hash=sha256:3f573a18be94de886d1191f27c168427ef693e8dcfcecf95b170577b2eb69cbb
|
||||
parsedatetime==2.4 \
|
||||
--hash=sha256:3d817c58fb9570d1eec1dd46fa9448cd644eeed4fb612684b02dfda3a79cb84b \
|
||||
--hash=sha256:9ee3529454bf35c40a77115f5a596771e59e1aee8c53306f346c461b8e913094
|
||||
pbr==5.4.2 \
|
||||
--hash=sha256:56e52299170b9492513c64be44736d27a512fa7e606f21942160b68ce510b4bc \
|
||||
--hash=sha256:9b321c204a88d8ab5082699469f52cc94c5da45c51f114113d01b3d993c24cdf
|
||||
pyOpenSSL==19.0.0 \
|
||||
--hash=sha256:aeca66338f6de19d1aa46ed634c3b9ae519a64b458f8468aec688e7e3c20f200 \
|
||||
--hash=sha256:c727930ad54b10fc157015014b666f2d8b41f70c0d03e83ab67624fd3dd5d1e6
|
||||
pyRFC3339==1.1 \
|
||||
--hash=sha256:67196cb83b470709c580bb4738b83165e67c6cc60e1f2e4f286cfcb402a926f4 \
|
||||
--hash=sha256:81b8cbe1519cdb79bed04910dd6fa4e181faf8c88dff1e1b987b5f7ab23a5b1a
|
||||
pycparser==2.19 \
|
||||
--hash=sha256:a988718abfad80b6b157acce7bf130a30876d27603738ac39f140993246b25b3
|
||||
pyparsing==2.4.2 \
|
||||
--hash=sha256:6f98a7b9397e206d78cc01df10131398f1c8b8510a2f4d97d9abd82e1aacdd80 \
|
||||
--hash=sha256:d9338df12903bbf5d65a0e4e87c2161968b10d2e489652bb47001d82a9b028b4
|
||||
python-augeas==0.5.0 \
|
||||
--hash=sha256:67d59d66cdba8d624e0389b87b2a83a176f21f16a87553b50f5703b23f29bac2 \
|
||||
--no-binary python-augeas
|
||||
pytz==2015.7 \
|
||||
--hash=sha256:3abe6a6d3fc2fbbe4c60144211f45da2edbe3182a6f6511af6bbba0598b1f992 \
|
||||
--hash=sha256:939ef9c1e1224d980405689a97ffcf7828c56d1517b31d73464356c1f2b7769e \
|
||||
--hash=sha256:ead4aefa7007249e05e51b01095719d5a8dd95760089f5730aac5698b1932918 \
|
||||
--hash=sha256:3cca0df08bd0ed98432390494ce3ded003f5e661aa460be7a734bffe35983605 \
|
||||
--hash=sha256:3ede470d3d17ba3c07638dfa0d10452bc1b6e5ad326127a65ba77e6aaeb11bec \
|
||||
--hash=sha256:68c47964f7186eec306b13629627722b9079cd4447ed9e5ecaecd4eac84ca734 \
|
||||
--hash=sha256:dd5d3991950aae40a6c81de1578942e73d629808cefc51d12cd157980e6cfc18 \
|
||||
--hash=sha256:a77c52062c07eb7c7b30545dbc73e32995b7e117eea750317b5cb5c7a4618f14 \
|
||||
--hash=sha256:81af9aec4bc960a9a0127c488f18772dae4634689233f06f65443e7b11ebeb51 \
|
||||
--hash=sha256:e079b1dadc5c06246cc1bb6fe1b23a50b1d1173f2edd5104efd40bb73a28f406 \
|
||||
--hash=sha256:fbd26746772c24cb93c8b97cbdad5cb9e46c86bbdb1b9d8a743ee00e2fb1fc5d \
|
||||
--hash=sha256:99266ef30a37e43932deec2b7ca73e83c8dbc3b9ff703ec73eca6b1dae6befea \
|
||||
--hash=sha256:8b6ce1c993909783bc96e0b4f34ea223bff7a4df2c90bdb9c4e0f1ac928689e3
|
||||
requests==2.20.0 \
|
||||
--hash=sha256:99dcfdaaeb17caf6e526f32b6a7b780461512ab3f1d992187801694cba42770c \
|
||||
--hash=sha256:a84b8c9ab6239b578f22d1c21d51b696dcfe004032bb80ea832398d6909d7279
|
||||
six==1.10.0 \
|
||||
--hash=sha256:0ff78c403d9bccf5a425a6d31a12aa6b47f1c21ca4dc2573a7e2f32a97335eb1 \
|
||||
--hash=sha256:105f8d68616f8248e24bf0e9372ef04d3cc10104f1980f54d57b2ce73a5ad56a
|
||||
traceback2==1.4.0 \
|
||||
--hash=sha256:8253cebec4b19094d67cc5ed5af99bf1dba1285292226e98a31929f87a5d6b23 \
|
||||
--hash=sha256:05acc67a09980c2ecfedd3423f7ae0104839eccb55fc645773e1caa0951c3030
|
||||
unittest2==1.1.0 \
|
||||
--hash=sha256:13f77d0875db6d9b435e1d4f41e74ad4cc2eb6e1d5c824996092b3430f088bb8 \
|
||||
--hash=sha256:22882a0e418c284e1f718a822b3b022944d53d2d908e1690b319a9d3eb2c0579
|
||||
zope.component==4.2.2 \
|
||||
--hash=sha256:282c112b55dd8e3c869a3571f86767c150ab1284a9ace2bdec226c592acaf81a \
|
||||
--no-binary zope.component
|
||||
zope.event==4.1.0 \
|
||||
--hash=sha256:dc7a59a2fd91730d3793131a5d261b29e93ec4e2a97f1bc487ce8defee2fe786 \
|
||||
--no-binary zope.event
|
||||
zope.interface==4.1.3 \
|
||||
--hash=sha256:f07b631f7a601cd8cbd3332d54f43142c7088a83299f859356f08d1d4d4259b3 \
|
||||
--hash=sha256:de5cca083b9439d8002fb76bbe6b4998c5a5a721fab25b84298967f002df4c94 \
|
||||
--hash=sha256:6788416f7ea7f5b8a97be94825377aa25e8bdc73463e07baaf9858b29e737077 \
|
||||
--hash=sha256:6f3230f7254518201e5a3708cbb2de98c848304f06e3ded8bfb39e5825cba2e1 \
|
||||
--hash=sha256:5fa575a5240f04200c3088427d0d4b7b737f6e9018818a51d8d0f927a6a2517a \
|
||||
--hash=sha256:522194ad6a545735edd75c8a83f48d65d1af064e432a7d320d64f56bafc12e99 \
|
||||
--hash=sha256:e8c7b2d40943f71c99148c97f66caa7f5134147f57423f8db5b4825099ce9a09 \
|
||||
--hash=sha256:279024f0208601c3caa907c53876e37ad88625f7eaf1cb3842dbe360b2287017 \
|
||||
--hash=sha256:2e221a9eec7ccc58889a278ea13dcfed5ef939d80b07819a9a8b3cb1c681484f \
|
||||
--hash=sha256:69118965410ec86d44dc6b9017ee3ddbd582e0c0abeef62b3a19dbf6c8ad132b \
|
||||
--hash=sha256:d04df8686ec864d0cade8cf199f7f83aecd416109a20834d568f8310ded12dea \
|
||||
--hash=sha256:e75a947e15ee97e7e71e02ea302feb2fc62d3a2bb4668bf9dfbed43a506ac7e7 \
|
||||
--hash=sha256:4e45d22fb883222a5ab9f282a116fec5ee2e8d1a568ccff6a2d75bbd0eb6bcfc \
|
||||
--hash=sha256:bce9339bb3c7a55e0803b63d21c5839e8e479bc85c4adf42ae415b72f94facb2 \
|
||||
--hash=sha256:928138365245a0e8869a5999fbcc2a45475a0a6ed52a494d60dbdc540335fedd \
|
||||
--hash=sha256:0d841ba1bb840eea0e6489dc5ecafa6125554971f53b5acb87764441e61bceba \
|
||||
--hash=sha256:b09c8c1d47b3531c400e0195697f1414a63221de6ef478598a4f1460f7d9a392
|
||||
requests-toolbelt==0.8.0 \
|
||||
--hash=sha256:42c9c170abc2cacb78b8ab23ac957945c7716249206f90874651971a4acff237 \
|
||||
--hash=sha256:f6a531936c6fa4c6cfce1b9c10d5c4f498d16528d2a54a22ca00011205a187b5
|
||||
chardet==3.0.2 \
|
||||
--hash=sha256:4f7832e7c583348a9eddd927ee8514b3bf717c061f57b21dbe7697211454d9bb \
|
||||
--hash=sha256:6ebf56457934fdce01fb5ada5582762a84eed94cad43ed877964aebbdd8174c0
|
||||
urllib3==1.24.1 \
|
||||
--hash=sha256:61bf29cada3fc2fbefad4fdf059ea4bd1b4a86d2b6d15e1c7c0b582b9752fe39 \
|
||||
--hash=sha256:de9529817c93f27c8ccbfead6985011db27bd0ddfcdb2d86f3f663385c6a9c22
|
||||
certifi==2017.4.17 \
|
||||
--hash=sha256:f4318671072f030a33c7ca6acaef720ddd50ff124d1388e50c1bda4cbd6d7010 \
|
||||
--hash=sha256:f7527ebf7461582ce95f7a9e03dd141ce810d40590834f4ec20cddd54234c10a
|
||||
--hash=sha256:67d59d66cdba8d624e0389b87b2a83a176f21f16a87553b50f5703b23f29bac2
|
||||
pytz==2019.2 \
|
||||
--hash=sha256:26c0b32e437e54a18161324a2fca3c4b9846b74a8dccddd843113109e1116b32 \
|
||||
--hash=sha256:c894d57500a4cd2d5c71114aaab77dbab5eabd9022308ce5ac9bb93a60a6f0c7
|
||||
requests==2.21.0 \
|
||||
--hash=sha256:502a824f31acdacb3a35b6690b5fbf0bc41d63a24a45c4004352b0242707598e \
|
||||
--hash=sha256:7bf2a778576d825600030a110f3c0e3e8edc51dfaafe1c146e39a2027784957b
|
||||
requests-toolbelt==0.9.1 \
|
||||
--hash=sha256:380606e1d10dc85c3bd47bf5a6095f815ec007be7a8b69c878507068df059e6f \
|
||||
--hash=sha256:968089d4584ad4ad7c171454f0a5c6dac23971e9472521ea3b6d49d610aa6fc0
|
||||
six==1.12.0 \
|
||||
--hash=sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c \
|
||||
--hash=sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73
|
||||
urllib3==1.24.3 \
|
||||
--hash=sha256:2393a695cd12afedd0dcb26fe5d50d0cf248e5a66f75dbd89a3d4eb333a61af4 \
|
||||
--hash=sha256:a637e5fae88995b256e3409dc4d52c2e2e0ba32c42a6365fee8bbd2238de3cfb
|
||||
zope.component==4.5 \
|
||||
--hash=sha256:6edfd626c3b593b72895a8cfcf79bff41f4619194ce996a85bce31ac02b94e55 \
|
||||
--hash=sha256:984a06ba3def0b02b1117fa4c45b56e772e8c29c0340820fbf367e440a93a3a4
|
||||
zope.deferredimport==4.3.1 \
|
||||
--hash=sha256:57b2345e7b5eef47efcd4f634ff16c93e4265de3dcf325afc7315ade48d909e1 \
|
||||
--hash=sha256:9a0c211df44aa95f1c4e6d2626f90b400f56989180d3ef96032d708da3d23e0a
|
||||
zope.deprecation==4.4.0 \
|
||||
--hash=sha256:0d453338f04bacf91bbfba545d8bcdf529aa829e67b705eac8c1a7fdce66e2df \
|
||||
--hash=sha256:f1480b74995958b24ce37b0ef04d3663d2683e5d6debc96726eff18acf4ea113
|
||||
zope.event==4.4 \
|
||||
--hash=sha256:69c27debad9bdacd9ce9b735dad382142281ac770c4a432b533d6d65c4614bcf \
|
||||
--hash=sha256:d8e97d165fd5a0997b45f5303ae11ea3338becfe68c401dd88ffd2113fe5cae7
|
||||
zope.hookable==4.2.0 \
|
||||
--hash=sha256:22886e421234e7e8cedc21202e1d0ab59960e40a47dd7240e9659a2d82c51370 \
|
||||
--hash=sha256:39912f446e45b4e1f1951b5ffa2d5c8b074d25727ec51855ae9eab5408f105ab \
|
||||
--hash=sha256:3adb7ea0871dbc56b78f62c4f5c024851fc74299f4f2a95f913025b076cde220 \
|
||||
--hash=sha256:3d7c4b96341c02553d8b8d71065a9366ef67e6c6feca714f269894646bb8268b \
|
||||
--hash=sha256:4e826a11a529ed0464ffcecf34b0b7bd1b4928dd5848c5c61bedd7833e8f4801 \
|
||||
--hash=sha256:700d68cc30728de1c4c62088a981c6daeaefdf20a0d81995d2c0b7f442c5f88c \
|
||||
--hash=sha256:77c82a430cedfbf508d1aa406b2f437363c24fa90c73f577ead0fb5295749b83 \
|
||||
--hash=sha256:c1df3929a3666fc5a0c80d60a0c1e6f6ef97c7f6ed2f1b7cf49f3e6f3d4dde15 \
|
||||
--hash=sha256:dba8b2dd2cd41cb5f37bfa3f3d82721b8ae10e492944e48ddd90a439227f2893 \
|
||||
--hash=sha256:f492540305b15b5591bd7195d61f28946bb071de071cee5d68b6b8414da90fd2
|
||||
zope.interface==4.6.0 \
|
||||
--hash=sha256:086707e0f413ff8800d9c4bc26e174f7ee4c9c8b0302fbad68d083071822316c \
|
||||
--hash=sha256:1157b1ec2a1f5bf45668421e3955c60c610e31913cc695b407a574efdbae1f7b \
|
||||
--hash=sha256:11ebddf765bff3bbe8dbce10c86884d87f90ed66ee410a7e6c392086e2c63d02 \
|
||||
--hash=sha256:14b242d53f6f35c2d07aa2c0e13ccb710392bcd203e1b82a1828d216f6f6b11f \
|
||||
--hash=sha256:1b3d0dcabc7c90b470e59e38a9acaa361be43b3a6ea644c0063951964717f0e5 \
|
||||
--hash=sha256:20a12ab46a7e72b89ce0671e7d7a6c3c1ca2c2766ac98112f78c5bddaa6e4375 \
|
||||
--hash=sha256:298f82c0ab1b182bd1f34f347ea97dde0fffb9ecf850ecf7f8904b8442a07487 \
|
||||
--hash=sha256:2f6175722da6f23dbfc76c26c241b67b020e1e83ec7fe93c9e5d3dd18667ada2 \
|
||||
--hash=sha256:3b877de633a0f6d81b600624ff9137312d8b1d0f517064dfc39999352ab659f0 \
|
||||
--hash=sha256:4265681e77f5ac5bac0905812b828c9fe1ce80c6f3e3f8574acfb5643aeabc5b \
|
||||
--hash=sha256:550695c4e7313555549aa1cdb978dc9413d61307531f123558e438871a883d63 \
|
||||
--hash=sha256:5f4d42baed3a14c290a078e2696c5f565501abde1b2f3f1a1c0a94fbf6fbcc39 \
|
||||
--hash=sha256:62dd71dbed8cc6a18379700701d959307823b3b2451bdc018594c48956ace745 \
|
||||
--hash=sha256:7040547e5b882349c0a2cc9b50674b1745db551f330746af434aad4f09fba2cc \
|
||||
--hash=sha256:7e099fde2cce8b29434684f82977db4e24f0efa8b0508179fce1602d103296a2 \
|
||||
--hash=sha256:7e5c9a5012b2b33e87980cee7d1c82412b2ebabcb5862d53413ba1a2cfde23aa \
|
||||
--hash=sha256:81295629128f929e73be4ccfdd943a0906e5fe3cdb0d43ff1e5144d16fbb52b1 \
|
||||
--hash=sha256:95cc574b0b83b85be9917d37cd2fad0ce5a0d21b024e1a5804d044aabea636fc \
|
||||
--hash=sha256:968d5c5702da15c5bf8e4a6e4b67a4d92164e334e9c0b6acf080106678230b98 \
|
||||
--hash=sha256:9e998ba87df77a85c7bed53240a7257afe51a07ee6bc3445a0bf841886da0b97 \
|
||||
--hash=sha256:a0c39e2535a7e9c195af956610dba5a1073071d2d85e9d2e5d789463f63e52ab \
|
||||
--hash=sha256:a15e75d284178afe529a536b0e8b28b7e107ef39626a7809b4ee64ff3abc9127 \
|
||||
--hash=sha256:a6a6ff82f5f9b9702478035d8f6fb6903885653bff7ec3a1e011edc9b1a7168d \
|
||||
--hash=sha256:b639f72b95389620c1f881d94739c614d385406ab1d6926a9ffe1c8abbea23fe \
|
||||
--hash=sha256:bad44274b151d46619a7567010f7cde23a908c6faa84b97598fd2f474a0c6891 \
|
||||
--hash=sha256:bbcef00d09a30948756c5968863316c949d9cedbc7aabac5e8f0ffbdb632e5f1 \
|
||||
--hash=sha256:d788a3999014ddf416f2dc454efa4a5dbeda657c6aba031cf363741273804c6b \
|
||||
--hash=sha256:eed88ae03e1ef3a75a0e96a55a99d7937ed03e53d0cffc2451c208db445a2966 \
|
||||
--hash=sha256:f99451f3a579e73b5dd58b1b08d1179791d49084371d9a47baad3b22417f0317
|
||||
zope.proxy==4.3.2 \
|
||||
--hash=sha256:320a7619992e42142549ebf61e14ce27683b4d14b0cbc45f7c037ba64edb560c \
|
||||
--hash=sha256:824d4dbabbb7deb84f25fdb96ea1eeca436a1802c3c8d323b3eb4ac9d527d41c \
|
||||
--hash=sha256:8a32eb9c94908f3544da2dae3f4a9e6961d78819b88ac6b6f4a51cee2d65f4a0 \
|
||||
--hash=sha256:96265fd3bc3ea646f98482e16307a69de21402eeaaaaf4b841c1161ac2f71bb0 \
|
||||
--hash=sha256:ab6d6975d9c51c13cac828ff03168de21fb562b0664c59bcdc4a4b10f39a5b17 \
|
||||
--hash=sha256:af10cb772391772463f65a58348e2de5ecc06693c16d2078be276dc068bcbb54 \
|
||||
--hash=sha256:b8fd3a3de3f7b6452775e92af22af5977b17b69ac86a38a3ddfe870e40a0d05f \
|
||||
--hash=sha256:bb7088f1bed3b8214284a5e425dc23da56f2f28e8815b7580bfed9e245b6c0b6 \
|
||||
--hash=sha256:bc29b3665eac34f14c4aef5224bef045efcfb1a7d12d78c8685858de5fbf21c0 \
|
||||
--hash=sha256:c39fa6a159affeae5fe31b49d9f5b12bd674fe77271a9a324408b271440c50a7 \
|
||||
--hash=sha256:e946a036ac5b9f897e986ac9dc950a34cffc857d88eae6727b8434fbc4752366
|
||||
|
||||
# Contains the requirements for the letsencrypt package.
|
||||
#
|
||||
@@ -1232,18 +1338,18 @@ letsencrypt==0.7.0 \
|
||||
--hash=sha256:105a5fb107e45bcd0722eb89696986dcf5f08a86a321d6aef25a0c7c63375ade \
|
||||
--hash=sha256:c36e532c486a7e92155ee09da54b436a3c420813ec1c590b98f635d924720de9
|
||||
|
||||
certbot==0.30.2 \
|
||||
--hash=sha256:e411b72fa86eec1018e6de28e649e8c9c71191a7431dcc77f207b57ca9484c11 \
|
||||
--hash=sha256:534487cb552ced8e47948ba3d2e7ca12c3a439133fc609485012b1a02fc7776e
|
||||
acme==0.30.2 \
|
||||
--hash=sha256:68982576492dfa99c7e2be0fce4371adc9344740b05420ce0ab53238d2bb9b3b \
|
||||
--hash=sha256:295a5b7fce9f908e6e5cff8c40be1a3daf3e1ebabd2e139a4c87274e68eeb8f2
|
||||
certbot-apache==0.30.2 \
|
||||
--hash=sha256:3b7fa4e59772da7c9975ef2a49ceff157c9d7cb31eb9475928b5986d89701a3a \
|
||||
--hash=sha256:32fa915a8a51810fdfe828ac1361da4425c231d7384891e49e6338e4741464b2
|
||||
certbot-nginx==0.30.2 \
|
||||
--hash=sha256:7dc785f6f0c0c57b19cea8d98f9ea8feef53945613967b52c9348c81327010e2 \
|
||||
--hash=sha256:6ba4dd772d0c7cdfb3383ca325b35639e01ac9e142e4baa6445cd85c7fb59552
|
||||
certbot==0.39.0 \
|
||||
--hash=sha256:f1a70651a6c5137a448f4a8db17b09af619f80a077326caae6b74278bf1db488 \
|
||||
--hash=sha256:885cee1c4d05888af86b626cbbfc29d3c6c842ef4fe8f4a486994cef9daddfe0
|
||||
acme==0.39.0 \
|
||||
--hash=sha256:4f8be913df289b981852042719469cc367a7e436256f232c799d0bd1521db710 \
|
||||
--hash=sha256:a2fcb75d16de6804f4b4d773a457ee2f6434ebaf8fd1aa60862a91d4e8f73608
|
||||
certbot-apache==0.39.0 \
|
||||
--hash=sha256:c7a8630a85b753a52ca0b8c19e24b8f85ac4ba028292a95745e250c2e72faab9 \
|
||||
--hash=sha256:4651a0212c9ebc3087281dad92ad3cb355bb2730f432d0180a8d23325d11825a
|
||||
certbot-nginx==0.39.0 \
|
||||
--hash=sha256:76e5862ad5cc0fbc099df3502987c101c60dee1c188a579eac990edee7a910df \
|
||||
--hash=sha256:ceac88df52d3b27d14c3052b9e90ada327d7e14ecd6e4af7519918182d6138b4
|
||||
|
||||
UNLIKELY_EOF
|
||||
# -------------------------------------------------------------------------
|
||||
@@ -1273,7 +1379,6 @@ from distutils.version import StrictVersion
|
||||
from hashlib import sha256
|
||||
from os import environ
|
||||
from os.path import join
|
||||
from pipes import quote
|
||||
from shutil import rmtree
|
||||
try:
|
||||
from subprocess import check_output
|
||||
@@ -1293,7 +1398,7 @@ except ImportError:
|
||||
cmd = popenargs[0]
|
||||
raise CalledProcessError(retcode, cmd)
|
||||
return output
|
||||
from sys import exit, version_info
|
||||
import sys
|
||||
from tempfile import mkdtemp
|
||||
try:
|
||||
from urllib2 import build_opener, HTTPHandler, HTTPSHandler
|
||||
@@ -1315,7 +1420,7 @@ maybe_argparse = (
|
||||
[('18/dd/e617cfc3f6210ae183374cd9f6a26b20514bbb5a792af97949c5aacddf0f/'
|
||||
'argparse-1.4.0.tar.gz',
|
||||
'62b089a55be1d8949cd2bc7e0df0bddb9e028faefc8c32038cc84862aefdd6e4')]
|
||||
if version_info < (2, 7, 0) else [])
|
||||
if sys.version_info < (2, 7, 0) else [])
|
||||
|
||||
|
||||
PACKAGES = maybe_argparse + [
|
||||
@@ -1396,7 +1501,8 @@ def get_index_base():
|
||||
|
||||
|
||||
def main():
|
||||
pip_version = StrictVersion(check_output(['pip', '--version'])
|
||||
python = sys.executable or 'python'
|
||||
pip_version = StrictVersion(check_output([python, '-m', 'pip', '--version'])
|
||||
.decode('utf-8').split()[1])
|
||||
has_pip_cache = pip_version >= StrictVersion('6.0')
|
||||
index_base = get_index_base()
|
||||
@@ -1406,12 +1512,12 @@ def main():
|
||||
temp,
|
||||
digest)
|
||||
for path, digest in PACKAGES]
|
||||
check_output('pip install --no-index --no-deps -U ' +
|
||||
# Disable cache since we're not using it and it otherwise
|
||||
# sometimes throws permission warnings:
|
||||
('--no-cache-dir ' if has_pip_cache else '') +
|
||||
' '.join(quote(d) for d in downloads),
|
||||
shell=True)
|
||||
# Calling pip as a module is the preferred way to avoid problems about pip self-upgrade.
|
||||
command = [python, '-m', 'pip', 'install', '--no-index', '--no-deps', '-U']
|
||||
# Disable cache since it is not used and it otherwise sometimes throws permission warnings:
|
||||
command.extend(['--no-cache-dir'] if has_pip_cache else [])
|
||||
command.extend(downloads)
|
||||
check_output(command)
|
||||
except HashError as exc:
|
||||
print(exc)
|
||||
except Exception:
|
||||
@@ -1424,7 +1530,7 @@ def main():
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
exit(main())
|
||||
sys.exit(main())
|
||||
|
||||
UNLIKELY_EOF
|
||||
# -------------------------------------------------------------------------
|
||||
@@ -1508,6 +1614,24 @@ else
|
||||
exit 0
|
||||
fi
|
||||
|
||||
DeterminePythonVersion "NOCRASH"
|
||||
# Don't warn about file permissions if the user disabled the check or we
|
||||
# can't find an up-to-date Python.
|
||||
if [ "$PYVER" -ge "$MIN_PYVER" -a "$NO_PERMISSIONS_CHECK" != 1 ]; then
|
||||
# If the script fails for some reason, don't break certbot-auto.
|
||||
set +e
|
||||
# Suppress unexpected error output.
|
||||
CHECK_PERM_OUT=$(CheckPathPermissions "$LE_PYTHON" "$0" 2>/dev/null)
|
||||
CHECK_PERM_STATUS="$?"
|
||||
set -e
|
||||
# Only print output if the script ran successfully and it actually produced
|
||||
# output. The latter check resolves
|
||||
# https://github.com/certbot/certbot/issues/7012.
|
||||
if [ "$CHECK_PERM_STATUS" = 0 -a -n "$CHECK_PERM_OUT" ]; then
|
||||
error "$CHECK_PERM_OUT"
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ "$NO_SELF_UPGRADE" != 1 ]; then
|
||||
TEMP_DIR=$(TempDir)
|
||||
trap 'rm -rf "$TEMP_DIR"' EXIT
|
||||
@@ -1664,7 +1788,6 @@ if __name__ == '__main__':
|
||||
|
||||
UNLIKELY_EOF
|
||||
# ---------------------------------------------------------------------------
|
||||
DeterminePythonVersion "NOCRASH"
|
||||
if [ "$PYVER" -lt "$MIN_PYVER" ]; then
|
||||
error "WARNING: couldn't find Python $MIN_PYTHON_VERSION+ to check for updates."
|
||||
elif ! REMOTE_VERSION=`"$LE_PYTHON" "$TEMP_DIR/fetch.py" --latest-version` ; then
|
||||
|
||||
1
certbot-ci/MANIFEST.in
Normal file
1
certbot-ci/MANIFEST.in
Normal file
@@ -0,0 +1 @@
|
||||
recursive-include certbot_integration_tests/assets *
|
||||
9
certbot-ci/certbot_integration_tests/.coveragerc
Normal file
9
certbot-ci/certbot_integration_tests/.coveragerc
Normal file
@@ -0,0 +1,9 @@
|
||||
[run]
|
||||
# Avoid false warnings because certbot packages are not installed in the thread that executes
|
||||
# the coverage: indeed, certbot is launched as a CLI from a subprocess.
|
||||
disable_warnings = module-not-imported,no-data-collected
|
||||
omit = **/*_test.py,**/tests/*,**/dns_common*,**/certbot_nginx/parser_obj.py
|
||||
|
||||
[report]
|
||||
# Exclude unit tests in coverage during integration tests.
|
||||
omit = **/*_test.py,**/tests/*,**/dns_common*,**/certbot_nginx/parser_obj.py
|
||||
1
certbot-ci/certbot_integration_tests/__init__.py
Normal file
1
certbot-ci/certbot_integration_tests/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
"""Package certbot_integration_test is for tests that require a live acme ca server instance"""
|
||||
32
certbot-ci/certbot_integration_tests/assets/cert.pem
Normal file
32
certbot-ci/certbot_integration_tests/assets/cert.pem
Normal file
@@ -0,0 +1,32 @@
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIFlTCCA32gAwIBAgIUR3wbM8qFE68f8NxfciHhUjR1GeUwDQYJKoZIhvcNAQEL
|
||||
BQAwWTELMAkGA1UEBhMCQVUxEzARBgNVBAgMClNvbWUtU3RhdGUxITAfBgNVBAoM
|
||||
GEludGVybmV0IFdpZGdpdHMgUHR5IEx0ZDESMBAGA1UEAwwJbmdpbngud3RmMCAX
|
||||
DTE5MDQxODIwMDUwM1oYDzIyOTMwMTMwMjAwNTAzWjBZMQswCQYDVQQGEwJBVTET
|
||||
MBEGA1UECAwKU29tZS1TdGF0ZTEhMB8GA1UECgwYSW50ZXJuZXQgV2lkZ2l0cyBQ
|
||||
dHkgTHRkMRIwEAYDVQQDDAluZ2lueC53dGYwggIiMA0GCSqGSIb3DQEBAQUAA4IC
|
||||
DwAwggIKAoICAQC/W+yxYE0PWJOS4df71Yx596fDjW03I9JZuu9kfP7mneMgy+OC
|
||||
HyRm0TEhl6FPUp9tD9YeEHloUZNjHEOg/qrnbEOspv3Ha3RFinzrzkMwbzEPR3Xf
|
||||
0go+aVsWelDhapFl8fccw4tWwijVZQquhBsWOUnPenS3Txe96kEv2NNJlJ0qFUa+
|
||||
rOTruzRzOzlbgKv5WRb4+BxxWonHLkAQ5IT87GBlsCerVIyPD+BnZveZGl6e9oMH
|
||||
ZlZvUT6aWRnzFWjAnQGiJpVIw7l9r4EW0jq1z7wqb37FrqrFbtWrOfUZVE7AlqXH
|
||||
aKIR82/xwkcZfFk3sCAM0IcZc8B2SDLi4zNZtDivW6qQgTC/3z5yf1hnJ+j00dtE
|
||||
X5qYlgXRaM2raOn31lxcerk5pjgagQ7Zj+v3YZS0QnenrgyXJcdnXLDj+cIARzx4
|
||||
QHtoO0nyP0RJqxvwX/H98513JTkeqFBc/Bx11UWYsUv20Qoo9IAuz0VDARu6rquu
|
||||
k9anv56yvxo77qZ8r80l3z8eMyDA+UjuSD2p1Za09RAHfva7o8rMUqULHNQ4pfFH
|
||||
JIUozHoinAg/9lBC/W80fcbILks+Sdi6E9WQ0n8PLl7oFLx9prEDCycKuC0z76J/
|
||||
Shb6R6sWr1YtzUFUc5EH2g9pMriaqT8uGO4CMOeRemXahrdT/H+Xg5m4TQIDAQAB
|
||||
o1MwUTAdBgNVHQ4EFgQU46gJeu9ZOfTQ6c4vfbWbSLUpEMowHwYDVR0jBBgwFoAU
|
||||
46gJeu9ZOfTQ6c4vfbWbSLUpEMowDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG9w0B
|
||||
AQsFAAOCAgEAcnfkXDUTsEGs0MleegkGbRCVy72a3U7tv1KVTLB8qLPc3tpPJJoT
|
||||
D4PbOuw9+yIE+HetZTZooOpaZoorLQdiwAEjlQ44RVuXSHSARQ8KW9ZZeiWN/Qvl
|
||||
Ip4xJ/cHxcKTFKSc/99o8M+kmPKEXF9SUMfKPc5jXarNxCsnA3VriYqJ+CnYEox2
|
||||
duNUEe3A9Y2d8ZxjmscBqlcXpk1kFwsCRT5UYVoUYwyjYznLkO5A+GJ0ZnMyRMQp
|
||||
obUiB34hUrNgyOaBvizk+pNh9EV4rEBPRQwhy4vDMco4AjQcwLWQAQ9G4GSt/E+Q
|
||||
62XdVDa6CAuOvBCudDPki7kEqNLbj1tMY1K/gsbgb6TYA/xTOVulAnqm4OEZ2svJ
|
||||
0Jqw3BzMfRTaxsNU6jxm8WehVL15GjoJUzfs7Te+l7Vm/QNc1Dv2pmEhVfBibwMa
|
||||
YxUZ8ClQtQ1lsOpne97Og0p/Cm93kKELNBLTjzXtpXGGPPYisAyNwe0Hadq8SiOd
|
||||
pXeNwXa5vHOXHv8xBENzBvFJ3TRN2GmMlHBp/eOfVUx/huNSpcnh2gO3fn5EbMj7
|
||||
43IaR133JW5yWbneYAMJOEAMdEB5EthRmEDtLVA7kLqLc/ywFTQ4VbS2b+PsOr5O
|
||||
501nzt0OTMMEz+UafvGXj5OPJBhe26RtnYXzVwwLfto/F5udM5zglWo=
|
||||
-----END CERTIFICATE-----
|
||||
11
certbot-ci/certbot_integration_tests/assets/hook.py
Executable file
11
certbot-ci/certbot_integration_tests/assets/hook.py
Executable file
@@ -0,0 +1,11 @@
|
||||
#!/usr/bin/env python
|
||||
import sys
|
||||
import os
|
||||
|
||||
hook_script_type = os.path.basename(os.path.dirname(sys.argv[1]))
|
||||
if hook_script_type == 'deploy' and ('RENEWED_DOMAINS' not in os.environ or 'RENEWED_LINEAGE' not in os.environ):
|
||||
sys.stderr.write('Environment variables not properly set!\n')
|
||||
sys.exit(1)
|
||||
|
||||
with open(sys.argv[2], 'a') as file_h:
|
||||
file_h.write(hook_script_type + '\n')
|
||||
52
certbot-ci/certbot_integration_tests/assets/key.pem
Normal file
52
certbot-ci/certbot_integration_tests/assets/key.pem
Normal file
@@ -0,0 +1,52 @@
|
||||
-----BEGIN PRIVATE KEY-----
|
||||
MIIJQQIBADANBgkqhkiG9w0BAQEFAASCCSswggknAgEAAoICAQC/W+yxYE0PWJOS
|
||||
4df71Yx596fDjW03I9JZuu9kfP7mneMgy+OCHyRm0TEhl6FPUp9tD9YeEHloUZNj
|
||||
HEOg/qrnbEOspv3Ha3RFinzrzkMwbzEPR3Xf0go+aVsWelDhapFl8fccw4tWwijV
|
||||
ZQquhBsWOUnPenS3Txe96kEv2NNJlJ0qFUa+rOTruzRzOzlbgKv5WRb4+BxxWonH
|
||||
LkAQ5IT87GBlsCerVIyPD+BnZveZGl6e9oMHZlZvUT6aWRnzFWjAnQGiJpVIw7l9
|
||||
r4EW0jq1z7wqb37FrqrFbtWrOfUZVE7AlqXHaKIR82/xwkcZfFk3sCAM0IcZc8B2
|
||||
SDLi4zNZtDivW6qQgTC/3z5yf1hnJ+j00dtEX5qYlgXRaM2raOn31lxcerk5pjga
|
||||
gQ7Zj+v3YZS0QnenrgyXJcdnXLDj+cIARzx4QHtoO0nyP0RJqxvwX/H98513JTke
|
||||
qFBc/Bx11UWYsUv20Qoo9IAuz0VDARu6rquuk9anv56yvxo77qZ8r80l3z8eMyDA
|
||||
+UjuSD2p1Za09RAHfva7o8rMUqULHNQ4pfFHJIUozHoinAg/9lBC/W80fcbILks+
|
||||
Sdi6E9WQ0n8PLl7oFLx9prEDCycKuC0z76J/Shb6R6sWr1YtzUFUc5EH2g9pMria
|
||||
qT8uGO4CMOeRemXahrdT/H+Xg5m4TQIDAQABAoICAAGGL+pxw+tdXz+KQPgmiUnn
|
||||
aRSrqbUIugIw9Pst67HWjBqUxSkiKl4PSH7mAEjrdY2e1KvEodLs42mkrf04ShAx
|
||||
0pArfFX8Sx7KrZgLOonGOPPQM+YmfCJnIGybaM2C1cmkFb3K6O81+LFKbr1ZHAYf
|
||||
SrE2XnufS6cdmItTBMvPPTk6lieqpOAjy5UnYZuS+Muxo/czsrZMbFCD08rOpyiE
|
||||
kXf94TMCJ2R0UetA7LPxe9N0TzLd485bLU55azV+dCkklwC9oe7EcFPJ9BNEdWdB
|
||||
UlRcMvxMGdwct+L3QTaEb2QlTwi5kqDl+XxJeduAHA3Pf1Haz1iqjVvj01PvT1di
|
||||
Cs0+ZeFBsa+BfiGDe9ONwuSQljda1CuI+vDv5bGUExulOSG1dHJ7RK9PBaXFaR/b
|
||||
/9tRBwAw1Erm7s1JIkjda5Oc46jFb3HzDaZYB1n5hUmEIrYM8HhUOGITyVT3hxDO
|
||||
AWlaV3aveQ0MmMXLptVXDgbjPGbWDGMLD9d5vUE9R7IyOLeXOmjthYlCH2rj378M
|
||||
r2PkgX2tD0A/yoEZ8XCFdtBWSVajLdL0/gkm7sKosMABBy3yrSCxbHeq5TFuTAXA
|
||||
hOdypX4NOZkA6WJU+hn3GkQyIScLqSrvGRA9kzHGoEWVZDKkB9DXg+dmTARZDWXD
|
||||
mCnHkJo6+FcbhUpXniuZAoIBAQDmE94vvdstB+HEtXxN1uNDY7H8gPc/BUonU6a9
|
||||
G5YOIbjByCfEDcXF8AUWekc6lc8DNG3ydx0dnb2ZAkxmdlsaD8GLqHGILzlSsOwR
|
||||
sez8nR4+4n9vYMfx9Qal8Ren5xEP9Z9sJcNqbKVGta1WFtQzrgYbpVXXf/Luv0xS
|
||||
YoVK8KaEACciD6XX4wmajrAXPPQgThvqQtXuTn/AxWsUDg1DK0tw1VRUuOJuJwpw
|
||||
f6qocM9AyqUNvdeVyjFx8Slag34ZI7fmxPtHX/e6opTg3zVXab1Ow8AMICOHMRL6
|
||||
m5/+wnWa9xMoKI4kYfk/QFqeTccnLDlwi6kQM8WRfbwr9AyPAoIBAQDU60wrX6Lm
|
||||
0vIfngv1/4j/w+AGAwjvxiuJ7Q7LwQ2fGsZGOIfMK/lpBxCn543kGbQT+KQKNOjO
|
||||
+EywObftnJ6Y2+om2NoLkCnCiptsfr5WlN8pxtIPQu2iu5xXA67WpQv4Nc4769PM
|
||||
wJGVW3pmPKi6H0QjjqYAZd1NAXdN9Au14zZVh3KBWoz82kTHWKSL6Ld1UClG728W
|
||||
k/moyCFFMMGTXX/LVliQzDVLM6L5jbAOaG317qAuxZIqFJ9NLwHFW9uH/i1S6Qfp
|
||||
+lOmOfVYKu1O/qh1DUBQfuJkR1XIn2ifZEjxOsxeTmWu1LXpyoZy526JRu49pk8Z
|
||||
DdEu+w7hsdNjAoIBAD1YWsub8Y6GJXpPcX9HpnzXXiOXN1VEUcs+kJyneFD4SMzS
|
||||
U1gA3BS0tIaTv94tB28xUYdunwLAhkb/x+Mh95RxUwert+m5va0Ao1DsgeWw9tmJ
|
||||
hrTptyYaUNV5/Pa1s2Tv9rvdLcd4hHDgDAGCQL4uzk4cvVCiOuHRe8YTorqig6N6
|
||||
bvSz+2IelPbyyJzJkcXzTZoei+/oWkPJ340PWhXou0qwdrXIPgdkvXHVeGlE+t2p
|
||||
qmyJi6vSp3Bb/sy1dq+5SFVtfBpBykmnA88ZdJ2EAge4RcJ150MqoIbVa8l/i9/v
|
||||
tNnmRlAJF233+LFwx4L4VbBebIt3YlwyjDOj9J0CggEAIknKOGnsV/O8ni7bikAe
|
||||
leG7X/x5IfPt6wZMDbAHO4oaSBCufcjPH4TNv9xgU014XIb8E9C1dS8zWmXRIujH
|
||||
+aHgsWTWqGoM75FWukAm8taCob2s8lw63KwN301uiI6HwO8ZSTkPILgaOc1DhtdZ
|
||||
7K9AT+GXBhVhcBc+WUVl5WKzy05GuGIWtlmIHfo+dXGCqdfA7fV9FEu8NtwTz4qs
|
||||
gcja3aoIFTltk7C7HCkfIxLaMnK9RQr4IOK1TL63MEs8rUfXkLSKW7m+YtSOmCZB
|
||||
lSkZg9AgfVYRq0h5nhddx91kicSISN+jLGaA7Sd6Q2LVwDG2CCOSNVyuRTyVBu+W
|
||||
NQKCAQAWN6vB6oToNIoBLdOThm0HD07cNHcrnBjtaKsYsQDgqbr2m8LRCRzNRML4
|
||||
jG0IAOWpuCiEGsgUPxywiI1Ufvyq7ZSNT1QQNzCR47NM3Ve6S2abrQkMIk9VJ+za
|
||||
CB9c1BH92GokoRxqswb/BiMttG2EIP8L8/pSRYEcVnaaxAkf9QOhEwj4LJPGX0mS
|
||||
t7kWIUVHPdFJ67F25dYr3mUHgyV+QJupQICkkkgY3nBOU1fS42vAugaxqH0wAP3T
|
||||
53FlpY3NuE7+kYC3FjfcBer99F1pOac3X9jxhk26w9dr2/QNA33xhDXHKYvoLUCG
|
||||
RPQylahJByU7IrtQzSCf/RE7q4v0
|
||||
-----END PRIVATE KEY-----
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user