Compare commits
389 Commits
fix_centos
...
remove-ngi
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
15b3f7bc6a | ||
|
|
d607276b6c | ||
|
|
4f3010ef3f | ||
|
|
2692b862d2 | ||
|
|
4d4c83d4d8 | ||
|
|
57148b7593 | ||
|
|
4a8ede2562 | ||
|
|
46d5f7a860 | ||
|
|
595b1b212e | ||
|
|
75acdeb645 | ||
|
|
c26d459d0f | ||
|
|
4792e1ee21 | ||
|
|
08c1de34bd | ||
|
|
641b60b8f0 | ||
|
|
d290fe464e | ||
|
|
e38aa65cae | ||
|
|
8fb9e9adde | ||
|
|
96e02d614b | ||
|
|
0a48d7bf7e | ||
|
|
4b488614cf | ||
|
|
f4f16605ed | ||
|
|
b84edfd39a | ||
|
|
88d9a31cf9 | ||
|
|
1dff022d05 | ||
|
|
2b4c2a7f55 | ||
|
|
baf43a2dbc | ||
|
|
ebce0adb5a | ||
|
|
61f77c35c0 | ||
|
|
1b76faada6 | ||
|
|
b79bcd0bf2 | ||
|
|
5f6ab47a7b | ||
|
|
d87c905c06 | ||
|
|
9b848b1d65 | ||
|
|
f555e4bf1f | ||
|
|
0de2645a8f | ||
|
|
fcecdfbcc5 | ||
|
|
73cd5aa81c | ||
|
|
3d9d212040 | ||
|
|
78deca4f60 | ||
|
|
3c24ff88cc | ||
|
|
08d91b456b | ||
|
|
1c05b9bd07 | ||
|
|
fffa74edb2 | ||
|
|
8956de6bee | ||
|
|
9bc4286a27 | ||
|
|
3e848b8fce | ||
|
|
fb1aafb5d2 | ||
|
|
f8ff881d23 | ||
|
|
ef3f8888b5 | ||
|
|
a45efcd40d | ||
|
|
63d673a3e0 | ||
|
|
9796128fee | ||
|
|
de6b56bec0 | ||
|
|
6f711d9ae8 | ||
|
|
6fcdfb0e50 | ||
|
|
e19b2e04c7 | ||
|
|
2dbe47f3a7 | ||
|
|
0f31d9b7ac | ||
|
|
60673e8a81 | ||
|
|
3132c32c26 | ||
|
|
db46326e95 | ||
|
|
44cc8d7a3c | ||
|
|
f8e097a061 | ||
|
|
37b3c22dee | ||
|
|
032178bea0 | ||
|
|
118cb3c9b1 | ||
|
|
717afebcff | ||
|
|
ec3ec9068c | ||
|
|
f755cfef48 | ||
|
|
c1f4b86d34 | ||
|
|
fcc398831b | ||
|
|
9da07590bd | ||
|
|
0cfedbc5f5 | ||
|
|
3608abb01a | ||
|
|
4739a0616d | ||
|
|
6e38ad9cce | ||
|
|
4599aff07f | ||
|
|
0b605333d9 | ||
|
|
9c18de993d | ||
|
|
e3dbd9ce4a | ||
|
|
c2480b29f7 | ||
|
|
6ac8633363 | ||
|
|
8a4c2f505f | ||
|
|
ca3077d034 | ||
|
|
6c89aa5227 | ||
|
|
8cb57566c0 | ||
|
|
18e6c6c2a8 | ||
|
|
e402993c34 | ||
|
|
754c34c120 | ||
|
|
2883ca839e | ||
|
|
fb6aad28bd | ||
|
|
ab76834100 | ||
|
|
ada2f5c767 | ||
|
|
e4af1f3319 | ||
|
|
ab0e382829 | ||
|
|
ed0b8e4af5 | ||
|
|
8a570b18e9 | ||
|
|
deb0168c09 | ||
|
|
46a12d0127 | ||
|
|
6d4baec955 | ||
|
|
4eaa06d58e | ||
|
|
0fe28a6459 | ||
|
|
aaeb4582e2 | ||
|
|
fdb0a14812 | ||
|
|
0324d1740e | ||
|
|
ce325db4e4 | ||
|
|
74e6736c79 | ||
|
|
2ed7608ed3 | ||
|
|
eb02acfc4b | ||
|
|
4f19d516d6 | ||
|
|
3dd918b024 | ||
|
|
8320018978 | ||
|
|
c17f2ff6b0 | ||
|
|
46a2ef8ba1 | ||
|
|
17c1d016c1 | ||
|
|
70ed791709 | ||
|
|
d39f63feca | ||
|
|
6882f006ac | ||
|
|
9a047a6996 | ||
|
|
a8bd839223 | ||
|
|
a1aef4c15c | ||
|
|
cb7598b007 | ||
|
|
55cf49cebe | ||
|
|
933f60a3c1 | ||
|
|
44eb048098 | ||
|
|
794ce57356 | ||
|
|
48d9715bd5 | ||
|
|
c5e1be4fd7 | ||
|
|
e21401004b | ||
|
|
120137eb8d | ||
|
|
2911eda3bd | ||
|
|
f1ea37dd71 | ||
|
|
3d3cbc0d16 | ||
|
|
d978440cb5 | ||
|
|
0c04ce3c32 | ||
|
|
987ce2c6b2 | ||
|
|
dded9290b7 | ||
|
|
745ef6e869 | ||
|
|
e2844bd0ad | ||
|
|
b67fda8832 | ||
|
|
d6e6d64848 | ||
|
|
f4d17d9a6b | ||
|
|
8bcb04af4a | ||
|
|
14e10f40e5 | ||
|
|
1c7105a940 | ||
|
|
36b4c312c6 | ||
|
|
56f609d4f5 | ||
|
|
2d3f3a042a | ||
|
|
bfd4955bad | ||
|
|
9174c631d9 | ||
|
|
81e0b92b43 | ||
|
|
d3da19919f | ||
|
|
e6bf3fe7f8 | ||
|
|
40da709792 | ||
|
|
bf9c681c4f | ||
|
|
391f301dd8 | ||
|
|
06a0dae67f | ||
|
|
a35470292e | ||
|
|
47f64c7280 | ||
|
|
f7c736da6f | ||
|
|
71ff47daad | ||
|
|
41a17f913e | ||
|
|
750d6a9686 | ||
|
|
c4684f187a | ||
|
|
82ad736120 | ||
|
|
ca893bd836 | ||
|
|
d1934e36fe | ||
|
|
15b1d8e5a7 | ||
|
|
cbd0a37c7a | ||
|
|
13c44a0595 | ||
|
|
89f52ca9f9 | ||
|
|
d0a9695b09 | ||
|
|
add24d4861 | ||
|
|
74292a10f5 | ||
|
|
74bf9ef46a | ||
|
|
2ac99fefe0 | ||
|
|
43f58ca803 | ||
|
|
17f2cabbbf | ||
|
|
7d61e9ea56 | ||
|
|
20b595bc9e | ||
|
|
88876b9901 | ||
|
|
448d159223 | ||
|
|
3e872627d8 | ||
|
|
76b7eb0628 | ||
|
|
4fc30f2ecb | ||
|
|
1c75b6dacd | ||
|
|
c08a4dec2d | ||
|
|
4fc0ef0fbe | ||
|
|
26a1eddd89 | ||
|
|
1c6210ee00 | ||
|
|
a27f3ebd4f | ||
|
|
a778b50403 | ||
|
|
f2ab6a338c | ||
|
|
0d5bad6c8c | ||
|
|
dc0cfa21c9 | ||
|
|
a37a4486cf | ||
|
|
776e939a4c | ||
|
|
69cf64079c | ||
|
|
9962cf0b8e | ||
|
|
4c95b687ae | ||
|
|
a3bbdd52e7 | ||
|
|
2e3c1d7c77 | ||
|
|
249af5c4cd | ||
|
|
9a60f6df78 | ||
|
|
e9bcaaa576 | ||
|
|
5078b58de9 | ||
|
|
03cf5d15a6 | ||
|
|
8efe3fb19a | ||
|
|
9863c2d18e | ||
|
|
6172821d90 | ||
|
|
dde16df778 | ||
|
|
1df778859b | ||
|
|
20ca47dec6 | ||
|
|
6c53f5d8ed | ||
|
|
add90cef32 | ||
|
|
1b54c74621 | ||
|
|
e60651057e | ||
|
|
e394889864 | ||
|
|
d75908c645 | ||
|
|
72e5d89e95 | ||
|
|
0c5f526f8b | ||
|
|
5385375571 | ||
|
|
7b4201fbdb | ||
|
|
8106f74dc0 | ||
|
|
3bceae4a89 | ||
|
|
f18143b117 | ||
|
|
0cc56677e2 | ||
|
|
6334d065cf | ||
|
|
c3edc25fb7 | ||
|
|
23b52ca1c8 | ||
|
|
02cf051e45 | ||
|
|
4d034122c6 | ||
|
|
391f742df7 | ||
|
|
5c663d4d97 | ||
|
|
89d907b182 | ||
|
|
d0f1a3e205 | ||
|
|
f3b73c4d2a | ||
|
|
f25a9b2004 | ||
|
|
3568070c73 | ||
|
|
8e92577cb0 | ||
|
|
459ba89aef | ||
|
|
bfd1ce97ef | ||
|
|
419ad7df1e | ||
|
|
889aeb31df | ||
|
|
09b7d2f461 | ||
|
|
18797dca79 | ||
|
|
31e81e7ae0 | ||
|
|
4b06eeae64 | ||
|
|
641aba68b1 | ||
|
|
926c8c198c | ||
|
|
4c299be965 | ||
|
|
561534b754 | ||
|
|
7d35f95293 | ||
|
|
d2a2b88090 | ||
|
|
bf818036eb | ||
|
|
8b684e9b95 | ||
|
|
51a7e7cd19 | ||
|
|
d1753e46f9 | ||
|
|
16834a0d78 | ||
|
|
11c3e7107c | ||
|
|
6bcd0415e0 | ||
|
|
7a6545b747 | ||
|
|
f4bbaadd18 | ||
|
|
f8614e7c04 | ||
|
|
def9af9f5e | ||
|
|
26d01537cb | ||
|
|
7cfbeaeac8 | ||
|
|
5ab6a597b0 | ||
|
|
9a7f774706 | ||
|
|
2abe39d8a2 | ||
|
|
3888bc8f2a | ||
|
|
7fe82cf1ac | ||
|
|
d391fb8876 | ||
|
|
60bf8edc79 | ||
|
|
ccedde088d | ||
|
|
c3a395e7c5 | ||
|
|
0e95cd8cde | ||
|
|
7683636684 | ||
|
|
8ff24f60a8 | ||
|
|
a754a90940 | ||
|
|
f56fad59c9 | ||
|
|
b86f553586 | ||
|
|
71b1b8c2d9 | ||
|
|
0c96cf6560 | ||
|
|
0baefcae32 | ||
|
|
115ed0e10b | ||
|
|
2b4d6e23d5 | ||
|
|
e5cdc2738d | ||
|
|
3410b9332c | ||
|
|
6a970f74d0 | ||
|
|
4bf6eb2091 | ||
|
|
e50d47d25c | ||
|
|
0ab2bb21fa | ||
|
|
b19d4801c9 | ||
|
|
57be329058 | ||
|
|
698e520044 | ||
|
|
e15e848474 | ||
|
|
862577fffc | ||
|
|
82f64126d9 | ||
|
|
60e734c969 | ||
|
|
7711da9fc2 | ||
|
|
9734be6922 | ||
|
|
7d28480844 | ||
|
|
6ba242bc3d | ||
|
|
2ef1c512b4 | ||
|
|
5b76de48de | ||
|
|
5f5f44dd97 | ||
|
|
40481e0fdb | ||
|
|
de88e7d777 | ||
|
|
b0d960f102 | ||
|
|
3900e56b52 | ||
|
|
f0f5bb4fc0 | ||
|
|
dcf89c9396 | ||
|
|
d1330efe41 | ||
|
|
b41a992545 | ||
|
|
1aa111f941 | ||
|
|
a1dc63a0a2 | ||
|
|
c99079fb0a | ||
|
|
352218510a | ||
|
|
463d089407 | ||
|
|
333ea90d1b | ||
|
|
fb83a1ac09 | ||
|
|
9dd2990e59 | ||
|
|
618e0562a0 | ||
|
|
2812f054a3 | ||
|
|
a817e4f0ec | ||
|
|
a58ad22002 | ||
|
|
f5d0d4241f | ||
|
|
0ee1002edc | ||
|
|
7e5dcaa383 | ||
|
|
24eb299a9b | ||
|
|
410e74c4a1 | ||
|
|
298b1db36b | ||
|
|
6bdc6435eb | ||
|
|
b73c551f14 | ||
|
|
471f8aecc0 | ||
|
|
3f0dc7c81c | ||
|
|
d7610c1ae7 | ||
|
|
de84688844 | ||
|
|
d5de24d9fc | ||
|
|
9c54f3dec8 | ||
|
|
3a2e9ff1fa | ||
|
|
2b1c77c1ca | ||
|
|
d5ea9f4486 | ||
|
|
b0285438cc | ||
|
|
3381bc6616 | ||
|
|
278cc8feef | ||
|
|
fb5974b8c3 | ||
|
|
12ab59e1fc | ||
|
|
6249cd0237 | ||
|
|
04152c21b5 | ||
|
|
c77159a30c | ||
|
|
9c312a3882 | ||
|
|
944d0e05c8 | ||
|
|
157d1ea0d8 | ||
|
|
aec29c2f1d | ||
|
|
7d58e67fd6 | ||
|
|
b7caa3b3a1 | ||
|
|
6d32dd8792 | ||
|
|
f2b071f8f4 | ||
|
|
e63ceb8dd2 | ||
|
|
ae9c57d68c | ||
|
|
c32b57607f | ||
|
|
45869f8315 | ||
|
|
6590875a1a | ||
|
|
7c7715743c | ||
|
|
30eafba997 | ||
|
|
c2d9ea1f61 | ||
|
|
2cf216122b | ||
|
|
4de4b17216 | ||
|
|
15763a3793 | ||
|
|
7b7f7b25fb | ||
|
|
69bb3eac2c | ||
|
|
58c21aa484 | ||
|
|
1bbfc669ab | ||
|
|
3830c0f900 | ||
|
|
4515a52d3f | ||
|
|
fd6702b869 | ||
|
|
1daa3ca076 | ||
|
|
232e0ea50f | ||
|
|
63c8f2e34d | ||
|
|
ea568d4dc2 | ||
|
|
6ce6c67932 | ||
|
|
b0fb570c1c | ||
|
|
8b8fc5ae54 | ||
|
|
414c70aa6c | ||
|
|
b30a5e5b73 | ||
|
|
5300d7d71f | ||
|
|
24974b07ba |
119
.azure-pipelines/INSTALL.md
Normal file
119
.azure-pipelines/INSTALL.md
Normal file
@@ -0,0 +1,119 @@
|
||||
# Configuring Azure Pipelines with Certbot
|
||||
|
||||
Let's begin. All pipelines are defined in `.azure-pipelines`. Currently there are two:
|
||||
* `.azure-pipelines/main.yml` is the main one, executed on PRs for master, and pushes to master,
|
||||
* `.azure-pipelines/advanced.yml` add installer testing on top of the main pipeline, and is executed for `test-*` branches, release branches, and nightly run for master.
|
||||
|
||||
Several templates are defined in `.azure-pipelines/templates`. These YAML files aggregate common jobs configuration that can be reused in several pipelines.
|
||||
|
||||
Unlike Travis, where CodeCov is working without any action required, CodeCov supports Azure Pipelines
|
||||
using the coverage-bash utility (not python-coverage for now) only if you provide the Codecov repo token
|
||||
using the `CODECOV_TOKEN` environment variable. So `CODECOV_TOKEN` needs to be set as a secured
|
||||
environment variable to allow the main pipeline to publish coverage reports to CodeCov.
|
||||
|
||||
This INSTALL.md file explains how to configure Azure Pipelines with Certbot in order to execute the CI/CD logic defined in `.azure-pipelines` folder with it.
|
||||
During this installation step, warnings describing user access and legal comitments will be displayed like this:
|
||||
```
|
||||
!!! ACCESS REQUIRED !!!
|
||||
```
|
||||
|
||||
This document suppose that the Azure DevOps organization is named _certbot_, and the Azure DevOps project is also _certbot_.
|
||||
|
||||
## Useful links
|
||||
|
||||
* https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=azure-devops&tabs=schema
|
||||
* https://www.azuredevopslabs.com/labs/azuredevops/github-integration/
|
||||
* https://docs.microsoft.com/en-us/azure/devops/pipelines/ecosystems/python?view=azure-devops
|
||||
|
||||
## Prerequisites
|
||||
|
||||
### Having a GitHub account
|
||||
|
||||
Use your GitHub user for a normal GitHub account, or a user that has administrative rights to the GitHub organization if relevant.
|
||||
|
||||
### Having an Azure DevOps account
|
||||
- Go to https://dev.azure.com/, click "Start free with GitHub"
|
||||
- Login to GitHub
|
||||
|
||||
```
|
||||
!!! ACCESS REQUIRED !!!
|
||||
Personal user data (email + profile info, in read-only)
|
||||
```
|
||||
|
||||
- Microsoft will create a Live account using the email referenced for the GitHub account. This account is also linked to GitHub account (meaning you can log it using GitHub authentication)
|
||||
- Proceed with account registration (birth date, country), add details about name and email contact
|
||||
|
||||
```
|
||||
!!! ACCESS REQUIRED !!!
|
||||
Microsoft proposes to send commercial links to this mail
|
||||
Azure DevOps terms of service need to be accepted
|
||||
```
|
||||
|
||||
_Logged to Azure DevOps, account is ready._
|
||||
|
||||
### Installing Azure Pipelines to GitHub
|
||||
|
||||
- On GitHub, go to Marketplace
|
||||
- Select Azure Pipeline, and "Set up a plan"
|
||||
- Select Free, then "Install it for free"
|
||||
- Click "Complete order and begin installation"
|
||||
|
||||
```
|
||||
!!! ACCESS !!!
|
||||
Azure Pipeline needs RW on code, RO on metadata, RW on checks, commit statuses, deployments, issues, pull requests.
|
||||
RW access here is required to allow update of the pipelines YAML files from Azure DevOps interface, and to
|
||||
update the status of builds and PRs on GitHub side when Azure Pipelines are triggered.
|
||||
Note however that no admin access is defined here: this means that Azure Pipelines cannot do anything with
|
||||
protected branches, like master, and cannot modify the security context around this on GitHub.
|
||||
Access can be defined for all or only selected repositories, which is nice.
|
||||
```
|
||||
|
||||
- Redirected to Azure DevOps, select the account created in _Having an Azure DevOps account_ section.
|
||||
- Select the organization, and click "Create a new project" (let's name it the same than the targetted github repo)
|
||||
- The Visibility is public, to profit from 10 parallel jobs
|
||||
|
||||
```
|
||||
!!! ACCESS !!!
|
||||
Azure Pipelines needs access to the GitHub account (in term of beeing able to check it is valid), and the Resources shared between the GitHub account and Azure Pipelines.
|
||||
```
|
||||
|
||||
_Done. We can move to pipelines configuration._
|
||||
|
||||
## Import an existing pipelines from `.azure-pipelines` folder
|
||||
|
||||
- On Azure DevOps, go to your organization (eg. _certbot_) then your project (eg. _certbot_)
|
||||
- Click "Pipelines" tab
|
||||
- Click "New pipeline"
|
||||
- Where is your code?: select "__Use the classic editor__"
|
||||
|
||||
__Warning: Do not choose the GitHub option in Where is your code? section. Indeed, this option will trigger an OAuth
|
||||
grant permissions from Azure Pipelines to GitHub in order to setup a GitHub OAuth Application. The permissions asked
|
||||
then are way too large (admin level on almost everything), while the classic approach does not add any more
|
||||
permissions, and works perfectly well.__
|
||||
|
||||
- Select GitHub in "Select your repository section", choose certbot/certbot in Repository, master in default branch.
|
||||
- Click on YAML option for "Select a template"
|
||||
- Choose a name for the pipeline (eg. test-pipeline), and browse to the actual pipeline YAML definition in the
|
||||
"YAML file path" input (eg. `.azure-pipelines/test-pipeline.yml`)
|
||||
- Click "Save & queue", choose the master branch to build the first pipeline, and click "Save and run" button.
|
||||
|
||||
_Done. Pipeline is operational. Repeat to add more pipelines from existing YAML files in `.azure-pipelines`._
|
||||
|
||||
## Add a secret variable to a pipeline (like `CODECOV_TOKEN`)
|
||||
|
||||
__NB: Following steps suppose that you already setup the YAML pipeline file to
|
||||
consume the secret variable that these steps will create as an environment variable.
|
||||
For a variable named `CODECOV_TOKEN` consuming the variable `codecov_token`,
|
||||
in the YAML file this setup would take the form of the following:
|
||||
```
|
||||
steps:
|
||||
- script: ./do_something_that_consumes_CODECOV_TOKEN # Eg. `codecov -F windows`
|
||||
env:
|
||||
CODECOV_TOKEN: $(codecov_token)
|
||||
```
|
||||
|
||||
To set up a variable that is shared between pipelines, follow the instructions
|
||||
at
|
||||
https://docs.microsoft.com/en-us/azure/devops/pipelines/library/variable-groups.
|
||||
When adding variables to a group, don't forget to tick "Keep this value secret"
|
||||
if it shouldn't be shared publcily.
|
||||
20
.azure-pipelines/advanced.yml
Normal file
20
.azure-pipelines/advanced.yml
Normal file
@@ -0,0 +1,20 @@
|
||||
# Advanced pipeline for isolated checks and release purpose
|
||||
trigger:
|
||||
- test-*
|
||||
- '*.x'
|
||||
pr:
|
||||
- test-*
|
||||
# This pipeline is also nightly run on master
|
||||
schedules:
|
||||
- cron: "0 4 * * *"
|
||||
displayName: Nightly build
|
||||
branches:
|
||||
include:
|
||||
- master
|
||||
always: true
|
||||
|
||||
jobs:
|
||||
# Any addition here should be reflected in the release pipeline.
|
||||
# It is advised to declare all jobs here as templates to improve maintainability.
|
||||
- template: templates/tests-suite.yml
|
||||
- template: templates/installer-tests.yml
|
||||
12
.azure-pipelines/main.yml
Normal file
12
.azure-pipelines/main.yml
Normal file
@@ -0,0 +1,12 @@
|
||||
trigger:
|
||||
# apache-parser-v2 is a temporary branch for doing work related to
|
||||
# rewriting the parser in the Apache plugin.
|
||||
- apache-parser-v2
|
||||
- master
|
||||
pr:
|
||||
- apache-parser-v2
|
||||
- master
|
||||
- '*.x'
|
||||
|
||||
jobs:
|
||||
- template: templates/tests-suite.yml
|
||||
13
.azure-pipelines/release.yml
Normal file
13
.azure-pipelines/release.yml
Normal file
@@ -0,0 +1,13 @@
|
||||
# Release pipeline to build and deploy Certbot for Windows for GitHub release tags
|
||||
trigger:
|
||||
tags:
|
||||
include:
|
||||
- v*
|
||||
pr: none
|
||||
|
||||
jobs:
|
||||
# Any addition here should be reflected in the advanced pipeline.
|
||||
# It is advised to declare all jobs here as templates to improve maintainability.
|
||||
- template: templates/tests-suite.yml
|
||||
- template: templates/installer-tests.yml
|
||||
- template: templates/changelog.yml
|
||||
14
.azure-pipelines/templates/changelog.yml
Normal file
14
.azure-pipelines/templates/changelog.yml
Normal file
@@ -0,0 +1,14 @@
|
||||
jobs:
|
||||
- job: changelog
|
||||
pool:
|
||||
vmImage: vs2017-win2016
|
||||
steps:
|
||||
- bash: |
|
||||
CERTBOT_VERSION="$(python -c "import certbot; print(certbot.__version__)")"
|
||||
"${BUILD_REPOSITORY_LOCALPATH}\tools\extract_changelog.py" "${CERTBOT_VERSION}" >> "${BUILD_ARTIFACTSTAGINGDIRECTORY}/release_notes.md"
|
||||
displayName: Prepare changelog
|
||||
- task: PublishPipelineArtifact@1
|
||||
inputs:
|
||||
path: $(Build.ArtifactStagingDirectory)
|
||||
artifact: changelog
|
||||
displayName: Publish changelog
|
||||
32
.azure-pipelines/templates/installer-tests.yml
Normal file
32
.azure-pipelines/templates/installer-tests.yml
Normal file
@@ -0,0 +1,32 @@
|
||||
jobs:
|
||||
- job: installer
|
||||
pool:
|
||||
vmImage: vs2017-win2016
|
||||
steps:
|
||||
- task: UsePythonVersion@0
|
||||
inputs:
|
||||
versionSpec: 3.7
|
||||
architecture: x86
|
||||
addToPath: true
|
||||
- script: python windows-installer/construct.py
|
||||
displayName: Build Certbot installer
|
||||
- task: CopyFiles@2
|
||||
inputs:
|
||||
sourceFolder: $(System.DefaultWorkingDirectory)/windows-installer/build/nsis
|
||||
contents: '*.exe'
|
||||
targetFolder: $(Build.ArtifactStagingDirectory)
|
||||
- task: PublishPipelineArtifact@1
|
||||
inputs:
|
||||
path: $(Build.ArtifactStagingDirectory)
|
||||
artifact: windows-installer
|
||||
displayName: Publish Windows installer
|
||||
- script: $(Build.ArtifactStagingDirectory)\certbot-beta-installer-win32.exe /S
|
||||
displayName: Install Certbot
|
||||
- script: |
|
||||
python -m venv venv
|
||||
venv\Scripts\python tools\pip_install.py -e certbot-ci
|
||||
displayName: Prepare Certbot-CI
|
||||
- script: |
|
||||
set PATH=%ProgramFiles(x86)%\Certbot\bin;%PATH%
|
||||
venv\Scripts\python -m pytest certbot-ci\certbot_integration_tests\certbot_tests -n 4
|
||||
displayName: Run integration tests
|
||||
38
.azure-pipelines/templates/tests-suite.yml
Normal file
38
.azure-pipelines/templates/tests-suite.yml
Normal file
@@ -0,0 +1,38 @@
|
||||
jobs:
|
||||
- job: test
|
||||
pool:
|
||||
vmImage: vs2017-win2016
|
||||
strategy:
|
||||
matrix:
|
||||
py35:
|
||||
PYTHON_VERSION: 3.5
|
||||
TOXENV: py35
|
||||
py37-cover:
|
||||
PYTHON_VERSION: 3.7
|
||||
TOXENV: py37-cover
|
||||
integration-certbot:
|
||||
PYTHON_VERSION: 3.7
|
||||
TOXENV: integration-certbot
|
||||
PYTEST_ADDOPTS: --numprocesses 4
|
||||
variables:
|
||||
- group: certbot-common
|
||||
steps:
|
||||
- task: UsePythonVersion@0
|
||||
inputs:
|
||||
versionSpec: $(PYTHON_VERSION)
|
||||
addToPath: true
|
||||
- script: python tools/pip_install.py -U tox coverage
|
||||
displayName: Install dependencies
|
||||
- script: python -m tox
|
||||
displayName: Run tox
|
||||
# We do not require codecov report upload to succeed. So to avoid to break the pipeline if
|
||||
# something goes wrong, each command is suffixed with a command that hides any non zero exit
|
||||
# codes and echoes an informative message instead.
|
||||
- bash: |
|
||||
curl -s https://codecov.io/bash -o codecov-bash || echo "Failed to download codecov-bash"
|
||||
chmod +x codecov-bash || echo "Failed to apply execute permissions on codecov-bash"
|
||||
./codecov-bash -F windows || echo "Codecov did not collect coverage reports"
|
||||
condition: in(variables['TOXENV'], 'py37-cover', 'integration-certbot')
|
||||
env:
|
||||
CODECOV_TOKEN: $(codecov_token)
|
||||
displayName: Publish coverage
|
||||
18
.codecov.yml
Normal file
18
.codecov.yml
Normal file
@@ -0,0 +1,18 @@
|
||||
coverage:
|
||||
status:
|
||||
project:
|
||||
default: off
|
||||
linux:
|
||||
flags: linux
|
||||
# Fixed target instead of auto set by #7173, can
|
||||
# be removed when flags in Codecov are added back.
|
||||
target: 97.4
|
||||
threshold: 0.1
|
||||
base: auto
|
||||
windows:
|
||||
flags: windows
|
||||
# Fixed target instead of auto set by #7173, can
|
||||
# be removed when flags in Codecov are added back.
|
||||
target: 97.4
|
||||
threshold: 0.1
|
||||
base: auto
|
||||
@@ -1,2 +1,5 @@
|
||||
[run]
|
||||
omit = */setup.py
|
||||
|
||||
[report]
|
||||
omit = */setup.py
|
||||
|
||||
35
.github/stale.yml
vendored
Normal file
35
.github/stale.yml
vendored
Normal file
@@ -0,0 +1,35 @@
|
||||
# Configuration for https://github.com/marketplace/stale
|
||||
|
||||
# Number of days of inactivity before an Issue or Pull Request becomes stale
|
||||
daysUntilStale: 365
|
||||
|
||||
# Number of days of inactivity before an Issue or Pull Request with the stale label is closed.
|
||||
# Set to false to disable. If disabled, issues still need to be closed manually, but will remain marked as stale.
|
||||
# When changing this value, be sure to also update markComment below.
|
||||
daysUntilClose: 30
|
||||
|
||||
# Ignore issues with an assignee (defaults to false)
|
||||
exemptAssignees: true
|
||||
|
||||
# Label to use when marking as stale
|
||||
staleLabel: needs-update
|
||||
|
||||
# Comment to post when marking as stale. Set to `false` to disable
|
||||
markComment: >
|
||||
We've made a lot of changes to Certbot since this issue was opened. If you
|
||||
still have this issue with an up-to-date version of Certbot, can you please
|
||||
add a comment letting us know? This helps us to better see what issues are
|
||||
still affecting our users. If there is no activity in the next 30 days, this
|
||||
issue will be automatically closed.
|
||||
|
||||
# Comment to post when closing a stale Issue or Pull Request.
|
||||
closeComment: >
|
||||
This issue has been closed due to lack of activity, but if you think it
|
||||
should be reopened, please open a new issue with a link to this one and we'll
|
||||
take a look.
|
||||
|
||||
# Limit the number of actions per hour, from 1-30. Default is 30
|
||||
limitPerRun: 1
|
||||
|
||||
# Don't mark pull requests as stale.
|
||||
only: issues
|
||||
5
.gitignore
vendored
5
.gitignore
vendored
@@ -44,3 +44,8 @@ tests/letstest/venv/
|
||||
|
||||
# docker files
|
||||
.docker
|
||||
|
||||
# certbot tests
|
||||
.certbot_test_workspace
|
||||
**/assets/pebble*
|
||||
**/assets/challtestsrv*
|
||||
|
||||
38
.pylintrc
38
.pylintrc
@@ -41,7 +41,7 @@ load-plugins=linter_plugin
|
||||
# --enable=similarities". If you want to run only the classes checker, but have
|
||||
# no Warning level messages displayed, use"--disable=all --enable=classes
|
||||
# --disable=W"
|
||||
disable=fixme,locally-disabled,locally-enabled,abstract-class-not-used,abstract-class-little-used,bad-continuation,too-few-public-methods,no-self-use,invalid-name,too-many-instance-attributes,cyclic-import,duplicate-code
|
||||
disable=fixme,locally-disabled,locally-enabled,abstract-class-not-used,abstract-class-little-used,bad-continuation,no-self-use,invalid-name,cyclic-import,duplicate-code,design
|
||||
# abstract-class-not-used cannot be disabled locally (at least in
|
||||
# pylint 1.4.1), same for abstract-class-little-used
|
||||
|
||||
@@ -251,7 +251,7 @@ ignored-modules=pkg_resources,confargparse,argparse,six.moves,six.moves.urllib
|
||||
|
||||
# List of classes names for which member attributes should not be checked
|
||||
# (useful for classes with attributes dynamically set).
|
||||
ignored-classes=SQLObject
|
||||
ignored-classes=Field,Header,JWS,closing
|
||||
|
||||
# When zope mode is activated, add a predefined set of Zope acquired attributes
|
||||
# to generated-members.
|
||||
@@ -297,40 +297,6 @@ valid-classmethod-first-arg=cls
|
||||
valid-metaclass-classmethod-first-arg=mcs
|
||||
|
||||
|
||||
[DESIGN]
|
||||
|
||||
# Maximum number of arguments for function / method
|
||||
max-args=6
|
||||
|
||||
# Argument names that match this expression will be ignored. Default to name
|
||||
# with leading underscore
|
||||
ignored-argument-names=_.*
|
||||
|
||||
# Maximum number of locals for function / method body
|
||||
max-locals=15
|
||||
|
||||
# Maximum number of return / yield for function / method body
|
||||
max-returns=6
|
||||
|
||||
# Maximum number of branch for function / method body
|
||||
max-branches=12
|
||||
|
||||
# Maximum number of statements in function / method body
|
||||
max-statements=50
|
||||
|
||||
# Maximum number of parents for a class (see R0901).
|
||||
max-parents=12
|
||||
|
||||
# Maximum number of attributes for a class (see R0902).
|
||||
max-attributes=7
|
||||
|
||||
# Minimum number of public methods for a class (see R0903).
|
||||
min-public-methods=2
|
||||
|
||||
# Maximum number of public methods for a class (see R0904).
|
||||
max-public-methods=20
|
||||
|
||||
|
||||
[EXCEPTIONS]
|
||||
|
||||
# Exceptions that will emit a warning when being caught. Defaults to
|
||||
|
||||
187
.travis.yml
187
.travis.yml
@@ -1,11 +1,16 @@
|
||||
language: python
|
||||
dist: xenial
|
||||
|
||||
cache:
|
||||
directories:
|
||||
- $HOME/.cache/pip
|
||||
|
||||
before_script:
|
||||
- 'if [ $TRAVIS_OS_NAME = osx ] ; then ulimit -n 1024 ; fi'
|
||||
- 'if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then ulimit -n 1024 ; fi'
|
||||
# On Travis, the fastest parallelization for integration tests has proved to be 4.
|
||||
- 'if [[ "$TOXENV" == *"integration"* ]]; then export PYTEST_ADDOPTS="--numprocesses 4"; fi'
|
||||
# Use Travis retry feature for farm tests since they are flaky
|
||||
- 'if [[ "$TOXENV" == "travis-test-farm"* ]]; then export TRAVIS_RETRY=travis_retry; fi'
|
||||
- export TOX_TESTENV_PASSENV=TRAVIS
|
||||
|
||||
# Only build pushes to the master branch, PRs, and branches beginning with
|
||||
@@ -14,6 +19,9 @@ before_script:
|
||||
# is a cap of on the number of simultaneous runs.
|
||||
branches:
|
||||
only:
|
||||
# apache-parser-v2 is a temporary branch for doing work related to
|
||||
# rewriting the parser in the Apache plugin.
|
||||
- apache-parser-v2
|
||||
- master
|
||||
- /^\d+\.\d+\.x$/
|
||||
- /^test-.*$/
|
||||
@@ -22,34 +30,22 @@ branches:
|
||||
not-on-master: ¬-on-master
|
||||
if: NOT (type = push AND branch = master)
|
||||
|
||||
# Jobs for the extended test suite are executed for cron jobs and pushes on non-master branches.
|
||||
# Jobs for the extended test suite are executed for cron jobs and pushes to
|
||||
# non-development branches. See the explanation for apache-parser-v2 above.
|
||||
extended-test-suite: &extended-test-suite
|
||||
if: type = cron OR (type = push AND branch != master)
|
||||
if: type = cron OR (type = push AND branch NOT IN (apache-parser-v2, master))
|
||||
|
||||
matrix:
|
||||
include:
|
||||
# Main test suite
|
||||
- python: "2.7"
|
||||
env: BOULDER_INTEGRATION=v1 INTEGRATION_TEST=all TOXENV=py27_install
|
||||
sudo: required
|
||||
services: docker
|
||||
<<: *not-on-master
|
||||
- python: "2.7"
|
||||
env: BOULDER_INTEGRATION=v2 INTEGRATION_TEST=all TOXENV=py27_install
|
||||
sudo: required
|
||||
services: docker
|
||||
env: ACME_SERVER=pebble TOXENV=integration
|
||||
<<: *not-on-master
|
||||
|
||||
# This job is always executed, including on master
|
||||
- python: "2.7"
|
||||
env: TOXENV=py27-cover FYI="py27 tests + code coverage"
|
||||
|
||||
- sudo: required
|
||||
env: TOXENV=nginx_compat
|
||||
services: docker
|
||||
before_install:
|
||||
addons:
|
||||
<<: *not-on-master
|
||||
- python: "2.7"
|
||||
env: TOXENV=lint
|
||||
<<: *not-on-master
|
||||
@@ -60,20 +56,20 @@ matrix:
|
||||
env: TOXENV=mypy
|
||||
<<: *not-on-master
|
||||
- python: "2.7"
|
||||
env: TOXENV='py27-{acme,apache,certbot,dns,nginx,postfix}-oldest'
|
||||
sudo: required
|
||||
services: docker
|
||||
# Ubuntu Trusty or older must be used because the oldest version of
|
||||
# cryptography we support cannot be compiled against the version of
|
||||
# OpenSSL in Xenial or newer.
|
||||
dist: trusty
|
||||
env: TOXENV='py27-{acme,apache,certbot,dns,nginx}-oldest'
|
||||
<<: *not-on-master
|
||||
- python: "3.4"
|
||||
env: TOXENV=py34
|
||||
sudo: required
|
||||
services: docker
|
||||
<<: *not-on-master
|
||||
- python: "3.7"
|
||||
dist: xenial
|
||||
env: TOXENV=py37
|
||||
sudo: required
|
||||
services: docker
|
||||
<<: *not-on-master
|
||||
- python: "3.8"
|
||||
env: TOXENV=py38
|
||||
<<: *not-on-master
|
||||
- sudo: required
|
||||
env: TOXENV=apache_compat
|
||||
@@ -82,90 +78,154 @@ matrix:
|
||||
addons:
|
||||
<<: *not-on-master
|
||||
- sudo: required
|
||||
env: TOXENV=le_auto_trusty
|
||||
env: TOXENV=le_auto_xenial
|
||||
services: docker
|
||||
before_install:
|
||||
addons:
|
||||
<<: *not-on-master
|
||||
- python: "2.7"
|
||||
env: TOXENV=apacheconftest-with-pebble
|
||||
sudo: required
|
||||
services: docker
|
||||
<<: *not-on-master
|
||||
- python: "2.7"
|
||||
env: TOXENV=nginxroundtrip
|
||||
<<: *not-on-master
|
||||
|
||||
# Extended test suite on cron jobs and pushes to tested branches other than master
|
||||
- sudo: required
|
||||
env: TOXENV=nginx_compat
|
||||
services: docker
|
||||
before_install:
|
||||
addons:
|
||||
<<: *extended-test-suite
|
||||
- python: "2.7"
|
||||
env:
|
||||
- TOXENV=travis-test-farm-apache2
|
||||
- secure: "f+j/Lj9s1lcuKo5sEFrlRd1kIAMnIJI4z0MTI7QF8jl9Fkmbx7KECGzw31TNgzrOSzxSapHbcueFYvNCLKST+kE/8ogMZBbwqXfEDuKpyF6BY3uYoJn+wPVE5pIb8Hhe08xPte8TTDSMIyHI3EyTfcAKrIreauoArePvh/cRvSw="
|
||||
<<: *extended-test-suite
|
||||
- python: "2.7"
|
||||
env:
|
||||
- TOXENV=travis-test-farm-leauto-upgrades
|
||||
- secure: "f+j/Lj9s1lcuKo5sEFrlRd1kIAMnIJI4z0MTI7QF8jl9Fkmbx7KECGzw31TNgzrOSzxSapHbcueFYvNCLKST+kE/8ogMZBbwqXfEDuKpyF6BY3uYoJn+wPVE5pIb8Hhe08xPte8TTDSMIyHI3EyTfcAKrIreauoArePvh/cRvSw="
|
||||
git:
|
||||
depth: false # This is needed to have the history to checkout old versions of certbot-auto.
|
||||
<<: *extended-test-suite
|
||||
- python: "2.7"
|
||||
env:
|
||||
- TOXENV=travis-test-farm-certonly-standalone
|
||||
- secure: "f+j/Lj9s1lcuKo5sEFrlRd1kIAMnIJI4z0MTI7QF8jl9Fkmbx7KECGzw31TNgzrOSzxSapHbcueFYvNCLKST+kE/8ogMZBbwqXfEDuKpyF6BY3uYoJn+wPVE5pIb8Hhe08xPte8TTDSMIyHI3EyTfcAKrIreauoArePvh/cRvSw="
|
||||
<<: *extended-test-suite
|
||||
- python: "2.7"
|
||||
env:
|
||||
- TOXENV=travis-test-farm-sdists
|
||||
- secure: "f+j/Lj9s1lcuKo5sEFrlRd1kIAMnIJI4z0MTI7QF8jl9Fkmbx7KECGzw31TNgzrOSzxSapHbcueFYvNCLKST+kE/8ogMZBbwqXfEDuKpyF6BY3uYoJn+wPVE5pIb8Hhe08xPte8TTDSMIyHI3EyTfcAKrIreauoArePvh/cRvSw="
|
||||
<<: *extended-test-suite
|
||||
- python: "3.7"
|
||||
dist: xenial
|
||||
env: TOXENV=py37 CERTBOT_NO_PIN=1
|
||||
<<: *extended-test-suite
|
||||
- python: "2.7"
|
||||
env: BOULDER_INTEGRATION=v1 INTEGRATION_TEST=certbot TOXENV=py27-certbot-oldest
|
||||
env: ACME_SERVER=boulder-v1 TOXENV=integration
|
||||
sudo: required
|
||||
services: docker
|
||||
<<: *extended-test-suite
|
||||
- python: "2.7"
|
||||
env: BOULDER_INTEGRATION=v2 INTEGRATION_TEST=certbot TOXENV=py27-certbot-oldest
|
||||
env: ACME_SERVER=boulder-v2 TOXENV=integration
|
||||
sudo: required
|
||||
services: docker
|
||||
<<: *extended-test-suite
|
||||
- python: "2.7"
|
||||
env: BOULDER_INTEGRATION=v1 INTEGRATION_TEST=nginx TOXENV=py27-nginx-oldest
|
||||
env: ACME_SERVER=boulder-v1 TOXENV=integration-certbot-oldest
|
||||
# Ubuntu Trusty or older must be used because the oldest version of
|
||||
# cryptography we support cannot be compiled against the version of
|
||||
# OpenSSL in Xenial or newer.
|
||||
dist: trusty
|
||||
sudo: required
|
||||
services: docker
|
||||
<<: *extended-test-suite
|
||||
- python: "2.7"
|
||||
env: BOULDER_INTEGRATION=v2 INTEGRATION_TEST=nginx TOXENV=py27-nginx-oldest
|
||||
env: ACME_SERVER=boulder-v2 TOXENV=integration-certbot-oldest
|
||||
# Ubuntu Trusty or older must be used because the oldest version of
|
||||
# cryptography we support cannot be compiled against the version of
|
||||
# OpenSSL in Xenial or newer.
|
||||
dist: trusty
|
||||
sudo: required
|
||||
services: docker
|
||||
<<: *extended-test-suite
|
||||
- python: "2.7"
|
||||
env: ACME_SERVER=boulder-v1 TOXENV=integration-nginx-oldest
|
||||
# Ubuntu Trusty or older must be used because the oldest version of
|
||||
# cryptography we support cannot be compiled against the version of
|
||||
# OpenSSL in Xenial or newer.
|
||||
dist: trusty
|
||||
sudo: required
|
||||
services: docker
|
||||
<<: *extended-test-suite
|
||||
- python: "2.7"
|
||||
env: ACME_SERVER=boulder-v2 TOXENV=integration-nginx-oldest
|
||||
# Ubuntu Trusty or older must be used because the oldest version of
|
||||
# cryptography we support cannot be compiled against the version of
|
||||
# OpenSSL in Xenial or newer.
|
||||
dist: trusty
|
||||
sudo: required
|
||||
services: docker
|
||||
<<: *extended-test-suite
|
||||
- python: "3.4"
|
||||
env: TOXENV=py34 BOULDER_INTEGRATION=v1
|
||||
env: TOXENV=py34
|
||||
<<: *extended-test-suite
|
||||
- python: "3.5"
|
||||
env: TOXENV=py35
|
||||
<<: *extended-test-suite
|
||||
- python: "3.6"
|
||||
env: TOXENV=py36
|
||||
<<: *extended-test-suite
|
||||
- python: "3.7"
|
||||
env: TOXENV=py37
|
||||
<<: *extended-test-suite
|
||||
- python: "3.8-dev"
|
||||
env: TOXENV=py38
|
||||
<<: *extended-test-suite
|
||||
- python: "3.4"
|
||||
env: ACME_SERVER=boulder-v1 TOXENV=integration
|
||||
sudo: required
|
||||
services: docker
|
||||
<<: *extended-test-suite
|
||||
- python: "3.4"
|
||||
env: TOXENV=py34 BOULDER_INTEGRATION=v2
|
||||
env: ACME_SERVER=boulder-v2 TOXENV=integration
|
||||
sudo: required
|
||||
services: docker
|
||||
<<: *extended-test-suite
|
||||
- python: "3.5"
|
||||
env: TOXENV=py35 BOULDER_INTEGRATION=v1
|
||||
env: ACME_SERVER=boulder-v1 TOXENV=integration
|
||||
sudo: required
|
||||
services: docker
|
||||
<<: *extended-test-suite
|
||||
- python: "3.5"
|
||||
env: TOXENV=py35 BOULDER_INTEGRATION=v2
|
||||
env: ACME_SERVER=boulder-v2 TOXENV=integration
|
||||
sudo: required
|
||||
services: docker
|
||||
<<: *extended-test-suite
|
||||
- python: "3.6"
|
||||
env: TOXENV=py36 BOULDER_INTEGRATION=v1
|
||||
env: ACME_SERVER=boulder-v1 TOXENV=integration
|
||||
sudo: required
|
||||
services: docker
|
||||
<<: *extended-test-suite
|
||||
- python: "3.6"
|
||||
env: TOXENV=py36 BOULDER_INTEGRATION=v2
|
||||
env: ACME_SERVER=boulder-v2 TOXENV=integration
|
||||
sudo: required
|
||||
services: docker
|
||||
<<: *extended-test-suite
|
||||
- python: "3.7"
|
||||
dist: xenial
|
||||
env: TOXENV=py37 BOULDER_INTEGRATION=v1
|
||||
env: ACME_SERVER=boulder-v1 TOXENV=integration
|
||||
sudo: required
|
||||
services: docker
|
||||
<<: *extended-test-suite
|
||||
- python: "3.7"
|
||||
dist: xenial
|
||||
env: TOXENV=py37 BOULDER_INTEGRATION=v2
|
||||
env: ACME_SERVER=boulder-v2 TOXENV=integration
|
||||
sudo: required
|
||||
services: docker
|
||||
<<: *extended-test-suite
|
||||
- sudo: required
|
||||
env: TOXENV=le_auto_xenial
|
||||
services: docker
|
||||
- python: "3.8-dev"
|
||||
env: ACME_SERVER=boulder-v1 TOXENV=integration
|
||||
<<: *extended-test-suite
|
||||
- python: "3.8-dev"
|
||||
env: ACME_SERVER=boulder-v2 TOXENV=integration
|
||||
<<: *extended-test-suite
|
||||
- sudo: required
|
||||
env: TOXENV=le_auto_jessie
|
||||
@@ -186,6 +246,9 @@ matrix:
|
||||
- language: generic
|
||||
env: TOXENV=py27
|
||||
os: osx
|
||||
# Using this osx_image is a workaround for
|
||||
# https://travis-ci.community/t/xcode-8-3-homebrew-outdated-error/3798.
|
||||
osx_image: xcode10.2
|
||||
addons:
|
||||
homebrew:
|
||||
packages:
|
||||
@@ -195,6 +258,9 @@ matrix:
|
||||
- language: generic
|
||||
env: TOXENV=py3
|
||||
os: osx
|
||||
# Using this osx_image is a workaround for
|
||||
# https://travis-ci.community/t/xcode-8-3-homebrew-outdated-error/3798.
|
||||
osx_image: xcode10.2
|
||||
addons:
|
||||
homebrew:
|
||||
packages:
|
||||
@@ -209,7 +275,6 @@ addons:
|
||||
apt:
|
||||
packages: # Keep in sync with letsencrypt-auto-source/pieces/bootstrappers/deb_common.sh and Boulder.
|
||||
- python-dev
|
||||
- python-virtualenv
|
||||
- gcc
|
||||
- libaugeas0
|
||||
- libssl-dev
|
||||
@@ -219,19 +284,29 @@ addons:
|
||||
- nginx-light
|
||||
- openssl
|
||||
|
||||
install: "$(command -v pip || command -v pip3) install codecov tox"
|
||||
script:
|
||||
- tox
|
||||
- '[ -z "${BOULDER_INTEGRATION+x}" ] || (tests/boulder-fetch.sh && tests/tox-boulder-integration.sh)'
|
||||
# tools/pip_install.py is used to pin packages to a known working version
|
||||
# except in tests where the environment variable CERTBOT_NO_PIN is set.
|
||||
# virtualenv is listed here explicitly to make sure it is upgraded when
|
||||
# CERTBOT_NO_PIN is set to work around failures we've seen when using an older
|
||||
# version of virtualenv.
|
||||
install: 'tools/pip_install.py -U codecov tox virtualenv'
|
||||
# Most of the time TRAVIS_RETRY is an empty string, and has no effect on the
|
||||
# script command. It is set only to `travis_retry` during farm tests, in
|
||||
# order to trigger the Travis retry feature, and compensate the inherent
|
||||
# flakiness of these specific tests.
|
||||
script: '$TRAVIS_RETRY tox'
|
||||
|
||||
after_success: '[ "$TOXENV" == "py27-cover" ] && codecov'
|
||||
after_success: '[ "$TOXENV" == "py27-cover" ] && codecov -F linux'
|
||||
|
||||
notifications:
|
||||
email: false
|
||||
irc:
|
||||
channels:
|
||||
- secure: "SGWZl3ownKx9xKVV2VnGt7DqkTmutJ89oJV9tjKhSs84kLijU6EYdPnllqISpfHMTxXflNZuxtGo0wTDYHXBuZL47w1O32W6nzuXdra5zC+i4sYQwYULUsyfOv9gJX8zWAULiK0Z3r0oho45U+FR5ZN6TPCidi8/eGU+EEPwaAw="
|
||||
# This is set to a secure variable to prevent forks from sending
|
||||
# notifications. This value was created by installing
|
||||
# https://github.com/travis-ci/travis.rb and running
|
||||
# `travis encrypt "chat.freenode.net#certbot-devel"`.
|
||||
- secure: "EWW66E2+KVPZyIPR8ViENZwfcup4Gx3/dlimmAZE0WuLwxDCshBBOd3O8Rf6pBokEoZlXM5eDT6XdyJj8n0DLslgjO62pExdunXpbcMwdY7l1ELxX2/UbnDTE6UnPYa09qVBHNG7156Z6yE0x2lH4M9Ykvp0G0cubjPQHylAwo0="
|
||||
on_cancel: never
|
||||
on_success: never
|
||||
on_failure: always
|
||||
use_notice: true
|
||||
|
||||
@@ -5,6 +5,7 @@ Authors
|
||||
* Aaron Zuehlke
|
||||
* Ada Lovelace
|
||||
* [Adam Woodbeck](https://github.com/awoodbeck)
|
||||
* [Adrien Ferrand](https://github.com/adferrand)
|
||||
* [Aidin Gharibnavaz](https://github.com/aidin36)
|
||||
* [AJ ONeal](https://github.com/coolaj86)
|
||||
* [Alcaro](https://github.com/Alcaro)
|
||||
@@ -14,8 +15,10 @@ Authors
|
||||
* [Alex Gaynor](https://github.com/alex)
|
||||
* [Alex Halderman](https://github.com/jhalderm)
|
||||
* [Alex Jordan](https://github.com/strugee)
|
||||
* [Alex Zorin](https://github.com/alexzorin)
|
||||
* [Amjad Mashaal](https://github.com/TheNavigat)
|
||||
* [Andrew Murray](https://github.com/radarhere)
|
||||
* [Andrzej Górski](https://github.com/andrzej3393)
|
||||
* [Anselm Levskaya](https://github.com/levskaya)
|
||||
* [Antoine Jacoutot](https://github.com/ajacoutot)
|
||||
* [asaph](https://github.com/asaph)
|
||||
@@ -75,6 +78,7 @@ Authors
|
||||
* [Fabian](https://github.com/faerbit)
|
||||
* [Faidon Liambotis](https://github.com/paravoid)
|
||||
* [Fan Jiang](https://github.com/tcz001)
|
||||
* [Felix Lechner](https://github.com/lechner)
|
||||
* [Felix Schwarz](https://github.com/FelixSchwarz)
|
||||
* [Felix Yan](https://github.com/felixonmars)
|
||||
* [Filip Ochnik](https://github.com/filipochnik)
|
||||
@@ -124,6 +128,7 @@ Authors
|
||||
* [Joubin Jabbari](https://github.com/joubin)
|
||||
* [Juho Juopperi](https://github.com/jkjuopperi)
|
||||
* [Kane York](https://github.com/riking)
|
||||
* [Kenichi Maehashi](https://github.com/kmaehashi)
|
||||
* [Kenneth Skovhede](https://github.com/kenkendk)
|
||||
* [Kevin Burke](https://github.com/kevinburke)
|
||||
* [Kevin London](https://github.com/kevinlondon)
|
||||
@@ -159,8 +164,10 @@ Authors
|
||||
* [Michael Schumacher](https://github.com/schumaml)
|
||||
* [Michael Strache](https://github.com/Jarodiv)
|
||||
* [Michael Sverdlin](https://github.com/sveder)
|
||||
* [Michael Watters](https://github.com/blackknight36)
|
||||
* [Michal Moravec](https://github.com/https://github.com/Majkl578)
|
||||
* [Michal Papis](https://github.com/mpapis)
|
||||
* [Mickaël Schoentgen](https://github.com/BoboTiG)
|
||||
* [Minn Soe](https://github.com/MinnSoe)
|
||||
* [Min RK](https://github.com/minrk)
|
||||
* [Miquel Ruiz](https://github.com/miquelruiz)
|
||||
@@ -224,6 +231,7 @@ Authors
|
||||
* [Stavros Korokithakis](https://github.com/skorokithakis)
|
||||
* [Stefan Weil](https://github.com/stweil)
|
||||
* [Steve Desmond](https://github.com/stevedesmond-ca)
|
||||
* [sydneyli](https://github.com/sydneyli)
|
||||
* [Tan Jay Jun](https://github.com/jayjun)
|
||||
* [Tapple Gao](https://github.com/tapple)
|
||||
* [Telepenin Nikolay](https://github.com/telepenin)
|
||||
|
||||
334
CHANGELOG.md
334
CHANGELOG.md
@@ -2,13 +2,339 @@
|
||||
|
||||
Certbot adheres to [Semantic Versioning](https://semver.org/).
|
||||
|
||||
## 0.33.0 - master
|
||||
## 1.0.0 - master
|
||||
|
||||
### Added
|
||||
|
||||
*
|
||||
|
||||
### Removed
|
||||
|
||||
* The `docs` extras for the `certbot-apache` and `certbot-nginx` packages
|
||||
have been removed.
|
||||
|
||||
### Changed
|
||||
|
||||
* certbot-auto has deprecated support for systems using OpenSSL 1.0.1 that are
|
||||
not running on x86-64. This primarily affects RHEL 6 based systems.
|
||||
* Certbot's `config_changes` subcommand has been removed
|
||||
* `certbot.plugins.common.TLSSNI01` has been removed.
|
||||
* Deprecated attributes related to the TLS-SNI-01 challenge in
|
||||
`acme.challenges` and `acme.standalone`
|
||||
have been removed.
|
||||
* The functions `certbot.client.view_config_changes`,
|
||||
`certbot.main.config_changes`,
|
||||
`certbot.plugins.common.Installer.view_config_changes`,
|
||||
`certbot.reverter.Reverter.view_config_changes`, and
|
||||
`certbot.util.get_systemd_os_info` have been removed
|
||||
* Certbot's `register --update-registration` subcommand has been removed
|
||||
|
||||
### Fixed
|
||||
|
||||
*
|
||||
|
||||
More details about these changes can be found on our GitHub repo.
|
||||
|
||||
## 0.40.1 - 2019-11-05
|
||||
|
||||
### Changed
|
||||
|
||||
* Added back support for Python 3.4 to Certbot components and certbot-auto due
|
||||
to a bug when requiring Python 2.7 or 3.5+ on RHEL 6 based systems.
|
||||
|
||||
More details about these changes can be found on our GitHub repo.
|
||||
|
||||
## 0.40.0 - 2019-11-05
|
||||
|
||||
### Added
|
||||
|
||||
*
|
||||
|
||||
### Changed
|
||||
|
||||
* We deprecated support for Python 3.4 in Certbot and its ACME library. Support
|
||||
for Python 3.4 will be removed in the next major release of Certbot.
|
||||
certbot-auto users on RHEL 6 based systems will be asked to enable Software
|
||||
Collections (SCL) repository so Python 3.6 can be installed. certbot-auto can
|
||||
enable the SCL repo for you on CentOS 6 while users on other RHEL 6 based
|
||||
systems will be asked to do this manually.
|
||||
* `--server` may now be combined with `--dry-run`. Certbot will, as before, use the
|
||||
staging server instead of the live server when `--dry-run` is used.
|
||||
* `--dry-run` now requests fresh authorizations every time, fixing the issue
|
||||
where it was prone to falsely reporting success.
|
||||
* Updated certbot-dns-google to depend on newer versions of
|
||||
google-api-python-client and oauth2client.
|
||||
* The OS detection logic again uses distro library for Linux OSes
|
||||
* certbot.plugins.common.TLSSNI01 has been deprecated and will be removed in a
|
||||
future release.
|
||||
* CLI flags --tls-sni-01-port and --tls-sni-01-address have been removed.
|
||||
* The values tls-sni and tls-sni-01 for the --preferred-challenges flag are no
|
||||
longer accepted.
|
||||
* Removed the flags: `--agree-dev-preview`, `--dialog`, and `--apache-init-script`
|
||||
* acme.standalone.BaseRequestHandlerWithLogging and
|
||||
acme.standalone.simple_tls_sni_01_server have been deprecated and will be
|
||||
removed in a future release of the library.
|
||||
* certbot-dns-rfc2136 now use TCP to query SOA records.
|
||||
|
||||
### Fixed
|
||||
|
||||
*
|
||||
|
||||
More details about these changes can be found on our GitHub repo.
|
||||
|
||||
## 0.39.0 - 2019-10-01
|
||||
|
||||
### Added
|
||||
|
||||
* Support for Python 3.8 was added to Certbot and all of its components.
|
||||
* Support for CentOS 8 was added to certbot-auto.
|
||||
|
||||
### Changed
|
||||
|
||||
* Don't send OCSP requests for expired certificates
|
||||
* Return to using platform.linux_distribution instead of distro.linux_distribution in OS fingerprinting for Python < 3.8
|
||||
* Updated the Nginx plugin's TLS configuration to keep support for some versions of IE11.
|
||||
|
||||
### Fixed
|
||||
|
||||
* Fixed OS detection in the Apache plugin on RHEL 6.
|
||||
|
||||
More details about these changes can be found on our GitHub repo.
|
||||
|
||||
## 0.38.0 - 2019-09-03
|
||||
|
||||
### Added
|
||||
|
||||
* Disable session tickets for Nginx users when appropriate.
|
||||
|
||||
### Changed
|
||||
|
||||
* If Certbot fails to rollback your server configuration, the error message
|
||||
links to the Let's Encrypt forum. Change the link to the Help category now
|
||||
that the Server category has been closed.
|
||||
* Replace platform.linux_distribution with distro.linux_distribution as a step
|
||||
towards Python 3.8 support in Certbot.
|
||||
|
||||
### Fixed
|
||||
|
||||
* Fixed OS detection in the Apache plugin on Scientific Linux.
|
||||
|
||||
More details about these changes can be found on our GitHub repo.
|
||||
|
||||
## 0.37.2 - 2019-08-21
|
||||
|
||||
* Stop disabling TLS session tickets in Nginx as it caused TLS failures on
|
||||
some systems.
|
||||
|
||||
More details about these changes can be found on our GitHub repo.
|
||||
|
||||
## 0.37.1 - 2019-08-08
|
||||
|
||||
### Fixed
|
||||
|
||||
* Stop disabling TLS session tickets in Apache as it caused TLS failures on
|
||||
some systems.
|
||||
|
||||
More details about these changes can be found on our GitHub repo.
|
||||
|
||||
## 0.37.0 - 2019-08-07
|
||||
|
||||
### Added
|
||||
|
||||
* Turn off session tickets for apache plugin by default
|
||||
* acme: Authz deactivation added to `acme` module.
|
||||
|
||||
### Changed
|
||||
|
||||
* Follow updated Mozilla recommendations for Nginx ssl_protocols, ssl_ciphers,
|
||||
and ssl_prefer_server_ciphers
|
||||
|
||||
### Fixed
|
||||
|
||||
* Fix certbot-auto failures on RHEL 8.
|
||||
|
||||
More details about these changes can be found on our GitHub repo.
|
||||
|
||||
## 0.36.0 - 2019-07-11
|
||||
|
||||
### Added
|
||||
|
||||
* Turn off session tickets for nginx plugin by default
|
||||
* Added missing error types from RFC8555 to acme
|
||||
|
||||
### Changed
|
||||
|
||||
* Support for Ubuntu 14.04 Trusty has been removed.
|
||||
* Update the 'manage your account' help to be more generic.
|
||||
* The error message when Certbot's Apache plugin is unable to modify your
|
||||
Apache configuration has been improved.
|
||||
* Certbot's config_changes subcommand has been deprecated and will be
|
||||
removed in a future release.
|
||||
* `certbot config_changes` no longer accepts a --num parameter.
|
||||
* The functions `certbot.plugins.common.Installer.view_config_changes` and
|
||||
`certbot.reverter.Reverter.view_config_changes` have been deprecated and will
|
||||
be removed in a future release.
|
||||
|
||||
### Fixed
|
||||
|
||||
* Replace some unnecessary platform-specific line separation.
|
||||
|
||||
More details about these changes can be found on our GitHub repo.
|
||||
|
||||
## 0.35.1 - 2019-06-10
|
||||
|
||||
### Fixed
|
||||
|
||||
* Support for specifying an authoritative base domain in our dns-rfc2136 plugin
|
||||
has been removed. This feature was added in our last release but had a bug
|
||||
which caused the plugin to fail so the feature has been removed until it can
|
||||
be added properly.
|
||||
|
||||
Despite us having broken lockstep, we are continuing to release new versions of
|
||||
all Certbot components during releases for the time being, however, the only
|
||||
package with changes other than its version number was:
|
||||
|
||||
* certbot-dns-rfc2136
|
||||
|
||||
More details about these changes can be found on our GitHub repo.
|
||||
|
||||
## 0.35.0 - 2019-06-05
|
||||
|
||||
### Added
|
||||
|
||||
* dns_rfc2136 plugin now supports explicitly specifing an authorative
|
||||
base domain for cases when the automatic method does not work (e.g.
|
||||
Split horizon DNS)
|
||||
|
||||
### Changed
|
||||
|
||||
*
|
||||
|
||||
### Fixed
|
||||
|
||||
* Renewal parameter `webroot_path` is always saved, avoiding some regressions
|
||||
when `webroot` authenticator plugin is invoked with no challenge to perform.
|
||||
* Certbot now accepts OCSP responses when an explicit authorized
|
||||
responder, different from the issuer, is used to sign OCSP
|
||||
responses.
|
||||
* Scripts in Certbot hook directories are no longer executed when their
|
||||
filenames end in a tilde.
|
||||
|
||||
Despite us having broken lockstep, we are continuing to release new versions of
|
||||
all Certbot components during releases for the time being, however, the only
|
||||
package with changes other than its version number was:
|
||||
|
||||
* certbot
|
||||
* certbot-dns-rfc2136
|
||||
|
||||
More details about these changes can be found on our GitHub repo.
|
||||
|
||||
## 0.34.2 - 2019-05-07
|
||||
|
||||
### Fixed
|
||||
|
||||
* certbot-auto no longer writes a check_permissions.py script at the root
|
||||
of the filesystem.
|
||||
|
||||
Despite us having broken lockstep, we are continuing to release new versions of
|
||||
all Certbot components during releases for the time being, however, the only
|
||||
changes in this release were to certbot-auto.
|
||||
|
||||
More details about these changes can be found on our GitHub repo.
|
||||
|
||||
## 0.34.1 - 2019-05-06
|
||||
|
||||
### Fixed
|
||||
|
||||
* certbot-auto no longer prints a blank line when there are no permissions
|
||||
problems.
|
||||
|
||||
Despite us having broken lockstep, we are continuing to release new versions of
|
||||
all Certbot components during releases for the time being, however, the only
|
||||
changes in this release were to certbot-auto.
|
||||
|
||||
More details about these changes can be found on our GitHub repo.
|
||||
|
||||
## 0.34.0 - 2019-05-01
|
||||
|
||||
### Changed
|
||||
|
||||
* Apache plugin now tries to restart httpd on Fedora using systemctl if a
|
||||
configuration test error is detected. This has to be done due to the way
|
||||
Fedora now generates the self signed certificate files upon first
|
||||
restart.
|
||||
* Updated Certbot and its plugins to improve the handling of file system permissions
|
||||
on Windows as a step towards adding proper Windows support to Certbot.
|
||||
* Updated urllib3 to 1.24.2 in certbot-auto.
|
||||
* Removed the fallback introduced with 0.32.0 in `acme` to retry a challenge response
|
||||
with a `keyAuthorization` if sending the response without this field caused a
|
||||
`malformed` error to be received from the ACME server.
|
||||
* Linode DNS plugin now supports api keys created from their new panel
|
||||
at [cloud.linode.com](https://cloud.linode.com)
|
||||
|
||||
### Fixed
|
||||
|
||||
* Fixed Google DNS Challenge issues when private zones exist
|
||||
* Adding a warning noting that future versions of Certbot will automatically configure the
|
||||
webserver so that all requests redirect to secure HTTPS access. You can control this
|
||||
behavior and disable this warning with the --redirect and --no-redirect flags.
|
||||
* certbot-auto now prints warnings when run as root with insecure file system
|
||||
permissions. If you see these messages, you should fix the problem by
|
||||
following the instructions at
|
||||
https://community.letsencrypt.org/t/certbot-auto-deployment-best-practices/91979/,
|
||||
however, these warnings can be disabled as necessary with the flag
|
||||
--no-permissions-check.
|
||||
* `acme` module uses now a POST-as-GET request to retrieve the registration
|
||||
from an ACME v2 server
|
||||
* Convert the tsig algorithm specified in the certbot_dns_rfc2136 configuration file to
|
||||
all uppercase letters before validating. This makes the value in the config case
|
||||
insensitive.
|
||||
|
||||
Despite us having broken lockstep, we are continuing to release new versions of
|
||||
all Certbot components during releases for the time being, however, the only
|
||||
package with changes other than its version number was:
|
||||
|
||||
* acme
|
||||
* certbot
|
||||
* certbot-apache
|
||||
* certbot-dns-cloudflare
|
||||
* certbot-dns-cloudxns
|
||||
* certbot-dns-digitalocean
|
||||
* certbot-dns-dnsimple
|
||||
* certbot-dns-dnsmadeeasy
|
||||
* certbot-dns-gehirn
|
||||
* certbot-dns-google
|
||||
* certbot-dns-linode
|
||||
* certbot-dns-luadns
|
||||
* certbot-dns-nsone
|
||||
* certbot-dns-ovh
|
||||
* certbot-dns-rfc2136
|
||||
* certbot-dns-route53
|
||||
* certbot-dns-sakuracloud
|
||||
* certbot-nginx
|
||||
|
||||
More details about these changes can be found on our GitHub repo.
|
||||
|
||||
## 0.33.1 - 2019-04-04
|
||||
|
||||
### Fixed
|
||||
|
||||
* A bug causing certbot-auto to print warnings or crash on some RHEL based
|
||||
systems has been resolved.
|
||||
|
||||
Despite us having broken lockstep, we are continuing to release new versions of
|
||||
all Certbot components during releases for the time being, however, the only
|
||||
changes in this release were to certbot-auto.
|
||||
|
||||
More details about these changes can be found on our GitHub repo.
|
||||
|
||||
## 0.33.0 - 2019-04-03
|
||||
|
||||
### Added
|
||||
|
||||
* Fedora 29+ is now supported by certbot-auto. Since Python 2.x is on a deprecation
|
||||
path in Fedora, certbot-auto will install and use Python 3.x on Fedora 29+.
|
||||
* CLI flag `--http-port` has been added for Nginx plugin exclusively, and replaces
|
||||
* CLI flag `--https-port` has been added for Nginx plugin exclusively, and replaces
|
||||
`--tls-sni-01-port`. It defines the HTTPS port the Nginx plugin will use while
|
||||
setting up a new SSL vhost. By default the HTTPS port is 443.
|
||||
|
||||
@@ -28,11 +354,15 @@ Certbot adheres to [Semantic Versioning](https://semver.org/).
|
||||
* Certbot uses the Python library cryptography for OCSP when cryptography>=2.5
|
||||
is installed. We fixed a bug in Certbot causing it to interpret timestamps in
|
||||
the OCSP response as being in the local timezone rather than UTC.
|
||||
* Issue causing the default CentOS 6 TLS configuration to ignore some of the
|
||||
HTTPS VirtualHosts created by Certbot. mod_ssl loading is now moved to main
|
||||
http.conf for this environment where possible.
|
||||
|
||||
Despite us having broken lockstep, we are continuing to release new versions of
|
||||
all Certbot components during releases for the time being, however, the only
|
||||
package with changes other than its version number was:
|
||||
|
||||
* acme
|
||||
* certbot
|
||||
* certbot-apache
|
||||
* certbot-nginx
|
||||
|
||||
1
CODE_OF_CONDUCT.md
Normal file
1
CODE_OF_CONDUCT.md
Normal file
@@ -0,0 +1 @@
|
||||
This project is governed by [EFF's Public Projects Code of Conduct](https://www.eff.org/pages/eppcode).
|
||||
@@ -33,3 +33,5 @@ started. In particular, we recommend you read these sections
|
||||
- [Finding issues to work on](https://certbot.eff.org/docs/contributing.html#find-issues-to-work-on)
|
||||
- [Coding style](https://certbot.eff.org/docs/contributing.html#coding-style)
|
||||
- [Submitting a pull request](https://certbot.eff.org/docs/contributing.html#submitting-a-pull-request)
|
||||
- [EFF's Public Projects Code of Conduct](https://www.eff.org/pages/eppcode)
|
||||
|
||||
|
||||
35
Dockerfile
35
Dockerfile
@@ -1,35 +0,0 @@
|
||||
FROM python:2-alpine3.9
|
||||
|
||||
ENTRYPOINT [ "certbot" ]
|
||||
EXPOSE 80 443
|
||||
VOLUME /etc/letsencrypt /var/lib/letsencrypt
|
||||
WORKDIR /opt/certbot
|
||||
|
||||
COPY CHANGELOG.md README.rst setup.py src/
|
||||
|
||||
# Generate constraints file to pin dependency versions
|
||||
COPY letsencrypt-auto-source/pieces/dependency-requirements.txt .
|
||||
COPY tools /opt/certbot/tools
|
||||
RUN sh -c 'cat dependency-requirements.txt | /opt/certbot/tools/strip_hashes.py > unhashed_requirements.txt'
|
||||
RUN sh -c 'cat tools/dev_constraints.txt unhashed_requirements.txt | /opt/certbot/tools/merge_requirements.py > docker_constraints.txt'
|
||||
|
||||
COPY acme src/acme
|
||||
COPY certbot src/certbot
|
||||
|
||||
RUN apk add --no-cache --virtual .certbot-deps \
|
||||
libffi \
|
||||
libssl1.1 \
|
||||
openssl \
|
||||
ca-certificates \
|
||||
binutils
|
||||
RUN apk add --no-cache --virtual .build-deps \
|
||||
gcc \
|
||||
linux-headers \
|
||||
openssl-dev \
|
||||
musl-dev \
|
||||
libffi-dev \
|
||||
&& pip install -r /opt/certbot/dependency-requirements.txt \
|
||||
&& pip install --no-cache-dir --no-deps \
|
||||
--editable /opt/certbot/src/acme \
|
||||
--editable /opt/certbot/src \
|
||||
&& apk del .build-deps
|
||||
@@ -1,5 +1,5 @@
|
||||
# This Dockerfile builds an image for development.
|
||||
FROM ubuntu:xenial
|
||||
FROM debian:buster
|
||||
|
||||
# Note: this only exposes the port to other docker containers.
|
||||
EXPOSE 80 443
|
||||
|
||||
@@ -1,75 +0,0 @@
|
||||
# https://github.com/letsencrypt/letsencrypt/pull/431#issuecomment-103659297
|
||||
# it is more likely developers will already have ubuntu:trusty rather
|
||||
# than e.g. debian:jessie and image size differences are negligible
|
||||
FROM ubuntu:trusty
|
||||
MAINTAINER Jakub Warmuz <jakub@warmuz.org>
|
||||
MAINTAINER William Budington <bill@eff.org>
|
||||
|
||||
# Note: this only exposes the port to other docker containers. You
|
||||
# still have to bind to 443@host at runtime, as per the ACME spec.
|
||||
EXPOSE 443
|
||||
|
||||
# TODO: make sure --config-dir and --work-dir cannot be changed
|
||||
# through the CLI (certbot-docker wrapper that uses standalone
|
||||
# authenticator and text mode only?)
|
||||
VOLUME /etc/letsencrypt /var/lib/letsencrypt
|
||||
|
||||
WORKDIR /opt/certbot
|
||||
|
||||
# no need to mkdir anything:
|
||||
# https://docs.docker.com/reference/builder/#copy
|
||||
# If <dest> doesn't exist, it is created along with all missing
|
||||
# directories in its path.
|
||||
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
COPY letsencrypt-auto-source/letsencrypt-auto /opt/certbot/src/letsencrypt-auto-source/letsencrypt-auto
|
||||
RUN /opt/certbot/src/letsencrypt-auto-source/letsencrypt-auto --os-packages-only && \
|
||||
apt-get clean && \
|
||||
rm -rf /var/lib/apt/lists/* \
|
||||
/tmp/* \
|
||||
/var/tmp/*
|
||||
|
||||
# the above is not likely to change, so by putting it further up the
|
||||
# Dockerfile we make sure we cache as much as possible
|
||||
|
||||
|
||||
COPY setup.py README.rst CHANGELOG.md MANIFEST.in letsencrypt-auto-source/pieces/pipstrap.py /opt/certbot/src/
|
||||
|
||||
# all above files are necessary for setup.py and venv setup, however,
|
||||
# package source code directory has to be copied separately to a
|
||||
# subdirectory...
|
||||
# https://docs.docker.com/reference/builder/#copy: "If <src> is a
|
||||
# directory, the entire contents of the directory are copied,
|
||||
# including filesystem metadata. Note: The directory itself is not
|
||||
# copied, just its contents." Order again matters, three files are far
|
||||
# more likely to be cached than the whole project directory
|
||||
|
||||
COPY certbot /opt/certbot/src/certbot/
|
||||
COPY acme /opt/certbot/src/acme/
|
||||
COPY certbot-apache /opt/certbot/src/certbot-apache/
|
||||
COPY certbot-nginx /opt/certbot/src/certbot-nginx/
|
||||
|
||||
|
||||
RUN VIRTUALENV_NO_DOWNLOAD=1 virtualenv --no-site-packages -p python2 /opt/certbot/venv
|
||||
|
||||
# PATH is set now so pipstrap upgrades the correct (v)env
|
||||
ENV PATH /opt/certbot/venv/bin:$PATH
|
||||
RUN /opt/certbot/venv/bin/python /opt/certbot/src/pipstrap.py && \
|
||||
/opt/certbot/venv/bin/pip install \
|
||||
-e /opt/certbot/src/acme \
|
||||
-e /opt/certbot/src \
|
||||
-e /opt/certbot/src/certbot-apache \
|
||||
-e /opt/certbot/src/certbot-nginx
|
||||
|
||||
# install in editable mode (-e) to save space: it's not possible to
|
||||
# "rm -rf /opt/certbot/src" (it's stays in the underlaying image);
|
||||
# this might also help in debugging: you can "docker run --entrypoint
|
||||
# bash" and investigate, apply patches, etc.
|
||||
|
||||
# set up certbot/letsencrypt wrapper to warn people about Dockerfile changes
|
||||
COPY tools/docker-warning.sh /opt/certbot/bin/certbot
|
||||
RUN ln -s /opt/certbot/bin/certbot /opt/certbot/bin/letsencrypt
|
||||
ENV PATH /opt/certbot/bin:$PATH
|
||||
|
||||
ENTRYPOINT [ "certbot" ]
|
||||
40
README.rst
40
README.rst
@@ -28,45 +28,19 @@ Contributing
|
||||
If you'd like to contribute to this project please read `Developer Guide
|
||||
<https://certbot.eff.org/docs/contributing.html>`_.
|
||||
|
||||
This project is governed by `EFF's Public Projects Code of Conduct <https://www.eff.org/pages/eppcode>`_.
|
||||
|
||||
.. _installation:
|
||||
|
||||
Installation
|
||||
------------
|
||||
|
||||
The easiest way to install Certbot is by visiting `certbot.eff.org`_, where you can
|
||||
find the correct installation instructions for many web server and OS combinations.
|
||||
For more information, see `Get Certbot <https://certbot.eff.org/docs/install.html>`_.
|
||||
|
||||
.. _certbot.eff.org: https://certbot.eff.org/
|
||||
|
||||
How to run the client
|
||||
---------------------
|
||||
|
||||
In many cases, you can just run ``certbot-auto`` or ``certbot``, and the
|
||||
client will guide you through the process of obtaining and installing certs
|
||||
interactively.
|
||||
|
||||
For full command line help, you can type::
|
||||
|
||||
./certbot-auto --help all
|
||||
|
||||
|
||||
You can also tell it exactly what you want it to do from the command line.
|
||||
For instance, if you want to obtain a cert for ``example.com``,
|
||||
``www.example.com``, and ``other.example.net``, using the Apache plugin to both
|
||||
obtain and install the certs, you could do this::
|
||||
|
||||
./certbot-auto --apache -d example.com -d www.example.com -d other.example.net
|
||||
|
||||
(The first time you run the command, it will make an account, and ask for an
|
||||
email and agreement to the Let's Encrypt Subscriber Agreement; you can
|
||||
automate those with ``--email`` and ``--agree-tos``)
|
||||
|
||||
If you want to use a webserver that doesn't have full plugin support yet, you
|
||||
can still use "standalone" or "webroot" plugins to obtain a certificate::
|
||||
|
||||
./certbot-auto certonly --standalone --email admin@example.com -d example.com -d www.example.com -d other.example.net
|
||||
The easiest way to install and run Certbot is by visiting `certbot.eff.org`_,
|
||||
where you can find the correct instructions for many web server and OS
|
||||
combinations. For more information, see `Get Certbot
|
||||
<https://certbot.eff.org/docs/install.html>`_.
|
||||
|
||||
.. _certbot.eff.org: https://certbot.eff.org/
|
||||
|
||||
Understanding the client in more depth
|
||||
--------------------------------------
|
||||
|
||||
@@ -21,30 +21,3 @@ for mod in list(sys.modules):
|
||||
# preserved (acme.jose.* is josepy.*)
|
||||
if mod == 'josepy' or mod.startswith('josepy.'):
|
||||
sys.modules['acme.' + mod.replace('josepy', 'jose', 1)] = sys.modules[mod]
|
||||
|
||||
|
||||
# This class takes a similar approach to the cryptography project to deprecate attributes
|
||||
# in public modules. See the _ModuleWithDeprecation class here:
|
||||
# https://github.com/pyca/cryptography/blob/91105952739442a74582d3e62b3d2111365b0dc7/src/cryptography/utils.py#L129
|
||||
class _TLSSNI01DeprecationModule(object):
|
||||
"""
|
||||
Internal class delegating to a module, and displaying warnings when
|
||||
attributes related to TLS-SNI-01 are accessed.
|
||||
"""
|
||||
def __init__(self, module):
|
||||
self.__dict__['_module'] = module
|
||||
|
||||
def __getattr__(self, attr):
|
||||
if 'TLSSNI01' in attr:
|
||||
warnings.warn('{0} attribute is deprecated, and will be removed soon.'.format(attr),
|
||||
DeprecationWarning, stacklevel=2)
|
||||
return getattr(self._module, attr)
|
||||
|
||||
def __setattr__(self, attr, value): # pragma: no cover
|
||||
setattr(self._module, attr, value)
|
||||
|
||||
def __delattr__(self, attr): # pragma: no cover
|
||||
delattr(self._module, attr)
|
||||
|
||||
def __dir__(self): # pragma: no cover
|
||||
return ['_module'] + dir(self._module)
|
||||
|
||||
@@ -3,28 +3,19 @@ import abc
|
||||
import functools
|
||||
import hashlib
|
||||
import logging
|
||||
import socket
|
||||
import sys
|
||||
|
||||
from cryptography.hazmat.primitives import hashes # type: ignore
|
||||
import josepy as jose
|
||||
import OpenSSL
|
||||
import requests
|
||||
import six
|
||||
|
||||
from acme import errors
|
||||
from acme import crypto_util
|
||||
from acme import fields
|
||||
from acme import _TLSSNI01DeprecationModule
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# pylint: disable=too-few-public-methods
|
||||
|
||||
|
||||
class Challenge(jose.TypedJSONObjectWithFields):
|
||||
# _fields_to_partial_json | pylint: disable=abstract-method
|
||||
# _fields_to_partial_json
|
||||
"""ACME challenge."""
|
||||
TYPES = {} # type: dict
|
||||
|
||||
@@ -38,7 +29,7 @@ class Challenge(jose.TypedJSONObjectWithFields):
|
||||
|
||||
|
||||
class ChallengeResponse(jose.TypedJSONObjectWithFields):
|
||||
# _fields_to_partial_json | pylint: disable=abstract-method
|
||||
# _fields_to_partial_json
|
||||
"""ACME challenge response."""
|
||||
TYPES = {} # type: dict
|
||||
resource_type = 'challenge'
|
||||
@@ -97,6 +88,7 @@ class _TokenChallenge(Challenge):
|
||||
"""
|
||||
# TODO: check that path combined with uri does not go above
|
||||
# URI_ROOT_PATH!
|
||||
# pylint: disable=unsupported-membership-test
|
||||
return b'..' not in self.token and b'/' not in self.token
|
||||
|
||||
|
||||
@@ -109,10 +101,6 @@ class KeyAuthorizationChallengeResponse(ChallengeResponse):
|
||||
key_authorization = jose.Field("keyAuthorization")
|
||||
thumbprint_hash_function = hashes.SHA256
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(KeyAuthorizationChallengeResponse, self).__init__(*args, **kwargs)
|
||||
self._dump_authorization_key(False)
|
||||
|
||||
def verify(self, chall, account_public_key):
|
||||
"""Verify the key authorization.
|
||||
|
||||
@@ -145,26 +133,14 @@ class KeyAuthorizationChallengeResponse(ChallengeResponse):
|
||||
|
||||
return True
|
||||
|
||||
def _dump_authorization_key(self, dump):
|
||||
# type: (bool) -> None
|
||||
"""
|
||||
Set if keyAuthorization is dumped in the JSON representation of this ChallengeResponse.
|
||||
NB: This method is declared as private because it will eventually be removed.
|
||||
:param bool dump: True to dump the keyAuthorization, False otherwise
|
||||
"""
|
||||
object.__setattr__(self, '_dump_auth_key', dump)
|
||||
|
||||
def to_partial_json(self):
|
||||
jobj = super(KeyAuthorizationChallengeResponse, self).to_partial_json()
|
||||
if not self._dump_auth_key: # pylint: disable=no-member
|
||||
jobj.pop('keyAuthorization', None)
|
||||
|
||||
jobj.pop('keyAuthorization', None)
|
||||
return jobj
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class KeyAuthorizationChallenge(_TokenChallenge):
|
||||
# pylint: disable=abstract-class-little-used,too-many-ancestors
|
||||
"""Challenge based on Key Authorization.
|
||||
|
||||
:param response_cls: Subclass of `KeyAuthorizationChallengeResponse`
|
||||
@@ -196,7 +172,7 @@ class KeyAuthorizationChallenge(_TokenChallenge):
|
||||
:rtype: KeyAuthorizationChallengeResponse
|
||||
|
||||
"""
|
||||
return self.response_cls(
|
||||
return self.response_cls( # pylint: disable=not-callable
|
||||
key_authorization=self.key_authorization(account_key))
|
||||
|
||||
@abc.abstractmethod
|
||||
@@ -233,7 +209,7 @@ class DNS01Response(KeyAuthorizationChallengeResponse):
|
||||
"""ACME dns-01 challenge response."""
|
||||
typ = "dns-01"
|
||||
|
||||
def simple_verify(self, chall, domain, account_public_key):
|
||||
def simple_verify(self, chall, domain, account_public_key): # pylint: disable=unused-argument
|
||||
"""Simple verify.
|
||||
|
||||
This method no longer checks DNS records and is a simple wrapper
|
||||
@@ -249,14 +225,13 @@ class DNS01Response(KeyAuthorizationChallengeResponse):
|
||||
:rtype: bool
|
||||
|
||||
"""
|
||||
# pylint: disable=unused-argument
|
||||
verified = self.verify(chall, account_public_key)
|
||||
if not verified:
|
||||
logger.debug("Verification of key authorization in response failed")
|
||||
return verified
|
||||
|
||||
|
||||
@Challenge.register # pylint: disable=too-many-ancestors
|
||||
@Challenge.register
|
||||
class DNS01(KeyAuthorizationChallenge):
|
||||
"""ACME dns-01 challenge."""
|
||||
response_cls = DNS01Response
|
||||
@@ -346,7 +321,7 @@ class HTTP01Response(KeyAuthorizationChallengeResponse):
|
||||
return True
|
||||
|
||||
|
||||
@Challenge.register # pylint: disable=too-many-ancestors
|
||||
@Challenge.register
|
||||
class HTTP01(KeyAuthorizationChallenge):
|
||||
"""ACME http-01 challenge."""
|
||||
response_cls = HTTP01Response
|
||||
@@ -386,152 +361,6 @@ class HTTP01(KeyAuthorizationChallenge):
|
||||
return self.key_authorization(account_key)
|
||||
|
||||
|
||||
@ChallengeResponse.register
|
||||
class TLSSNI01Response(KeyAuthorizationChallengeResponse):
|
||||
"""ACME tls-sni-01 challenge response."""
|
||||
typ = "tls-sni-01"
|
||||
|
||||
DOMAIN_SUFFIX = b".acme.invalid"
|
||||
"""Domain name suffix."""
|
||||
|
||||
PORT = 443
|
||||
"""Verification port as defined by the protocol.
|
||||
|
||||
You can override it (e.g. for testing) by passing ``port`` to
|
||||
`simple_verify`.
|
||||
|
||||
"""
|
||||
|
||||
@property
|
||||
def z(self): # pylint: disable=invalid-name
|
||||
"""``z`` value used for verification.
|
||||
|
||||
:rtype bytes:
|
||||
|
||||
"""
|
||||
return hashlib.sha256(
|
||||
self.key_authorization.encode("utf-8")).hexdigest().lower().encode()
|
||||
|
||||
@property
|
||||
def z_domain(self):
|
||||
"""Domain name used for verification, generated from `z`.
|
||||
|
||||
:rtype bytes:
|
||||
|
||||
"""
|
||||
return self.z[:32] + b'.' + self.z[32:] + self.DOMAIN_SUFFIX
|
||||
|
||||
def gen_cert(self, key=None, bits=2048):
|
||||
"""Generate tls-sni-01 certificate.
|
||||
|
||||
:param OpenSSL.crypto.PKey key: Optional private key used in
|
||||
certificate generation. If not provided (``None``), then
|
||||
fresh key will be generated.
|
||||
:param int bits: Number of bits for newly generated key.
|
||||
|
||||
:rtype: `tuple` of `OpenSSL.crypto.X509` and `OpenSSL.crypto.PKey`
|
||||
|
||||
"""
|
||||
if key is None:
|
||||
key = OpenSSL.crypto.PKey()
|
||||
key.generate_key(OpenSSL.crypto.TYPE_RSA, bits)
|
||||
return crypto_util.gen_ss_cert(key, [
|
||||
# z_domain is too big to fit into CN, hence first dummy domain
|
||||
'dummy', self.z_domain.decode()], force_san=True), key
|
||||
|
||||
def probe_cert(self, domain, **kwargs):
|
||||
"""Probe tls-sni-01 challenge certificate.
|
||||
|
||||
:param unicode domain:
|
||||
|
||||
"""
|
||||
# TODO: domain is not necessary if host is provided
|
||||
if "host" not in kwargs:
|
||||
host = socket.gethostbyname(domain)
|
||||
logger.debug('%s resolved to %s', domain, host)
|
||||
kwargs["host"] = host
|
||||
|
||||
kwargs.setdefault("port", self.PORT)
|
||||
kwargs["name"] = self.z_domain
|
||||
# TODO: try different methods?
|
||||
# pylint: disable=protected-access
|
||||
return crypto_util.probe_sni(**kwargs)
|
||||
|
||||
def verify_cert(self, cert):
|
||||
"""Verify tls-sni-01 challenge certificate.
|
||||
|
||||
:param OpensSSL.crypto.X509 cert: Challenge certificate.
|
||||
|
||||
:returns: Whether the certificate was successfully verified.
|
||||
:rtype: bool
|
||||
|
||||
"""
|
||||
# pylint: disable=protected-access
|
||||
sans = crypto_util._pyopenssl_cert_or_req_san(cert)
|
||||
logger.debug('Certificate %s. SANs: %s', cert.digest('sha256'), sans)
|
||||
return self.z_domain.decode() in sans
|
||||
|
||||
def simple_verify(self, chall, domain, account_public_key,
|
||||
cert=None, **kwargs):
|
||||
"""Simple verify.
|
||||
|
||||
Verify ``validation`` using ``account_public_key``, optionally
|
||||
probe tls-sni-01 certificate and check using `verify_cert`.
|
||||
|
||||
:param .challenges.TLSSNI01 chall: Corresponding challenge.
|
||||
:param str domain: Domain name being validated.
|
||||
:param JWK account_public_key:
|
||||
:param OpenSSL.crypto.X509 cert: Optional certificate. If not
|
||||
provided (``None``) certificate will be retrieved using
|
||||
`probe_cert`.
|
||||
:param int port: Port used to probe the certificate.
|
||||
|
||||
|
||||
:returns: ``True`` iff client's control of the domain has been
|
||||
verified.
|
||||
:rtype: bool
|
||||
|
||||
"""
|
||||
if not self.verify(chall, account_public_key):
|
||||
logger.debug("Verification of key authorization in response failed")
|
||||
return False
|
||||
|
||||
if cert is None:
|
||||
try:
|
||||
cert = self.probe_cert(domain=domain, **kwargs)
|
||||
except errors.Error as error:
|
||||
logger.debug(str(error), exc_info=True)
|
||||
return False
|
||||
|
||||
return self.verify_cert(cert)
|
||||
|
||||
|
||||
@Challenge.register # pylint: disable=too-many-ancestors
|
||||
class TLSSNI01(KeyAuthorizationChallenge):
|
||||
"""ACME tls-sni-01 challenge."""
|
||||
response_cls = TLSSNI01Response
|
||||
typ = response_cls.typ
|
||||
|
||||
# boulder#962, ietf-wg-acme#22
|
||||
#n = jose.Field("n", encoder=int, decoder=int)
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(TLSSNI01, self).__init__(*args, **kwargs)
|
||||
|
||||
def validation(self, account_key, **kwargs):
|
||||
"""Generate validation.
|
||||
|
||||
:param JWK account_key:
|
||||
:param OpenSSL.crypto.PKey cert_key: Optional private key used
|
||||
in certificate generation. If not provided (``None``), then
|
||||
fresh key will be generated.
|
||||
|
||||
:rtype: `tuple` of `OpenSSL.crypto.X509` and `OpenSSL.crypto.PKey`
|
||||
|
||||
"""
|
||||
return self.response(account_key).gen_cert(key=kwargs.get('cert_key'))
|
||||
|
||||
|
||||
@ChallengeResponse.register
|
||||
class TLSALPN01Response(KeyAuthorizationChallengeResponse):
|
||||
"""ACME TLS-ALPN-01 challenge response.
|
||||
@@ -543,7 +372,7 @@ class TLSALPN01Response(KeyAuthorizationChallengeResponse):
|
||||
typ = "tls-alpn-01"
|
||||
|
||||
|
||||
@Challenge.register # pylint: disable=too-many-ancestors
|
||||
@Challenge.register
|
||||
class TLSALPN01(KeyAuthorizationChallenge):
|
||||
"""ACME tls-alpn-01 challenge.
|
||||
|
||||
@@ -559,7 +388,7 @@ class TLSALPN01(KeyAuthorizationChallenge):
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
@Challenge.register # pylint: disable=too-many-ancestors
|
||||
@Challenge.register
|
||||
class DNS(_TokenChallenge):
|
||||
"""ACME "dns" challenge."""
|
||||
typ = "dns"
|
||||
@@ -640,7 +469,3 @@ class DNSResponse(ChallengeResponse):
|
||||
|
||||
"""
|
||||
return chall.check_validation(self.validation, account_public_key)
|
||||
|
||||
|
||||
# Patching ourselves to warn about TLS-SNI challenge deprecation and removal.
|
||||
sys.modules[__name__] = _TLSSNI01DeprecationModule(sys.modules[__name__])
|
||||
|
||||
@@ -3,12 +3,10 @@ import unittest
|
||||
|
||||
import josepy as jose
|
||||
import mock
|
||||
import OpenSSL
|
||||
import requests
|
||||
|
||||
from six.moves.urllib import parse as urllib_parse # pylint: disable=import-error
|
||||
from six.moves.urllib import parse as urllib_parse # pylint: disable=relative-import
|
||||
|
||||
from acme import errors
|
||||
from acme import test_util
|
||||
|
||||
CERT = test_util.load_comparable_cert('cert.pem')
|
||||
@@ -77,7 +75,6 @@ class KeyAuthorizationChallengeResponseTest(unittest.TestCase):
|
||||
|
||||
|
||||
class DNS01ResponseTest(unittest.TestCase):
|
||||
# pylint: disable=too-many-instance-attributes
|
||||
|
||||
def setUp(self):
|
||||
from acme.challenges import DNS01Response
|
||||
@@ -95,8 +92,6 @@ class DNS01ResponseTest(unittest.TestCase):
|
||||
def test_to_partial_json(self):
|
||||
self.assertEqual({k: v for k, v in self.jmsg.items() if k != 'keyAuthorization'},
|
||||
self.msg.to_partial_json())
|
||||
self.msg._dump_authorization_key(True) # pylint: disable=protected-access
|
||||
self.assertEqual(self.jmsg, self.msg.to_partial_json())
|
||||
|
||||
def test_from_json(self):
|
||||
from acme.challenges import DNS01Response
|
||||
@@ -151,7 +146,6 @@ class DNS01Test(unittest.TestCase):
|
||||
|
||||
|
||||
class HTTP01ResponseTest(unittest.TestCase):
|
||||
# pylint: disable=too-many-instance-attributes
|
||||
|
||||
def setUp(self):
|
||||
from acme.challenges import HTTP01Response
|
||||
@@ -169,8 +163,6 @@ class HTTP01ResponseTest(unittest.TestCase):
|
||||
def test_to_partial_json(self):
|
||||
self.assertEqual({k: v for k, v in self.jmsg.items() if k != 'keyAuthorization'},
|
||||
self.msg.to_partial_json())
|
||||
self.msg._dump_authorization_key(True) # pylint: disable=protected-access
|
||||
self.assertEqual(self.jmsg, self.msg.to_partial_json())
|
||||
|
||||
def test_from_json(self):
|
||||
from acme.challenges import HTTP01Response
|
||||
@@ -263,154 +255,7 @@ class HTTP01Test(unittest.TestCase):
|
||||
self.msg.update(token=b'..').good_token)
|
||||
|
||||
|
||||
class TLSSNI01ResponseTest(unittest.TestCase):
|
||||
# pylint: disable=too-many-instance-attributes
|
||||
|
||||
def setUp(self):
|
||||
from acme.challenges import TLSSNI01
|
||||
self.chall = TLSSNI01(
|
||||
token=jose.b64decode(b'a82d5ff8ef740d12881f6d3c2277ab2e'))
|
||||
|
||||
self.response = self.chall.response(KEY)
|
||||
self.jmsg = {
|
||||
'resource': 'challenge',
|
||||
'type': 'tls-sni-01',
|
||||
'keyAuthorization': self.response.key_authorization,
|
||||
}
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
label1 = b'dc38d9c3fa1a4fdcc3a5501f2d38583f'
|
||||
label2 = b'b7793728f084394f2a1afd459556bb5c'
|
||||
self.z = label1 + label2
|
||||
self.z_domain = label1 + b'.' + label2 + b'.acme.invalid'
|
||||
self.domain = 'foo.com'
|
||||
|
||||
def test_z_and_domain(self):
|
||||
self.assertEqual(self.z, self.response.z)
|
||||
self.assertEqual(self.z_domain, self.response.z_domain)
|
||||
|
||||
def test_to_partial_json(self):
|
||||
self.assertEqual({k: v for k, v in self.jmsg.items() if k != 'keyAuthorization'},
|
||||
self.response.to_partial_json())
|
||||
self.response._dump_authorization_key(True) # pylint: disable=protected-access
|
||||
self.assertEqual(self.jmsg, self.response.to_partial_json())
|
||||
|
||||
def test_from_json(self):
|
||||
from acme.challenges import TLSSNI01Response
|
||||
self.assertEqual(self.response, TLSSNI01Response.from_json(self.jmsg))
|
||||
|
||||
def test_from_json_hashable(self):
|
||||
from acme.challenges import TLSSNI01Response
|
||||
hash(TLSSNI01Response.from_json(self.jmsg))
|
||||
|
||||
@mock.patch('acme.challenges.socket.gethostbyname')
|
||||
@mock.patch('acme.challenges.crypto_util.probe_sni')
|
||||
def test_probe_cert(self, mock_probe_sni, mock_gethostbyname):
|
||||
mock_gethostbyname.return_value = '127.0.0.1'
|
||||
self.response.probe_cert('foo.com')
|
||||
mock_gethostbyname.assert_called_once_with('foo.com')
|
||||
mock_probe_sni.assert_called_once_with(
|
||||
host='127.0.0.1', port=self.response.PORT,
|
||||
name=self.z_domain)
|
||||
|
||||
self.response.probe_cert('foo.com', host='8.8.8.8')
|
||||
mock_probe_sni.assert_called_with(
|
||||
host='8.8.8.8', port=mock.ANY, name=mock.ANY)
|
||||
|
||||
self.response.probe_cert('foo.com', port=1234)
|
||||
mock_probe_sni.assert_called_with(
|
||||
host=mock.ANY, port=1234, name=mock.ANY)
|
||||
|
||||
self.response.probe_cert('foo.com', bar='baz')
|
||||
mock_probe_sni.assert_called_with(
|
||||
host=mock.ANY, port=mock.ANY, name=mock.ANY, bar='baz')
|
||||
|
||||
self.response.probe_cert('foo.com', name=b'xxx')
|
||||
mock_probe_sni.assert_called_with(
|
||||
host=mock.ANY, port=mock.ANY,
|
||||
name=self.z_domain)
|
||||
|
||||
def test_gen_verify_cert(self):
|
||||
key1 = test_util.load_pyopenssl_private_key('rsa512_key.pem')
|
||||
cert, key2 = self.response.gen_cert(key1)
|
||||
self.assertEqual(key1, key2)
|
||||
self.assertTrue(self.response.verify_cert(cert))
|
||||
|
||||
def test_gen_verify_cert_gen_key(self):
|
||||
cert, key = self.response.gen_cert()
|
||||
self.assertTrue(isinstance(key, OpenSSL.crypto.PKey))
|
||||
self.assertTrue(self.response.verify_cert(cert))
|
||||
|
||||
def test_verify_bad_cert(self):
|
||||
self.assertFalse(self.response.verify_cert(
|
||||
test_util.load_cert('cert.pem')))
|
||||
|
||||
def test_simple_verify_bad_key_authorization(self):
|
||||
key2 = jose.JWKRSA.load(test_util.load_vector('rsa256_key.pem'))
|
||||
self.response.simple_verify(self.chall, "local", key2.public_key())
|
||||
|
||||
@mock.patch('acme.challenges.TLSSNI01Response.verify_cert', autospec=True)
|
||||
def test_simple_verify(self, mock_verify_cert):
|
||||
mock_verify_cert.return_value = mock.sentinel.verification
|
||||
self.assertEqual(
|
||||
mock.sentinel.verification, self.response.simple_verify(
|
||||
self.chall, self.domain, KEY.public_key(),
|
||||
cert=mock.sentinel.cert))
|
||||
mock_verify_cert.assert_called_once_with(
|
||||
self.response, mock.sentinel.cert)
|
||||
|
||||
@mock.patch('acme.challenges.TLSSNI01Response.probe_cert')
|
||||
def test_simple_verify_false_on_probe_error(self, mock_probe_cert):
|
||||
mock_probe_cert.side_effect = errors.Error
|
||||
self.assertFalse(self.response.simple_verify(
|
||||
self.chall, self.domain, KEY.public_key()))
|
||||
|
||||
|
||||
class TLSSNI01Test(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.jmsg = {
|
||||
'type': 'tls-sni-01',
|
||||
'token': 'a82d5ff8ef740d12881f6d3c2277ab2e',
|
||||
}
|
||||
from acme.challenges import TLSSNI01
|
||||
self.msg = TLSSNI01(
|
||||
token=jose.b64decode('a82d5ff8ef740d12881f6d3c2277ab2e'))
|
||||
|
||||
def test_to_partial_json(self):
|
||||
self.assertEqual(self.jmsg, self.msg.to_partial_json())
|
||||
|
||||
def test_from_json(self):
|
||||
from acme.challenges import TLSSNI01
|
||||
self.assertEqual(self.msg, TLSSNI01.from_json(self.jmsg))
|
||||
|
||||
def test_from_json_hashable(self):
|
||||
from acme.challenges import TLSSNI01
|
||||
hash(TLSSNI01.from_json(self.jmsg))
|
||||
|
||||
def test_from_json_invalid_token_length(self):
|
||||
from acme.challenges import TLSSNI01
|
||||
self.jmsg['token'] = jose.encode_b64jose(b'abcd')
|
||||
self.assertRaises(
|
||||
jose.DeserializationError, TLSSNI01.from_json, self.jmsg)
|
||||
|
||||
@mock.patch('acme.challenges.TLSSNI01Response.gen_cert')
|
||||
def test_validation(self, mock_gen_cert):
|
||||
mock_gen_cert.return_value = ('cert', 'key')
|
||||
self.assertEqual(('cert', 'key'), self.msg.validation(
|
||||
KEY, cert_key=mock.sentinel.cert_key))
|
||||
mock_gen_cert.assert_called_once_with(key=mock.sentinel.cert_key)
|
||||
|
||||
def test_deprecation_message(self):
|
||||
with mock.patch('acme.warnings.warn') as mock_warn:
|
||||
from acme.challenges import TLSSNI01
|
||||
assert TLSSNI01
|
||||
self.assertEqual(mock_warn.call_count, 1)
|
||||
self.assertTrue('deprecated' in mock_warn.call_args[0][0])
|
||||
|
||||
|
||||
class TLSALPN01ResponseTest(unittest.TestCase):
|
||||
# pylint: disable=too-many-instance-attributes
|
||||
|
||||
def setUp(self):
|
||||
from acme.challenges import TLSALPN01Response
|
||||
@@ -428,8 +273,6 @@ class TLSALPN01ResponseTest(unittest.TestCase):
|
||||
def test_to_partial_json(self):
|
||||
self.assertEqual({k: v for k, v in self.jmsg.items() if k != 'keyAuthorization'},
|
||||
self.msg.to_partial_json())
|
||||
self.msg._dump_authorization_key(True) # pylint: disable=protected-access
|
||||
self.assertEqual(self.jmsg, self.msg.to_partial_json())
|
||||
|
||||
def test_from_json(self):
|
||||
from acme.challenges import TLSALPN01Response
|
||||
|
||||
@@ -6,18 +6,17 @@ from email.utils import parsedate_tz
|
||||
import heapq
|
||||
import logging
|
||||
import time
|
||||
import re
|
||||
import sys
|
||||
|
||||
import six
|
||||
from six.moves import http_client # pylint: disable=import-error
|
||||
import josepy as jose
|
||||
import OpenSSL
|
||||
import re
|
||||
from requests_toolbelt.adapters.source import SourceAddressAdapter
|
||||
import requests
|
||||
from requests.adapters import HTTPAdapter
|
||||
import sys
|
||||
from requests_toolbelt.adapters.source import SourceAddressAdapter
|
||||
|
||||
from acme import challenges
|
||||
from acme import crypto_util
|
||||
from acme import errors
|
||||
from acme import jws
|
||||
@@ -45,7 +44,7 @@ DEFAULT_NETWORK_TIMEOUT = 45
|
||||
DER_CONTENT_TYPE = 'application/pkix-cert'
|
||||
|
||||
|
||||
class ClientBase(object): # pylint: disable=too-many-instance-attributes
|
||||
class ClientBase(object):
|
||||
"""ACME client base object.
|
||||
|
||||
:ivar messages.Directory directory:
|
||||
@@ -124,14 +123,21 @@ class ClientBase(object): # pylint: disable=too-many-instance-attributes
|
||||
"""
|
||||
return self.update_registration(regr, update={'status': 'deactivated'})
|
||||
|
||||
def query_registration(self, regr):
|
||||
"""Query server about registration.
|
||||
def deactivate_authorization(self, authzr):
|
||||
# type: (messages.AuthorizationResource) -> messages.AuthorizationResource
|
||||
"""Deactivate authorization.
|
||||
|
||||
:param messages.RegistrationResource: Existing Registration
|
||||
Resource.
|
||||
:param messages.AuthorizationResource authzr: The Authorization resource
|
||||
to be deactivated.
|
||||
|
||||
:returns: The Authorization resource that was deactivated.
|
||||
:rtype: `.AuthorizationResource`
|
||||
|
||||
"""
|
||||
return self._send_recv_regr(regr, messages.UpdateRegistration())
|
||||
body = messages.UpdateAuthorization(status='deactivated')
|
||||
response = self._post(authzr.uri, body)
|
||||
return self._authzr_from_response(response,
|
||||
authzr.body.identifier, authzr.uri)
|
||||
|
||||
def _authzr_from_response(self, response, identifier=None, uri=None):
|
||||
authzr = messages.AuthorizationResource(
|
||||
@@ -156,23 +162,7 @@ class ClientBase(object): # pylint: disable=too-many-instance-attributes
|
||||
:raises .UnexpectedUpdate:
|
||||
|
||||
"""
|
||||
# Because sending keyAuthorization in a response challenge has been removed from the ACME
|
||||
# spec, it is not included in the KeyAuthorizationResponseChallenge JSON by default.
|
||||
# However as a migration path, we temporarily expect a malformed error from the server,
|
||||
# and fallback by resending the challenge response with the keyAuthorization field.
|
||||
# TODO: Remove this fallback for Certbot 0.34.0
|
||||
try:
|
||||
response = self._post(challb.uri, response)
|
||||
except messages.Error as error:
|
||||
if (error.code == 'malformed'
|
||||
and isinstance(response, challenges.KeyAuthorizationChallengeResponse)):
|
||||
logger.debug('Error while responding to a challenge without keyAuthorization '
|
||||
'in the JWS, your ACME CA server may not support it:\n%s', error)
|
||||
logger.debug('Retrying request with keyAuthorization set.')
|
||||
response._dump_authorization_key(True) # pylint: disable=protected-access
|
||||
response = self._post(challb.uri, response)
|
||||
else:
|
||||
raise
|
||||
response = self._post(challb.uri, response)
|
||||
try:
|
||||
authzr_uri = response.links['up']['url']
|
||||
except KeyError:
|
||||
@@ -264,7 +254,6 @@ class Client(ClientBase):
|
||||
URI from which the resource will be downloaded.
|
||||
|
||||
"""
|
||||
# pylint: disable=too-many-arguments
|
||||
self.key = key
|
||||
if net is None:
|
||||
net = ClientNetwork(key, alg=alg, verify_ssl=verify_ssl)
|
||||
@@ -293,6 +282,15 @@ class Client(ClientBase):
|
||||
# pylint: disable=no-member
|
||||
return self._regr_from_response(response)
|
||||
|
||||
def query_registration(self, regr):
|
||||
"""Query server about registration.
|
||||
|
||||
:param messages.RegistrationResource: Existing Registration
|
||||
Resource.
|
||||
|
||||
"""
|
||||
return self._send_recv_regr(regr, messages.UpdateRegistration())
|
||||
|
||||
def agree_to_tos(self, regr):
|
||||
"""Agree to the terms-of-service.
|
||||
|
||||
@@ -436,7 +434,6 @@ class Client(ClientBase):
|
||||
was marked by the CA as invalid
|
||||
|
||||
"""
|
||||
# pylint: disable=too-many-locals
|
||||
assert max_attempts > 0
|
||||
attempts = collections.defaultdict(int) # type: Dict[messages.AuthorizationResource, int]
|
||||
exhausted = set()
|
||||
@@ -620,10 +617,13 @@ class ClientV2(ClientBase):
|
||||
Resource.
|
||||
|
||||
"""
|
||||
self.net.account = regr
|
||||
updated_regr = super(ClientV2, self).query_registration(regr)
|
||||
self.net.account = updated_regr
|
||||
return updated_regr
|
||||
self.net.account = regr # See certbot/certbot#6258
|
||||
# ACME v2 requires to use a POST-as-GET request (POST an empty JWS) here.
|
||||
# This is done by passing None instead of an empty UpdateRegistration to _post().
|
||||
response = self._post(regr.uri, None)
|
||||
self.net.account = self._regr_from_response(response, uri=regr.uri,
|
||||
terms_of_service=regr.terms_of_service)
|
||||
return self.net.account
|
||||
|
||||
def update_registration(self, regr, update=None):
|
||||
"""Update registration.
|
||||
@@ -669,7 +669,7 @@ class ClientV2(ClientBase):
|
||||
response = self._post(self.directory['newOrder'], order)
|
||||
body = messages.Order.from_json(response.json())
|
||||
authorizations = []
|
||||
for url in body.authorizations:
|
||||
for url in body.authorizations: # pylint: disable=not-an-iterable
|
||||
authorizations.append(self._authzr_from_response(self._post_as_get(url), uri=url))
|
||||
return messages.OrderResource(
|
||||
body=body,
|
||||
@@ -731,7 +731,7 @@ class ClientV2(ClientBase):
|
||||
for chall in authzr.body.challenges:
|
||||
if chall.error != None:
|
||||
failed.append(authzr)
|
||||
if len(failed) > 0:
|
||||
if failed:
|
||||
raise errors.ValidationError(failed)
|
||||
return orderr.update(authorizations=responses)
|
||||
|
||||
@@ -775,10 +775,7 @@ class ClientV2(ClientBase):
|
||||
|
||||
def external_account_required(self):
|
||||
"""Checks if ACME server requires External Account Binding authentication."""
|
||||
if hasattr(self.directory, 'meta') and self.directory.meta.external_account_required:
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
return hasattr(self.directory, 'meta') and self.directory.meta.external_account_required
|
||||
|
||||
def _post_as_get(self, *args, **kwargs):
|
||||
"""
|
||||
@@ -794,7 +791,7 @@ class ClientV2(ClientBase):
|
||||
# We add an empty payload for POST-as-GET requests
|
||||
new_args = args[:1] + (None,) + args[1:]
|
||||
try:
|
||||
return self._post(*new_args, **kwargs) # pylint: disable=star-args
|
||||
return self._post(*new_args, **kwargs)
|
||||
except messages.Error as error:
|
||||
if error.code == 'malformed':
|
||||
logger.debug('Error during a POST-as-GET request, '
|
||||
@@ -882,8 +879,7 @@ class BackwardsCompatibleClientV2(object):
|
||||
for domain in dnsNames:
|
||||
authorizations.append(self.client.request_domain_challenges(domain))
|
||||
return messages.OrderResource(authorizations=authorizations, csr_pem=csr_pem)
|
||||
else:
|
||||
return self.client.new_order(csr_pem)
|
||||
return self.client.new_order(csr_pem)
|
||||
|
||||
def finalize_order(self, orderr, deadline):
|
||||
"""Finalize an order and obtain a certificate.
|
||||
@@ -920,8 +916,7 @@ class BackwardsCompatibleClientV2(object):
|
||||
chain = crypto_util.dump_pyopenssl_chain(chain).decode()
|
||||
|
||||
return orderr.update(fullchain_pem=(cert + chain))
|
||||
else:
|
||||
return self.client.finalize_order(orderr, deadline)
|
||||
return self.client.finalize_order(orderr, deadline)
|
||||
|
||||
def revoke(self, cert, rsn):
|
||||
"""Revoke certificate.
|
||||
@@ -939,8 +934,7 @@ class BackwardsCompatibleClientV2(object):
|
||||
def _acme_version_from_directory(self, directory):
|
||||
if hasattr(directory, 'newNonce'):
|
||||
return 2
|
||||
else:
|
||||
return 1
|
||||
return 1
|
||||
|
||||
def external_account_required(self):
|
||||
"""Checks if the server requires an external account for ACMEv2 servers.
|
||||
@@ -948,11 +942,10 @@ class BackwardsCompatibleClientV2(object):
|
||||
Always return False for ACMEv1 servers, as it doesn't use External Account Binding."""
|
||||
if self.acme_version == 1:
|
||||
return False
|
||||
else:
|
||||
return self.client.external_account_required()
|
||||
return self.client.external_account_required()
|
||||
|
||||
|
||||
class ClientNetwork(object): # pylint: disable=too-many-instance-attributes
|
||||
class ClientNetwork(object):
|
||||
"""Wrapper around requests that signs POSTs for authentication.
|
||||
|
||||
Also adds user agent, and handles Content-Type.
|
||||
@@ -978,7 +971,6 @@ class ClientNetwork(object): # pylint: disable=too-many-instance-attributes
|
||||
def __init__(self, key, account=None, alg=jose.RS256, verify_ssl=True,
|
||||
user_agent='acme-python', timeout=DEFAULT_NETWORK_TIMEOUT,
|
||||
source_address=None):
|
||||
# pylint: disable=too-many-arguments
|
||||
self.key = key
|
||||
self.account = account
|
||||
self.alg = alg
|
||||
@@ -1027,7 +1019,6 @@ class ClientNetwork(object): # pylint: disable=too-many-instance-attributes
|
||||
if self.account is not None:
|
||||
kwargs["kid"] = self.account["uri"]
|
||||
kwargs["key"] = self.key
|
||||
# pylint: disable=star-args
|
||||
return jws.JWS.sign(jobj, **kwargs).json_dumps(indent=2)
|
||||
|
||||
@classmethod
|
||||
@@ -1087,7 +1078,6 @@ class ClientNetwork(object): # pylint: disable=too-many-instance-attributes
|
||||
return response
|
||||
|
||||
def _send_request(self, method, url, *args, **kwargs):
|
||||
# pylint: disable=too-many-locals
|
||||
"""Send HTTP request.
|
||||
|
||||
Makes sure that `verify_ssl` is respected. Logs request and
|
||||
|
||||
@@ -64,7 +64,7 @@ class ClientTestBase(unittest.TestCase):
|
||||
reg = messages.Registration(
|
||||
contact=self.contact, key=KEY.public_key())
|
||||
the_arg = dict(reg) # type: Dict
|
||||
self.new_reg = messages.NewRegistration(**the_arg) # pylint: disable=star-args
|
||||
self.new_reg = messages.NewRegistration(**the_arg)
|
||||
self.regr = messages.RegistrationResource(
|
||||
body=reg, uri='https://www.letsencrypt-demo.org/acme/reg/1')
|
||||
|
||||
@@ -318,7 +318,6 @@ class BackwardsCompatibleClientV2Test(ClientTestBase):
|
||||
|
||||
class ClientTest(ClientTestBase):
|
||||
"""Tests for acme.client.Client."""
|
||||
# pylint: disable=too-many-instance-attributes,too-many-public-methods
|
||||
|
||||
def setUp(self):
|
||||
super(ClientTest, self).setUp()
|
||||
@@ -358,7 +357,6 @@ class ClientTest(ClientTestBase):
|
||||
|
||||
def test_register(self):
|
||||
# "Instance of 'Field' has no to_json/update member" bug:
|
||||
# pylint: disable=no-member
|
||||
self.response.status_code = http_client.CREATED
|
||||
self.response.json.return_value = self.regr.body.to_json()
|
||||
self.response.headers['Location'] = self.regr.uri
|
||||
@@ -371,7 +369,6 @@ class ClientTest(ClientTestBase):
|
||||
|
||||
def test_update_registration(self):
|
||||
# "Instance of 'Field' has no to_json/update member" bug:
|
||||
# pylint: disable=no-member
|
||||
self.response.headers['Location'] = self.regr.uri
|
||||
self.response.json.return_value = self.regr.body.to_json()
|
||||
self.assertEqual(self.regr, self.client.update_registration(self.regr))
|
||||
@@ -463,34 +460,6 @@ class ClientTest(ClientTestBase):
|
||||
errors.ClientError, self.client.answer_challenge,
|
||||
self.challr.body, challenges.DNSResponse(validation=None))
|
||||
|
||||
def test_answer_challenge_key_authorization_fallback(self):
|
||||
self.response.links['up'] = {'url': self.challr.authzr_uri}
|
||||
self.response.json.return_value = self.challr.body.to_json()
|
||||
|
||||
def _wrapper_post(url, obj, *args, **kwargs): # pylint: disable=unused-argument
|
||||
"""
|
||||
Simulate an old ACME CA server, that would respond a 'malformed'
|
||||
error if keyAuthorization is missing.
|
||||
"""
|
||||
jobj = obj.to_partial_json()
|
||||
if 'keyAuthorization' not in jobj:
|
||||
raise messages.Error.with_code('malformed')
|
||||
return self.response
|
||||
self.net.post.side_effect = _wrapper_post
|
||||
|
||||
# This challenge response is of type KeyAuthorizationChallengeResponse, so the fallback
|
||||
# should be triggered, and avoid an exception.
|
||||
http_chall_response = challenges.HTTP01Response(key_authorization='test',
|
||||
resource=mock.MagicMock())
|
||||
self.client.answer_challenge(self.challr.body, http_chall_response)
|
||||
|
||||
# This challenge response is not of type KeyAuthorizationChallengeResponse, so the fallback
|
||||
# should not be triggered, leading to an exception.
|
||||
dns_chall_response = challenges.DNSResponse(validation=None)
|
||||
self.assertRaises(
|
||||
errors.Error, self.client.answer_challenge,
|
||||
self.challr.body, dns_chall_response)
|
||||
|
||||
def test_retry_after_date(self):
|
||||
self.response.headers['Retry-After'] = 'Fri, 31 Dec 1999 23:59:59 GMT'
|
||||
self.assertEqual(
|
||||
@@ -667,6 +636,14 @@ class ClientTest(ClientTestBase):
|
||||
errors.PollError, self.client.poll_and_request_issuance,
|
||||
csr, authzrs, mintime=mintime, max_attempts=2)
|
||||
|
||||
def test_deactivate_authorization(self):
|
||||
authzb = self.authzr.body.update(status=messages.STATUS_DEACTIVATED)
|
||||
self.response.json.return_value = authzb.to_json()
|
||||
authzr = self.client.deactivate_authorization(self.authzr)
|
||||
self.assertEqual(authzb, authzr.body)
|
||||
self.assertEqual(self.client.net.post.call_count, 1)
|
||||
self.assertTrue(self.authzr.uri in self.net.post.call_args_list[0][0])
|
||||
|
||||
def test_check_cert(self):
|
||||
self.response.headers['Location'] = self.certr.uri
|
||||
self.response.content = CERT_DER
|
||||
@@ -872,7 +849,6 @@ class ClientV2Test(ClientTestBase):
|
||||
|
||||
def test_update_registration(self):
|
||||
# "Instance of 'Field' has no to_json/update member" bug:
|
||||
# pylint: disable=no-member
|
||||
self.response.headers['Location'] = self.regr.uri
|
||||
self.response.json.return_value = self.regr.body.to_json()
|
||||
self.assertEqual(self.regr, self.client.update_registration(self.regr))
|
||||
@@ -911,7 +887,7 @@ class ClientV2Test(ClientTestBase):
|
||||
new_nonce_url='https://www.letsencrypt-demo.org/acme/new-nonce')
|
||||
self.client.net.get.assert_not_called()
|
||||
|
||||
class FakeError(messages.Error): # pylint: disable=too-many-ancestors
|
||||
class FakeError(messages.Error):
|
||||
"""Fake error to reproduce a malformed request ACME error"""
|
||||
def __init__(self): # pylint: disable=super-init-not-called
|
||||
pass
|
||||
@@ -934,13 +910,12 @@ class MockJSONDeSerializable(jose.JSONDeSerializable):
|
||||
return {'foo': self.value}
|
||||
|
||||
@classmethod
|
||||
def from_json(cls, value):
|
||||
def from_json(cls, jobj):
|
||||
pass # pragma: no cover
|
||||
|
||||
|
||||
class ClientNetworkTest(unittest.TestCase):
|
||||
"""Tests for acme.client.ClientNetwork."""
|
||||
# pylint: disable=too-many-public-methods
|
||||
|
||||
def setUp(self):
|
||||
self.verify_ssl = mock.MagicMock()
|
||||
@@ -1146,7 +1121,6 @@ class ClientNetworkTest(unittest.TestCase):
|
||||
|
||||
class ClientNetworkWithMockedResponseTest(unittest.TestCase):
|
||||
"""Tests for acme.client.ClientNetwork which mock out response."""
|
||||
# pylint: disable=too-many-instance-attributes
|
||||
|
||||
def setUp(self):
|
||||
from acme.client import ClientNetwork
|
||||
|
||||
@@ -28,7 +28,7 @@ logger = logging.getLogger(__name__)
|
||||
_DEFAULT_SSL_METHOD = SSL.SSLv23_METHOD # type: ignore
|
||||
|
||||
|
||||
class SSLSocket(object): # pylint: disable=too-few-public-methods
|
||||
class SSLSocket(object):
|
||||
"""SSL wrapper for sockets.
|
||||
|
||||
:ivar socket sock: Original wrapped socket.
|
||||
@@ -74,7 +74,7 @@ class SSLSocket(object): # pylint: disable=too-few-public-methods
|
||||
class FakeConnection(object):
|
||||
"""Fake OpenSSL.SSL.Connection."""
|
||||
|
||||
# pylint: disable=too-few-public-methods,missing-docstring
|
||||
# pylint: disable=missing-docstring
|
||||
|
||||
def __init__(self, connection):
|
||||
self._wrapped = connection
|
||||
@@ -134,7 +134,6 @@ def probe_sni(name, host, port=443, timeout=300,
|
||||
socket_kwargs = {'source_address': source_address}
|
||||
|
||||
try:
|
||||
# pylint: disable=star-args
|
||||
logger.debug(
|
||||
"Attempting to connect to %s:%d%s.", host, port,
|
||||
" from {0}:{1}".format(
|
||||
@@ -195,8 +194,7 @@ def _pyopenssl_cert_or_req_all_names(loaded_cert_or_req):
|
||||
|
||||
if common_name is None:
|
||||
return sans
|
||||
else:
|
||||
return [common_name] + [d for d in sans if d != common_name]
|
||||
return [common_name] + [d for d in sans if d != common_name]
|
||||
|
||||
def _pyopenssl_cert_or_req_san(cert_or_req):
|
||||
"""Get Subject Alternative Names from certificate or CSR using pyOpenSSL.
|
||||
|
||||
@@ -30,7 +30,6 @@ class SSLSocketAndProbeSNITest(unittest.TestCase):
|
||||
|
||||
class _TestServer(socketserver.TCPServer):
|
||||
|
||||
# pylint: disable=too-few-public-methods
|
||||
# six.moves.* | pylint: disable=attribute-defined-outside-init,no-init
|
||||
|
||||
def server_bind(self): # pylint: disable=missing-docstring
|
||||
|
||||
@@ -43,7 +43,7 @@ class JWS(jose.JWS):
|
||||
__slots__ = jose.JWS._orig_slots # pylint: disable=no-member
|
||||
|
||||
@classmethod
|
||||
# pylint: disable=arguments-differ,too-many-arguments
|
||||
# pylint: disable=arguments-differ
|
||||
def sign(cls, payload, key, alg, nonce, url=None, kid=None):
|
||||
# Per ACME spec, jwk and kid are mutually exclusive, so only include a
|
||||
# jwk field if kid is not provided.
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
"""ACME protocol messages."""
|
||||
import six
|
||||
import json
|
||||
import six
|
||||
try:
|
||||
from collections.abc import Hashable # pylint: disable=no-name-in-module
|
||||
except ImportError: # pragma: no cover
|
||||
@@ -18,20 +18,35 @@ OLD_ERROR_PREFIX = "urn:acme:error:"
|
||||
ERROR_PREFIX = "urn:ietf:params:acme:error:"
|
||||
|
||||
ERROR_CODES = {
|
||||
'accountDoesNotExist': 'The request specified an account that does not exist',
|
||||
'alreadyRevoked': 'The request specified a certificate to be revoked that has' \
|
||||
' already been revoked',
|
||||
'badCSR': 'The CSR is unacceptable (e.g., due to a short key)',
|
||||
'badNonce': 'The client sent an unacceptable anti-replay nonce',
|
||||
'badPublicKey': 'The JWS was signed by a public key the server does not support',
|
||||
'badRevocationReason': 'The revocation reason provided is not allowed by the server',
|
||||
'badSignatureAlgorithm': 'The JWS was signed with an algorithm the server does not support',
|
||||
'caa': 'Certification Authority Authorization (CAA) records forbid the CA from issuing' \
|
||||
' a certificate',
|
||||
'compound': 'Specific error conditions are indicated in the "subproblems" array',
|
||||
'connection': ('The server could not connect to the client to verify the'
|
||||
' domain'),
|
||||
'dns': 'There was a problem with a DNS query during identifier validation',
|
||||
'dnssec': 'The server could not validate a DNSSEC signed domain',
|
||||
'incorrectResponse': 'Response recieved didn\'t match the challenge\'s requirements',
|
||||
# deprecate invalidEmail
|
||||
'invalidEmail': 'The provided email for a registration was invalid',
|
||||
'invalidContact': 'The provided contact URI was invalid',
|
||||
'malformed': 'The request message was malformed',
|
||||
'rejectedIdentifier': 'The server will not issue certificates for the identifier',
|
||||
'orderNotReady': 'The request attempted to finalize an order that is not ready to be finalized',
|
||||
'rateLimited': 'There were too many requests of a given type',
|
||||
'serverInternal': 'The server experienced an internal error',
|
||||
'tls': 'The server experienced a TLS error during domain verification',
|
||||
'unauthorized': 'The client lacks sufficient authorization',
|
||||
'unsupportedContact': 'A contact URL for an account used an unsupported protocol scheme',
|
||||
'unknownHost': 'The server could not resolve a domain name',
|
||||
'unsupportedIdentifier': 'An identifier is of an unsupported type',
|
||||
'externalAccountRequired': 'The server requires external account binding',
|
||||
}
|
||||
|
||||
@@ -46,8 +61,7 @@ def is_acme_error(err):
|
||||
"""Check if argument is an ACME error."""
|
||||
if isinstance(err, Error) and (err.typ is not None):
|
||||
return (ERROR_PREFIX in err.typ) or (OLD_ERROR_PREFIX in err.typ)
|
||||
else:
|
||||
return False
|
||||
return False
|
||||
|
||||
|
||||
@six.python_2_unicode_compatible
|
||||
@@ -102,6 +116,7 @@ class Error(jose.JSONObjectWithFields, errors.Error):
|
||||
code = str(self.typ).split(':')[-1]
|
||||
if code in ERROR_CODES:
|
||||
return code
|
||||
return None
|
||||
|
||||
def __str__(self):
|
||||
return b' :: '.join(
|
||||
@@ -116,18 +131,19 @@ class _Constant(jose.JSONDeSerializable, Hashable): # type: ignore
|
||||
POSSIBLE_NAMES = NotImplemented
|
||||
|
||||
def __init__(self, name):
|
||||
self.POSSIBLE_NAMES[name] = self
|
||||
super(_Constant, self).__init__()
|
||||
self.POSSIBLE_NAMES[name] = self # pylint: disable=unsupported-assignment-operation
|
||||
self.name = name
|
||||
|
||||
def to_partial_json(self):
|
||||
return self.name
|
||||
|
||||
@classmethod
|
||||
def from_json(cls, value):
|
||||
if value not in cls.POSSIBLE_NAMES:
|
||||
def from_json(cls, jobj):
|
||||
if jobj not in cls.POSSIBLE_NAMES: # pylint: disable=unsupported-membership-test
|
||||
raise jose.DeserializationError(
|
||||
'{0} not recognized'.format(cls.__name__))
|
||||
return cls.POSSIBLE_NAMES[value]
|
||||
return cls.POSSIBLE_NAMES[jobj] # pylint: disable=unsubscriptable-object
|
||||
|
||||
def __repr__(self):
|
||||
return '{0}({1})'.format(self.__class__.__name__, self.name)
|
||||
@@ -152,6 +168,7 @@ STATUS_VALID = Status('valid')
|
||||
STATUS_INVALID = Status('invalid')
|
||||
STATUS_REVOKED = Status('revoked')
|
||||
STATUS_READY = Status('ready')
|
||||
STATUS_DEACTIVATED = Status('deactivated')
|
||||
|
||||
|
||||
class IdentifierType(_Constant):
|
||||
@@ -186,7 +203,6 @@ class Directory(jose.JSONDeSerializable):
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
kwargs = dict((self._internal_name(k), v) for k, v in kwargs.items())
|
||||
# pylint: disable=star-args
|
||||
super(Directory.Meta, self).__init__(**kwargs)
|
||||
|
||||
@property
|
||||
@@ -322,7 +338,7 @@ class Registration(ResourceBody):
|
||||
|
||||
def _filter_contact(self, prefix):
|
||||
return tuple(
|
||||
detail[len(prefix):] for detail in self.contact
|
||||
detail[len(prefix):] for detail in self.contact # pylint: disable=not-an-iterable
|
||||
if detail.startswith(prefix))
|
||||
|
||||
@property
|
||||
@@ -394,7 +410,6 @@ class ChallengeBody(ResourceBody):
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
kwargs = dict((self._internal_name(k), v) for k, v in kwargs.items())
|
||||
# pylint: disable=star-args
|
||||
super(ChallengeBody, self).__init__(**kwargs)
|
||||
|
||||
def encode(self, name):
|
||||
@@ -457,7 +472,7 @@ class Authorization(ResourceBody):
|
||||
:ivar datetime.datetime expires:
|
||||
|
||||
"""
|
||||
identifier = jose.Field('identifier', decoder=Identifier.from_json)
|
||||
identifier = jose.Field('identifier', decoder=Identifier.from_json, omitempty=True)
|
||||
challenges = jose.Field('challenges', omitempty=True)
|
||||
combinations = jose.Field('combinations', omitempty=True)
|
||||
|
||||
@@ -477,7 +492,7 @@ class Authorization(ResourceBody):
|
||||
def resolved_combinations(self):
|
||||
"""Combinations with challenges instead of indices."""
|
||||
return tuple(tuple(self.challenges[idx] for idx in combo)
|
||||
for combo in self.combinations)
|
||||
for combo in self.combinations) # pylint: disable=not-an-iterable
|
||||
|
||||
|
||||
@Directory.register
|
||||
@@ -487,6 +502,12 @@ class NewAuthorization(Authorization):
|
||||
resource = fields.Resource(resource_type)
|
||||
|
||||
|
||||
class UpdateAuthorization(Authorization):
|
||||
"""Update authorization."""
|
||||
resource_type = 'authz'
|
||||
resource = fields.Resource(resource_type)
|
||||
|
||||
|
||||
class AuthorizationResource(ResourceWithURI):
|
||||
"""Authorization Resource.
|
||||
|
||||
|
||||
@@ -1,29 +1,23 @@
|
||||
"""Support for standalone client challenge solvers. """
|
||||
import argparse
|
||||
import collections
|
||||
import functools
|
||||
import logging
|
||||
import os
|
||||
import socket
|
||||
import sys
|
||||
import threading
|
||||
|
||||
from six.moves import BaseHTTPServer # type: ignore # pylint: disable=import-error
|
||||
from six.moves import http_client # pylint: disable=import-error
|
||||
from six.moves import socketserver # type: ignore # pylint: disable=import-error
|
||||
|
||||
import OpenSSL
|
||||
|
||||
from acme import challenges
|
||||
from acme import crypto_util
|
||||
from acme.magic_typing import List # pylint: disable=unused-import, no-name-in-module
|
||||
from acme import _TLSSNI01DeprecationModule
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# six.moves.* | pylint: disable=no-member,attribute-defined-outside-init
|
||||
# pylint: disable=too-few-public-methods,no-init
|
||||
# pylint: disable=no-init
|
||||
|
||||
|
||||
class TLSServer(socketserver.TCPServer):
|
||||
@@ -83,7 +77,7 @@ class BaseDualNetworkedServers(object):
|
||||
kwargs["ipv6"] = ip_version
|
||||
new_address = (server_address[0],) + (port,) + server_address[2:]
|
||||
new_args = (new_address,) + remaining_args
|
||||
server = ServerClass(*new_args, **kwargs) # pylint: disable=star-args
|
||||
server = ServerClass(*new_args, **kwargs)
|
||||
logger.debug(
|
||||
"Successfully bound to %s:%s using %s", new_address[0],
|
||||
new_address[1], "IPv6" if ip_version else "IPv4")
|
||||
@@ -91,8 +85,8 @@ class BaseDualNetworkedServers(object):
|
||||
if self.servers:
|
||||
# Already bound using IPv6.
|
||||
logger.debug(
|
||||
"Certbot wasn't able to bind to %s:%s using %s, this " +
|
||||
"is often expected due to the dual stack nature of " +
|
||||
"Certbot wasn't able to bind to %s:%s using %s, this "
|
||||
"is often expected due to the dual stack nature of "
|
||||
"IPv6 socket implementations.",
|
||||
new_address[0], new_address[1],
|
||||
"IPv6" if ip_version else "IPv4")
|
||||
@@ -105,7 +99,7 @@ class BaseDualNetworkedServers(object):
|
||||
# If two servers are set up and port 0 was passed in, ensure we always
|
||||
# bind to the same port for both servers.
|
||||
port = server.socket.getsockname()[1]
|
||||
if len(self.servers) == 0:
|
||||
if not self.servers:
|
||||
raise socket.error("Could not bind to IPv4 or IPv6.")
|
||||
|
||||
def serve_forever(self):
|
||||
@@ -132,35 +126,6 @@ class BaseDualNetworkedServers(object):
|
||||
self.threads = []
|
||||
|
||||
|
||||
class TLSSNI01Server(TLSServer, ACMEServerMixin):
|
||||
"""TLSSNI01 Server."""
|
||||
|
||||
def __init__(self, server_address, certs, ipv6=False):
|
||||
TLSServer.__init__(
|
||||
self, server_address, BaseRequestHandlerWithLogging, certs=certs, ipv6=ipv6)
|
||||
|
||||
|
||||
class TLSSNI01DualNetworkedServers(BaseDualNetworkedServers):
|
||||
"""TLSSNI01Server Wrapper. Tries everything for both. Failures for one don't
|
||||
affect the other."""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
BaseDualNetworkedServers.__init__(self, TLSSNI01Server, *args, **kwargs)
|
||||
|
||||
|
||||
class BaseRequestHandlerWithLogging(socketserver.BaseRequestHandler):
|
||||
"""BaseRequestHandler with logging."""
|
||||
|
||||
def log_message(self, format, *args): # pylint: disable=redefined-builtin
|
||||
"""Log arbitrary message."""
|
||||
logger.debug("%s - - %s", self.client_address[0], format % args)
|
||||
|
||||
def handle(self):
|
||||
"""Handle request."""
|
||||
self.log_message("Incoming request")
|
||||
socketserver.BaseRequestHandler.handle(self)
|
||||
|
||||
|
||||
class HTTPServer(BaseHTTPServer.HTTPServer):
|
||||
"""Generic HTTP Server."""
|
||||
|
||||
@@ -263,43 +228,3 @@ class HTTP01RequestHandler(BaseHTTPServer.BaseHTTPRequestHandler):
|
||||
"""
|
||||
return functools.partial(
|
||||
cls, simple_http_resources=simple_http_resources)
|
||||
|
||||
|
||||
def simple_tls_sni_01_server(cli_args, forever=True):
|
||||
"""Run simple standalone TLSSNI01 server."""
|
||||
logging.basicConfig(level=logging.DEBUG)
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument(
|
||||
"-p", "--port", default=0, help="Port to serve at. By default "
|
||||
"picks random free port.")
|
||||
args = parser.parse_args(cli_args[1:])
|
||||
|
||||
certs = {}
|
||||
|
||||
_, hosts, _ = next(os.walk('.')) # type: ignore # https://github.com/python/mypy/issues/465
|
||||
for host in hosts:
|
||||
with open(os.path.join(host, "cert.pem")) as cert_file:
|
||||
cert_contents = cert_file.read()
|
||||
with open(os.path.join(host, "key.pem")) as key_file:
|
||||
key_contents = key_file.read()
|
||||
certs[host.encode()] = (
|
||||
OpenSSL.crypto.load_privatekey(
|
||||
OpenSSL.crypto.FILETYPE_PEM, key_contents),
|
||||
OpenSSL.crypto.load_certificate(
|
||||
OpenSSL.crypto.FILETYPE_PEM, cert_contents))
|
||||
|
||||
server = TLSSNI01Server(('', int(args.port)), certs=certs)
|
||||
logger.info("Serving at https://%s:%s...", *server.socket.getsockname()[:2])
|
||||
if forever: # pragma: no cover
|
||||
server.serve_forever()
|
||||
else:
|
||||
server.handle_request()
|
||||
|
||||
|
||||
# Patching ourselves to warn about TLS-SNI challenge deprecation and removal.
|
||||
sys.modules[__name__] = _TLSSNI01DeprecationModule(sys.modules[__name__])
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(simple_tls_sni_01_server(sys.argv)) # pragma: no cover
|
||||
|
||||
@@ -1,13 +1,7 @@
|
||||
"""Tests for acme.standalone."""
|
||||
import multiprocessing
|
||||
import os
|
||||
import shutil
|
||||
import socket
|
||||
import threading
|
||||
import tempfile
|
||||
import unittest
|
||||
import time
|
||||
from contextlib import closing
|
||||
|
||||
from six.moves import http_client # pylint: disable=import-error
|
||||
from six.moves import socketserver # type: ignore # pylint: disable=import-error
|
||||
@@ -17,8 +11,6 @@ import mock
|
||||
import requests
|
||||
|
||||
from acme import challenges
|
||||
from acme import crypto_util
|
||||
from acme import errors
|
||||
from acme import test_util
|
||||
from acme.magic_typing import Set # pylint: disable=unused-import, no-name-in-module
|
||||
|
||||
@@ -31,41 +23,14 @@ class TLSServerTest(unittest.TestCase):
|
||||
from acme.standalone import TLSServer
|
||||
server = TLSServer(
|
||||
('', 0), socketserver.BaseRequestHandler, bind_and_activate=True)
|
||||
server.server_close() # pylint: disable=no-member
|
||||
server.server_close()
|
||||
|
||||
def test_ipv6(self):
|
||||
if socket.has_ipv6:
|
||||
from acme.standalone import TLSServer
|
||||
server = TLSServer(
|
||||
('', 0), socketserver.BaseRequestHandler, bind_and_activate=True, ipv6=True)
|
||||
server.server_close() # pylint: disable=no-member
|
||||
|
||||
|
||||
class TLSSNI01ServerTest(unittest.TestCase):
|
||||
"""Test for acme.standalone.TLSSNI01Server."""
|
||||
|
||||
|
||||
def setUp(self):
|
||||
self.certs = {b'localhost': (
|
||||
test_util.load_pyopenssl_private_key('rsa2048_key.pem'),
|
||||
test_util.load_cert('rsa2048_cert.pem'),
|
||||
)}
|
||||
from acme.standalone import TLSSNI01Server
|
||||
self.server = TLSSNI01Server(('localhost', 0), certs=self.certs)
|
||||
# pylint: disable=no-member
|
||||
self.thread = threading.Thread(target=self.server.serve_forever)
|
||||
self.thread.start()
|
||||
|
||||
def tearDown(self):
|
||||
self.server.shutdown() # pylint: disable=no-member
|
||||
self.thread.join()
|
||||
|
||||
def test_it(self):
|
||||
host, port = self.server.socket.getsockname()[:2]
|
||||
cert = crypto_util.probe_sni(
|
||||
b'localhost', host=host, port=port, timeout=1)
|
||||
self.assertEqual(jose.ComparableX509(cert),
|
||||
jose.ComparableX509(self.certs[b'localhost'][1]))
|
||||
server.server_close()
|
||||
|
||||
|
||||
class HTTP01ServerTest(unittest.TestCase):
|
||||
@@ -80,13 +45,12 @@ class HTTP01ServerTest(unittest.TestCase):
|
||||
from acme.standalone import HTTP01Server
|
||||
self.server = HTTP01Server(('', 0), resources=self.resources)
|
||||
|
||||
# pylint: disable=no-member
|
||||
self.port = self.server.socket.getsockname()[1]
|
||||
self.thread = threading.Thread(target=self.server.serve_forever)
|
||||
self.thread.start()
|
||||
|
||||
def tearDown(self):
|
||||
self.server.shutdown() # pylint: disable=no-member
|
||||
self.server.shutdown()
|
||||
self.thread.join()
|
||||
|
||||
def test_index(self):
|
||||
@@ -139,7 +103,6 @@ class BaseDualNetworkedServersTest(unittest.TestCase):
|
||||
# NB: On Windows, socket.IPPROTO_IPV6 constant may be missing.
|
||||
# We use the corresponding value (41) instead.
|
||||
level = getattr(socket, "IPPROTO_IPV6", 41)
|
||||
# pylint: disable=no-member
|
||||
self.socket.setsockopt(level, socket.IPV6_V6ONLY, 1)
|
||||
try:
|
||||
self.server_bind()
|
||||
@@ -173,33 +136,6 @@ class BaseDualNetworkedServersTest(unittest.TestCase):
|
||||
prev_port = port
|
||||
|
||||
|
||||
class TLSSNI01DualNetworkedServersTest(unittest.TestCase):
|
||||
"""Test for acme.standalone.TLSSNI01DualNetworkedServers."""
|
||||
|
||||
|
||||
def setUp(self):
|
||||
self.certs = {b'localhost': (
|
||||
test_util.load_pyopenssl_private_key('rsa2048_key.pem'),
|
||||
test_util.load_cert('rsa2048_cert.pem'),
|
||||
)}
|
||||
from acme.standalone import TLSSNI01DualNetworkedServers
|
||||
self.servers = TLSSNI01DualNetworkedServers(('localhost', 0), certs=self.certs)
|
||||
self.servers.serve_forever()
|
||||
|
||||
def tearDown(self):
|
||||
self.servers.shutdown_and_server_close()
|
||||
|
||||
def test_connect(self):
|
||||
socknames = self.servers.getsocknames()
|
||||
# connect to all addresses
|
||||
for sockname in socknames:
|
||||
host, port = sockname[:2]
|
||||
cert = crypto_util.probe_sni(
|
||||
b'localhost', host=host, port=port, timeout=1)
|
||||
self.assertEqual(jose.ComparableX509(cert),
|
||||
jose.ComparableX509(self.certs[b'localhost'][1]))
|
||||
|
||||
|
||||
class HTTP01DualNetworkedServersTest(unittest.TestCase):
|
||||
"""Tests for acme.standalone.HTTP01DualNetworkedServers."""
|
||||
|
||||
@@ -212,7 +148,6 @@ class HTTP01DualNetworkedServersTest(unittest.TestCase):
|
||||
from acme.standalone import HTTP01DualNetworkedServers
|
||||
self.servers = HTTP01DualNetworkedServers(('', 0), resources=self.resources)
|
||||
|
||||
# pylint: disable=no-member
|
||||
self.port = self.servers.getsocknames()[0][1]
|
||||
self.servers.serve_forever()
|
||||
|
||||
@@ -251,56 +186,5 @@ class HTTP01DualNetworkedServersTest(unittest.TestCase):
|
||||
self.assertFalse(self._test_http01(add=False))
|
||||
|
||||
|
||||
class TestSimpleTLSSNI01Server(unittest.TestCase):
|
||||
"""Tests for acme.standalone.simple_tls_sni_01_server."""
|
||||
|
||||
|
||||
def setUp(self):
|
||||
# mirror ../examples/standalone
|
||||
self.test_cwd = tempfile.mkdtemp()
|
||||
localhost_dir = os.path.join(self.test_cwd, 'localhost')
|
||||
os.makedirs(localhost_dir)
|
||||
shutil.copy(test_util.vector_path('rsa2048_cert.pem'),
|
||||
os.path.join(localhost_dir, 'cert.pem'))
|
||||
shutil.copy(test_util.vector_path('rsa2048_key.pem'),
|
||||
os.path.join(localhost_dir, 'key.pem'))
|
||||
|
||||
with closing(socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as sock:
|
||||
sock.bind(('', 0))
|
||||
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
||||
self.port = sock.getsockname()[1]
|
||||
|
||||
from acme.standalone import simple_tls_sni_01_server
|
||||
self.process = multiprocessing.Process(target=simple_tls_sni_01_server,
|
||||
args=(['path', '-p', str(self.port)],))
|
||||
self.old_cwd = os.getcwd()
|
||||
os.chdir(self.test_cwd)
|
||||
|
||||
def tearDown(self):
|
||||
os.chdir(self.old_cwd)
|
||||
if self.process.is_alive():
|
||||
self.process.terminate()
|
||||
shutil.rmtree(self.test_cwd)
|
||||
|
||||
@mock.patch('acme.standalone.TLSSNI01Server.handle_request')
|
||||
def test_mock(self, handle):
|
||||
from acme.standalone import simple_tls_sni_01_server
|
||||
simple_tls_sni_01_server(cli_args=['path', '-p', str(self.port)], forever=False)
|
||||
self.assertEqual(handle.call_count, 1)
|
||||
|
||||
def test_live(self):
|
||||
self.process.start()
|
||||
cert = None
|
||||
for _ in range(50):
|
||||
time.sleep(0.1)
|
||||
try:
|
||||
cert = crypto_util.probe_sni(b'localhost', b'127.0.0.1', self.port)
|
||||
break
|
||||
except errors.Error: # pragma: no cover
|
||||
pass
|
||||
self.assertEqual(jose.ComparableX509(cert),
|
||||
test_util.load_comparable_cert('rsa2048_cert.pem'))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main() # pragma: no cover
|
||||
|
||||
@@ -5,7 +5,6 @@
|
||||
"""
|
||||
import os
|
||||
import pkg_resources
|
||||
import unittest
|
||||
|
||||
from cryptography.hazmat.backends import default_backend
|
||||
from cryptography.hazmat.primitives import serialization
|
||||
@@ -13,12 +12,6 @@ import josepy as jose
|
||||
from OpenSSL import crypto
|
||||
|
||||
|
||||
def vector_path(*names):
|
||||
"""Path to a test vector."""
|
||||
return pkg_resources.resource_filename(
|
||||
__name__, os.path.join('testdata', *names))
|
||||
|
||||
|
||||
def load_vector(*names):
|
||||
"""Load contents of a test vector."""
|
||||
# luckily, resource_string opens file in binary mode
|
||||
@@ -73,24 +66,3 @@ def load_pyopenssl_private_key(*names):
|
||||
loader = _guess_loader(
|
||||
names[-1], crypto.FILETYPE_PEM, crypto.FILETYPE_ASN1)
|
||||
return crypto.load_privatekey(loader, load_vector(*names))
|
||||
|
||||
|
||||
def skip_unless(condition, reason): # pragma: no cover
|
||||
"""Skip tests unless a condition holds.
|
||||
|
||||
This implements the basic functionality of unittest.skipUnless
|
||||
which is only available on Python 2.7+.
|
||||
|
||||
:param bool condition: If ``False``, the test will be skipped
|
||||
:param str reason: the reason for skipping the test
|
||||
|
||||
:rtype: callable
|
||||
:returns: decorator that hides tests unless condition is ``True``
|
||||
|
||||
"""
|
||||
if hasattr(unittest, "skipUnless"):
|
||||
return unittest.skipUnless(condition, reason)
|
||||
elif condition:
|
||||
return lambda cls: cls
|
||||
else:
|
||||
return lambda cls: None
|
||||
|
||||
@@ -3,7 +3,7 @@ from setuptools import find_packages
|
||||
from setuptools.command.test import test as TestCommand
|
||||
import sys
|
||||
|
||||
version = '0.33.0.dev0'
|
||||
version = '1.0.0.dev0'
|
||||
|
||||
# Please update tox.ini when modifying dependency version requirements
|
||||
install_requires = [
|
||||
@@ -73,6 +73,7 @@ setup(
|
||||
'Programming Language :: Python :: 3.5',
|
||||
'Programming Language :: Python :: 3.6',
|
||||
'Programming Language :: Python :: 3.7',
|
||||
'Programming Language :: Python :: 3.8',
|
||||
'Topic :: Internet :: WWW/HTTP',
|
||||
'Topic :: Security',
|
||||
],
|
||||
|
||||
40
appveyor.yml
40
appveyor.yml
@@ -1,40 +0,0 @@
|
||||
image: Visual Studio 2015
|
||||
|
||||
environment:
|
||||
matrix:
|
||||
- TOXENV: py35
|
||||
- TOXENV: py37-cover
|
||||
|
||||
branches:
|
||||
only:
|
||||
- master
|
||||
- /^\d+\.\d+\.x$/ # Version branches like X.X.X
|
||||
- /^test-.*$/
|
||||
|
||||
init:
|
||||
# Since master can receive only commits from PR that have already been tested, following
|
||||
# condition avoid to launch all jobs except the coverage one for commits pushed to master.
|
||||
- ps: |
|
||||
if (-Not $Env:APPVEYOR_PULL_REQUEST_NUMBER -And $Env:APPVEYOR_REPO_BRANCH -Eq 'master' `
|
||||
-And -Not ($Env:TOXENV -Like '*-cover'))
|
||||
{ $Env:APPVEYOR_SKIP_FINALIZE_ON_EXIT = 'true'; Exit-AppVeyorBuild }
|
||||
|
||||
install:
|
||||
# Use Python 3.7 by default
|
||||
- "SET PATH=C:\\Python37;C:\\Python37\\Scripts;%PATH%"
|
||||
# Check env
|
||||
- "python --version"
|
||||
# Upgrade pip to avoid warnings
|
||||
- "python -m pip install --upgrade pip"
|
||||
# Ready to install tox and coverage
|
||||
- "pip install tox codecov"
|
||||
|
||||
build: off
|
||||
|
||||
test_script:
|
||||
- set TOX_TESTENV_PASSENV=APPVEYOR
|
||||
# Test env is set by TOXENV env variable
|
||||
- tox
|
||||
|
||||
on_success:
|
||||
- if exist .coverage codecov
|
||||
@@ -1,8 +1,9 @@
|
||||
""" Utility functions for certbot-apache plugin """
|
||||
import binascii
|
||||
import os
|
||||
|
||||
from certbot import util
|
||||
from certbot.compat import os
|
||||
|
||||
|
||||
def get_mod_deps(mod_name):
|
||||
"""Get known module dependencies.
|
||||
|
||||
@@ -1,207 +0,0 @@
|
||||
"""Class of Augeas Configurators."""
|
||||
import logging
|
||||
|
||||
|
||||
from certbot import errors
|
||||
from certbot.plugins import common
|
||||
|
||||
from certbot_apache import constants
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class AugeasConfigurator(common.Installer):
|
||||
"""Base Augeas Configurator class.
|
||||
|
||||
:ivar config: Configuration.
|
||||
:type config: :class:`~certbot.interfaces.IConfig`
|
||||
|
||||
:ivar aug: Augeas object
|
||||
:type aug: :class:`augeas.Augeas`
|
||||
|
||||
:ivar str save_notes: Human-readable configuration change notes
|
||||
:ivar reverter: saves and reverts checkpoints
|
||||
:type reverter: :class:`certbot.reverter.Reverter`
|
||||
|
||||
"""
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(AugeasConfigurator, self).__init__(*args, **kwargs)
|
||||
|
||||
# Placeholder for augeas
|
||||
self.aug = None
|
||||
|
||||
self.save_notes = ""
|
||||
|
||||
|
||||
def init_augeas(self):
|
||||
""" Initialize the actual Augeas instance """
|
||||
import augeas
|
||||
self.aug = augeas.Augeas(
|
||||
# specify a directory to load our preferred lens from
|
||||
loadpath=constants.AUGEAS_LENS_DIR,
|
||||
# Do not save backup (we do it ourselves), do not load
|
||||
# anything by default
|
||||
flags=(augeas.Augeas.NONE |
|
||||
augeas.Augeas.NO_MODL_AUTOLOAD |
|
||||
augeas.Augeas.ENABLE_SPAN))
|
||||
# See if any temporary changes need to be recovered
|
||||
# This needs to occur before VirtualHost objects are setup...
|
||||
# because this will change the underlying configuration and potential
|
||||
# vhosts
|
||||
self.recovery_routine()
|
||||
|
||||
def check_parsing_errors(self, lens):
|
||||
"""Verify Augeas can parse all of the lens files.
|
||||
|
||||
:param str lens: lens to check for errors
|
||||
|
||||
:raises .errors.PluginError: If there has been an error in parsing with
|
||||
the specified lens.
|
||||
|
||||
"""
|
||||
error_files = self.aug.match("/augeas//error")
|
||||
|
||||
for path in error_files:
|
||||
# Check to see if it was an error resulting from the use of
|
||||
# the httpd lens
|
||||
lens_path = self.aug.get(path + "/lens")
|
||||
# As aug.get may return null
|
||||
if lens_path and lens in lens_path:
|
||||
msg = (
|
||||
"There has been an error in parsing the file {0} on line {1}: "
|
||||
"{2}".format(
|
||||
# Strip off /augeas/files and /error
|
||||
path[13:len(path) - 6],
|
||||
self.aug.get(path + "/line"),
|
||||
self.aug.get(path + "/message")))
|
||||
raise errors.PluginError(msg)
|
||||
|
||||
def ensure_augeas_state(self):
|
||||
"""Makes sure that all Augeas dom changes are written to files to avoid
|
||||
loss of configuration directives when doing additional augeas parsing,
|
||||
causing a possible augeas.load() resulting dom reset
|
||||
"""
|
||||
|
||||
if self.unsaved_files():
|
||||
self.save_notes += "(autosave)"
|
||||
self.save()
|
||||
|
||||
def unsaved_files(self):
|
||||
"""Lists files that have modified Augeas DOM but the changes have not
|
||||
been written to the filesystem yet, used by `self.save()` and
|
||||
ApacheConfigurator to check the file state.
|
||||
|
||||
:raises .errors.PluginError: If there was an error in Augeas, in
|
||||
an attempt to save the configuration, or an error creating a
|
||||
checkpoint
|
||||
|
||||
:returns: `set` of unsaved files
|
||||
"""
|
||||
save_state = self.aug.get("/augeas/save")
|
||||
self.aug.set("/augeas/save", "noop")
|
||||
# Existing Errors
|
||||
ex_errs = self.aug.match("/augeas//error")
|
||||
try:
|
||||
# This is a noop save
|
||||
self.aug.save()
|
||||
except (RuntimeError, IOError):
|
||||
self._log_save_errors(ex_errs)
|
||||
# Erase Save Notes
|
||||
self.save_notes = ""
|
||||
raise errors.PluginError(
|
||||
"Error saving files, check logs for more info.")
|
||||
|
||||
# Return the original save method
|
||||
self.aug.set("/augeas/save", save_state)
|
||||
|
||||
# Retrieve list of modified files
|
||||
# Note: Noop saves can cause the file to be listed twice, I used a
|
||||
# set to remove this possibility. This is a known augeas 0.10 error.
|
||||
save_paths = self.aug.match("/augeas/events/saved")
|
||||
|
||||
save_files = set()
|
||||
if save_paths:
|
||||
for path in save_paths:
|
||||
save_files.add(self.aug.get(path)[6:])
|
||||
return save_files
|
||||
|
||||
def save(self, title=None, temporary=False):
|
||||
"""Saves all changes to the configuration files.
|
||||
|
||||
This function first checks for save errors, if none are found,
|
||||
all configuration changes made will be saved. According to the
|
||||
function parameters. If an exception is raised, a new checkpoint
|
||||
was not created.
|
||||
|
||||
:param str title: The title of the save. If a title is given, the
|
||||
configuration will be saved as a new checkpoint and put in a
|
||||
timestamped directory.
|
||||
|
||||
:param bool temporary: Indicates whether the changes made will
|
||||
be quickly reversed in the future (ie. challenges)
|
||||
|
||||
"""
|
||||
save_files = self.unsaved_files()
|
||||
if save_files:
|
||||
self.add_to_checkpoint(save_files,
|
||||
self.save_notes, temporary=temporary)
|
||||
|
||||
self.save_notes = ""
|
||||
self.aug.save()
|
||||
|
||||
# Force reload if files were modified
|
||||
# This is needed to recalculate augeas directive span
|
||||
if save_files:
|
||||
for sf in save_files:
|
||||
self.aug.remove("/files/"+sf)
|
||||
self.aug.load()
|
||||
if title and not temporary:
|
||||
self.finalize_checkpoint(title)
|
||||
|
||||
def _log_save_errors(self, ex_errs):
|
||||
"""Log errors due to bad Augeas save.
|
||||
|
||||
:param list ex_errs: Existing errors before save
|
||||
|
||||
"""
|
||||
# Check for the root of save problems
|
||||
new_errs = self.aug.match("/augeas//error")
|
||||
# logger.error("During Save - %s", mod_conf)
|
||||
logger.error("Unable to save files: %s. Attempted Save Notes: %s",
|
||||
", ".join(err[13:len(err) - 6] for err in new_errs
|
||||
# Only new errors caused by recent save
|
||||
if err not in ex_errs), self.save_notes)
|
||||
|
||||
# Wrapper functions for Reverter class
|
||||
def recovery_routine(self):
|
||||
"""Revert all previously modified files.
|
||||
|
||||
Reverts all modified files that have not been saved as a checkpoint
|
||||
|
||||
:raises .errors.PluginError: If unable to recover the configuration
|
||||
|
||||
"""
|
||||
super(AugeasConfigurator, self).recovery_routine()
|
||||
# Need to reload configuration after these changes take effect
|
||||
self.aug.load()
|
||||
|
||||
def revert_challenge_config(self):
|
||||
"""Used to cleanup challenge configurations.
|
||||
|
||||
:raises .errors.PluginError: If unable to revert the challenge config.
|
||||
|
||||
"""
|
||||
self.revert_temporary_config()
|
||||
self.aug.load()
|
||||
|
||||
def rollback_checkpoints(self, rollback=1):
|
||||
"""Rollback saved checkpoints.
|
||||
|
||||
:param int rollback: Number of checkpoints to revert
|
||||
|
||||
:raises .errors.PluginError: If there is a problem with the input or
|
||||
the function is unable to correctly revert the configuration
|
||||
|
||||
"""
|
||||
super(AugeasConfigurator, self).rollback_checkpoints(rollback)
|
||||
self.aug.load()
|
||||
@@ -1,40 +1,41 @@
|
||||
"""Apache Configuration based off of Augeas Configurator."""
|
||||
"""Apache Configurator."""
|
||||
# pylint: disable=too-many-lines
|
||||
import copy
|
||||
import fnmatch
|
||||
import logging
|
||||
import os
|
||||
import pkg_resources
|
||||
import re
|
||||
import six
|
||||
import socket
|
||||
import time
|
||||
|
||||
from collections import defaultdict
|
||||
|
||||
import pkg_resources
|
||||
import six
|
||||
|
||||
import zope.component
|
||||
import zope.interface
|
||||
|
||||
from acme import challenges
|
||||
from acme.magic_typing import Any, DefaultDict, Dict, List, Set, Union # pylint: disable=unused-import, no-name-in-module
|
||||
from acme.magic_typing import DefaultDict, Dict, List, Set, Union # pylint: disable=unused-import, no-name-in-module
|
||||
|
||||
from certbot import errors
|
||||
from certbot import interfaces
|
||||
from certbot import util
|
||||
|
||||
from certbot.achallenges import KeyAuthorizationAnnotatedChallenge # pylint: disable=unused-import
|
||||
from certbot.compat import filesystem
|
||||
from certbot.compat import os
|
||||
from certbot.plugins import common
|
||||
from certbot.plugins.util import path_surgery
|
||||
from certbot.plugins.enhancements import AutoHSTSEnhancement
|
||||
|
||||
from certbot_apache import apache_util
|
||||
from certbot_apache import augeas_configurator
|
||||
from certbot_apache import constants
|
||||
from certbot_apache import display_ops
|
||||
from certbot_apache import http_01
|
||||
from certbot_apache import obj
|
||||
from certbot_apache import parser
|
||||
|
||||
from collections import defaultdict
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@@ -69,13 +70,9 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
@zope.interface.implementer(interfaces.IAuthenticator, interfaces.IInstaller)
|
||||
@zope.interface.provider(interfaces.IPluginFactory)
|
||||
class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
# pylint: disable=too-many-instance-attributes,too-many-public-methods
|
||||
class ApacheConfigurator(common.Installer):
|
||||
"""Apache configurator.
|
||||
|
||||
State of Configurator: This code has been been tested and built for Ubuntu
|
||||
14.04 Apache 2.4 and it works for Ubuntu 12.04 Apache 2.2
|
||||
|
||||
:ivar config: Configuration.
|
||||
:type config: :class:`~certbot.interfaces.IConfig`
|
||||
|
||||
@@ -176,8 +173,6 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
"(Only Ubuntu/Debian currently)")
|
||||
add("ctl", default=DEFAULTS["ctl"],
|
||||
help="Full path to Apache control script")
|
||||
util.add_deprecated_argument(
|
||||
add, argument_name="init-script", nargs=1)
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
"""Initialize an Apache Configurator.
|
||||
@@ -200,6 +195,8 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
self._enhanced_vhosts = defaultdict(set) # type: DefaultDict[str, Set[obj.VirtualHost]]
|
||||
# Temporary state for AutoHSTS enhancement
|
||||
self._autohsts = {} # type: Dict[str, Dict[str, Union[int, float]]]
|
||||
# Reverter save notes
|
||||
self.save_notes = ""
|
||||
|
||||
# These will be set in the prepare function
|
||||
self._prepared = False
|
||||
@@ -230,12 +227,6 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
:raises .errors.PluginError: If there is any other error
|
||||
|
||||
"""
|
||||
# Perform the actual Augeas initialization to be able to react
|
||||
try:
|
||||
self.init_augeas()
|
||||
except ImportError:
|
||||
raise errors.NoInstallationError("Problem in Augeas installation")
|
||||
|
||||
self._prepare_options()
|
||||
|
||||
# Verify Apache is installed
|
||||
@@ -251,18 +242,16 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
'.'.join(str(i) for i in self.version))
|
||||
if self.version < (2, 2):
|
||||
raise errors.NotSupportedError(
|
||||
"Apache Version %s not supported.", str(self.version))
|
||||
|
||||
if not self._check_aug_version():
|
||||
raise errors.NotSupportedError(
|
||||
"Apache plugin support requires libaugeas0 and augeas-lenses "
|
||||
"version 1.2.0 or higher, please make sure you have you have "
|
||||
"those installed.")
|
||||
"Apache Version {0} not supported.".format(str(self.version)))
|
||||
|
||||
# Recover from previous crash before Augeas initialization to have the
|
||||
# correct parse tree from the get go.
|
||||
self.recovery_routine()
|
||||
# Perform the actual Augeas initialization to be able to react
|
||||
self.parser = self.get_parser()
|
||||
|
||||
# Check for errors in parsing files with Augeas
|
||||
self.check_parsing_errors("httpd.aug")
|
||||
self.parser.check_parsing_errors("httpd.aug")
|
||||
|
||||
# Get all of the available vhosts
|
||||
self.vhosts = self.get_virtual_hosts()
|
||||
@@ -276,9 +265,72 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
except (OSError, errors.LockError):
|
||||
logger.debug("Encountered error:", exc_info=True)
|
||||
raise errors.PluginError(
|
||||
"Unable to lock %s", self.option("server_root"))
|
||||
"Unable to create a lock file in {0}. Are you running"
|
||||
" Certbot with sufficient privileges to modify your"
|
||||
" Apache configuration?".format(self.option("server_root")))
|
||||
self._prepared = True
|
||||
|
||||
def save(self, title=None, temporary=False):
|
||||
"""Saves all changes to the configuration files.
|
||||
|
||||
This function first checks for save errors, if none are found,
|
||||
all configuration changes made will be saved. According to the
|
||||
function parameters. If an exception is raised, a new checkpoint
|
||||
was not created.
|
||||
|
||||
:param str title: The title of the save. If a title is given, the
|
||||
configuration will be saved as a new checkpoint and put in a
|
||||
timestamped directory.
|
||||
|
||||
:param bool temporary: Indicates whether the changes made will
|
||||
be quickly reversed in the future (ie. challenges)
|
||||
|
||||
"""
|
||||
save_files = self.parser.unsaved_files()
|
||||
if save_files:
|
||||
self.add_to_checkpoint(save_files,
|
||||
self.save_notes, temporary=temporary)
|
||||
# Handle the parser specific tasks
|
||||
self.parser.save(save_files)
|
||||
if title and not temporary:
|
||||
self.finalize_checkpoint(title)
|
||||
|
||||
def recovery_routine(self):
|
||||
"""Revert all previously modified files.
|
||||
|
||||
Reverts all modified files that have not been saved as a checkpoint
|
||||
|
||||
:raises .errors.PluginError: If unable to recover the configuration
|
||||
|
||||
"""
|
||||
super(ApacheConfigurator, self).recovery_routine()
|
||||
# Reload configuration after these changes take effect if needed
|
||||
# ie. ApacheParser has been initialized.
|
||||
if self.parser:
|
||||
# TODO: wrap into non-implementation specific parser interface
|
||||
self.parser.aug.load()
|
||||
|
||||
def revert_challenge_config(self):
|
||||
"""Used to cleanup challenge configurations.
|
||||
|
||||
:raises .errors.PluginError: If unable to revert the challenge config.
|
||||
|
||||
"""
|
||||
self.revert_temporary_config()
|
||||
self.parser.aug.load()
|
||||
|
||||
def rollback_checkpoints(self, rollback=1):
|
||||
"""Rollback saved checkpoints.
|
||||
|
||||
:param int rollback: Number of checkpoints to revert
|
||||
|
||||
:raises .errors.PluginError: If there is a problem with the input or
|
||||
the function is unable to correctly revert the configuration
|
||||
|
||||
"""
|
||||
super(ApacheConfigurator, self).rollback_checkpoints(rollback)
|
||||
self.parser.aug.load()
|
||||
|
||||
def _verify_exe_availability(self, exe):
|
||||
"""Checks availability of Apache executable"""
|
||||
if not util.exe_exists(exe):
|
||||
@@ -286,26 +338,11 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
raise errors.NoInstallationError(
|
||||
'Cannot find Apache executable {0}'.format(exe))
|
||||
|
||||
def _check_aug_version(self):
|
||||
""" Checks that we have recent enough version of libaugeas.
|
||||
If augeas version is recent enough, it will support case insensitive
|
||||
regexp matching"""
|
||||
|
||||
self.aug.set("/test/path/testing/arg", "aRgUMeNT")
|
||||
try:
|
||||
matches = self.aug.match(
|
||||
"/test//*[self::arg=~regexp('argument', 'i')]")
|
||||
except RuntimeError:
|
||||
self.aug.remove("/test/path")
|
||||
return False
|
||||
self.aug.remove("/test/path")
|
||||
return matches
|
||||
|
||||
def get_parser(self):
|
||||
"""Initializes the ApacheParser"""
|
||||
# If user provided vhost_root value in command line, use it
|
||||
return parser.ApacheParser(
|
||||
self.aug, self.option("server_root"), self.conf("vhost-root"),
|
||||
self.option("server_root"), self.conf("vhost-root"),
|
||||
self.version, configurator=self)
|
||||
|
||||
def _wildcard_domain(self, domain):
|
||||
@@ -392,6 +429,7 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
"""
|
||||
if len(name.split(".")) == len(domain.split(".")):
|
||||
return fnmatch.fnmatch(name, domain)
|
||||
return None
|
||||
|
||||
def _choose_vhosts_wildcard(self, domain, create_ssl=True):
|
||||
"""Prompts user to choose vhosts to install a wildcard certificate for"""
|
||||
@@ -483,8 +521,8 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
# install SSLCertificateFile, SSLCertificateKeyFile,
|
||||
# and SSLCertificateChainFile directives
|
||||
set_cert_path = cert_path
|
||||
self.aug.set(path["cert_path"][-1], cert_path)
|
||||
self.aug.set(path["cert_key"][-1], key_path)
|
||||
self.parser.aug.set(path["cert_path"][-1], cert_path)
|
||||
self.parser.aug.set(path["cert_key"][-1], key_path)
|
||||
if chain_path is not None:
|
||||
self.parser.add_dir(vhost.path,
|
||||
"SSLCertificateChainFile", chain_path)
|
||||
@@ -496,8 +534,8 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
raise errors.PluginError("Please provide the --fullchain-path "
|
||||
"option pointing to your full chain file")
|
||||
set_cert_path = fullchain_path
|
||||
self.aug.set(path["cert_path"][-1], fullchain_path)
|
||||
self.aug.set(path["cert_key"][-1], key_path)
|
||||
self.parser.aug.set(path["cert_path"][-1], fullchain_path)
|
||||
self.parser.aug.set(path["cert_key"][-1], key_path)
|
||||
|
||||
# Enable the new vhost if needed
|
||||
if not vhost.enabled:
|
||||
@@ -715,7 +753,7 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
if name:
|
||||
all_names.add(name)
|
||||
|
||||
if len(vhost_macro) > 0:
|
||||
if vhost_macro:
|
||||
zope.component.getUtility(interfaces.IDisplay).notification(
|
||||
"Apache mod_macro seems to be in use in file(s):\n{0}"
|
||||
"\n\nUnfortunately mod_macro is not yet supported".format(
|
||||
@@ -797,7 +835,7 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
"""
|
||||
addrs = set()
|
||||
try:
|
||||
args = self.aug.match(path + "/arg")
|
||||
args = self.parser.aug.match(path + "/arg")
|
||||
except RuntimeError:
|
||||
logger.warning("Encountered a problem while parsing file: %s, skipping", path)
|
||||
return None
|
||||
@@ -815,7 +853,7 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
is_ssl = True
|
||||
|
||||
filename = apache_util.get_file_path(
|
||||
self.aug.get("/augeas/files%s/path" % apache_util.get_file_path(path)))
|
||||
self.parser.aug.get("/augeas/files%s/path" % apache_util.get_file_path(path)))
|
||||
if filename is None:
|
||||
return None
|
||||
|
||||
@@ -845,7 +883,7 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
# Make a list of parser paths because the parser_paths
|
||||
# dictionary may be modified during the loop.
|
||||
for vhost_path in list(self.parser.parser_paths):
|
||||
paths = self.aug.match(
|
||||
paths = self.parser.aug.match(
|
||||
("/files%s//*[label()=~regexp('%s')]" %
|
||||
(vhost_path, parser.case_i("VirtualHost"))))
|
||||
paths = [path for path in paths if
|
||||
@@ -855,7 +893,7 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
if not new_vhost:
|
||||
continue
|
||||
internal_path = apache_util.get_internal_aug_path(new_vhost.path)
|
||||
realpath = os.path.realpath(new_vhost.filep)
|
||||
realpath = filesystem.realpath(new_vhost.filep)
|
||||
if realpath not in file_paths:
|
||||
file_paths[realpath] = new_vhost.filep
|
||||
internal_paths[realpath].add(internal_path)
|
||||
@@ -1061,6 +1099,7 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
# Ugly but takes care of protocol def, eg: 1.1.1.1:443 https
|
||||
if listen.split(":")[-1].split(" ")[0] == port:
|
||||
return True
|
||||
return None
|
||||
|
||||
def prepare_https_modules(self, temp):
|
||||
"""Helper method for prepare_server_https, taking care of enabling
|
||||
@@ -1076,7 +1115,7 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
if "ssl_module" not in self.parser.modules:
|
||||
self.enable_mod("ssl", temp=temp)
|
||||
|
||||
def make_vhost_ssl(self, nonssl_vhost): # pylint: disable=too-many-locals
|
||||
def make_vhost_ssl(self, nonssl_vhost):
|
||||
"""Makes an ssl_vhost version of a nonssl_vhost.
|
||||
|
||||
Duplicates vhost and adds default ssl options
|
||||
@@ -1098,16 +1137,16 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
avail_fp = nonssl_vhost.filep
|
||||
ssl_fp = self._get_ssl_vhost_path(avail_fp)
|
||||
|
||||
orig_matches = self.aug.match("/files%s//* [label()=~regexp('%s')]" %
|
||||
orig_matches = self.parser.aug.match("/files%s//* [label()=~regexp('%s')]" %
|
||||
(self._escape(ssl_fp),
|
||||
parser.case_i("VirtualHost")))
|
||||
|
||||
self._copy_create_ssl_vhost_skeleton(nonssl_vhost, ssl_fp)
|
||||
|
||||
# Reload augeas to take into account the new vhost
|
||||
self.aug.load()
|
||||
self.parser.aug.load()
|
||||
# Get Vhost augeas path for new vhost
|
||||
new_matches = self.aug.match("/files%s//* [label()=~regexp('%s')]" %
|
||||
new_matches = self.parser.aug.match("/files%s//* [label()=~regexp('%s')]" %
|
||||
(self._escape(ssl_fp),
|
||||
parser.case_i("VirtualHost")))
|
||||
|
||||
@@ -1118,7 +1157,7 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
# Make Augeas aware of the new vhost
|
||||
self.parser.parse_file(ssl_fp)
|
||||
# Try to search again
|
||||
new_matches = self.aug.match(
|
||||
new_matches = self.parser.aug.match(
|
||||
"/files%s//* [label()=~regexp('%s')]" %
|
||||
(self._escape(ssl_fp),
|
||||
parser.case_i("VirtualHost")))
|
||||
@@ -1180,16 +1219,15 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
"""
|
||||
|
||||
if self.conf("vhost-root") and os.path.exists(self.conf("vhost-root")):
|
||||
fp = os.path.join(os.path.realpath(self.option("vhost_root")),
|
||||
fp = os.path.join(filesystem.realpath(self.option("vhost_root")),
|
||||
os.path.basename(non_ssl_vh_fp))
|
||||
else:
|
||||
# Use non-ssl filepath
|
||||
fp = os.path.realpath(non_ssl_vh_fp)
|
||||
fp = filesystem.realpath(non_ssl_vh_fp)
|
||||
|
||||
if fp.endswith(".conf"):
|
||||
return fp[:-(len(".conf"))] + self.option("le_vhost_ext")
|
||||
else:
|
||||
return fp + self.option("le_vhost_ext")
|
||||
return fp + self.option("le_vhost_ext")
|
||||
|
||||
def _sift_rewrite_rule(self, line):
|
||||
"""Decides whether a line should be copied to a SSL vhost.
|
||||
@@ -1269,8 +1307,8 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
"vhost for your HTTPS site located at {1} because they have "
|
||||
"the potential to create redirection loops.".format(
|
||||
vhost.filep, ssl_fp), reporter.MEDIUM_PRIORITY)
|
||||
self.aug.set("/augeas/files%s/mtime" % (self._escape(ssl_fp)), "0")
|
||||
self.aug.set("/augeas/files%s/mtime" % (self._escape(vhost.filep)), "0")
|
||||
self.parser.aug.set("/augeas/files%s/mtime" % (self._escape(ssl_fp)), "0")
|
||||
self.parser.aug.set("/augeas/files%s/mtime" % (self._escape(vhost.filep)), "0")
|
||||
|
||||
def _sift_rewrite_rules(self, contents):
|
||||
""" Helper function for _copy_create_ssl_vhost_skeleton to prepare the
|
||||
@@ -1345,7 +1383,7 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
"""
|
||||
|
||||
try:
|
||||
span_val = self.aug.span(vhost.path)
|
||||
span_val = self.parser.aug.span(vhost.path)
|
||||
except ValueError:
|
||||
logger.critical("Error while reading the VirtualHost %s from "
|
||||
"file %s", vhost.name, vhost.filep, exc_info=True)
|
||||
@@ -1380,13 +1418,13 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
|
||||
def _update_ssl_vhosts_addrs(self, vh_path):
|
||||
ssl_addrs = set()
|
||||
ssl_addr_p = self.aug.match(vh_path + "/arg")
|
||||
ssl_addr_p = self.parser.aug.match(vh_path + "/arg")
|
||||
|
||||
for addr in ssl_addr_p:
|
||||
old_addr = obj.Addr.fromstring(
|
||||
str(self.parser.get_arg(addr)))
|
||||
ssl_addr = old_addr.get_addr_obj("443")
|
||||
self.aug.set(addr, str(ssl_addr))
|
||||
self.parser.aug.set(addr, str(ssl_addr))
|
||||
ssl_addrs.add(ssl_addr)
|
||||
|
||||
return ssl_addrs
|
||||
@@ -1405,15 +1443,14 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
vh_path, False)) > 1:
|
||||
directive_path = self.parser.find_dir(directive, None,
|
||||
vh_path, False)
|
||||
self.aug.remove(re.sub(r"/\w*$", "", directive_path[0]))
|
||||
self.parser.aug.remove(re.sub(r"/\w*$", "", directive_path[0]))
|
||||
|
||||
def _remove_directives(self, vh_path, directives):
|
||||
for directive in directives:
|
||||
while len(self.parser.find_dir(directive, None,
|
||||
vh_path, False)) > 0:
|
||||
while self.parser.find_dir(directive, None, vh_path, False):
|
||||
directive_path = self.parser.find_dir(directive, None,
|
||||
vh_path, False)
|
||||
self.aug.remove(re.sub(r"/\w*$", "", directive_path[0]))
|
||||
self.parser.aug.remove(re.sub(r"/\w*$", "", directive_path[0]))
|
||||
|
||||
def _add_dummy_ssl_directives(self, vh_path):
|
||||
self.parser.add_dir(vh_path, "SSLCertificateFile",
|
||||
@@ -1452,7 +1489,7 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
"""
|
||||
matches = self.parser.find_dir(
|
||||
"ServerAlias", start=vh_path, exclude=False)
|
||||
aliases = (self.aug.get(match) for match in matches)
|
||||
aliases = (self.parser.aug.get(match) for match in matches)
|
||||
return self.domain_in_names(aliases, target_name)
|
||||
|
||||
def _add_name_vhost_if_necessary(self, vhost):
|
||||
@@ -1574,9 +1611,9 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
|
||||
:param str domain: domain to enhance
|
||||
:param str enhancement: enhancement type defined in
|
||||
:const:`~certbot.constants.ENHANCEMENTS`
|
||||
:const:`~certbot.plugins.enhancements.ENHANCEMENTS`
|
||||
:param options: options for the enhancement
|
||||
See :const:`~certbot.constants.ENHANCEMENTS`
|
||||
See :const:`~certbot.plugins.enhancements.ENHANCEMENTS`
|
||||
documentation for appropriate parameter.
|
||||
|
||||
:raises .errors.PluginError: If Enhancement is not supported, or if
|
||||
@@ -1635,7 +1672,7 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
if header_path:
|
||||
pat = '(?:[ "]|^)(strict-transport-security)(?:[ "]|$)'
|
||||
for match in header_path:
|
||||
if re.search(pat, self.aug.get(match).lower()):
|
||||
if re.search(pat, self.parser.aug.get(match).lower()):
|
||||
hsts_dirpath = match
|
||||
if not hsts_dirpath:
|
||||
err_msg = ("Certbot was unable to find the existing HSTS header "
|
||||
@@ -1649,7 +1686,7 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
# Our match statement was for string strict-transport-security, but
|
||||
# we need to update the value instead. The next index is for the value
|
||||
hsts_dirpath = hsts_dirpath.replace("arg[3]", "arg[4]")
|
||||
self.aug.set(hsts_dirpath, hsts_maxage)
|
||||
self.parser.aug.set(hsts_dirpath, hsts_maxage)
|
||||
note_msg = ("Increasing HSTS max-age value to {0} for VirtualHost "
|
||||
"in {1}\n".format(nextstep_value, vhost.filep))
|
||||
logger.debug(note_msg)
|
||||
@@ -1731,7 +1768,7 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
# We'll simply delete the directive, so that we'll have a
|
||||
# consistent OCSP cache path.
|
||||
if stapling_cache_aug_path:
|
||||
self.aug.remove(
|
||||
self.parser.aug.remove(
|
||||
re.sub(r"/\w*$", "", stapling_cache_aug_path[0]))
|
||||
|
||||
self.parser.add_dir_to_ifmodssl(ssl_vhost_aug_path,
|
||||
@@ -1808,7 +1845,7 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
# "Existing Header directive for virtualhost"
|
||||
pat = '(?:[ "]|^)(%s)(?:[ "]|$)' % (header_substring.lower())
|
||||
for match in header_path:
|
||||
if re.search(pat, self.aug.get(match).lower()):
|
||||
if re.search(pat, self.parser.aug.get(match).lower()):
|
||||
raise errors.PluginEnhancementAlreadyPresent(
|
||||
"Existing %s header" % (header_substring))
|
||||
|
||||
@@ -1935,11 +1972,11 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
constants.REWRITE_HTTPS_ARGS_WITH_END]
|
||||
|
||||
for dir_path, args_paths in rewrite_args_dict.items():
|
||||
arg_vals = [self.aug.get(x) for x in args_paths]
|
||||
arg_vals = [self.parser.aug.get(x) for x in args_paths]
|
||||
|
||||
# Search for past redirection rule, delete it, set the new one
|
||||
if arg_vals in constants.OLD_REWRITE_HTTPS_ARGS:
|
||||
self.aug.remove(dir_path)
|
||||
self.parser.aug.remove(dir_path)
|
||||
self._set_https_redirection_rewrite_rule(vhost)
|
||||
self.save()
|
||||
raise errors.PluginEnhancementAlreadyPresent(
|
||||
@@ -1995,7 +2032,7 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
|
||||
redirect_filepath = self._write_out_redirect(ssl_vhost, text)
|
||||
|
||||
self.aug.load()
|
||||
self.parser.aug.load()
|
||||
# Make a new vhost data structure and add it to the lists
|
||||
new_vhost = self._create_vhost(parser.get_aug_path(self._escape(redirect_filepath)))
|
||||
self.vhosts.append(new_vhost)
|
||||
@@ -2131,7 +2168,7 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
vhost.enabled = True
|
||||
return
|
||||
|
||||
def enable_mod(self, mod_name, temp=False): # pylint: disable=unused-argument
|
||||
def enable_mod(self, mod_name, temp=False): # pylint: disable=unused-argument
|
||||
"""Enables module in Apache.
|
||||
|
||||
Both enables and reloads Apache so module is active.
|
||||
@@ -2307,7 +2344,7 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
Enable the AutoHSTS enhancement for defined domains
|
||||
|
||||
:param _unused_lineage: Certificate lineage object, unused
|
||||
:type _unused_lineage: certbot.storage.RenewableCert
|
||||
:type _unused_lineage: certbot._internal.storage.RenewableCert
|
||||
|
||||
:param domains: List of domains in certificate to enhance
|
||||
:type domains: str
|
||||
@@ -2432,7 +2469,7 @@ class ApacheConfigurator(augeas_configurator.AugeasConfigurator):
|
||||
and changes the HSTS max-age to a high value.
|
||||
|
||||
:param lineage: Certificate lineage object
|
||||
:type lineage: certbot.storage.RenewableCert
|
||||
:type lineage: certbot._internal.storage.RenewableCert
|
||||
"""
|
||||
self._autohsts_fetch_state()
|
||||
if not self._autohsts:
|
||||
|
||||
@@ -9,6 +9,7 @@ MOD_SSL_CONF_DEST = "options-ssl-apache.conf"
|
||||
UPDATED_MOD_SSL_CONF_DIGEST = ".updated-options-ssl-apache-conf-digest.txt"
|
||||
"""Name of the hash of the updated or informed mod_ssl_conf as saved in `IConfig.config_dir`."""
|
||||
|
||||
# NEVER REMOVE A SINGLE HASH FROM THIS LIST UNLESS YOU KNOW EXACTLY WHAT YOU ARE DOING!
|
||||
ALL_SSL_OPTIONS_HASHES = [
|
||||
'2086bca02db48daf93468332543c60ac6acdb6f0b58c7bfdf578a5d47092f82a',
|
||||
'4844d36c9a0f587172d9fa10f4f1c9518e3bcfa1947379f155e16a70a728c21a',
|
||||
@@ -18,6 +19,10 @@ ALL_SSL_OPTIONS_HASHES = [
|
||||
'cfdd7c18d2025836ea3307399f509cfb1ebf2612c87dd600a65da2a8e2f2797b',
|
||||
'80720bd171ccdc2e6b917ded340defae66919e4624962396b992b7218a561791',
|
||||
'c0c022ea6b8a51ecc8f1003d0a04af6c3f2bc1c3ce506b3c2dfc1f11ef931082',
|
||||
'717b0a89f5e4c39b09a42813ac6e747cfbdeb93439499e73f4f70a1fe1473f20',
|
||||
'0fcdc81280cd179a07ec4d29d3595068b9326b455c488de4b09f585d5dafc137',
|
||||
'86cc09ad5415cd6d5f09a947fe2501a9344328b1e8a8b458107ea903e80baa6c',
|
||||
'06675349e457eae856120cdebb564efe546f0b87399f2264baeb41e442c724c7',
|
||||
]
|
||||
"""SHA256 hashes of the contents of previous versions of all versions of MOD_SSL_CONF_SRC"""
|
||||
|
||||
|
||||
@@ -1,14 +1,12 @@
|
||||
"""Contains UI methods for Apache operations."""
|
||||
import logging
|
||||
import os
|
||||
|
||||
import zope.component
|
||||
|
||||
import certbot.display.util as display_util
|
||||
from certbot import errors
|
||||
from certbot import interfaces
|
||||
|
||||
import certbot.display.util as display_util
|
||||
|
||||
from certbot.compat import os
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -26,7 +24,7 @@ def select_vhost_multiple(vhosts):
|
||||
return list()
|
||||
tags_list = [vhost.display_repr()+"\n" for vhost in vhosts]
|
||||
# Remove the extra newline from the last entry
|
||||
if len(tags_list):
|
||||
if tags_list:
|
||||
tags_list[-1] = tags_list[-1][:-1]
|
||||
code, names = zope.component.getUtility(interfaces.IDisplay).checklist(
|
||||
"Which VirtualHosts would you like to install the wildcard certificate for?",
|
||||
@@ -62,8 +60,7 @@ def select_vhost(domain, vhosts):
|
||||
code, tag = _vhost_menu(domain, vhosts)
|
||||
if code == display_util.OK:
|
||||
return vhosts[tag]
|
||||
else:
|
||||
return None
|
||||
return None
|
||||
|
||||
def _vhost_menu(domain, vhosts):
|
||||
"""Select an appropriate Apache Vhost.
|
||||
@@ -93,7 +90,7 @@ def _vhost_menu(domain, vhosts):
|
||||
for vhost in vhosts:
|
||||
if len(vhost.get_names()) == 1:
|
||||
disp_name = next(iter(vhost.get_names()))
|
||||
elif len(vhost.get_names()) == 0:
|
||||
elif not vhost.get_names():
|
||||
disp_name = ""
|
||||
else:
|
||||
disp_name = "Multiple Names"
|
||||
|
||||
@@ -1,8 +1,13 @@
|
||||
""" Entry point for Apache Plugin """
|
||||
# Pylint does not like disutils.version when running inside a venv.
|
||||
# See: https://github.com/PyCQA/pylint/issues/73
|
||||
from distutils.version import LooseVersion # pylint: disable=no-name-in-module,import-error
|
||||
|
||||
from certbot import util
|
||||
|
||||
from certbot_apache import configurator
|
||||
from certbot_apache import override_arch
|
||||
from certbot_apache import override_fedora
|
||||
from certbot_apache import override_darwin
|
||||
from certbot_apache import override_debian
|
||||
from certbot_apache import override_centos
|
||||
@@ -11,13 +16,18 @@ from certbot_apache import override_suse
|
||||
|
||||
OVERRIDE_CLASSES = {
|
||||
"arch": override_arch.ArchConfigurator,
|
||||
"cloudlinux": override_centos.CentOSConfigurator,
|
||||
"darwin": override_darwin.DarwinConfigurator,
|
||||
"debian": override_debian.DebianConfigurator,
|
||||
"ubuntu": override_debian.DebianConfigurator,
|
||||
"centos": override_centos.CentOSConfigurator,
|
||||
"centos linux": override_centos.CentOSConfigurator,
|
||||
"fedora": override_centos.CentOSConfigurator,
|
||||
"fedora_old": override_centos.CentOSConfigurator,
|
||||
"fedora": override_fedora.FedoraConfigurator,
|
||||
"linuxmint": override_debian.DebianConfigurator,
|
||||
"ol": override_centos.CentOSConfigurator,
|
||||
"oracle": override_centos.CentOSConfigurator,
|
||||
"redhatenterpriseserver": override_centos.CentOSConfigurator,
|
||||
"red hat enterprise linux server": override_centos.CentOSConfigurator,
|
||||
"rhel": override_centos.CentOSConfigurator,
|
||||
"amazon": override_centos.CentOSConfigurator,
|
||||
@@ -25,14 +35,24 @@ OVERRIDE_CLASSES = {
|
||||
"gentoo base system": override_gentoo.GentooConfigurator,
|
||||
"opensuse": override_suse.OpenSUSEConfigurator,
|
||||
"suse": override_suse.OpenSUSEConfigurator,
|
||||
"sles": override_suse.OpenSUSEConfigurator,
|
||||
"scientific": override_centos.CentOSConfigurator,
|
||||
"scientific linux": override_centos.CentOSConfigurator,
|
||||
}
|
||||
|
||||
|
||||
def get_configurator():
|
||||
""" Get correct configurator class based on the OS fingerprint """
|
||||
os_info = util.get_os_info()
|
||||
os_name, os_version = util.get_os_info()
|
||||
os_name = os_name.lower()
|
||||
override_class = None
|
||||
|
||||
# Special case for older Fedora versions
|
||||
if os_name == 'fedora' and LooseVersion(os_version) < LooseVersion('29'):
|
||||
os_name = 'fedora_old'
|
||||
|
||||
try:
|
||||
override_class = OVERRIDE_CLASSES[os_info[0].lower()]
|
||||
override_class = OVERRIDE_CLASSES[os_name]
|
||||
except KeyError:
|
||||
# OS not found in the list
|
||||
os_like = util.get_systemd_os_like()
|
||||
@@ -45,4 +65,5 @@ def get_configurator():
|
||||
override_class = configurator.ApacheConfigurator
|
||||
return override_class
|
||||
|
||||
|
||||
ENTRYPOINT = get_configurator()
|
||||
|
||||
@@ -1,16 +1,20 @@
|
||||
"""A class that performs HTTP-01 challenges for Apache"""
|
||||
import logging
|
||||
import os
|
||||
|
||||
from acme.magic_typing import List, Set # pylint: disable=unused-import, no-name-in-module
|
||||
|
||||
from certbot import errors
|
||||
from certbot.compat import os
|
||||
from certbot.compat import filesystem
|
||||
from certbot.plugins import common
|
||||
|
||||
from certbot_apache.obj import VirtualHost # pylint: disable=unused-import
|
||||
from certbot_apache.parser import get_aug_path
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class ApacheHttp01(common.TLSSNI01):
|
||||
|
||||
class ApacheHttp01(common.ChallengePerformer):
|
||||
"""Class that performs HTTP-01 challenges within the Apache configurator."""
|
||||
|
||||
CONFIG_TEMPLATE22_PRE = """\
|
||||
@@ -165,8 +169,7 @@ class ApacheHttp01(common.TLSSNI01):
|
||||
|
||||
def _set_up_challenges(self):
|
||||
if not os.path.isdir(self.challenge_dir):
|
||||
os.makedirs(self.challenge_dir)
|
||||
os.chmod(self.challenge_dir, 0o755)
|
||||
filesystem.makedirs(self.challenge_dir, 0o755)
|
||||
|
||||
responses = []
|
||||
for achall in self.achalls:
|
||||
@@ -182,7 +185,7 @@ class ApacheHttp01(common.TLSSNI01):
|
||||
self.configurator.reverter.register_file_creation(True, name)
|
||||
with open(name, 'wb') as f:
|
||||
f.write(validation.encode())
|
||||
os.chmod(name, 0o644)
|
||||
filesystem.chmod(name, 0o644)
|
||||
|
||||
return response
|
||||
|
||||
|
||||
@@ -26,7 +26,7 @@ class Addr(common.Addr):
|
||||
def __repr__(self):
|
||||
return "certbot_apache.obj.Addr(" + repr(self.tup) + ")"
|
||||
|
||||
def __hash__(self):
|
||||
def __hash__(self): # pylint: disable=useless-super-delegation
|
||||
# Python 3 requires explicit overridden for __hash__ if __eq__ or
|
||||
# __cmp__ is overridden. See https://bugs.python.org/issue2235
|
||||
return super(Addr, self).__hash__()
|
||||
@@ -47,8 +47,7 @@ class Addr(common.Addr):
|
||||
return 0
|
||||
elif self.get_addr() == "*":
|
||||
return 1
|
||||
else:
|
||||
return 2
|
||||
return 2
|
||||
|
||||
def conflicts(self, addr):
|
||||
r"""Returns if address could conflict with correct function of self.
|
||||
@@ -99,7 +98,7 @@ class Addr(common.Addr):
|
||||
return self.get_addr_obj(port)
|
||||
|
||||
|
||||
class VirtualHost(object): # pylint: disable=too-few-public-methods
|
||||
class VirtualHost(object):
|
||||
"""Represents an Apache Virtualhost.
|
||||
|
||||
:ivar str filep: file path of VH
|
||||
@@ -127,7 +126,6 @@ class VirtualHost(object): # pylint: disable=too-few-public-methods
|
||||
def __init__(self, filep, path, addrs, ssl, enabled, name=None,
|
||||
aliases=None, modmacro=False, ancestor=None):
|
||||
|
||||
# pylint: disable=too-many-arguments
|
||||
"""Initialize a VH."""
|
||||
self.filep = filep
|
||||
self.path = path
|
||||
|
||||
@@ -1,14 +1,24 @@
|
||||
""" Distribution specific override class for CentOS family (RHEL, Fedora) """
|
||||
import pkg_resources
|
||||
import logging
|
||||
|
||||
import pkg_resources
|
||||
import zope.interface
|
||||
|
||||
from certbot import errors
|
||||
from certbot import interfaces
|
||||
from certbot import util
|
||||
from certbot.errors import MisconfigurationError
|
||||
|
||||
from acme.magic_typing import List # pylint: disable=unused-import, no-name-in-module
|
||||
|
||||
from certbot_apache import apache_util
|
||||
from certbot_apache import configurator
|
||||
from certbot_apache import parser
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@zope.interface.provider(interfaces.IPluginFactory)
|
||||
class CentOSConfigurator(configurator.ApacheConfigurator):
|
||||
"""CentOS specific ApacheConfigurator override class"""
|
||||
@@ -33,6 +43,38 @@ class CentOSConfigurator(configurator.ApacheConfigurator):
|
||||
"certbot_apache", "centos-options-ssl-apache.conf")
|
||||
)
|
||||
|
||||
def config_test(self):
|
||||
"""
|
||||
Override config_test to mitigate configtest error in vanilla installation
|
||||
of mod_ssl in Fedora. The error is caused by non-existent self-signed
|
||||
certificates referenced by the configuration, that would be autogenerated
|
||||
during the first (re)start of httpd.
|
||||
"""
|
||||
|
||||
os_info = util.get_os_info()
|
||||
fedora = os_info[0].lower() == "fedora"
|
||||
|
||||
try:
|
||||
super(CentOSConfigurator, self).config_test()
|
||||
except errors.MisconfigurationError:
|
||||
if fedora:
|
||||
self._try_restart_fedora()
|
||||
else:
|
||||
raise
|
||||
|
||||
def _try_restart_fedora(self):
|
||||
"""
|
||||
Tries to restart httpd using systemctl to generate the self signed keypair.
|
||||
"""
|
||||
|
||||
try:
|
||||
util.run_script(['systemctl', 'restart', 'httpd'])
|
||||
except errors.SubprocessError as err:
|
||||
raise errors.MisconfigurationError(str(err))
|
||||
|
||||
# Finish with actual config check to see if systemctl restart helped
|
||||
super(CentOSConfigurator, self).config_test()
|
||||
|
||||
def _prepare_options(self):
|
||||
"""
|
||||
Override the options dictionary initialization in order to support
|
||||
@@ -44,9 +86,86 @@ class CentOSConfigurator(configurator.ApacheConfigurator):
|
||||
def get_parser(self):
|
||||
"""Initializes the ApacheParser"""
|
||||
return CentOSParser(
|
||||
self.aug, self.option("server_root"), self.option("vhost_root"),
|
||||
self.option("server_root"), self.option("vhost_root"),
|
||||
self.version, configurator=self)
|
||||
|
||||
def _deploy_cert(self, *args, **kwargs): # pylint: disable=arguments-differ
|
||||
"""
|
||||
Override _deploy_cert in order to ensure that the Apache configuration
|
||||
has "LoadModule ssl_module..." before parsing the VirtualHost configuration
|
||||
that was created by Certbot
|
||||
"""
|
||||
super(CentOSConfigurator, self)._deploy_cert(*args, **kwargs)
|
||||
if self.version < (2, 4, 0):
|
||||
self._deploy_loadmodule_ssl_if_needed()
|
||||
|
||||
def _deploy_loadmodule_ssl_if_needed(self):
|
||||
"""
|
||||
Add "LoadModule ssl_module <pre-existing path>" to main httpd.conf if
|
||||
it doesn't exist there already.
|
||||
"""
|
||||
|
||||
loadmods = self.parser.find_dir("LoadModule", "ssl_module", exclude=False)
|
||||
|
||||
correct_ifmods = [] # type: List[str]
|
||||
loadmod_args = [] # type: List[str]
|
||||
loadmod_paths = [] # type: List[str]
|
||||
for m in loadmods:
|
||||
noarg_path = m.rpartition("/")[0]
|
||||
path_args = self.parser.get_all_args(noarg_path)
|
||||
if loadmod_args:
|
||||
if loadmod_args != path_args:
|
||||
msg = ("Certbot encountered multiple LoadModule directives "
|
||||
"for LoadModule ssl_module with differing library paths. "
|
||||
"Please remove or comment out the one(s) that are not in "
|
||||
"use, and run Certbot again.")
|
||||
raise MisconfigurationError(msg)
|
||||
else:
|
||||
loadmod_args = path_args
|
||||
|
||||
if self.parser.not_modssl_ifmodule(noarg_path): # pylint: disable=no-member
|
||||
if self.parser.loc["default"] in noarg_path:
|
||||
# LoadModule already in the main configuration file
|
||||
if ("ifmodule/" in noarg_path.lower() or
|
||||
"ifmodule[1]" in noarg_path.lower()):
|
||||
# It's the first or only IfModule in the file
|
||||
return
|
||||
# Populate the list of known !mod_ssl.c IfModules
|
||||
nodir_path = noarg_path.rpartition("/directive")[0]
|
||||
correct_ifmods.append(nodir_path)
|
||||
else:
|
||||
loadmod_paths.append(noarg_path)
|
||||
|
||||
if not loadmod_args:
|
||||
# Do not try to enable mod_ssl
|
||||
return
|
||||
|
||||
# Force creation as the directive wasn't found from the beginning of
|
||||
# httpd.conf
|
||||
rootconf_ifmod = self.parser.create_ifmod(
|
||||
parser.get_aug_path(self.parser.loc["default"]),
|
||||
"!mod_ssl.c", beginning=True)
|
||||
# parser.get_ifmod returns a path postfixed with "/", remove that
|
||||
self.parser.add_dir(rootconf_ifmod[:-1], "LoadModule", loadmod_args)
|
||||
correct_ifmods.append(rootconf_ifmod[:-1])
|
||||
self.save_notes += "Added LoadModule ssl_module to main configuration.\n"
|
||||
|
||||
# Wrap LoadModule mod_ssl inside of <IfModule !mod_ssl.c> if it's not
|
||||
# configured like this already.
|
||||
for loadmod_path in loadmod_paths:
|
||||
nodir_path = loadmod_path.split("/directive")[0]
|
||||
# Remove the old LoadModule directive
|
||||
self.parser.aug.remove(loadmod_path)
|
||||
|
||||
# Create a new IfModule !mod_ssl.c if not already found on path
|
||||
ssl_ifmod = self.parser.get_ifmod(nodir_path, "!mod_ssl.c",
|
||||
beginning=True)[:-1]
|
||||
if ssl_ifmod not in correct_ifmods:
|
||||
self.parser.add_dir(ssl_ifmod, "LoadModule", loadmod_args)
|
||||
correct_ifmods.append(ssl_ifmod)
|
||||
self.save_notes += ("Wrapped pre-existing LoadModule ssl_module "
|
||||
"inside of <IfModule !mod_ssl> block.\n")
|
||||
|
||||
|
||||
class CentOSParser(parser.ApacheParser):
|
||||
"""CentOS specific ApacheParser override class"""
|
||||
@@ -64,5 +183,35 @@ class CentOSParser(parser.ApacheParser):
|
||||
def parse_sysconfig_var(self):
|
||||
""" Parses Apache CLI options from CentOS configuration file """
|
||||
defines = apache_util.parse_define_file(self.sysconfig_filep, "OPTIONS")
|
||||
for k in defines.keys():
|
||||
for k in defines:
|
||||
self.variables[k] = defines[k]
|
||||
|
||||
def not_modssl_ifmodule(self, path):
|
||||
"""Checks if the provided Augeas path has argument !mod_ssl"""
|
||||
|
||||
if "ifmodule" not in path.lower():
|
||||
return False
|
||||
|
||||
# Trim the path to the last ifmodule
|
||||
workpath = path.lower()
|
||||
while workpath:
|
||||
# Get path to the last IfModule (ignore the tail)
|
||||
parts = workpath.rpartition("ifmodule")
|
||||
|
||||
if not parts[0]:
|
||||
# IfModule not found
|
||||
break
|
||||
ifmod_path = parts[0] + parts[1]
|
||||
# Check if ifmodule had an index
|
||||
if parts[2].startswith("["):
|
||||
# Append the index from tail
|
||||
ifmod_path += parts[2].partition("/")[0]
|
||||
# Get the original path trimmed to correct length
|
||||
# This is required to preserve cases
|
||||
ifmod_real_path = path[0:len(ifmod_path)]
|
||||
if "!mod_ssl.c" in self.get_all_args(ifmod_real_path):
|
||||
return True
|
||||
# Set the workpath to the heading part
|
||||
workpath = parts[0]
|
||||
|
||||
return False
|
||||
|
||||
@@ -1,19 +1,21 @@
|
||||
""" Distribution specific override class for Debian family (Ubuntu/Debian) """
|
||||
import logging
|
||||
import os
|
||||
import pkg_resources
|
||||
|
||||
import pkg_resources
|
||||
import zope.interface
|
||||
|
||||
from certbot import errors
|
||||
from certbot import interfaces
|
||||
from certbot import util
|
||||
from certbot.compat import filesystem
|
||||
from certbot.compat import os
|
||||
|
||||
from certbot_apache import apache_util
|
||||
from certbot_apache import configurator
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@zope.interface.provider(interfaces.IPluginFactory)
|
||||
class DebianConfigurator(configurator.ApacheConfigurator):
|
||||
"""Debian specific ApacheConfigurator override class"""
|
||||
@@ -51,7 +53,7 @@ class DebianConfigurator(configurator.ApacheConfigurator):
|
||||
|
||||
"""
|
||||
if vhost.enabled:
|
||||
return
|
||||
return None
|
||||
|
||||
enabled_path = ("%s/sites-enabled/%s" %
|
||||
(self.parser.root,
|
||||
@@ -64,11 +66,11 @@ class DebianConfigurator(configurator.ApacheConfigurator):
|
||||
try:
|
||||
os.symlink(vhost.filep, enabled_path)
|
||||
except OSError as err:
|
||||
if os.path.islink(enabled_path) and os.path.realpath(
|
||||
if os.path.islink(enabled_path) and filesystem.realpath(
|
||||
enabled_path) == vhost.filep:
|
||||
# Already in shape
|
||||
vhost.enabled = True
|
||||
return
|
||||
return None
|
||||
else:
|
||||
logger.warning(
|
||||
"Could not symlink %s to %s, got error: %s", enabled_path,
|
||||
@@ -81,9 +83,9 @@ class DebianConfigurator(configurator.ApacheConfigurator):
|
||||
vhost.enabled = True
|
||||
logger.info("Enabling available site: %s", vhost.filep)
|
||||
self.save_notes += "Enabled site %s\n" % vhost.filep
|
||||
return None
|
||||
|
||||
def enable_mod(self, mod_name, temp=False):
|
||||
# pylint: disable=unused-argument
|
||||
"""Enables module in Apache.
|
||||
|
||||
Both enables and reloads Apache so module is active.
|
||||
|
||||
98
certbot-apache/certbot_apache/override_fedora.py
Normal file
98
certbot-apache/certbot_apache/override_fedora.py
Normal file
@@ -0,0 +1,98 @@
|
||||
""" Distribution specific override class for Fedora 29+ """
|
||||
import pkg_resources
|
||||
import zope.interface
|
||||
|
||||
from certbot import errors
|
||||
from certbot import interfaces
|
||||
from certbot import util
|
||||
|
||||
from certbot_apache import apache_util
|
||||
from certbot_apache import configurator
|
||||
from certbot_apache import parser
|
||||
|
||||
|
||||
@zope.interface.provider(interfaces.IPluginFactory)
|
||||
class FedoraConfigurator(configurator.ApacheConfigurator):
|
||||
"""Fedora 29+ specific ApacheConfigurator override class"""
|
||||
|
||||
OS_DEFAULTS = dict(
|
||||
server_root="/etc/httpd",
|
||||
vhost_root="/etc/httpd/conf.d",
|
||||
vhost_files="*.conf",
|
||||
logs_root="/var/log/httpd",
|
||||
ctl="httpd",
|
||||
version_cmd=['httpd', '-v'],
|
||||
restart_cmd=['apachectl', 'graceful'],
|
||||
restart_cmd_alt=['apachectl', 'restart'],
|
||||
conftest_cmd=['apachectl', 'configtest'],
|
||||
enmod=None,
|
||||
dismod=None,
|
||||
le_vhost_ext="-le-ssl.conf",
|
||||
handle_modules=False,
|
||||
handle_sites=False,
|
||||
challenge_location="/etc/httpd/conf.d",
|
||||
MOD_SSL_CONF_SRC=pkg_resources.resource_filename(
|
||||
# TODO: eventually newest version of Fedora will need their own config
|
||||
"certbot_apache", "centos-options-ssl-apache.conf")
|
||||
)
|
||||
|
||||
def config_test(self):
|
||||
"""
|
||||
Override config_test to mitigate configtest error in vanilla installation
|
||||
of mod_ssl in Fedora. The error is caused by non-existent self-signed
|
||||
certificates referenced by the configuration, that would be autogenerated
|
||||
during the first (re)start of httpd.
|
||||
"""
|
||||
try:
|
||||
super(FedoraConfigurator, self).config_test()
|
||||
except errors.MisconfigurationError:
|
||||
self._try_restart_fedora()
|
||||
|
||||
def get_parser(self):
|
||||
"""Initializes the ApacheParser"""
|
||||
return FedoraParser(
|
||||
self.option("server_root"), self.option("vhost_root"),
|
||||
self.version, configurator=self)
|
||||
|
||||
def _try_restart_fedora(self):
|
||||
"""
|
||||
Tries to restart httpd using systemctl to generate the self signed keypair.
|
||||
"""
|
||||
try:
|
||||
util.run_script(['systemctl', 'restart', 'httpd'])
|
||||
except errors.SubprocessError as err:
|
||||
raise errors.MisconfigurationError(str(err))
|
||||
|
||||
# Finish with actual config check to see if systemctl restart helped
|
||||
super(FedoraConfigurator, self).config_test()
|
||||
|
||||
def _prepare_options(self):
|
||||
"""
|
||||
Override the options dictionary initialization to keep using apachectl
|
||||
instead of httpd and so take advantages of this new bash script in newer versions
|
||||
of Fedora to restart httpd.
|
||||
"""
|
||||
super(FedoraConfigurator, self)._prepare_options()
|
||||
self.options["restart_cmd"][0] = 'apachectl'
|
||||
self.options["restart_cmd_alt"][0] = 'apachectl'
|
||||
self.options["conftest_cmd"][0] = 'apachectl'
|
||||
|
||||
|
||||
class FedoraParser(parser.ApacheParser):
|
||||
"""Fedora 29+ specific ApacheParser override class"""
|
||||
def __init__(self, *args, **kwargs):
|
||||
# Fedora 29+ specific configuration file for Apache
|
||||
self.sysconfig_filep = "/etc/sysconfig/httpd"
|
||||
super(FedoraParser, self).__init__(*args, **kwargs)
|
||||
|
||||
def update_runtime_variables(self):
|
||||
""" Override for update_runtime_variables for custom parsing """
|
||||
# Opportunistic, works if SELinux not enforced
|
||||
super(FedoraParser, self).update_runtime_variables()
|
||||
self._parse_sysconfig_var()
|
||||
|
||||
def _parse_sysconfig_var(self):
|
||||
""" Parses Apache CLI options from Fedora configuration file """
|
||||
defines = apache_util.parse_define_file(self.sysconfig_filep, "OPTIONS")
|
||||
for k in defines:
|
||||
self.variables[k] = defines[k]
|
||||
@@ -44,7 +44,7 @@ class GentooConfigurator(configurator.ApacheConfigurator):
|
||||
def get_parser(self):
|
||||
"""Initializes the ApacheParser"""
|
||||
return GentooParser(
|
||||
self.aug, self.option("server_root"), self.option("vhost_root"),
|
||||
self.option("server_root"), self.option("vhost_root"),
|
||||
self.version, configurator=self)
|
||||
|
||||
|
||||
@@ -64,7 +64,7 @@ class GentooParser(parser.ApacheParser):
|
||||
""" Parses Apache CLI options from Gentoo configuration file """
|
||||
defines = apache_util.parse_define_file(self.apacheconfig_filep,
|
||||
"APACHE2_OPTS")
|
||||
for k in defines.keys():
|
||||
for k in defines:
|
||||
self.variables[k] = defines[k]
|
||||
|
||||
def update_modules(self):
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
import copy
|
||||
import fnmatch
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
@@ -10,13 +9,16 @@ import sys
|
||||
import six
|
||||
|
||||
from acme.magic_typing import Dict, List, Set # pylint: disable=unused-import, no-name-in-module
|
||||
|
||||
from certbot import errors
|
||||
from certbot.compat import os
|
||||
|
||||
from certbot_apache import constants
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ApacheParser(object):
|
||||
# pylint: disable=too-many-public-methods
|
||||
"""Class handles the fine details of parsing the Apache Configuration.
|
||||
|
||||
.. todo:: Make parsing general... remove sites-available etc...
|
||||
@@ -31,7 +33,7 @@ class ApacheParser(object):
|
||||
arg_var_interpreter = re.compile(r"\$\{[^ \}]*}")
|
||||
fnmatch_chars = set(["*", "?", "\\", "[", "]"])
|
||||
|
||||
def __init__(self, aug, root, vhostroot=None, version=(2, 4),
|
||||
def __init__(self, root, vhostroot=None, version=(2, 4),
|
||||
configurator=None):
|
||||
# Note: Order is important here.
|
||||
|
||||
@@ -40,11 +42,20 @@ class ApacheParser(object):
|
||||
# issues with aug.load() after adding new files / defines to parse tree
|
||||
self.configurator = configurator
|
||||
|
||||
# Initialize augeas
|
||||
self.aug = None
|
||||
self.init_augeas()
|
||||
|
||||
if not self.check_aug_version():
|
||||
raise errors.NotSupportedError(
|
||||
"Apache plugin support requires libaugeas0 and augeas-lenses "
|
||||
"version 1.2.0 or higher, please make sure you have you have "
|
||||
"those installed.")
|
||||
|
||||
self.modules = set() # type: Set[str]
|
||||
self.parser_paths = {} # type: Dict[str, List[str]]
|
||||
self.variables = {} # type: Dict[str, str]
|
||||
|
||||
self.aug = aug
|
||||
# Find configuration root and make sure augeas can parse it.
|
||||
self.root = os.path.abspath(root)
|
||||
self.loc = {"root": self._find_config_root()}
|
||||
@@ -76,6 +87,146 @@ class ApacheParser(object):
|
||||
if self.find_dir("Define", exclude=False):
|
||||
raise errors.PluginError("Error parsing runtime variables")
|
||||
|
||||
def init_augeas(self):
|
||||
""" Initialize the actual Augeas instance """
|
||||
|
||||
try:
|
||||
import augeas
|
||||
except ImportError: # pragma: no cover
|
||||
raise errors.NoInstallationError("Problem in Augeas installation")
|
||||
|
||||
self.aug = augeas.Augeas(
|
||||
# specify a directory to load our preferred lens from
|
||||
loadpath=constants.AUGEAS_LENS_DIR,
|
||||
# Do not save backup (we do it ourselves), do not load
|
||||
# anything by default
|
||||
flags=(augeas.Augeas.NONE |
|
||||
augeas.Augeas.NO_MODL_AUTOLOAD |
|
||||
augeas.Augeas.ENABLE_SPAN))
|
||||
|
||||
def check_parsing_errors(self, lens):
|
||||
"""Verify Augeas can parse all of the lens files.
|
||||
|
||||
:param str lens: lens to check for errors
|
||||
|
||||
:raises .errors.PluginError: If there has been an error in parsing with
|
||||
the specified lens.
|
||||
|
||||
"""
|
||||
error_files = self.aug.match("/augeas//error")
|
||||
|
||||
for path in error_files:
|
||||
# Check to see if it was an error resulting from the use of
|
||||
# the httpd lens
|
||||
lens_path = self.aug.get(path + "/lens")
|
||||
# As aug.get may return null
|
||||
if lens_path and lens in lens_path:
|
||||
msg = (
|
||||
"There has been an error in parsing the file {0} on line {1}: "
|
||||
"{2}".format(
|
||||
# Strip off /augeas/files and /error
|
||||
path[13:len(path) - 6],
|
||||
self.aug.get(path + "/line"),
|
||||
self.aug.get(path + "/message")))
|
||||
raise errors.PluginError(msg)
|
||||
|
||||
def check_aug_version(self):
|
||||
""" Checks that we have recent enough version of libaugeas.
|
||||
If augeas version is recent enough, it will support case insensitive
|
||||
regexp matching"""
|
||||
|
||||
self.aug.set("/test/path/testing/arg", "aRgUMeNT")
|
||||
try:
|
||||
matches = self.aug.match(
|
||||
"/test//*[self::arg=~regexp('argument', 'i')]")
|
||||
except RuntimeError:
|
||||
self.aug.remove("/test/path")
|
||||
return False
|
||||
self.aug.remove("/test/path")
|
||||
return matches
|
||||
|
||||
def unsaved_files(self):
|
||||
"""Lists files that have modified Augeas DOM but the changes have not
|
||||
been written to the filesystem yet, used by `self.save()` and
|
||||
ApacheConfigurator to check the file state.
|
||||
|
||||
:raises .errors.PluginError: If there was an error in Augeas, in
|
||||
an attempt to save the configuration, or an error creating a
|
||||
checkpoint
|
||||
|
||||
:returns: `set` of unsaved files
|
||||
"""
|
||||
save_state = self.aug.get("/augeas/save")
|
||||
self.aug.set("/augeas/save", "noop")
|
||||
# Existing Errors
|
||||
ex_errs = self.aug.match("/augeas//error")
|
||||
try:
|
||||
# This is a noop save
|
||||
self.aug.save()
|
||||
except (RuntimeError, IOError):
|
||||
self._log_save_errors(ex_errs)
|
||||
# Erase Save Notes
|
||||
self.configurator.save_notes = ""
|
||||
raise errors.PluginError(
|
||||
"Error saving files, check logs for more info.")
|
||||
|
||||
# Return the original save method
|
||||
self.aug.set("/augeas/save", save_state)
|
||||
|
||||
# Retrieve list of modified files
|
||||
# Note: Noop saves can cause the file to be listed twice, I used a
|
||||
# set to remove this possibility. This is a known augeas 0.10 error.
|
||||
save_paths = self.aug.match("/augeas/events/saved")
|
||||
|
||||
save_files = set()
|
||||
if save_paths:
|
||||
for path in save_paths:
|
||||
save_files.add(self.aug.get(path)[6:])
|
||||
return save_files
|
||||
|
||||
def ensure_augeas_state(self):
|
||||
"""Makes sure that all Augeas dom changes are written to files to avoid
|
||||
loss of configuration directives when doing additional augeas parsing,
|
||||
causing a possible augeas.load() resulting dom reset
|
||||
"""
|
||||
|
||||
if self.unsaved_files():
|
||||
self.configurator.save_notes += "(autosave)"
|
||||
self.configurator.save()
|
||||
|
||||
def save(self, save_files):
|
||||
"""Saves all changes to the configuration files.
|
||||
|
||||
save() is called from ApacheConfigurator to handle the parser specific
|
||||
tasks of saving.
|
||||
|
||||
:param list save_files: list of strings of file paths that we need to save.
|
||||
|
||||
"""
|
||||
self.configurator.save_notes = ""
|
||||
self.aug.save()
|
||||
|
||||
# Force reload if files were modified
|
||||
# This is needed to recalculate augeas directive span
|
||||
if save_files:
|
||||
for sf in save_files:
|
||||
self.aug.remove("/files/"+sf)
|
||||
self.aug.load()
|
||||
|
||||
def _log_save_errors(self, ex_errs):
|
||||
"""Log errors due to bad Augeas save.
|
||||
|
||||
:param list ex_errs: Existing errors before save
|
||||
|
||||
"""
|
||||
# Check for the root of save problems
|
||||
new_errs = self.aug.match("/augeas//error")
|
||||
# logger.error("During Save - %s", mod_conf)
|
||||
logger.error("Unable to save files: %s. Attempted Save Notes: %s",
|
||||
", ".join(err[13:len(err) - 6] for err in new_errs
|
||||
# Only new errors caused by recent save
|
||||
if err not in ex_errs), self.configurator.save_notes)
|
||||
|
||||
def add_include(self, main_config, inc_path):
|
||||
"""Add Include for a new configuration file if one does not exist
|
||||
|
||||
@@ -83,7 +234,7 @@ class ApacheParser(object):
|
||||
:param str inc_path: path of file to include
|
||||
|
||||
"""
|
||||
if len(self.find_dir(case_i("Include"), inc_path)) == 0:
|
||||
if not self.find_dir(case_i("Include"), inc_path):
|
||||
logger.debug("Adding Include %s to %s",
|
||||
inc_path, get_aug_path(main_config))
|
||||
self.add_dir(
|
||||
@@ -134,7 +285,7 @@ class ApacheParser(object):
|
||||
mods.add(os.path.basename(mod_filename)[:-2] + "c")
|
||||
else:
|
||||
logger.debug("Could not read LoadModule directive from " +
|
||||
"Augeas path: {0}".format(match_name[6:]))
|
||||
"Augeas path: %s", match_name[6:])
|
||||
self.modules.update(mods)
|
||||
|
||||
def update_runtime_variables(self):
|
||||
@@ -224,8 +375,8 @@ class ApacheParser(object):
|
||||
"Error running command %s for runtime parameters!%s",
|
||||
command, os.linesep)
|
||||
raise errors.MisconfigurationError(
|
||||
"Error accessing loaded Apache parameters: %s",
|
||||
command)
|
||||
"Error accessing loaded Apache parameters: {0}".format(
|
||||
command))
|
||||
# Small errors that do not impede
|
||||
if proc.returncode != 0:
|
||||
logger.warning("Error in checking parameter list: %s", stderr)
|
||||
@@ -251,12 +402,12 @@ class ApacheParser(object):
|
||||
"""
|
||||
filtered = []
|
||||
if args == 1:
|
||||
for i in range(len(matches)):
|
||||
if matches[i].endswith("/arg"):
|
||||
for i, match in enumerate(matches):
|
||||
if match.endswith("/arg"):
|
||||
filtered.append(matches[i][:-4])
|
||||
else:
|
||||
for i in range(len(matches)):
|
||||
if matches[i].endswith("/arg[%d]" % args):
|
||||
for i, match in enumerate(matches):
|
||||
if match.endswith("/arg[%d]" % args):
|
||||
# Make sure we don't cause an IndexError (end of list)
|
||||
# Check to make sure arg + 1 doesn't exist
|
||||
if (i == (len(matches) - 1) or
|
||||
@@ -281,7 +432,7 @@ class ApacheParser(object):
|
||||
"""
|
||||
# TODO: Add error checking code... does the path given even exist?
|
||||
# Does it throw exceptions?
|
||||
if_mod_path = self._get_ifmod(aug_conf_path, "mod_ssl.c")
|
||||
if_mod_path = self.get_ifmod(aug_conf_path, "mod_ssl.c")
|
||||
# IfModule can have only one valid argument, so append after
|
||||
self.aug.insert(if_mod_path + "arg", "directive", False)
|
||||
nvh_path = if_mod_path + "directive[1]"
|
||||
@@ -292,22 +443,54 @@ class ApacheParser(object):
|
||||
for i, arg in enumerate(args):
|
||||
self.aug.set("%s/arg[%d]" % (nvh_path, i + 1), arg)
|
||||
|
||||
def _get_ifmod(self, aug_conf_path, mod):
|
||||
def get_ifmod(self, aug_conf_path, mod, beginning=False):
|
||||
"""Returns the path to <IfMod mod> and creates one if it doesn't exist.
|
||||
|
||||
:param str aug_conf_path: Augeas configuration path
|
||||
:param str mod: module ie. mod_ssl.c
|
||||
:param bool beginning: If the IfModule should be created to the beginning
|
||||
of augeas path DOM tree.
|
||||
|
||||
:returns: Augeas path of the requested IfModule directive that pre-existed
|
||||
or was created during the process. The path may be dynamic,
|
||||
i.e. .../IfModule[last()]
|
||||
:rtype: str
|
||||
|
||||
"""
|
||||
if_mods = self.aug.match(("%s/IfModule/*[self::arg='%s']" %
|
||||
(aug_conf_path, mod)))
|
||||
if len(if_mods) == 0:
|
||||
self.aug.set("%s/IfModule[last() + 1]" % aug_conf_path, "")
|
||||
self.aug.set("%s/IfModule[last()]/arg" % aug_conf_path, mod)
|
||||
if_mods = self.aug.match(("%s/IfModule/*[self::arg='%s']" %
|
||||
(aug_conf_path, mod)))
|
||||
if not if_mods:
|
||||
return self.create_ifmod(aug_conf_path, mod, beginning)
|
||||
|
||||
# Strip off "arg" at end of first ifmod path
|
||||
return if_mods[0][:len(if_mods[0]) - 3]
|
||||
return if_mods[0].rpartition("arg")[0]
|
||||
|
||||
def create_ifmod(self, aug_conf_path, mod, beginning=False):
|
||||
"""Creates a new <IfMod mod> and returns its path.
|
||||
|
||||
:param str aug_conf_path: Augeas configuration path
|
||||
:param str mod: module ie. mod_ssl.c
|
||||
:param bool beginning: If the IfModule should be created to the beginning
|
||||
of augeas path DOM tree.
|
||||
|
||||
:returns: Augeas path of the newly created IfModule directive.
|
||||
The path may be dynamic, i.e. .../IfModule[last()]
|
||||
:rtype: str
|
||||
|
||||
"""
|
||||
if beginning:
|
||||
c_path_arg = "{}/IfModule[1]/arg".format(aug_conf_path)
|
||||
# Insert IfModule before the first directive
|
||||
self.aug.insert("{}/directive[1]".format(aug_conf_path),
|
||||
"IfModule", True)
|
||||
retpath = "{}/IfModule[1]/".format(aug_conf_path)
|
||||
else:
|
||||
c_path = "{}/IfModule[last() + 1]".format(aug_conf_path)
|
||||
c_path_arg = "{}/IfModule[last()]/arg".format(aug_conf_path)
|
||||
self.aug.set(c_path, "")
|
||||
retpath = "{}/IfModule[last()]/".format(aug_conf_path)
|
||||
self.aug.set(c_path_arg, mod)
|
||||
return retpath
|
||||
|
||||
def add_dir(self, aug_conf_path, directive, args):
|
||||
"""Appends directive to the end fo the file given by aug_conf_path.
|
||||
@@ -453,6 +636,20 @@ class ApacheParser(object):
|
||||
|
||||
return ordered_matches
|
||||
|
||||
def get_all_args(self, match):
|
||||
"""
|
||||
Tries to fetch all arguments for a directive. See get_arg.
|
||||
|
||||
Note that if match is an ancestor node, it returns all names of
|
||||
child directives as well as the list of arguments.
|
||||
|
||||
"""
|
||||
|
||||
if match[-1] != "/":
|
||||
match = match+"/"
|
||||
allargs = self.aug.match(match + '*')
|
||||
return [self.get_arg(arg) for arg in allargs]
|
||||
|
||||
def get_arg(self, match):
|
||||
"""Uses augeas.get to get argument value and interprets result.
|
||||
|
||||
@@ -596,9 +793,8 @@ class ApacheParser(object):
|
||||
if sys.version_info < (3, 6):
|
||||
# This strips off final /Z(?ms)
|
||||
return fnmatch.translate(clean_fn_match)[:-7]
|
||||
else: # pragma: no cover
|
||||
# Since Python 3.6, it returns a different pattern like (?s:.*\.load)\Z
|
||||
return fnmatch.translate(clean_fn_match)[4:-3]
|
||||
# Since Python 3.6, it returns a different pattern like (?s:.*\.load)\Z
|
||||
return fnmatch.translate(clean_fn_match)[4:-3] # pragma: no cover
|
||||
|
||||
def parse_file(self, filepath):
|
||||
"""Parse file with Augeas
|
||||
@@ -612,8 +808,7 @@ class ApacheParser(object):
|
||||
use_new, remove_old = self._check_path_actions(filepath)
|
||||
# Ensure that we have the latest Augeas DOM state on disk before
|
||||
# calling aug.load() which reloads the state from disk
|
||||
if self.configurator:
|
||||
self.configurator.ensure_augeas_state()
|
||||
self.ensure_augeas_state()
|
||||
# Test if augeas included file for Httpd.lens
|
||||
# Note: This works for augeas globs, ie. *.conf
|
||||
if use_new:
|
||||
@@ -680,10 +875,7 @@ class ApacheParser(object):
|
||||
use_new = False
|
||||
else:
|
||||
use_new = True
|
||||
if new_file_match == "*":
|
||||
remove_old = True
|
||||
else:
|
||||
remove_old = False
|
||||
remove_old = new_file_match == "*"
|
||||
except KeyError:
|
||||
use_new = True
|
||||
remove_old = False
|
||||
|
||||
@@ -0,0 +1,27 @@
|
||||
#!/usr/bin/env python
|
||||
"""
|
||||
This executable script wraps the apache-conf-test bash script, in order to setup a pebble instance
|
||||
before its execution. Directory URL is passed through the SERVER environment variable.
|
||||
"""
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
from certbot_integration_tests.utils import acme_server
|
||||
|
||||
SCRIPT_DIRNAME = os.path.dirname(__file__)
|
||||
|
||||
|
||||
def main(args=None):
|
||||
if not args:
|
||||
args = sys.argv[1:]
|
||||
with acme_server.ACMEServer('pebble', [], False) as acme_xdist:
|
||||
environ = os.environ.copy()
|
||||
environ['SERVER'] = acme_xdist['directory_url']
|
||||
command = [os.path.join(SCRIPT_DIRNAME, 'apache-conf-test')]
|
||||
command.extend(args)
|
||||
return subprocess.call(command, env=environ)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main())
|
||||
@@ -1,4 +1,4 @@
|
||||
# pylint: disable=too-many-public-methods,too-many-lines
|
||||
# pylint: disable=too-many-lines
|
||||
"""Test for certbot_apache.configurator AutoHSTS functionality"""
|
||||
import re
|
||||
import unittest
|
||||
@@ -35,8 +35,9 @@ class AutoHSTSTest(util.ApacheTest):
|
||||
pat = '(?:[ "]|^)(strict-transport-security)(?:[ "]|$)'
|
||||
for head in header_path:
|
||||
if re.search(pat, self.config.parser.aug.get(head).lower()):
|
||||
return self.config.parser.aug.get(head.replace("arg[3]",
|
||||
"arg[4]"))
|
||||
return self.config.parser.aug.get(
|
||||
head.replace("arg[3]", "arg[4]"))
|
||||
return None # pragma: no cover
|
||||
|
||||
@mock.patch("certbot_apache.configurator.ApacheConfigurator.restart")
|
||||
@mock.patch("certbot_apache.configurator.ApacheConfigurator.enable_mod")
|
||||
|
||||
222
certbot-apache/certbot_apache/tests/centos6_test.py
Normal file
222
certbot-apache/certbot_apache/tests/centos6_test.py
Normal file
@@ -0,0 +1,222 @@
|
||||
"""Test for certbot_apache.configurator for CentOS 6 overrides"""
|
||||
import unittest
|
||||
|
||||
from certbot.compat import os
|
||||
from certbot.errors import MisconfigurationError
|
||||
|
||||
from certbot_apache import obj
|
||||
from certbot_apache import override_centos
|
||||
from certbot_apache import parser
|
||||
from certbot_apache.tests import util
|
||||
|
||||
|
||||
def get_vh_truth(temp_dir, config_name):
|
||||
"""Return the ground truth for the specified directory."""
|
||||
prefix = os.path.join(
|
||||
temp_dir, config_name, "httpd/conf.d")
|
||||
|
||||
aug_pre = "/files" + prefix
|
||||
vh_truth = [
|
||||
obj.VirtualHost(
|
||||
os.path.join(prefix, "test.example.com.conf"),
|
||||
os.path.join(aug_pre, "test.example.com.conf/VirtualHost"),
|
||||
set([obj.Addr.fromstring("*:80")]),
|
||||
False, True, "test.example.com"),
|
||||
obj.VirtualHost(
|
||||
os.path.join(prefix, "ssl.conf"),
|
||||
os.path.join(aug_pre, "ssl.conf/VirtualHost"),
|
||||
set([obj.Addr.fromstring("_default_:443")]),
|
||||
True, True, None)
|
||||
]
|
||||
return vh_truth
|
||||
|
||||
class CentOS6Tests(util.ApacheTest):
|
||||
"""Tests for CentOS 6"""
|
||||
|
||||
def setUp(self): # pylint: disable=arguments-differ
|
||||
test_dir = "centos6_apache/apache"
|
||||
config_root = "centos6_apache/apache/httpd"
|
||||
vhost_root = "centos6_apache/apache/httpd/conf.d"
|
||||
super(CentOS6Tests, self).setUp(test_dir=test_dir,
|
||||
config_root=config_root,
|
||||
vhost_root=vhost_root)
|
||||
|
||||
self.config = util.get_apache_configurator(
|
||||
self.config_path, self.vhost_path, self.config_dir, self.work_dir,
|
||||
version=(2, 2, 15), os_info="centos")
|
||||
self.vh_truth = get_vh_truth(
|
||||
self.temp_dir, "centos6_apache/apache")
|
||||
|
||||
def test_get_parser(self):
|
||||
self.assertTrue(isinstance(self.config.parser,
|
||||
override_centos.CentOSParser))
|
||||
|
||||
def test_get_virtual_hosts(self):
|
||||
"""Make sure all vhosts are being properly found."""
|
||||
vhs = self.config.get_virtual_hosts()
|
||||
self.assertEqual(len(vhs), 2)
|
||||
found = 0
|
||||
|
||||
for vhost in vhs:
|
||||
for centos_truth in self.vh_truth:
|
||||
if vhost == centos_truth:
|
||||
found += 1
|
||||
break
|
||||
else:
|
||||
raise Exception("Missed: %s" % vhost) # pragma: no cover
|
||||
self.assertEqual(found, 2)
|
||||
|
||||
def test_loadmod_default(self):
|
||||
ssl_loadmods = self.config.parser.find_dir(
|
||||
"LoadModule", "ssl_module", exclude=False)
|
||||
self.assertEqual(len(ssl_loadmods), 1)
|
||||
# Make sure the LoadModule ssl_module is in ssl.conf (default)
|
||||
self.assertTrue("ssl.conf" in ssl_loadmods[0])
|
||||
# ...and that it's not inside of <IfModule>
|
||||
self.assertFalse("IfModule" in ssl_loadmods[0])
|
||||
|
||||
# Get the example vhost
|
||||
self.config.assoc["test.example.com"] = self.vh_truth[0]
|
||||
self.config.deploy_cert(
|
||||
"random.demo", "example/cert.pem", "example/key.pem",
|
||||
"example/cert_chain.pem", "example/fullchain.pem")
|
||||
self.config.save()
|
||||
|
||||
post_loadmods = self.config.parser.find_dir(
|
||||
"LoadModule", "ssl_module", exclude=False)
|
||||
|
||||
# We should now have LoadModule ssl_module in root conf and ssl.conf
|
||||
self.assertEqual(len(post_loadmods), 2)
|
||||
for lm in post_loadmods:
|
||||
# lm[:-7] removes "/arg[#]" from the path
|
||||
arguments = self.config.parser.get_all_args(lm[:-7])
|
||||
self.assertEqual(arguments, ["ssl_module", "modules/mod_ssl.so"])
|
||||
# ...and both of them should be wrapped in <IfModule !mod_ssl.c>
|
||||
# lm[:-17] strips off /directive/arg[1] from the path.
|
||||
ifmod_args = self.config.parser.get_all_args(lm[:-17])
|
||||
self.assertTrue("!mod_ssl.c" in ifmod_args)
|
||||
|
||||
def test_loadmod_multiple(self):
|
||||
sslmod_args = ["ssl_module", "modules/mod_ssl.so"]
|
||||
# Adds another LoadModule to main httpd.conf in addtition to ssl.conf
|
||||
self.config.parser.add_dir(self.config.parser.loc["default"], "LoadModule",
|
||||
sslmod_args)
|
||||
self.config.save()
|
||||
pre_loadmods = self.config.parser.find_dir(
|
||||
"LoadModule", "ssl_module", exclude=False)
|
||||
# LoadModules are not within IfModule blocks
|
||||
self.assertFalse(any(["ifmodule" in m.lower() for m in pre_loadmods]))
|
||||
self.config.assoc["test.example.com"] = self.vh_truth[0]
|
||||
self.config.deploy_cert(
|
||||
"random.demo", "example/cert.pem", "example/key.pem",
|
||||
"example/cert_chain.pem", "example/fullchain.pem")
|
||||
post_loadmods = self.config.parser.find_dir(
|
||||
"LoadModule", "ssl_module", exclude=False)
|
||||
|
||||
for mod in post_loadmods:
|
||||
self.assertTrue(self.config.parser.not_modssl_ifmodule(mod)) #pylint: disable=no-member
|
||||
|
||||
def test_loadmod_rootconf_exists(self):
|
||||
sslmod_args = ["ssl_module", "modules/mod_ssl.so"]
|
||||
rootconf_ifmod = self.config.parser.get_ifmod(
|
||||
parser.get_aug_path(self.config.parser.loc["default"]),
|
||||
"!mod_ssl.c", beginning=True)
|
||||
self.config.parser.add_dir(rootconf_ifmod[:-1], "LoadModule", sslmod_args)
|
||||
self.config.save()
|
||||
# Get the example vhost
|
||||
self.config.assoc["test.example.com"] = self.vh_truth[0]
|
||||
self.config.deploy_cert(
|
||||
"random.demo", "example/cert.pem", "example/key.pem",
|
||||
"example/cert_chain.pem", "example/fullchain.pem")
|
||||
self.config.save()
|
||||
|
||||
root_loadmods = self.config.parser.find_dir(
|
||||
"LoadModule", "ssl_module",
|
||||
start=parser.get_aug_path(self.config.parser.loc["default"]),
|
||||
exclude=False)
|
||||
|
||||
mods = [lm for lm in root_loadmods if self.config.parser.loc["default"] in lm]
|
||||
|
||||
self.assertEqual(len(mods), 1)
|
||||
# [:-7] removes "/arg[#]" from the path
|
||||
self.assertEqual(
|
||||
self.config.parser.get_all_args(mods[0][:-7]),
|
||||
sslmod_args)
|
||||
|
||||
def test_neg_loadmod_already_on_path(self):
|
||||
loadmod_args = ["ssl_module", "modules/mod_ssl.so"]
|
||||
ifmod = self.config.parser.get_ifmod(
|
||||
self.vh_truth[1].path, "!mod_ssl.c", beginning=True)
|
||||
self.config.parser.add_dir(ifmod[:-1], "LoadModule", loadmod_args)
|
||||
self.config.parser.add_dir(self.vh_truth[1].path, "LoadModule", loadmod_args)
|
||||
self.config.save()
|
||||
pre_loadmods = self.config.parser.find_dir(
|
||||
"LoadModule", "ssl_module", start=self.vh_truth[1].path, exclude=False)
|
||||
self.assertEqual(len(pre_loadmods), 2)
|
||||
# The ssl.conf now has two LoadModule directives, one inside of
|
||||
# !mod_ssl.c IfModule
|
||||
self.config.assoc["test.example.com"] = self.vh_truth[0]
|
||||
self.config.deploy_cert(
|
||||
"random.demo", "example/cert.pem", "example/key.pem",
|
||||
"example/cert_chain.pem", "example/fullchain.pem")
|
||||
self.config.save()
|
||||
# Ensure that the additional LoadModule wasn't written into the IfModule
|
||||
post_loadmods = self.config.parser.find_dir(
|
||||
"LoadModule", "ssl_module", start=self.vh_truth[1].path, exclude=False)
|
||||
self.assertEqual(len(post_loadmods), 1)
|
||||
|
||||
def test_loadmod_non_duplicate(self):
|
||||
# the modules/mod_ssl.so exists in ssl.conf
|
||||
sslmod_args = ["ssl_module", "modules/mod_somethingelse.so"]
|
||||
rootconf_ifmod = self.config.parser.get_ifmod(
|
||||
parser.get_aug_path(self.config.parser.loc["default"]),
|
||||
"!mod_ssl.c", beginning=True)
|
||||
self.config.parser.add_dir(rootconf_ifmod[:-1], "LoadModule", sslmod_args)
|
||||
self.config.save()
|
||||
self.config.assoc["test.example.com"] = self.vh_truth[0]
|
||||
pre_matches = self.config.parser.find_dir("LoadModule",
|
||||
"ssl_module", exclude=False)
|
||||
|
||||
self.assertRaises(MisconfigurationError, self.config.deploy_cert,
|
||||
"random.demo", "example/cert.pem", "example/key.pem",
|
||||
"example/cert_chain.pem", "example/fullchain.pem")
|
||||
|
||||
post_matches = self.config.parser.find_dir("LoadModule",
|
||||
"ssl_module", exclude=False)
|
||||
# Make sure that none was changed
|
||||
self.assertEqual(pre_matches, post_matches)
|
||||
|
||||
def test_loadmod_not_found(self):
|
||||
# Remove all existing LoadModule ssl_module... directives
|
||||
orig_loadmods = self.config.parser.find_dir("LoadModule",
|
||||
"ssl_module",
|
||||
exclude=False)
|
||||
for mod in orig_loadmods:
|
||||
noarg_path = mod.rpartition("/")[0]
|
||||
self.config.parser.aug.remove(noarg_path)
|
||||
self.config.save()
|
||||
self.config.deploy_cert(
|
||||
"random.demo", "example/cert.pem", "example/key.pem",
|
||||
"example/cert_chain.pem", "example/fullchain.pem")
|
||||
|
||||
post_loadmods = self.config.parser.find_dir("LoadModule",
|
||||
"ssl_module",
|
||||
exclude=False)
|
||||
self.assertFalse(post_loadmods)
|
||||
|
||||
def test_no_ifmod_search_false(self):
|
||||
#pylint: disable=no-member
|
||||
|
||||
self.assertFalse(self.config.parser.not_modssl_ifmodule(
|
||||
"/path/does/not/include/ifmod"
|
||||
))
|
||||
self.assertFalse(self.config.parser.not_modssl_ifmodule(
|
||||
""
|
||||
))
|
||||
self.assertFalse(self.config.parser.not_modssl_ifmodule(
|
||||
"/path/includes/IfModule/but/no/arguments"
|
||||
))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main() # pragma: no cover
|
||||
@@ -1,15 +1,17 @@
|
||||
"""Test for certbot_apache.configurator for Centos overrides"""
|
||||
import os
|
||||
import unittest
|
||||
|
||||
import mock
|
||||
|
||||
from certbot import errors
|
||||
from certbot.compat import filesystem
|
||||
from certbot.compat import os
|
||||
|
||||
from certbot_apache import obj
|
||||
from certbot_apache import override_centos
|
||||
from certbot_apache.tests import util
|
||||
|
||||
|
||||
def get_vh_truth(temp_dir, config_name):
|
||||
"""Return the ground truth for the specified directory."""
|
||||
prefix = os.path.join(
|
||||
@@ -30,6 +32,59 @@ def get_vh_truth(temp_dir, config_name):
|
||||
]
|
||||
return vh_truth
|
||||
|
||||
class FedoraRestartTest(util.ApacheTest):
|
||||
"""Tests for Fedora specific self-signed certificate override"""
|
||||
|
||||
def setUp(self): # pylint: disable=arguments-differ
|
||||
test_dir = "centos7_apache/apache"
|
||||
config_root = "centos7_apache/apache/httpd"
|
||||
vhost_root = "centos7_apache/apache/httpd/conf.d"
|
||||
super(FedoraRestartTest, self).setUp(test_dir=test_dir,
|
||||
config_root=config_root,
|
||||
vhost_root=vhost_root)
|
||||
self.config = util.get_apache_configurator(
|
||||
self.config_path, self.vhost_path, self.config_dir, self.work_dir,
|
||||
os_info="fedora_old")
|
||||
self.vh_truth = get_vh_truth(
|
||||
self.temp_dir, "centos7_apache/apache")
|
||||
|
||||
def _run_fedora_test(self):
|
||||
self.assertIsInstance(self.config, override_centos.CentOSConfigurator)
|
||||
with mock.patch("certbot.util.get_os_info") as mock_info:
|
||||
mock_info.return_value = ["fedora", "28"]
|
||||
self.config.config_test()
|
||||
|
||||
def test_non_fedora_error(self):
|
||||
c_test = "certbot_apache.configurator.ApacheConfigurator.config_test"
|
||||
with mock.patch(c_test) as mock_test:
|
||||
mock_test.side_effect = errors.MisconfigurationError
|
||||
with mock.patch("certbot.util.get_os_info") as mock_info:
|
||||
mock_info.return_value = ["not_fedora"]
|
||||
self.assertRaises(errors.MisconfigurationError,
|
||||
self.config.config_test)
|
||||
|
||||
def test_fedora_restart_error(self):
|
||||
c_test = "certbot_apache.configurator.ApacheConfigurator.config_test"
|
||||
with mock.patch(c_test) as mock_test:
|
||||
# First call raises error, second doesn't
|
||||
mock_test.side_effect = [errors.MisconfigurationError, '']
|
||||
with mock.patch("certbot.util.run_script") as mock_run:
|
||||
mock_run.side_effect = errors.SubprocessError
|
||||
self.assertRaises(errors.MisconfigurationError,
|
||||
self._run_fedora_test)
|
||||
|
||||
def test_fedora_restart(self):
|
||||
c_test = "certbot_apache.configurator.ApacheConfigurator.config_test"
|
||||
with mock.patch(c_test) as mock_test:
|
||||
with mock.patch("certbot.util.run_script") as mock_run:
|
||||
# First call raises error, second doesn't
|
||||
mock_test.side_effect = [errors.MisconfigurationError, '']
|
||||
self._run_fedora_test()
|
||||
self.assertEqual(mock_test.call_count, 2)
|
||||
self.assertEqual(mock_run.call_args[0][0],
|
||||
['systemctl', 'restart', 'httpd'])
|
||||
|
||||
|
||||
class MultipleVhostsTestCentOS(util.ApacheTest):
|
||||
"""Multiple vhost tests for CentOS / RHEL family of distros"""
|
||||
|
||||
@@ -50,8 +105,7 @@ class MultipleVhostsTestCentOS(util.ApacheTest):
|
||||
self.temp_dir, "centos7_apache/apache")
|
||||
|
||||
def test_get_parser(self):
|
||||
self.assertTrue(isinstance(self.config.parser,
|
||||
override_centos.CentOSParser))
|
||||
self.assertIsInstance(self.config.parser, override_centos.CentOSParser)
|
||||
|
||||
@mock.patch("certbot_apache.parser.ApacheParser._get_runtime_cfg")
|
||||
def test_opportunistic_httpd_runtime_parsing(self, mock_get):
|
||||
@@ -107,7 +161,7 @@ class MultipleVhostsTestCentOS(util.ApacheTest):
|
||||
"""Make sure we read the sysconfig OPTIONS variable correctly"""
|
||||
# Return nothing for the process calls
|
||||
mock_cfg.return_value = ""
|
||||
self.config.parser.sysconfig_filep = os.path.realpath(
|
||||
self.config.parser.sysconfig_filep = filesystem.realpath(
|
||||
os.path.join(self.config.parser.root, "../sysconfig/httpd"))
|
||||
self.config.parser.variables = {}
|
||||
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
"""Tests for certbot_apache.parser."""
|
||||
import os
|
||||
import shutil
|
||||
import unittest
|
||||
|
||||
from certbot import errors
|
||||
from certbot.compat import os
|
||||
|
||||
from certbot_apache.tests import util
|
||||
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
"""Test for certbot_apache.augeas_configurator."""
|
||||
import os
|
||||
"""Test for certbot_apache.configurator implementations of reverter"""
|
||||
import shutil
|
||||
import unittest
|
||||
|
||||
@@ -10,12 +9,12 @@ from certbot import errors
|
||||
from certbot_apache.tests import util
|
||||
|
||||
|
||||
class AugeasConfiguratorTest(util.ApacheTest):
|
||||
"""Test for Augeas Configurator base class."""
|
||||
class ConfiguratorReverterTest(util.ApacheTest):
|
||||
"""Test for ApacheConfigurator reverter methods"""
|
||||
|
||||
|
||||
def setUp(self): # pylint: disable=arguments-differ
|
||||
super(AugeasConfiguratorTest, self).setUp()
|
||||
super(ConfiguratorReverterTest, self).setUp()
|
||||
|
||||
self.config = util.get_apache_configurator(
|
||||
self.config_path, self.vhost_path, self.config_dir, self.work_dir)
|
||||
@@ -28,20 +27,6 @@ class AugeasConfiguratorTest(util.ApacheTest):
|
||||
shutil.rmtree(self.work_dir)
|
||||
shutil.rmtree(self.temp_dir)
|
||||
|
||||
def test_bad_parse(self):
|
||||
# pylint: disable=protected-access
|
||||
self.config.parser.parse_file(os.path.join(
|
||||
self.config.parser.root, "conf-available", "bad_conf_file.conf"))
|
||||
self.assertRaises(
|
||||
errors.PluginError, self.config.check_parsing_errors, "httpd.aug")
|
||||
|
||||
def test_bad_save(self):
|
||||
mock_save = mock.Mock()
|
||||
mock_save.side_effect = IOError
|
||||
self.config.aug.save = mock_save
|
||||
|
||||
self.assertRaises(errors.PluginError, self.config.save)
|
||||
|
||||
def test_bad_save_checkpoint(self):
|
||||
self.config.reverter.add_to_checkpoint = mock.Mock(
|
||||
side_effect=errors.ReverterError)
|
||||
@@ -63,23 +48,9 @@ class AugeasConfiguratorTest(util.ApacheTest):
|
||||
|
||||
self.assertTrue(mock_finalize.is_called)
|
||||
|
||||
def test_recovery_routine(self):
|
||||
mock_load = mock.Mock()
|
||||
self.config.aug.load = mock_load
|
||||
|
||||
self.config.recovery_routine()
|
||||
self.assertEqual(mock_load.call_count, 1)
|
||||
|
||||
def test_recovery_routine_error(self):
|
||||
self.config.reverter.recovery_routine = mock.Mock(
|
||||
side_effect=errors.ReverterError)
|
||||
|
||||
self.assertRaises(
|
||||
errors.PluginError, self.config.recovery_routine)
|
||||
|
||||
def test_revert_challenge_config(self):
|
||||
mock_load = mock.Mock()
|
||||
self.config.aug.load = mock_load
|
||||
self.config.parser.aug.load = mock_load
|
||||
|
||||
self.config.revert_challenge_config()
|
||||
self.assertEqual(mock_load.call_count, 1)
|
||||
@@ -93,7 +64,7 @@ class AugeasConfiguratorTest(util.ApacheTest):
|
||||
|
||||
def test_rollback_checkpoints(self):
|
||||
mock_load = mock.Mock()
|
||||
self.config.aug.load = mock_load
|
||||
self.config.parser.aug.load = mock_load
|
||||
|
||||
self.config.rollback_checkpoints()
|
||||
self.assertEqual(mock_load.call_count, 1)
|
||||
@@ -103,13 +74,11 @@ class AugeasConfiguratorTest(util.ApacheTest):
|
||||
side_effect=errors.ReverterError)
|
||||
self.assertRaises(errors.PluginError, self.config.rollback_checkpoints)
|
||||
|
||||
def test_view_config_changes(self):
|
||||
self.config.view_config_changes()
|
||||
|
||||
def test_view_config_changes_error(self):
|
||||
self.config.reverter.view_config_changes = mock.Mock(
|
||||
side_effect=errors.ReverterError)
|
||||
self.assertRaises(errors.PluginError, self.config.view_config_changes)
|
||||
def test_recovery_routine_reload(self):
|
||||
mock_load = mock.Mock()
|
||||
self.config.parser.aug.load = mock_load
|
||||
self.config.recovery_routine()
|
||||
self.assertEqual(mock_load.call_count, 1)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
@@ -1,6 +1,6 @@
|
||||
# pylint: disable=too-many-public-methods,too-many-lines
|
||||
# pylint: disable=too-many-lines
|
||||
"""Test for certbot_apache.configurator."""
|
||||
import os
|
||||
import copy
|
||||
import shutil
|
||||
import socket
|
||||
import tempfile
|
||||
@@ -15,22 +15,21 @@ from acme import challenges
|
||||
from certbot import achallenges
|
||||
from certbot import crypto_util
|
||||
from certbot import errors
|
||||
|
||||
from certbot.compat import os
|
||||
from certbot.compat import filesystem
|
||||
from certbot.tests import acme_util
|
||||
from certbot.tests import util as certbot_util
|
||||
|
||||
from certbot_apache import apache_util
|
||||
from certbot_apache import constants
|
||||
from certbot_apache import parser
|
||||
from certbot_apache import obj
|
||||
|
||||
from certbot_apache import parser
|
||||
from certbot_apache.tests import util
|
||||
|
||||
|
||||
class MultipleVhostsTest(util.ApacheTest):
|
||||
"""Test two standard well-configured HTTP vhosts."""
|
||||
|
||||
|
||||
def setUp(self): # pylint: disable=arguments-differ
|
||||
super(MultipleVhostsTest, self).setUp()
|
||||
|
||||
@@ -52,25 +51,14 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
self.config.deploy_cert = mocked_deploy_cert
|
||||
return self.config
|
||||
|
||||
@mock.patch("certbot_apache.configurator.ApacheConfigurator.init_augeas")
|
||||
@mock.patch("certbot_apache.configurator.path_surgery")
|
||||
def test_prepare_no_install(self, mock_surgery, _init_augeas):
|
||||
def test_prepare_no_install(self, mock_surgery):
|
||||
silly_path = {"PATH": "/tmp/nothingness2342"}
|
||||
mock_surgery.return_value = False
|
||||
with mock.patch.dict('os.environ', silly_path):
|
||||
self.assertRaises(errors.NoInstallationError, self.config.prepare)
|
||||
self.assertEqual(mock_surgery.call_count, 1)
|
||||
|
||||
@mock.patch("certbot_apache.augeas_configurator.AugeasConfigurator.init_augeas")
|
||||
def test_prepare_no_augeas(self, mock_init_augeas):
|
||||
""" Test augeas initialization ImportError """
|
||||
def side_effect_error():
|
||||
""" Side effect error for the test """
|
||||
raise ImportError
|
||||
mock_init_augeas.side_effect = side_effect_error
|
||||
self.assertRaises(
|
||||
errors.NoInstallationError, self.config.prepare)
|
||||
|
||||
@mock.patch("certbot_apache.parser.ApacheParser")
|
||||
@mock.patch("certbot_apache.configurator.util.exe_exists")
|
||||
def test_prepare_version(self, mock_exe_exists, _):
|
||||
@@ -82,16 +70,6 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
self.assertRaises(
|
||||
errors.NotSupportedError, self.config.prepare)
|
||||
|
||||
@mock.patch("certbot_apache.parser.ApacheParser")
|
||||
@mock.patch("certbot_apache.configurator.util.exe_exists")
|
||||
def test_prepare_old_aug(self, mock_exe_exists, _):
|
||||
mock_exe_exists.return_value = True
|
||||
self.config.config_test = mock.Mock()
|
||||
# pylint: disable=protected-access
|
||||
self.config._check_aug_version = mock.Mock(return_value=False)
|
||||
self.assertRaises(
|
||||
errors.NotSupportedError, self.config.prepare)
|
||||
|
||||
def test_prepare_locked(self):
|
||||
server_root = self.config.conf("server-root")
|
||||
self.config.config_test = mock.Mock()
|
||||
@@ -130,13 +108,9 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
exp[k.replace("_", "-")] = ApacheConfigurator.OS_DEFAULTS[k]
|
||||
# Special cases
|
||||
exp["vhost-root"] = None
|
||||
exp["init-script"] = None
|
||||
|
||||
found = set()
|
||||
for call in mock_add.call_args_list:
|
||||
# init-script is a special case: deprecated argument
|
||||
if call[0][0] != "init-script":
|
||||
self.assertEqual(exp[call[0][0]], call[1]['default'])
|
||||
found.add(call[0][0])
|
||||
|
||||
# Make sure that all (and only) the expected values exist
|
||||
@@ -384,6 +358,7 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
"""Mock method for parser.find_dir"""
|
||||
if directive == "Include" and argument.endswith("options-ssl-apache.conf"):
|
||||
return ["/path/to/whatever"]
|
||||
return None # pragma: no cover
|
||||
|
||||
mock_add = mock.MagicMock()
|
||||
self.config.parser.add_dir = mock_add
|
||||
@@ -495,8 +470,7 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
but an SSLCertificateKeyFile directive is missing."""
|
||||
if "SSLCertificateFile" in args:
|
||||
return ["example/cert.pem"]
|
||||
else:
|
||||
return []
|
||||
return []
|
||||
|
||||
mock_find_dir = mock.MagicMock(return_value=[])
|
||||
mock_find_dir.side_effect = side_effect
|
||||
@@ -676,7 +650,7 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
# span excludes the closing </VirtualHost> tag in older versions
|
||||
# of Augeas
|
||||
return_value = [self.vh_truth[0].filep, 1, 12, 0, 0, 0, 1142]
|
||||
with mock.patch.object(self.config.aug, 'span') as mock_span:
|
||||
with mock.patch.object(self.config.parser.aug, 'span') as mock_span:
|
||||
mock_span.return_value = return_value
|
||||
self.test_make_vhost_ssl()
|
||||
|
||||
@@ -684,7 +658,7 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
# span includes the closing </VirtualHost> tag in newer versions
|
||||
# of Augeas
|
||||
return_value = [self.vh_truth[0].filep, 1, 12, 0, 0, 0, 1157]
|
||||
with mock.patch.object(self.config.aug, 'span') as mock_span:
|
||||
with mock.patch.object(self.config.parser.aug, 'span') as mock_span:
|
||||
mock_span.return_value = return_value
|
||||
self.test_make_vhost_ssl()
|
||||
|
||||
@@ -697,8 +671,7 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
def test_make_vhost_ssl_nonexistent_vhost_path(self):
|
||||
ssl_vhost = self.config.make_vhost_ssl(self.vh_truth[1])
|
||||
self.assertEqual(os.path.dirname(ssl_vhost.filep),
|
||||
os.path.dirname(os.path.realpath(
|
||||
self.vh_truth[1].filep)))
|
||||
os.path.dirname(filesystem.realpath(self.vh_truth[1].filep)))
|
||||
|
||||
def test_make_vhost_ssl(self):
|
||||
ssl_vhost = self.config.make_vhost_ssl(self.vh_truth[0])
|
||||
@@ -1044,7 +1017,7 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
|
||||
# pylint: disable=protected-access
|
||||
http_vh = self.config._get_http_vhost(ssl_vh)
|
||||
self.assertTrue(http_vh.ssl == False)
|
||||
self.assertFalse(http_vh.ssl)
|
||||
|
||||
@mock.patch("certbot.util.run_script")
|
||||
@mock.patch("certbot.util.exe_exists")
|
||||
@@ -1233,7 +1206,7 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
except errors.PluginEnhancementAlreadyPresent:
|
||||
args_paths = self.config.parser.find_dir(
|
||||
"RewriteRule", None, http_vhost.path, False)
|
||||
arg_vals = [self.config.aug.get(x) for x in args_paths]
|
||||
arg_vals = [self.config.parser.aug.get(x) for x in args_paths]
|
||||
self.assertEqual(arg_vals, constants.REWRITE_HTTPS_ARGS)
|
||||
|
||||
|
||||
@@ -1319,13 +1292,13 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
account_key = self.rsa512jwk
|
||||
achall1 = achallenges.KeyAuthorizationAnnotatedChallenge(
|
||||
challb=acme_util.chall_to_challb(
|
||||
challenges.TLSSNI01(
|
||||
challenges.HTTP01(
|
||||
token=b"jIq_Xy1mXGN37tb4L6Xj_es58fW571ZNyXekdZzhh7Q"),
|
||||
"pending"),
|
||||
domain="encryption-example.demo", account_key=account_key)
|
||||
achall2 = achallenges.KeyAuthorizationAnnotatedChallenge(
|
||||
challb=acme_util.chall_to_challb(
|
||||
challenges.TLSSNI01(
|
||||
challenges.HTTP01(
|
||||
token=b"uqnaPzxtrndteOqtrXb0Asl5gOJfWAnnx6QJyvcmlDU"),
|
||||
"pending"),
|
||||
domain="certbot.demo", account_key=account_key)
|
||||
@@ -1336,15 +1309,6 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
|
||||
return account_key, (achall1, achall2, achall3)
|
||||
|
||||
def test_aug_version(self):
|
||||
mock_match = mock.Mock(return_value=["something"])
|
||||
self.config.aug.match = mock_match
|
||||
# pylint: disable=protected-access
|
||||
self.assertEqual(self.config._check_aug_version(),
|
||||
["something"])
|
||||
self.config.aug.match.side_effect = RuntimeError
|
||||
self.assertFalse(self.config._check_aug_version())
|
||||
|
||||
def test_enable_site_nondebian(self):
|
||||
inc_path = "/path/to/wherever"
|
||||
vhost = self.vh_truth[0]
|
||||
@@ -1367,8 +1331,8 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
self.config.parser.modules.add("ssl_module")
|
||||
self.config.parser.modules.add("mod_ssl.c")
|
||||
self.config.parser.modules.add("socache_shmcb_module")
|
||||
tmp_path = os.path.realpath(tempfile.mkdtemp("vhostroot"))
|
||||
os.chmod(tmp_path, 0o755)
|
||||
tmp_path = filesystem.realpath(tempfile.mkdtemp("vhostroot"))
|
||||
filesystem.chmod(tmp_path, 0o755)
|
||||
mock_p = "certbot_apache.configurator.ApacheConfigurator._get_ssl_vhost_path"
|
||||
mock_a = "certbot_apache.parser.ApacheParser.add_include"
|
||||
|
||||
@@ -1400,7 +1364,7 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
# pylint: disable=protected-access
|
||||
cases = {u"*.example.org": True, b"*.x.example.org": True,
|
||||
u"a.example.org": False, b"a.x.example.org": False}
|
||||
for key in cases.keys():
|
||||
for key in cases:
|
||||
self.assertEqual(self.config._wildcard_domain(key), cases[key])
|
||||
|
||||
def test_choose_vhosts_wildcard(self):
|
||||
@@ -1512,6 +1476,29 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
second_id = self.config.add_vhost_id(self.vh_truth[0])
|
||||
self.assertEqual(first_id, second_id)
|
||||
|
||||
def test_realpath_replaces_symlink(self):
|
||||
orig_match = self.config.parser.aug.match
|
||||
mock_vhost = copy.deepcopy(self.vh_truth[0])
|
||||
mock_vhost.filep = mock_vhost.filep.replace('sites-enabled', u'sites-available')
|
||||
mock_vhost.path = mock_vhost.path.replace('sites-enabled', 'sites-available')
|
||||
mock_vhost.enabled = False
|
||||
self.config.parser.parse_file(mock_vhost.filep)
|
||||
|
||||
def mock_match(aug_expr):
|
||||
"""Return a mocked match list of VirtualHosts"""
|
||||
if "/mocked/path" in aug_expr:
|
||||
return [self.vh_truth[1].path, self.vh_truth[0].path, mock_vhost.path]
|
||||
return orig_match(aug_expr)
|
||||
|
||||
self.config.parser.parser_paths = ["/mocked/path"]
|
||||
self.config.parser.aug.match = mock_match
|
||||
vhs = self.config.get_virtual_hosts()
|
||||
self.assertEqual(len(vhs), 2)
|
||||
self.assertTrue(vhs[0] == self.vh_truth[1])
|
||||
# mock_vhost should have replaced the vh_truth[0], because its filepath
|
||||
# isn't a symlink
|
||||
self.assertTrue(vhs[1] == mock_vhost)
|
||||
|
||||
|
||||
class AugeasVhostsTest(util.ApacheTest):
|
||||
"""Test vhosts with illegal names dependent on augeas version."""
|
||||
@@ -1530,8 +1517,8 @@ class AugeasVhostsTest(util.ApacheTest):
|
||||
self.work_dir)
|
||||
|
||||
def test_choosevhost_with_illegal_name(self):
|
||||
self.config.aug = mock.MagicMock()
|
||||
self.config.aug.match.side_effect = RuntimeError
|
||||
self.config.parser.aug = mock.MagicMock()
|
||||
self.config.parser.aug.match.side_effect = RuntimeError
|
||||
path = "debian_apache_2_4/augeas_vhosts/apache2/sites-available/old-and-default.conf"
|
||||
chosen_vhost = self.config._create_vhost(path)
|
||||
self.assertEqual(None, chosen_vhost)
|
||||
@@ -1539,7 +1526,7 @@ class AugeasVhostsTest(util.ApacheTest):
|
||||
def test_choosevhost_works(self):
|
||||
path = "debian_apache_2_4/augeas_vhosts/apache2/sites-available/old-and-default.conf"
|
||||
chosen_vhost = self.config._create_vhost(path)
|
||||
self.assertTrue(chosen_vhost == None or chosen_vhost.path == path)
|
||||
self.assertTrue(chosen_vhost is None or chosen_vhost.path == path)
|
||||
|
||||
@mock.patch("certbot_apache.configurator.ApacheConfigurator._create_vhost")
|
||||
def test_get_vhost_continue(self, mock_vhost):
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
"""Test for certbot_apache.configurator for Debian overrides"""
|
||||
import os
|
||||
import shutil
|
||||
import unittest
|
||||
|
||||
import mock
|
||||
|
||||
from certbot import errors
|
||||
from certbot.compat import os
|
||||
|
||||
from certbot_apache import apache_util
|
||||
from certbot_apache import obj
|
||||
@@ -79,9 +79,9 @@ class MultipleVhostsTestDebian(util.ApacheTest):
|
||||
|
||||
def test_enable_site_failure(self):
|
||||
self.config.parser.root = "/tmp/nonexistent"
|
||||
with mock.patch("os.path.isdir") as mock_dir:
|
||||
with mock.patch("certbot.compat.os.path.isdir") as mock_dir:
|
||||
mock_dir.return_value = True
|
||||
with mock.patch("os.path.islink") as mock_link:
|
||||
with mock.patch("certbot.compat.os.path.islink") as mock_link:
|
||||
mock_link.return_value = False
|
||||
self.assertRaises(
|
||||
errors.NotSupportedError,
|
||||
|
||||
@@ -6,6 +6,7 @@ import mock
|
||||
from certbot_apache import configurator
|
||||
from certbot_apache import entrypoint
|
||||
|
||||
|
||||
class EntryPointTest(unittest.TestCase):
|
||||
"""Entrypoint tests"""
|
||||
|
||||
@@ -14,8 +15,13 @@ class EntryPointTest(unittest.TestCase):
|
||||
def test_get_configurator(self):
|
||||
|
||||
with mock.patch("certbot.util.get_os_info") as mock_info:
|
||||
for distro in entrypoint.OVERRIDE_CLASSES.keys():
|
||||
mock_info.return_value = (distro, "whatever")
|
||||
for distro in entrypoint.OVERRIDE_CLASSES:
|
||||
return_value = (distro, "whatever")
|
||||
if distro == 'fedora_old':
|
||||
return_value = ('fedora', '28')
|
||||
elif distro == 'fedora':
|
||||
return_value = ('fedora', '29')
|
||||
mock_info.return_value = return_value
|
||||
self.assertEqual(entrypoint.get_configurator(),
|
||||
entrypoint.OVERRIDE_CLASSES[distro])
|
||||
|
||||
@@ -23,7 +29,7 @@ class EntryPointTest(unittest.TestCase):
|
||||
with mock.patch("certbot.util.get_os_info") as mock_info:
|
||||
mock_info.return_value = ("nonexistent", "irrelevant")
|
||||
with mock.patch("certbot.util.get_systemd_os_like") as mock_like:
|
||||
for like in entrypoint.OVERRIDE_CLASSES.keys():
|
||||
for like in entrypoint.OVERRIDE_CLASSES:
|
||||
mock_like.return_value = [like]
|
||||
self.assertEqual(entrypoint.get_configurator(),
|
||||
entrypoint.OVERRIDE_CLASSES[like])
|
||||
|
||||
195
certbot-apache/certbot_apache/tests/fedora_test.py
Normal file
195
certbot-apache/certbot_apache/tests/fedora_test.py
Normal file
@@ -0,0 +1,195 @@
|
||||
"""Test for certbot_apache.configurator for Fedora 29+ overrides"""
|
||||
import unittest
|
||||
|
||||
import mock
|
||||
|
||||
from certbot import errors
|
||||
from certbot.compat import filesystem
|
||||
from certbot.compat import os
|
||||
|
||||
from certbot_apache import obj
|
||||
from certbot_apache import override_fedora
|
||||
from certbot_apache.tests import util
|
||||
|
||||
|
||||
def get_vh_truth(temp_dir, config_name):
|
||||
"""Return the ground truth for the specified directory."""
|
||||
prefix = os.path.join(
|
||||
temp_dir, config_name, "httpd/conf.d")
|
||||
|
||||
aug_pre = "/files" + prefix
|
||||
# TODO: eventually, these tests should have a dedicated configuration instead
|
||||
# of reusing the ones from centos_test
|
||||
vh_truth = [
|
||||
obj.VirtualHost(
|
||||
os.path.join(prefix, "centos.example.com.conf"),
|
||||
os.path.join(aug_pre, "centos.example.com.conf/VirtualHost"),
|
||||
{obj.Addr.fromstring("*:80")},
|
||||
False, True, "centos.example.com"),
|
||||
obj.VirtualHost(
|
||||
os.path.join(prefix, "ssl.conf"),
|
||||
os.path.join(aug_pre, "ssl.conf/VirtualHost"),
|
||||
{obj.Addr.fromstring("_default_:443")},
|
||||
True, True, None)
|
||||
]
|
||||
return vh_truth
|
||||
|
||||
|
||||
class FedoraRestartTest(util.ApacheTest):
|
||||
"""Tests for Fedora specific self-signed certificate override"""
|
||||
|
||||
# TODO: eventually, these tests should have a dedicated configuration instead
|
||||
# of reusing the ones from centos_test
|
||||
def setUp(self): # pylint: disable=arguments-differ
|
||||
test_dir = "centos7_apache/apache"
|
||||
config_root = "centos7_apache/apache/httpd"
|
||||
vhost_root = "centos7_apache/apache/httpd/conf.d"
|
||||
super(FedoraRestartTest, self).setUp(test_dir=test_dir,
|
||||
config_root=config_root,
|
||||
vhost_root=vhost_root)
|
||||
self.config = util.get_apache_configurator(
|
||||
self.config_path, self.vhost_path, self.config_dir, self.work_dir,
|
||||
os_info="fedora")
|
||||
self.vh_truth = get_vh_truth(
|
||||
self.temp_dir, "centos7_apache/apache")
|
||||
|
||||
def _run_fedora_test(self):
|
||||
self.assertIsInstance(self.config, override_fedora.FedoraConfigurator)
|
||||
self.config.config_test()
|
||||
|
||||
def test_fedora_restart_error(self):
|
||||
c_test = "certbot_apache.configurator.ApacheConfigurator.config_test"
|
||||
with mock.patch(c_test) as mock_test:
|
||||
# First call raises error, second doesn't
|
||||
mock_test.side_effect = [errors.MisconfigurationError, '']
|
||||
with mock.patch("certbot.util.run_script") as mock_run:
|
||||
mock_run.side_effect = errors.SubprocessError
|
||||
self.assertRaises(errors.MisconfigurationError,
|
||||
self._run_fedora_test)
|
||||
|
||||
def test_fedora_restart(self):
|
||||
c_test = "certbot_apache.configurator.ApacheConfigurator.config_test"
|
||||
with mock.patch(c_test) as mock_test:
|
||||
with mock.patch("certbot.util.run_script") as mock_run:
|
||||
# First call raises error, second doesn't
|
||||
mock_test.side_effect = [errors.MisconfigurationError, '']
|
||||
self._run_fedora_test()
|
||||
self.assertEqual(mock_test.call_count, 2)
|
||||
self.assertEqual(mock_run.call_args[0][0],
|
||||
['systemctl', 'restart', 'httpd'])
|
||||
|
||||
|
||||
class MultipleVhostsTestFedora(util.ApacheTest):
|
||||
"""Multiple vhost tests for CentOS / RHEL family of distros"""
|
||||
|
||||
_multiprocess_can_split_ = True
|
||||
|
||||
def setUp(self): # pylint: disable=arguments-differ
|
||||
test_dir = "centos7_apache/apache"
|
||||
config_root = "centos7_apache/apache/httpd"
|
||||
vhost_root = "centos7_apache/apache/httpd/conf.d"
|
||||
super(MultipleVhostsTestFedora, self).setUp(test_dir=test_dir,
|
||||
config_root=config_root,
|
||||
vhost_root=vhost_root)
|
||||
|
||||
self.config = util.get_apache_configurator(
|
||||
self.config_path, self.vhost_path, self.config_dir, self.work_dir,
|
||||
os_info="fedora")
|
||||
self.vh_truth = get_vh_truth(
|
||||
self.temp_dir, "centos7_apache/apache")
|
||||
|
||||
def test_get_parser(self):
|
||||
self.assertIsInstance(self.config.parser, override_fedora.FedoraParser)
|
||||
|
||||
@mock.patch("certbot_apache.parser.ApacheParser._get_runtime_cfg")
|
||||
def test_opportunistic_httpd_runtime_parsing(self, mock_get):
|
||||
define_val = (
|
||||
'Define: TEST1\n'
|
||||
'Define: TEST2\n'
|
||||
'Define: DUMP_RUN_CFG\n'
|
||||
)
|
||||
mod_val = (
|
||||
'Loaded Modules:\n'
|
||||
' mock_module (static)\n'
|
||||
' another_module (static)\n'
|
||||
)
|
||||
def mock_get_cfg(command):
|
||||
"""Mock httpd process stdout"""
|
||||
if command == ['httpd', '-t', '-D', 'DUMP_RUN_CFG']:
|
||||
return define_val
|
||||
elif command == ['httpd', '-t', '-D', 'DUMP_MODULES']:
|
||||
return mod_val
|
||||
return ""
|
||||
mock_get.side_effect = mock_get_cfg
|
||||
self.config.parser.modules = set()
|
||||
self.config.parser.variables = {}
|
||||
|
||||
with mock.patch("certbot.util.get_os_info") as mock_osi:
|
||||
# Make sure we have the have the CentOS httpd constants
|
||||
mock_osi.return_value = ("fedora", "29")
|
||||
self.config.parser.update_runtime_variables()
|
||||
|
||||
self.assertEqual(mock_get.call_count, 3)
|
||||
self.assertEqual(len(self.config.parser.modules), 4)
|
||||
self.assertEqual(len(self.config.parser.variables), 2)
|
||||
self.assertTrue("TEST2" in self.config.parser.variables.keys())
|
||||
self.assertTrue("mod_another.c" in self.config.parser.modules)
|
||||
|
||||
@mock.patch("certbot_apache.configurator.util.run_script")
|
||||
def test_get_version(self, mock_run_script):
|
||||
mock_run_script.return_value = ('', None)
|
||||
self.assertRaises(errors.PluginError, self.config.get_version)
|
||||
self.assertEqual(mock_run_script.call_args[0][0][0], 'httpd')
|
||||
|
||||
def test_get_virtual_hosts(self):
|
||||
"""Make sure all vhosts are being properly found."""
|
||||
vhs = self.config.get_virtual_hosts()
|
||||
self.assertEqual(len(vhs), 2)
|
||||
found = 0
|
||||
|
||||
for vhost in vhs:
|
||||
for centos_truth in self.vh_truth:
|
||||
if vhost == centos_truth:
|
||||
found += 1
|
||||
break
|
||||
else:
|
||||
raise Exception("Missed: %s" % vhost) # pragma: no cover
|
||||
self.assertEqual(found, 2)
|
||||
|
||||
@mock.patch("certbot_apache.parser.ApacheParser._get_runtime_cfg")
|
||||
def test_get_sysconfig_vars(self, mock_cfg):
|
||||
"""Make sure we read the sysconfig OPTIONS variable correctly"""
|
||||
# Return nothing for the process calls
|
||||
mock_cfg.return_value = ""
|
||||
self.config.parser.sysconfig_filep = filesystem.realpath(
|
||||
os.path.join(self.config.parser.root, "../sysconfig/httpd"))
|
||||
self.config.parser.variables = {}
|
||||
|
||||
with mock.patch("certbot.util.get_os_info") as mock_osi:
|
||||
# Make sure we have the have the CentOS httpd constants
|
||||
mock_osi.return_value = ("fedora", "29")
|
||||
self.config.parser.update_runtime_variables()
|
||||
|
||||
self.assertTrue("mock_define" in self.config.parser.variables.keys())
|
||||
self.assertTrue("mock_define_too" in self.config.parser.variables.keys())
|
||||
self.assertTrue("mock_value" in self.config.parser.variables.keys())
|
||||
self.assertEqual("TRUE", self.config.parser.variables["mock_value"])
|
||||
self.assertTrue("MOCK_NOSEP" in self.config.parser.variables.keys())
|
||||
self.assertEqual("NOSEP_VAL", self.config.parser.variables["NOSEP_TWO"])
|
||||
|
||||
@mock.patch("certbot_apache.configurator.util.run_script")
|
||||
def test_alt_restart_works(self, mock_run_script):
|
||||
mock_run_script.side_effect = [None, errors.SubprocessError, None]
|
||||
self.config.restart()
|
||||
self.assertEqual(mock_run_script.call_count, 3)
|
||||
|
||||
@mock.patch("certbot_apache.configurator.util.run_script")
|
||||
def test_alt_restart_errors(self, mock_run_script):
|
||||
mock_run_script.side_effect = [None,
|
||||
errors.SubprocessError,
|
||||
errors.SubprocessError]
|
||||
self.assertRaises(errors.MisconfigurationError, self.config.restart)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main() # pragma: no cover
|
||||
@@ -1,15 +1,17 @@
|
||||
"""Test for certbot_apache.configurator for Gentoo overrides"""
|
||||
import os
|
||||
import unittest
|
||||
|
||||
import mock
|
||||
|
||||
from certbot import errors
|
||||
from certbot.compat import filesystem
|
||||
from certbot.compat import os
|
||||
|
||||
from certbot_apache import override_gentoo
|
||||
from certbot_apache import obj
|
||||
from certbot_apache import override_gentoo
|
||||
from certbot_apache.tests import util
|
||||
|
||||
|
||||
def get_vh_truth(temp_dir, config_name):
|
||||
"""Return the ground truth for the specified directory."""
|
||||
prefix = os.path.join(
|
||||
@@ -80,7 +82,7 @@ class MultipleVhostsTestGentoo(util.ApacheTest):
|
||||
"""Make sure we read the Gentoo APACHE2_OPTS variable correctly"""
|
||||
defines = ['DEFAULT_VHOST', 'INFO',
|
||||
'SSL', 'SSL_DEFAULT_VHOST', 'LANGUAGE']
|
||||
self.config.parser.apacheconfig_filep = os.path.realpath(
|
||||
self.config.parser.apacheconfig_filep = filesystem.realpath(
|
||||
os.path.join(self.config.parser.root, "../conf.d/apache2"))
|
||||
self.config.parser.variables = {}
|
||||
with mock.patch("certbot_apache.override_gentoo.GentooParser.update_modules"):
|
||||
@@ -113,6 +115,7 @@ class MultipleVhostsTestGentoo(util.ApacheTest):
|
||||
"""Mock httpd process stdout"""
|
||||
if command == ['apache2ctl', 'modules']:
|
||||
return mod_val
|
||||
return None # pragma: no cover
|
||||
mock_get.side_effect = mock_get_cfg
|
||||
self.config.parser.modules = set()
|
||||
|
||||
|
||||
@@ -1,15 +1,16 @@
|
||||
"""Test for certbot_apache.http_01."""
|
||||
import mock
|
||||
import os
|
||||
import unittest
|
||||
import mock
|
||||
|
||||
from acme import challenges
|
||||
from acme.magic_typing import List # pylint: disable=unused-import, no-name-in-module
|
||||
|
||||
from certbot import achallenges
|
||||
from certbot import errors
|
||||
|
||||
from certbot.compat import filesystem
|
||||
from certbot.compat import os
|
||||
from certbot.tests import acme_util
|
||||
|
||||
from certbot_apache.parser import get_aug_path
|
||||
from certbot_apache.tests import util
|
||||
|
||||
@@ -20,7 +21,7 @@ NUM_ACHALLS = 3
|
||||
class ApacheHttp01Test(util.ApacheTest):
|
||||
"""Test for certbot_apache.http_01.ApacheHttp01."""
|
||||
|
||||
def setUp(self, *args, **kwargs):
|
||||
def setUp(self, *args, **kwargs): # pylint: disable=arguments-differ
|
||||
super(ApacheHttp01Test, self).setUp(*args, **kwargs)
|
||||
|
||||
self.account_key = self.rsa512jwk
|
||||
@@ -78,7 +79,7 @@ class ApacheHttp01Test(util.ApacheTest):
|
||||
calls = mock_enmod.call_args_list
|
||||
other_calls = []
|
||||
for call in calls:
|
||||
if "rewrite" != call[0][0]:
|
||||
if call[0][0] != "rewrite":
|
||||
other_calls.append(call)
|
||||
|
||||
# If these lists are equal, we never enabled mod_rewrite
|
||||
@@ -180,7 +181,7 @@ class ApacheHttp01Test(util.ApacheTest):
|
||||
self.assertEqual(self.http.perform(), expected_response)
|
||||
|
||||
self.assertTrue(os.path.isdir(self.http.challenge_dir))
|
||||
self._has_min_permissions(self.http.challenge_dir, 0o755)
|
||||
self.assertTrue(filesystem.has_min_permissions(self.http.challenge_dir, 0o755))
|
||||
self._test_challenge_conf()
|
||||
|
||||
for achall in achalls:
|
||||
@@ -218,15 +219,10 @@ class ApacheHttp01Test(util.ApacheTest):
|
||||
name = os.path.join(self.http.challenge_dir, achall.chall.encode("token"))
|
||||
validation = achall.validation(self.account_key)
|
||||
|
||||
self._has_min_permissions(name, 0o644)
|
||||
self.assertTrue(filesystem.has_min_permissions(name, 0o644))
|
||||
with open(name, 'rb') as f:
|
||||
self.assertEqual(f.read(), validation.encode())
|
||||
|
||||
def _has_min_permissions(self, path, min_mode):
|
||||
"""Tests the given file has at least the permissions in mode."""
|
||||
st_mode = os.stat(path).st_mode
|
||||
self.assertEqual(st_mode, st_mode | min_mode)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main() # pragma: no cover
|
||||
|
||||
@@ -1,12 +1,11 @@
|
||||
"""Tests for certbot_apache.parser."""
|
||||
import os
|
||||
import shutil
|
||||
import unittest
|
||||
|
||||
import augeas
|
||||
import mock
|
||||
|
||||
from certbot import errors
|
||||
from certbot.compat import os
|
||||
|
||||
from certbot_apache.tests import util
|
||||
|
||||
@@ -22,6 +21,27 @@ class BasicParserTest(util.ParserTest):
|
||||
shutil.rmtree(self.config_dir)
|
||||
shutil.rmtree(self.work_dir)
|
||||
|
||||
def test_bad_parse(self):
|
||||
self.parser.parse_file(os.path.join(self.parser.root,
|
||||
"conf-available", "bad_conf_file.conf"))
|
||||
self.assertRaises(
|
||||
errors.PluginError, self.parser.check_parsing_errors, "httpd.aug")
|
||||
|
||||
def test_bad_save(self):
|
||||
mock_save = mock.Mock()
|
||||
mock_save.side_effect = IOError
|
||||
self.parser.aug.save = mock_save
|
||||
self.assertRaises(errors.PluginError, self.parser.unsaved_files)
|
||||
|
||||
def test_aug_version(self):
|
||||
mock_match = mock.Mock(return_value=["something"])
|
||||
self.parser.aug.match = mock_match
|
||||
# pylint: disable=protected-access
|
||||
self.assertEqual(self.parser.check_aug_version(),
|
||||
["something"])
|
||||
self.parser.aug.match.side_effect = RuntimeError
|
||||
self.assertFalse(self.parser.check_aug_version())
|
||||
|
||||
def test_find_config_root_no_root(self):
|
||||
# pylint: disable=protected-access
|
||||
os.remove(self.parser.loc["root"])
|
||||
@@ -234,6 +254,7 @@ class BasicParserTest(util.ParserTest):
|
||||
return inc_val
|
||||
elif cmd[-1] == "DUMP_MODULES":
|
||||
return mod_val
|
||||
return None # pragma: no cover
|
||||
|
||||
mock_cfg.side_effect = mock_get_vars
|
||||
|
||||
@@ -310,21 +331,38 @@ class BasicParserTest(util.ParserTest):
|
||||
class ParserInitTest(util.ApacheTest):
|
||||
def setUp(self): # pylint: disable=arguments-differ
|
||||
super(ParserInitTest, self).setUp()
|
||||
self.aug = augeas.Augeas(
|
||||
flags=augeas.Augeas.NONE | augeas.Augeas.NO_MODL_AUTOLOAD)
|
||||
|
||||
def tearDown(self):
|
||||
shutil.rmtree(self.temp_dir)
|
||||
shutil.rmtree(self.config_dir)
|
||||
shutil.rmtree(self.work_dir)
|
||||
|
||||
@mock.patch("certbot_apache.parser.ApacheParser.init_augeas")
|
||||
def test_prepare_no_augeas(self, mock_init_augeas):
|
||||
from certbot_apache.parser import ApacheParser
|
||||
mock_init_augeas.side_effect = errors.NoInstallationError
|
||||
self.config.config_test = mock.Mock()
|
||||
self.assertRaises(
|
||||
errors.NoInstallationError, ApacheParser,
|
||||
os.path.relpath(self.config_path), "/dummy/vhostpath",
|
||||
version=(2, 4, 22), configurator=self.config)
|
||||
|
||||
def test_init_old_aug(self):
|
||||
from certbot_apache.parser import ApacheParser
|
||||
with mock.patch("certbot_apache.parser.ApacheParser.check_aug_version") as mock_c:
|
||||
mock_c.return_value = False
|
||||
self.assertRaises(
|
||||
errors.NotSupportedError,
|
||||
ApacheParser, os.path.relpath(self.config_path),
|
||||
"/dummy/vhostpath", version=(2, 4, 22), configurator=self.config)
|
||||
|
||||
@mock.patch("certbot_apache.parser.ApacheParser._get_runtime_cfg")
|
||||
def test_unparseable(self, mock_cfg):
|
||||
from certbot_apache.parser import ApacheParser
|
||||
mock_cfg.return_value = ('Define: TEST')
|
||||
self.assertRaises(
|
||||
errors.PluginError,
|
||||
ApacheParser, self.aug, os.path.relpath(self.config_path),
|
||||
ApacheParser, os.path.relpath(self.config_path),
|
||||
"/dummy/vhostpath", version=(2, 2, 22), configurator=self.config)
|
||||
|
||||
def test_root_normalized(self):
|
||||
@@ -336,8 +374,7 @@ class ParserInitTest(util.ApacheTest):
|
||||
self.temp_dir,
|
||||
"debian_apache_2_4/////multiple_vhosts/../multiple_vhosts/apache2")
|
||||
|
||||
parser = ApacheParser(self.aug, path,
|
||||
"/dummy/vhostpath", configurator=self.config)
|
||||
parser = ApacheParser(path, "/dummy/vhostpath", configurator=self.config)
|
||||
|
||||
self.assertEqual(parser.root, self.config_path)
|
||||
|
||||
@@ -346,7 +383,7 @@ class ParserInitTest(util.ApacheTest):
|
||||
with mock.patch("certbot_apache.parser.ApacheParser."
|
||||
"update_runtime_variables"):
|
||||
parser = ApacheParser(
|
||||
self.aug, os.path.relpath(self.config_path),
|
||||
os.path.relpath(self.config_path),
|
||||
"/dummy/vhostpath", configurator=self.config)
|
||||
|
||||
self.assertEqual(parser.root, self.config_path)
|
||||
@@ -356,7 +393,7 @@ class ParserInitTest(util.ApacheTest):
|
||||
with mock.patch("certbot_apache.parser.ApacheParser."
|
||||
"update_runtime_variables"):
|
||||
parser = ApacheParser(
|
||||
self.aug, self.config_path + os.path.sep,
|
||||
self.config_path + os.path.sep,
|
||||
"/dummy/vhostpath", configurator=self.config)
|
||||
self.assertEqual(parser.root, self.config_path)
|
||||
|
||||
|
||||
9
certbot-apache/certbot_apache/tests/testdata/centos6_apache/apache/httpd/conf.d/README
vendored
Normal file
9
certbot-apache/certbot_apache/tests/testdata/centos6_apache/apache/httpd/conf.d/README
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
|
||||
This directory holds Apache 2.0 module-specific configuration files;
|
||||
any files in this directory which have the ".conf" extension will be
|
||||
processed as Apache configuration files.
|
||||
|
||||
Files are processed in alphabetical order, so if using configuration
|
||||
directives which depend on, say, mod_perl being loaded, ensure that
|
||||
these are placed in a filename later in the sort order than "perl.conf".
|
||||
|
||||
222
certbot-apache/certbot_apache/tests/testdata/centos6_apache/apache/httpd/conf.d/ssl.conf
vendored
Normal file
222
certbot-apache/certbot_apache/tests/testdata/centos6_apache/apache/httpd/conf.d/ssl.conf
vendored
Normal file
@@ -0,0 +1,222 @@
|
||||
#
|
||||
# This is the Apache server configuration file providing SSL support.
|
||||
# It contains the configuration directives to instruct the server how to
|
||||
# serve pages over an https connection. For detailing information about these
|
||||
# directives see <URL:http://httpd.apache.org/docs/2.2/mod/mod_ssl.html>
|
||||
#
|
||||
# Do NOT simply read the instructions in here without understanding
|
||||
# what they do. They're here only as hints or reminders. If you are unsure
|
||||
# consult the online docs. You have been warned.
|
||||
#
|
||||
|
||||
LoadModule ssl_module modules/mod_ssl.so
|
||||
|
||||
#
|
||||
# When we also provide SSL we have to listen to the
|
||||
# the HTTPS port in addition.
|
||||
#
|
||||
Listen 443
|
||||
|
||||
##
|
||||
## SSL Global Context
|
||||
##
|
||||
## All SSL configuration in this context applies both to
|
||||
## the main server and all SSL-enabled virtual hosts.
|
||||
##
|
||||
|
||||
# Pass Phrase Dialog:
|
||||
# Configure the pass phrase gathering process.
|
||||
# The filtering dialog program (`builtin' is a internal
|
||||
# terminal dialog) has to provide the pass phrase on stdout.
|
||||
SSLPassPhraseDialog builtin
|
||||
|
||||
# Inter-Process Session Cache:
|
||||
# Configure the SSL Session Cache: First the mechanism
|
||||
# to use and second the expiring timeout (in seconds).
|
||||
SSLSessionCache shmcb:/var/cache/mod_ssl/scache(512000)
|
||||
SSLSessionCacheTimeout 300
|
||||
|
||||
# Semaphore:
|
||||
# Configure the path to the mutual exclusion semaphore the
|
||||
# SSL engine uses internally for inter-process synchronization.
|
||||
SSLMutex default
|
||||
|
||||
# Pseudo Random Number Generator (PRNG):
|
||||
# Configure one or more sources to seed the PRNG of the
|
||||
# SSL library. The seed data should be of good random quality.
|
||||
# WARNING! On some platforms /dev/random blocks if not enough entropy
|
||||
# is available. This means you then cannot use the /dev/random device
|
||||
# because it would lead to very long connection times (as long as
|
||||
# it requires to make more entropy available). But usually those
|
||||
# platforms additionally provide a /dev/urandom device which doesn't
|
||||
# block. So, if available, use this one instead. Read the mod_ssl User
|
||||
# Manual for more details.
|
||||
SSLRandomSeed startup file:/dev/urandom 256
|
||||
SSLRandomSeed connect builtin
|
||||
#SSLRandomSeed startup file:/dev/random 512
|
||||
#SSLRandomSeed connect file:/dev/random 512
|
||||
#SSLRandomSeed connect file:/dev/urandom 512
|
||||
|
||||
#
|
||||
# Use "SSLCryptoDevice" to enable any supported hardware
|
||||
# accelerators. Use "openssl engine -v" to list supported
|
||||
# engine names. NOTE: If you enable an accelerator and the
|
||||
# server does not start, consult the error logs and ensure
|
||||
# your accelerator is functioning properly.
|
||||
#
|
||||
SSLCryptoDevice builtin
|
||||
#SSLCryptoDevice ubsec
|
||||
|
||||
##
|
||||
## SSL Virtual Host Context
|
||||
##
|
||||
|
||||
<VirtualHost _default_:443>
|
||||
|
||||
# General setup for the virtual host, inherited from global configuration
|
||||
#DocumentRoot "/var/www/html"
|
||||
#ServerName www.example.com:443
|
||||
|
||||
# Use separate log files for the SSL virtual host; note that LogLevel
|
||||
# is not inherited from httpd.conf.
|
||||
ErrorLog logs/ssl_error_log
|
||||
TransferLog logs/ssl_access_log
|
||||
LogLevel warn
|
||||
|
||||
# SSL Engine Switch:
|
||||
# Enable/Disable SSL for this virtual host.
|
||||
SSLEngine on
|
||||
|
||||
# SSL Protocol support:
|
||||
# List the enable protocol levels with which clients will be able to
|
||||
# connect. Disable SSLv2 access by default:
|
||||
SSLProtocol all -SSLv2
|
||||
|
||||
# SSL Cipher Suite:
|
||||
# List the ciphers that the client is permitted to negotiate.
|
||||
# See the mod_ssl documentation for a complete list.
|
||||
SSLCipherSuite DEFAULT:!EXP:!SSLv2:!DES:!IDEA:!SEED:+3DES
|
||||
|
||||
# Server Certificate:
|
||||
# Point SSLCertificateFile at a PEM encoded certificate. If
|
||||
# the certificate is encrypted, then you will be prompted for a
|
||||
# pass phrase. Note that a kill -HUP will prompt again. A new
|
||||
# certificate can be generated using the genkey(1) command.
|
||||
SSLCertificateFile /etc/pki/tls/certs/localhost.crt
|
||||
|
||||
# Server Private Key:
|
||||
# If the key is not combined with the certificate, use this
|
||||
# directive to point at the key file. Keep in mind that if
|
||||
# you've both a RSA and a DSA private key you can configure
|
||||
# both in parallel (to also allow the use of DSA ciphers, etc.)
|
||||
SSLCertificateKeyFile /etc/pki/tls/private/localhost.key
|
||||
|
||||
# Server Certificate Chain:
|
||||
# Point SSLCertificateChainFile at a file containing the
|
||||
# concatenation of PEM encoded CA certificates which form the
|
||||
# certificate chain for the server certificate. Alternatively
|
||||
# the referenced file can be the same as SSLCertificateFile
|
||||
# when the CA certificates are directly appended to the server
|
||||
# certificate for convinience.
|
||||
#SSLCertificateChainFile /etc/pki/tls/certs/server-chain.crt
|
||||
|
||||
# Certificate Authority (CA):
|
||||
# Set the CA certificate verification path where to find CA
|
||||
# certificates for client authentication or alternatively one
|
||||
# huge file containing all of them (file must be PEM encoded)
|
||||
#SSLCACertificateFile /etc/pki/tls/certs/ca-bundle.crt
|
||||
|
||||
# Client Authentication (Type):
|
||||
# Client certificate verification type and depth. Types are
|
||||
# none, optional, require and optional_no_ca. Depth is a
|
||||
# number which specifies how deeply to verify the certificate
|
||||
# issuer chain before deciding the certificate is not valid.
|
||||
#SSLVerifyClient require
|
||||
#SSLVerifyDepth 10
|
||||
|
||||
# Access Control:
|
||||
# With SSLRequire you can do per-directory access control based
|
||||
# on arbitrary complex boolean expressions containing server
|
||||
# variable checks and other lookup directives. The syntax is a
|
||||
# mixture between C and Perl. See the mod_ssl documentation
|
||||
# for more details.
|
||||
#<Location />
|
||||
#SSLRequire ( %{SSL_CIPHER} !~ m/^(EXP|NULL)/ \
|
||||
# and %{SSL_CLIENT_S_DN_O} eq "Snake Oil, Ltd." \
|
||||
# and %{SSL_CLIENT_S_DN_OU} in {"Staff", "CA", "Dev"} \
|
||||
# and %{TIME_WDAY} >= 1 and %{TIME_WDAY} <= 5 \
|
||||
# and %{TIME_HOUR} >= 8 and %{TIME_HOUR} <= 20 ) \
|
||||
# or %{REMOTE_ADDR} =~ m/^192\.76\.162\.[0-9]+$/
|
||||
#</Location>
|
||||
|
||||
# SSL Engine Options:
|
||||
# Set various options for the SSL engine.
|
||||
# o FakeBasicAuth:
|
||||
# Translate the client X.509 into a Basic Authorisation. This means that
|
||||
# the standard Auth/DBMAuth methods can be used for access control. The
|
||||
# user name is the `one line' version of the client's X.509 certificate.
|
||||
# Note that no password is obtained from the user. Every entry in the user
|
||||
# file needs this password: `xxj31ZMTZzkVA'.
|
||||
# o ExportCertData:
|
||||
# This exports two additional environment variables: SSL_CLIENT_CERT and
|
||||
# SSL_SERVER_CERT. These contain the PEM-encoded certificates of the
|
||||
# server (always existing) and the client (only existing when client
|
||||
# authentication is used). This can be used to import the certificates
|
||||
# into CGI scripts.
|
||||
# o StdEnvVars:
|
||||
# This exports the standard SSL/TLS related `SSL_*' environment variables.
|
||||
# Per default this exportation is switched off for performance reasons,
|
||||
# because the extraction step is an expensive operation and is usually
|
||||
# useless for serving static content. So one usually enables the
|
||||
# exportation for CGI and SSI requests only.
|
||||
# o StrictRequire:
|
||||
# This denies access when "SSLRequireSSL" or "SSLRequire" applied even
|
||||
# under a "Satisfy any" situation, i.e. when it applies access is denied
|
||||
# and no other module can change it.
|
||||
# o OptRenegotiate:
|
||||
# This enables optimized SSL connection renegotiation handling when SSL
|
||||
# directives are used in per-directory context.
|
||||
#SSLOptions +FakeBasicAuth +ExportCertData +StrictRequire
|
||||
<Files ~ "\.(cgi|shtml|phtml|php3?)$">
|
||||
SSLOptions +StdEnvVars
|
||||
</Files>
|
||||
<Directory "/var/www/cgi-bin">
|
||||
SSLOptions +StdEnvVars
|
||||
</Directory>
|
||||
|
||||
# SSL Protocol Adjustments:
|
||||
# The safe and default but still SSL/TLS standard compliant shutdown
|
||||
# approach is that mod_ssl sends the close notify alert but doesn't wait for
|
||||
# the close notify alert from client. When you need a different shutdown
|
||||
# approach you can use one of the following variables:
|
||||
# o ssl-unclean-shutdown:
|
||||
# This forces an unclean shutdown when the connection is closed, i.e. no
|
||||
# SSL close notify alert is send or allowed to received. This violates
|
||||
# the SSL/TLS standard but is needed for some brain-dead browsers. Use
|
||||
# this when you receive I/O errors because of the standard approach where
|
||||
# mod_ssl sends the close notify alert.
|
||||
# o ssl-accurate-shutdown:
|
||||
# This forces an accurate shutdown when the connection is closed, i.e. a
|
||||
# SSL close notify alert is send and mod_ssl waits for the close notify
|
||||
# alert of the client. This is 100% SSL/TLS standard compliant, but in
|
||||
# practice often causes hanging connections with brain-dead browsers. Use
|
||||
# this only for browsers where you know that their SSL implementation
|
||||
# works correctly.
|
||||
# Notice: Most problems of broken clients are also related to the HTTP
|
||||
# keep-alive facility, so you usually additionally want to disable
|
||||
# keep-alive for those clients, too. Use variable "nokeepalive" for this.
|
||||
# Similarly, one has to force some clients to use HTTP/1.0 to workaround
|
||||
# their broken HTTP/1.1 implementation. Use variables "downgrade-1.0" and
|
||||
# "force-response-1.0" for this.
|
||||
SetEnvIf User-Agent ".*MSIE.*" \
|
||||
nokeepalive ssl-unclean-shutdown \
|
||||
downgrade-1.0 force-response-1.0
|
||||
|
||||
# Per-Server Logging:
|
||||
# The home of a custom SSL log file. Use this when you want a
|
||||
# compact non-error SSL logfile on a virtual host basis.
|
||||
CustomLog logs/ssl_request_log \
|
||||
"%t %h %{SSL_PROTOCOL}x %{SSL_CIPHER}x \"%r\" %b"
|
||||
|
||||
</VirtualHost>
|
||||
|
||||
@@ -0,0 +1,7 @@
|
||||
<VirtualHost *:80>
|
||||
ServerName test.example.com
|
||||
ServerAdmin webmaster@dummy-host.example.com
|
||||
DocumentRoot /var/www/htdocs
|
||||
ErrorLog logs/dummy-host.example.com-error_log
|
||||
CustomLog logs/dummy-host.example.com-access_log common
|
||||
</VirtualHost>
|
||||
11
certbot-apache/certbot_apache/tests/testdata/centos6_apache/apache/httpd/conf.d/welcome.conf
vendored
Normal file
11
certbot-apache/certbot_apache/tests/testdata/centos6_apache/apache/httpd/conf.d/welcome.conf
vendored
Normal file
@@ -0,0 +1,11 @@
|
||||
#
|
||||
# This configuration file enables the default "Welcome"
|
||||
# page if there is no default index page present for
|
||||
# the root URL. To disable the Welcome page, comment
|
||||
# out all the lines below.
|
||||
#
|
||||
<LocationMatch "^/+$">
|
||||
Options -Indexes
|
||||
ErrorDocument 403 /error/noindex.html
|
||||
</LocationMatch>
|
||||
|
||||
1009
certbot-apache/certbot_apache/tests/testdata/centos6_apache/apache/httpd/conf/httpd.conf
vendored
Normal file
1009
certbot-apache/certbot_apache/tests/testdata/centos6_apache/apache/httpd/conf/httpd.conf
vendored
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1,5 +1,4 @@
|
||||
"""Common utilities for certbot_apache."""
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
import unittest
|
||||
@@ -9,10 +8,9 @@ import josepy as jose
|
||||
import mock
|
||||
import zope.component
|
||||
|
||||
from certbot.compat import os
|
||||
from certbot.display import util as display_util
|
||||
|
||||
from certbot.plugins import common
|
||||
|
||||
from certbot.tests import util as test_util
|
||||
|
||||
from certbot_apache import configurator
|
||||
@@ -20,7 +18,7 @@ from certbot_apache import entrypoint
|
||||
from certbot_apache import obj
|
||||
|
||||
|
||||
class ApacheTest(unittest.TestCase): # pylint: disable=too-few-public-methods
|
||||
class ApacheTest(unittest.TestCase):
|
||||
|
||||
def setUp(self, test_dir="debian_apache_2_4/multiple_vhosts",
|
||||
config_root="debian_apache_2_4/multiple_vhosts/apache2",
|
||||
@@ -80,11 +78,10 @@ class ParserTest(ApacheTest):
|
||||
with mock.patch("certbot_apache.parser.ApacheParser."
|
||||
"update_runtime_variables"):
|
||||
self.parser = ApacheParser(
|
||||
self.aug, self.config_path, self.vhost_path,
|
||||
configurator=self.config)
|
||||
self.config_path, self.vhost_path, configurator=self.config)
|
||||
|
||||
|
||||
def get_apache_configurator( # pylint: disable=too-many-arguments, too-many-locals
|
||||
def get_apache_configurator(
|
||||
config_path, vhost_path,
|
||||
config_dir, work_dir, version=(2, 4, 7),
|
||||
os_info="generic",
|
||||
|
||||
@@ -1,5 +0,0 @@
|
||||
:mod:`certbot_apache.tls_sni_01`
|
||||
------------------------------------
|
||||
|
||||
.. automodule:: certbot_apache.tls_sni_01
|
||||
:members:
|
||||
@@ -13,7 +13,7 @@
|
||||
# serve to show the default.
|
||||
|
||||
import sys
|
||||
import os
|
||||
from certbot.compat import os
|
||||
import shlex
|
||||
|
||||
import mock
|
||||
|
||||
@@ -1,2 +1,3 @@
|
||||
acme[dev]==0.25.0
|
||||
certbot[dev]==0.26.0
|
||||
# Remember to update setup.py to match the package versions below.
|
||||
acme[dev]==0.29.0
|
||||
certbot[dev]==0.39.0
|
||||
|
||||
@@ -4,13 +4,13 @@ from setuptools.command.test import test as TestCommand
|
||||
import sys
|
||||
|
||||
|
||||
version = '0.33.0.dev0'
|
||||
version = '1.0.0.dev0'
|
||||
|
||||
# Remember to update local-oldest-requirements.txt when changing the minimum
|
||||
# acme/certbot version.
|
||||
install_requires = [
|
||||
'acme>=0.25.0',
|
||||
'certbot>=0.26.0',
|
||||
'acme>=0.29.0',
|
||||
'certbot>=0.39.0',
|
||||
'mock',
|
||||
'python-augeas',
|
||||
'setuptools',
|
||||
@@ -62,6 +62,7 @@ setup(
|
||||
'Programming Language :: Python :: 3.5',
|
||||
'Programming Language :: Python :: 3.6',
|
||||
'Programming Language :: Python :: 3.7',
|
||||
'Programming Language :: Python :: 3.8',
|
||||
'Topic :: Internet :: WWW/HTTP',
|
||||
'Topic :: Security',
|
||||
'Topic :: System :: Installation/Setup',
|
||||
|
||||
611
certbot-auto
611
certbot-auto
@@ -31,7 +31,7 @@ if [ -z "$VENV_PATH" ]; then
|
||||
fi
|
||||
VENV_BIN="$VENV_PATH/bin"
|
||||
BOOTSTRAP_VERSION_PATH="$VENV_PATH/certbot-auto-bootstrap-version.txt"
|
||||
LE_AUTO_VERSION="0.32.0"
|
||||
LE_AUTO_VERSION="0.40.1"
|
||||
BASENAME=$(basename $0)
|
||||
USAGE="Usage: $BASENAME [OPTIONS]
|
||||
A self-updating wrapper script for the Certbot ACME client. When run, updates
|
||||
@@ -45,6 +45,7 @@ Help for certbot itself cannot be provided until it is installed.
|
||||
-h, --help print this help
|
||||
-n, --non-interactive, --noninteractive run without asking for user input
|
||||
--no-bootstrap do not install OS dependencies
|
||||
--no-permissions-check do not warn about file system permissions
|
||||
--no-self-upgrade do not download updates
|
||||
--os-packages-only install OS dependencies and exit
|
||||
--install-only install certbot, upgrade if needed, and exit
|
||||
@@ -67,6 +68,8 @@ for arg in "$@" ; do
|
||||
# Do not upgrade this script (also prevents client upgrades, because each
|
||||
# copy of the script pins a hash of the python client)
|
||||
NO_SELF_UPGRADE=1;;
|
||||
--no-permissions-check)
|
||||
NO_PERMISSIONS_CHECK=1;;
|
||||
--no-bootstrap)
|
||||
NO_BOOTSTRAP=1;;
|
||||
--help)
|
||||
@@ -172,7 +175,11 @@ SetRootAuthMechanism() {
|
||||
sudo)
|
||||
SUDO="sudo -E"
|
||||
;;
|
||||
'') ;; # Nothing to do for plain root method.
|
||||
'')
|
||||
# If we're not running with root, don't check that this script can only
|
||||
# be modified by system users and groups.
|
||||
NO_PERMISSIONS_CHECK=1
|
||||
;;
|
||||
*)
|
||||
error "Error: unknown root authorization mechanism '$LE_AUTO_SUDO'."
|
||||
exit 1
|
||||
@@ -488,11 +495,18 @@ BOOTSTRAP_RPM_PYTHON3_VERSION=1
|
||||
BootstrapRpmPython3() {
|
||||
# Tested with:
|
||||
# - CentOS 6
|
||||
# - Fedora 29
|
||||
|
||||
InitializeRPMCommonBase
|
||||
|
||||
# Fedora 29 must use python3-virtualenv
|
||||
if $TOOL list python3-virtualenv >/dev/null 2>&1; then
|
||||
python_pkgs="python3
|
||||
python3-virtualenv
|
||||
python3-devel
|
||||
"
|
||||
# EPEL uses python34
|
||||
if $TOOL list python34 >/dev/null 2>&1; then
|
||||
elif $TOOL list python34 >/dev/null 2>&1; then
|
||||
python_pkgs="python34
|
||||
python34-devel
|
||||
python34-tools
|
||||
@@ -527,7 +541,7 @@ BootstrapSuseCommon() {
|
||||
# Since Leap 15.0 (and associated Tumbleweed version), python-virtualenv
|
||||
# is a source package, and python2-virtualenv must be used instead.
|
||||
# Also currently python2-setuptools is not a dependency of python2-virtualenv,
|
||||
# while it should be. Installing it explicitly until upstreqm fix.
|
||||
# while it should be. Installing it explicitly until upstream fix.
|
||||
OPENSUSE_VIRTUALENV_PACKAGES="python2-virtualenv python2-setuptools"
|
||||
fi
|
||||
|
||||
@@ -741,7 +755,33 @@ elif [ -f /etc/redhat-release ]; then
|
||||
prev_le_python="$LE_PYTHON"
|
||||
unset LE_PYTHON
|
||||
DeterminePythonVersion "NOCRASH"
|
||||
if [ "$PYVER" -eq 26 ]; then
|
||||
|
||||
RPM_DIST_NAME=`(. /etc/os-release 2> /dev/null && echo $ID) || echo "unknown"`
|
||||
|
||||
# Set RPM_DIST_VERSION to VERSION_ID from /etc/os-release after splitting on
|
||||
# '.' characters (e.g. "8.0" becomes "8"). If the command exits with an
|
||||
# error, RPM_DIST_VERSION is set to "unknown".
|
||||
RPM_DIST_VERSION=$( (. /etc/os-release 2> /dev/null && echo "$VERSION_ID") | cut -d '.' -f1 || echo "unknown")
|
||||
|
||||
# If RPM_DIST_VERSION is an empty string or it contains any nonnumeric
|
||||
# characters, the value is unexpected so we set RPM_DIST_VERSION to 0.
|
||||
if [ -z "$RPM_DIST_VERSION" ] || [ -n "$(echo "$RPM_DIST_VERSION" | tr -d '[0-9]')" ]; then
|
||||
RPM_DIST_VERSION=0
|
||||
fi
|
||||
|
||||
# Starting to Fedora 29, python2 is on a deprecation path. Let's move to python3 then.
|
||||
# RHEL 8 also uses python3 by default.
|
||||
if [ "$RPM_DIST_NAME" = "fedora" -a "$RPM_DIST_VERSION" -ge 29 -o "$PYVER" -eq 26 ]; then
|
||||
RPM_USE_PYTHON_3=1
|
||||
elif [ "$RPM_DIST_NAME" = "rhel" -a "$RPM_DIST_VERSION" -ge 8 ]; then
|
||||
RPM_USE_PYTHON_3=1
|
||||
elif [ "$RPM_DIST_NAME" = "centos" -a "$RPM_DIST_VERSION" -ge 8 ]; then
|
||||
RPM_USE_PYTHON_3=1
|
||||
else
|
||||
RPM_USE_PYTHON_3=0
|
||||
fi
|
||||
|
||||
if [ "$RPM_USE_PYTHON_3" = 1 ]; then
|
||||
Bootstrap() {
|
||||
BootstrapMessage "RedHat-based OSes that will use Python3"
|
||||
BootstrapRpmPython3
|
||||
@@ -755,6 +795,7 @@ elif [ -f /etc/redhat-release ]; then
|
||||
}
|
||||
BOOTSTRAP_VERSION="BootstrapRpmCommon $BOOTSTRAP_RPM_COMMON_VERSION"
|
||||
fi
|
||||
|
||||
LE_PYTHON="$prev_le_python"
|
||||
elif [ -f /etc/os-release ] && `grep -q openSUSE /etc/os-release` ; then
|
||||
Bootstrap() {
|
||||
@@ -898,6 +939,130 @@ else:
|
||||
UNLIKELY_EOF
|
||||
}
|
||||
|
||||
# Create a new virtual environment for Certbot. It will overwrite any existing one.
|
||||
# Parameters: LE_PYTHON, VENV_PATH, PYVER, VERBOSE
|
||||
CreateVenv() {
|
||||
"$1" - "$2" "$3" "$4" << "UNLIKELY_EOF"
|
||||
#!/usr/bin/env python
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
|
||||
def create_venv(venv_path, pyver, verbose):
|
||||
if os.path.exists(venv_path):
|
||||
shutil.rmtree(venv_path)
|
||||
|
||||
stdout = sys.stdout if verbose == '1' else open(os.devnull, 'w')
|
||||
|
||||
if int(pyver) <= 27:
|
||||
# Use virtualenv binary
|
||||
environ = os.environ.copy()
|
||||
environ['VIRTUALENV_NO_DOWNLOAD'] = '1'
|
||||
command = ['virtualenv', '--no-site-packages', '--python', sys.executable, venv_path]
|
||||
subprocess.check_call(command, stdout=stdout, env=environ)
|
||||
else:
|
||||
# Use embedded venv module in Python 3
|
||||
command = [sys.executable, '-m', 'venv', venv_path]
|
||||
subprocess.check_call(command, stdout=stdout)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
create_venv(*sys.argv[1:])
|
||||
|
||||
UNLIKELY_EOF
|
||||
}
|
||||
|
||||
# Check that the given PATH_TO_CHECK has secured permissions.
|
||||
# Parameters: LE_PYTHON, PATH_TO_CHECK
|
||||
CheckPathPermissions() {
|
||||
"$1" - "$2" << "UNLIKELY_EOF"
|
||||
"""Verifies certbot-auto cannot be modified by unprivileged users.
|
||||
|
||||
This script takes the path to certbot-auto as its only command line
|
||||
argument. It then checks that the file can only be modified by uid/gid
|
||||
< 1000 and if other users can modify the file, it prints a warning with
|
||||
a suggestion on how to solve the problem.
|
||||
|
||||
Permissions on symlinks in the absolute path of certbot-auto are ignored
|
||||
and only the canonical path to certbot-auto is checked. There could be
|
||||
permissions problems due to the symlinks that are unreported by this
|
||||
script, however, issues like this were not caused by our documentation
|
||||
and are ignored for the sake of simplicity.
|
||||
|
||||
All warnings are printed to stdout rather than stderr so all stderr
|
||||
output from this script can be suppressed to avoid printing messages if
|
||||
this script fails for some reason.
|
||||
|
||||
"""
|
||||
from __future__ import print_function
|
||||
|
||||
import os
|
||||
import stat
|
||||
import sys
|
||||
|
||||
|
||||
FORUM_POST_URL = 'https://community.letsencrypt.org/t/certbot-auto-deployment-best-practices/91979/'
|
||||
|
||||
|
||||
def has_safe_permissions(path):
|
||||
"""Returns True if the given path has secure permissions.
|
||||
|
||||
The permissions are considered safe if the file is only writable by
|
||||
uid/gid < 1000.
|
||||
|
||||
The reason we allow more IDs than 0 is because on some systems such
|
||||
as Debian, system users/groups other than uid/gid 0 are used for the
|
||||
path we recommend in our instructions which is /usr/local/bin. 1000
|
||||
was chosen because on Debian 0-999 is reserved for system IDs[1] and
|
||||
on RHEL either 0-499 or 0-999 is reserved depending on the
|
||||
version[2][3]. Due to these differences across different OSes, this
|
||||
detection isn't perfect so we only determine permissions are
|
||||
insecure when we can be reasonably confident there is a problem
|
||||
regardless of the underlying OS.
|
||||
|
||||
[1] https://www.debian.org/doc/debian-policy/ch-opersys.html#uid-and-gid-classes
|
||||
[2] https://access.redhat.com/documentation/en-us/red_hat_enterprise_linux/6/html/deployment_guide/ch-managing_users_and_groups
|
||||
[3] https://access.redhat.com/documentation/en-us/red_hat_enterprise_linux/7/html/system_administrators_guide/ch-managing_users_and_groups
|
||||
|
||||
:param str path: filesystem path to check
|
||||
:returns: True if the path has secure permissions, otherwise, False
|
||||
:rtype: bool
|
||||
|
||||
"""
|
||||
# os.stat follows symlinks before obtaining information about a file.
|
||||
stat_result = os.stat(path)
|
||||
if stat_result.st_mode & stat.S_IWOTH:
|
||||
return False
|
||||
if stat_result.st_mode & stat.S_IWGRP and stat_result.st_gid >= 1000:
|
||||
return False
|
||||
if stat_result.st_mode & stat.S_IWUSR and stat_result.st_uid >= 1000:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def main(certbot_auto_path):
|
||||
current_path = os.path.realpath(certbot_auto_path)
|
||||
last_path = None
|
||||
permissions_ok = True
|
||||
# This loop makes use of the fact that os.path.dirname('/') == '/'.
|
||||
while current_path != last_path and permissions_ok:
|
||||
permissions_ok = has_safe_permissions(current_path)
|
||||
last_path = current_path
|
||||
current_path = os.path.dirname(current_path)
|
||||
|
||||
if not permissions_ok:
|
||||
print('{0} has insecure permissions!'.format(certbot_auto_path))
|
||||
print('To learn how to fix them, visit {0}'.format(FORUM_POST_URL))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main(sys.argv[1])
|
||||
|
||||
UNLIKELY_EOF
|
||||
}
|
||||
|
||||
if [ "$1" = "--le-auto-phase2" ]; then
|
||||
# Phase 2: Create venv, install LE, and run.
|
||||
|
||||
@@ -953,22 +1118,7 @@ if [ "$1" = "--le-auto-phase2" ]; then
|
||||
if [ "$LE_AUTO_VERSION" != "$INSTALLED_VERSION" ]; then
|
||||
say "Creating virtual environment..."
|
||||
DeterminePythonVersion
|
||||
rm -rf "$VENV_PATH"
|
||||
if [ "$PYVER" -le 27 ]; then
|
||||
# Use an environment variable instead of a flag for compatibility with old versions
|
||||
if [ "$VERBOSE" = 1 ]; then
|
||||
VIRTUALENV_NO_DOWNLOAD=1 virtualenv --no-site-packages --python "$LE_PYTHON" "$VENV_PATH"
|
||||
else
|
||||
VIRTUALENV_NO_DOWNLOAD=1 virtualenv --no-site-packages --python "$LE_PYTHON" "$VENV_PATH" \
|
||||
> /dev/null
|
||||
fi
|
||||
else
|
||||
if [ "$VERBOSE" = 1 ]; then
|
||||
"$LE_PYTHON" -m venv "$VENV_PATH"
|
||||
else
|
||||
"$LE_PYTHON" -m venv "$VENV_PATH" > /dev/null
|
||||
fi
|
||||
fi
|
||||
CreateVenv "$LE_PYTHON" "$VENV_PATH" "$PYVER" "$VERBOSE"
|
||||
|
||||
if [ -n "$BOOTSTRAP_VERSION" ]; then
|
||||
echo "$BOOTSTRAP_VERSION" > "$BOOTSTRAP_VERSION_PATH"
|
||||
@@ -982,202 +1132,200 @@ if [ "$1" = "--le-auto-phase2" ]; then
|
||||
# There is no $ interpolation due to quotes on starting heredoc delimiter.
|
||||
# -------------------------------------------------------------------------
|
||||
cat << "UNLIKELY_EOF" > "$TEMP_DIR/letsencrypt-auto-requirements.txt"
|
||||
# This is the flattened list of packages certbot-auto installs. To generate
|
||||
# this, do
|
||||
# `pip install --no-cache-dir -e acme -e . -e certbot-apache -e certbot-nginx`,
|
||||
# and then use `hashin` or a more secure method to gather the hashes.
|
||||
|
||||
# Hashin example:
|
||||
# This is the flattened list of packages certbot-auto installs.
|
||||
# To generate this, do (with docker and package hashin installed):
|
||||
# ```
|
||||
# letsencrypt-auto-source/rebuild_dependencies.py \
|
||||
# letsencrypt-auto-source/pieces/dependency-requirements.txt
|
||||
# ```
|
||||
# If you want to update a single dependency, run commands similar to these:
|
||||
# ```
|
||||
# pip install hashin
|
||||
# hashin -r dependency-requirements.txt cryptography==1.5.2
|
||||
# sets the new certbot-auto pinned version of cryptography to 1.5.2
|
||||
|
||||
argparse==1.4.0 \
|
||||
--hash=sha256:c31647edb69fd3d465a847ea3157d37bed1f95f19760b11a47aa91c04b666314 \
|
||||
--hash=sha256:62b089a55be1d8949cd2bc7e0df0bddb9e028faefc8c32038cc84862aefdd6e4
|
||||
|
||||
# This comes before cffi because cffi will otherwise install an unchecked
|
||||
# version via setup_requires.
|
||||
pycparser==2.14 \
|
||||
--hash=sha256:7959b4a74abdc27b312fed1c21e6caf9309ce0b29ea86b591fd2e99ecdf27f73 \
|
||||
--no-binary pycparser
|
||||
|
||||
asn1crypto==0.22.0 \
|
||||
--hash=sha256:d232509fefcfcdb9a331f37e9c9dc20441019ad927c7d2176cf18ed5da0ba097 \
|
||||
--hash=sha256:cbbadd640d3165ab24b06ef25d1dca09a3441611ac15f6a6b452474fdf0aed1a
|
||||
cffi==1.11.5 \
|
||||
--hash=sha256:1b0493c091a1898f1136e3f4f991a784437fac3673780ff9de3bcf46c80b6b50 \
|
||||
--hash=sha256:87f37fe5130574ff76c17cab61e7d2538a16f843bb7bca8ebbc4b12de3078596 \
|
||||
--hash=sha256:1553d1e99f035ace1c0544050622b7bc963374a00c467edafac50ad7bd276aef \
|
||||
--hash=sha256:151b7eefd035c56b2b2e1eb9963c90c6302dc15fbd8c1c0a83a163ff2c7d7743 \
|
||||
--hash=sha256:edabd457cd23a02965166026fd9bfd196f4324fe6032e866d0f3bd0301cd486f \
|
||||
--hash=sha256:ba5e697569f84b13640c9e193170e89c13c6244c24400fc57e88724ef610cd31 \
|
||||
--hash=sha256:79f9b6f7c46ae1f8ded75f68cf8ad50e5729ed4d590c74840471fc2823457d04 \
|
||||
--hash=sha256:b0f7d4a3df8f06cf49f9f121bead236e328074de6449866515cea4907bbc63d6 \
|
||||
--hash=sha256:4c91af6e967c2015729d3e69c2e51d92f9898c330d6a851bf8f121236f3defd3 \
|
||||
--hash=sha256:7a33145e04d44ce95bcd71e522b478d282ad0eafaf34fe1ec5bbd73e662f22b6 \
|
||||
--hash=sha256:95d5251e4b5ca00061f9d9f3d6fe537247e145a8524ae9fd30a2f8fbce993b5b \
|
||||
--hash=sha256:b75110fb114fa366b29a027d0c9be3709579602ae111ff61674d28c93606acca \
|
||||
--hash=sha256:ae5e35a2c189d397b91034642cb0eab0e346f776ec2eb44a49a459e6615d6e2e \
|
||||
--hash=sha256:fdf1c1dc5bafc32bc5d08b054f94d659422b05aba244d6be4ddc1c72d9aa70fb \
|
||||
--hash=sha256:9d1d3e63a4afdc29bd76ce6aa9d58c771cd1599fbba8cf5057e7860b203710dd \
|
||||
--hash=sha256:be2a9b390f77fd7676d80bc3cdc4f8edb940d8c198ed2d8c0be1319018c778e1 \
|
||||
--hash=sha256:ed01918d545a38998bfa5902c7c00e0fee90e957ce036a4000a88e3fe2264917 \
|
||||
--hash=sha256:857959354ae3a6fa3da6651b966d13b0a8bed6bbc87a0de7b38a549db1d2a359 \
|
||||
--hash=sha256:2ba8a45822b7aee805ab49abfe7eec16b90587f7f26df20c71dd89e45a97076f \
|
||||
--hash=sha256:a36c5c154f9d42ec176e6e620cb0dd275744aa1d804786a71ac37dc3661a5e95 \
|
||||
--hash=sha256:e55e22ac0a30023426564b1059b035973ec82186ddddbac867078435801c7801 \
|
||||
--hash=sha256:3eb6434197633b7748cea30bf0ba9f66727cdce45117a712b29a443943733257 \
|
||||
--hash=sha256:ecbb7b01409e9b782df5ded849c178a0aa7c906cf8c5a67368047daab282b184 \
|
||||
--hash=sha256:770f3782b31f50b68627e22f91cb182c48c47c02eb405fd689472aa7b7aa16dc \
|
||||
--hash=sha256:d5d8555d9bfc3f02385c1c37e9f998e2011f0db4f90e250e5bc0c0a85a813085 \
|
||||
--hash=sha256:3c85641778460581c42924384f5e68076d724ceac0f267d66c757f7535069c93 \
|
||||
--hash=sha256:ca1bd81f40adc59011f58159e4aa6445fc585a32bb8ac9badf7a2c1aa23822f2 \
|
||||
--hash=sha256:3bb6bd7266598f318063e584378b8e27c67de998a43362e8fce664c54ee52d30 \
|
||||
--hash=sha256:a6a5cb8809091ec9ac03edde9304b3ad82ad4466333432b16d78ef40e0cce0d5 \
|
||||
--hash=sha256:57b2533356cb2d8fac1555815929f7f5f14d68ac77b085d2326b571310f34f6e \
|
||||
--hash=sha256:495c5c2d43bf6cebe0178eb3e88f9c4aa48d8934aa6e3cddb865c058da76756b \
|
||||
--hash=sha256:e90f17980e6ab0f3c2f3730e56d1fe9bcba1891eeea58966e89d352492cc74f4
|
||||
ConfigArgParse==0.12.0 \
|
||||
--hash=sha256:28cd7d67669651f2a4518367838c49539457504584a139709b2b8f6c208ef339 \
|
||||
--no-binary ConfigArgParse
|
||||
# ```
|
||||
ConfigArgParse==0.14.0 \
|
||||
--hash=sha256:2e2efe2be3f90577aca9415e32cb629aa2ecd92078adbe27b53a03e53ff12e91
|
||||
asn1crypto==0.24.0 \
|
||||
--hash=sha256:2f1adbb7546ed199e3c90ef23ec95c5cf3585bac7d11fb7eb562a3fe89c64e87 \
|
||||
--hash=sha256:9d5c20441baf0cb60a4ac34cc447c6c189024b6b4c6cd7877034f4965c464e49
|
||||
certifi==2019.6.16 \
|
||||
--hash=sha256:046832c04d4e752f37383b628bc601a7ea7211496b4638f6514d0e5b9acc4939 \
|
||||
--hash=sha256:945e3ba63a0b9f577b1395204e13c3a231f9bc0223888be653286534e5873695
|
||||
cffi==1.12.3 \
|
||||
--hash=sha256:041c81822e9f84b1d9c401182e174996f0bae9991f33725d059b771744290774 \
|
||||
--hash=sha256:046ef9a22f5d3eed06334d01b1e836977eeef500d9b78e9ef693f9380ad0b83d \
|
||||
--hash=sha256:066bc4c7895c91812eff46f4b1c285220947d4aa46fa0a2651ff85f2afae9c90 \
|
||||
--hash=sha256:066c7ff148ae33040c01058662d6752fd73fbc8e64787229ea8498c7d7f4041b \
|
||||
--hash=sha256:2444d0c61f03dcd26dbf7600cf64354376ee579acad77aef459e34efcb438c63 \
|
||||
--hash=sha256:300832850b8f7967e278870c5d51e3819b9aad8f0a2c8dbe39ab11f119237f45 \
|
||||
--hash=sha256:34c77afe85b6b9e967bd8154e3855e847b70ca42043db6ad17f26899a3df1b25 \
|
||||
--hash=sha256:46de5fa00f7ac09f020729148ff632819649b3e05a007d286242c4882f7b1dc3 \
|
||||
--hash=sha256:4aa8ee7ba27c472d429b980c51e714a24f47ca296d53f4d7868075b175866f4b \
|
||||
--hash=sha256:4d0004eb4351e35ed950c14c11e734182591465a33e960a4ab5e8d4f04d72647 \
|
||||
--hash=sha256:4e3d3f31a1e202b0f5a35ba3bc4eb41e2fc2b11c1eff38b362de710bcffb5016 \
|
||||
--hash=sha256:50bec6d35e6b1aaeb17f7c4e2b9374ebf95a8975d57863546fa83e8d31bdb8c4 \
|
||||
--hash=sha256:55cad9a6df1e2a1d62063f79d0881a414a906a6962bc160ac968cc03ed3efcfb \
|
||||
--hash=sha256:5662ad4e4e84f1eaa8efce5da695c5d2e229c563f9d5ce5b0113f71321bcf753 \
|
||||
--hash=sha256:59b4dc008f98fc6ee2bb4fd7fc786a8d70000d058c2bbe2698275bc53a8d3fa7 \
|
||||
--hash=sha256:73e1ffefe05e4ccd7bcea61af76f36077b914f92b76f95ccf00b0c1b9186f3f9 \
|
||||
--hash=sha256:a1f0fd46eba2d71ce1589f7e50a9e2ffaeb739fb2c11e8192aa2b45d5f6cc41f \
|
||||
--hash=sha256:a2e85dc204556657661051ff4bab75a84e968669765c8a2cd425918699c3d0e8 \
|
||||
--hash=sha256:a5457d47dfff24882a21492e5815f891c0ca35fefae8aa742c6c263dac16ef1f \
|
||||
--hash=sha256:a8dccd61d52a8dae4a825cdbb7735da530179fea472903eb871a5513b5abbfdc \
|
||||
--hash=sha256:ae61af521ed676cf16ae94f30fe202781a38d7178b6b4ab622e4eec8cefaff42 \
|
||||
--hash=sha256:b012a5edb48288f77a63dba0840c92d0504aa215612da4541b7b42d849bc83a3 \
|
||||
--hash=sha256:d2c5cfa536227f57f97c92ac30c8109688ace8fa4ac086d19d0af47d134e2909 \
|
||||
--hash=sha256:d42b5796e20aacc9d15e66befb7a345454eef794fdb0737d1af593447c6c8f45 \
|
||||
--hash=sha256:dee54f5d30d775f525894d67b1495625dd9322945e7fee00731952e0368ff42d \
|
||||
--hash=sha256:e070535507bd6aa07124258171be2ee8dfc19119c28ca94c9dfb7efd23564512 \
|
||||
--hash=sha256:e1ff2748c84d97b065cc95429814cdba39bcbd77c9c85c89344b317dc0d9cbff \
|
||||
--hash=sha256:ed851c75d1e0e043cbf5ca9a8e1b13c4c90f3fbd863dacb01c0808e2b5204201
|
||||
chardet==3.0.4 \
|
||||
--hash=sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae \
|
||||
--hash=sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691
|
||||
configobj==5.0.6 \
|
||||
--hash=sha256:a2f5650770e1c87fb335af19a9b7eb73fc05ccf22144eb68db7d00cd2bcb0902 \
|
||||
--no-binary configobj
|
||||
cryptography==2.5 \
|
||||
--hash=sha256:9e29af877c29338f0cab5f049ccc8bd3ead289a557f144376c4fbc7d1b98914f \
|
||||
--hash=sha256:b13c80b877e73bcb6f012813c6f4a9334fcf4b0e96681c5a15dac578f2eedfa0 \
|
||||
--hash=sha256:8504661ffe324837f5c4607347eeee4cf0fcad689163c6e9c8d3b18cf1f4a4ad \
|
||||
--hash=sha256:e091bd424567efa4b9d94287a952597c05d22155a13716bf5f9f746b9dc906d3 \
|
||||
--hash=sha256:42fad67d7072216a49e34f923d8cbda9edacbf6633b19a79655e88a1b4857063 \
|
||||
--hash=sha256:9a30384cc402eac099210ab9b8801b2ae21e591831253883decdb4513b77a3cd \
|
||||
--hash=sha256:08b753df3672b7066e74376f42ce8fc4683e4fd1358d34c80f502e939ee944d2 \
|
||||
--hash=sha256:6f841c7272645dd7c65b07b7108adfa8af0aaea57f27b7f59e01d41f75444c85 \
|
||||
--hash=sha256:bfe66b577a7118e05b04141f0f1ed0959552d45672aa7ecb3d91e319d846001e \
|
||||
--hash=sha256:522fdb2809603ee97a4d0ef2f8d617bc791eb483313ba307cb9c0a773e5e5695 \
|
||||
--hash=sha256:05b3ded5e88747d28ee3ef493f2b92cbb947c1e45cf98cfef22e6d38bb67d4af \
|
||||
--hash=sha256:fa2b38c8519c5a3aa6e2b4e1cf1a549b54acda6adb25397ff542068e73d1ed00 \
|
||||
--hash=sha256:ab50da871bc109b2d9389259aac269dd1b7c7413ee02d06fe4e486ed26882159 \
|
||||
--hash=sha256:9260b201ce584d7825d900c88700aa0bd6b40d4ebac7b213857bd2babee9dbca \
|
||||
--hash=sha256:06826e7f72d1770e186e9c90e76b4f84d90cdb917b47ff88d8dc59a7b10e2b1e \
|
||||
--hash=sha256:2cd29bd1911782baaee890544c653bb03ec7d95ebeb144d714b0f5c33deb55c7 \
|
||||
--hash=sha256:7d335e35306af5b9bc0560ca39f740dfc8def72749645e193dd35be11fb323b3 \
|
||||
--hash=sha256:31e5637e9036d966824edaa91bf0aa39dc6f525a1c599f39fd5c50340264e079 \
|
||||
--hash=sha256:4946b67235b9d2ea7d31307be9d5ad5959d6c4a8f98f900157b47abddf698401
|
||||
enum34==1.1.2 ; python_version < '3.4' \
|
||||
--hash=sha256:2475d7fcddf5951e92ff546972758802de5260bf409319a9f1934e6bbc8b1dc7 \
|
||||
--hash=sha256:35907defb0f992b75ab7788f65fedc1cf20ffa22688e0e6f6f12afc06b3ea501
|
||||
--hash=sha256:a2f5650770e1c87fb335af19a9b7eb73fc05ccf22144eb68db7d00cd2bcb0902
|
||||
cryptography==2.7 \
|
||||
--hash=sha256:24b61e5fcb506424d3ec4e18bca995833839bf13c59fc43e530e488f28d46b8c \
|
||||
--hash=sha256:25dd1581a183e9e7a806fe0543f485103232f940fcfc301db65e630512cce643 \
|
||||
--hash=sha256:3452bba7c21c69f2df772762be0066c7ed5dc65df494a1d53a58b683a83e1216 \
|
||||
--hash=sha256:41a0be220dd1ed9e998f5891948306eb8c812b512dc398e5a01846d855050799 \
|
||||
--hash=sha256:5751d8a11b956fbfa314f6553d186b94aa70fdb03d8a4d4f1c82dcacf0cbe28a \
|
||||
--hash=sha256:5f61c7d749048fa6e3322258b4263463bfccefecb0dd731b6561cb617a1d9bb9 \
|
||||
--hash=sha256:72e24c521fa2106f19623a3851e9f89ddfdeb9ac63871c7643790f872a305dfc \
|
||||
--hash=sha256:7b97ae6ef5cba2e3bb14256625423413d5ce8d1abb91d4f29b6d1a081da765f8 \
|
||||
--hash=sha256:961e886d8a3590fd2c723cf07be14e2a91cf53c25f02435c04d39e90780e3b53 \
|
||||
--hash=sha256:96d8473848e984184b6728e2c9d391482008646276c3ff084a1bd89e15ff53a1 \
|
||||
--hash=sha256:ae536da50c7ad1e002c3eee101871d93abdc90d9c5f651818450a0d3af718609 \
|
||||
--hash=sha256:b0db0cecf396033abb4a93c95d1602f268b3a68bb0a9cc06a7cff587bb9a7292 \
|
||||
--hash=sha256:cfee9164954c186b191b91d4193989ca994703b2fff406f71cf454a2d3c7327e \
|
||||
--hash=sha256:e6347742ac8f35ded4a46ff835c60e68c22a536a8ae5c4422966d06946b6d4c6 \
|
||||
--hash=sha256:f27d93f0139a3c056172ebb5d4f9056e770fdf0206c2f422ff2ebbad142e09ed \
|
||||
--hash=sha256:f57b76e46a58b63d1c6375017f4564a28f19a5ca912691fd2e4261b3414b618d
|
||||
distro==1.4.0 \
|
||||
--hash=sha256:362dde65d846d23baee4b5c058c8586f219b5a54be1cf5fc6ff55c4578392f57 \
|
||||
--hash=sha256:eedf82a470ebe7d010f1872c17237c79ab04097948800029994fa458e52fb4b4
|
||||
enum34==1.1.6 \
|
||||
--hash=sha256:2d81cbbe0e73112bdfe6ef8576f2238f2ba27dd0d55752a776c41d38b7da2850 \
|
||||
--hash=sha256:644837f692e5f550741432dd3f223bbb9852018674981b1664e5dc339387588a \
|
||||
--hash=sha256:6bd0f6ad48ec2aa117d3d141940d484deccda84d4fcd884f5c3d93c23ecd8c79 \
|
||||
--hash=sha256:8ad8c4783bf61ded74527bffb48ed9b54166685e4230386a9ed9b1279e2df5b1
|
||||
funcsigs==1.0.2 \
|
||||
--hash=sha256:330cc27ccbf7f1e992e69fef78261dc7c6569012cf397db8d3de0234e6c937ca \
|
||||
--hash=sha256:a7bb0f2cf3a3fd1ab2732cb49eba4252c2af4240442415b4abce3b87022a8f50
|
||||
idna==2.5 \
|
||||
--hash=sha256:cc19709fd6d0cbfed39ea875d29ba6d4e22c0cebc510a76d6302a28385e8bb70 \
|
||||
--hash=sha256:3cb5ce08046c4e3a560fc02f138d0ac63e00f8ce5901a56b32ec8b7994082aab
|
||||
ipaddress==1.0.16 \
|
||||
--hash=sha256:935712800ce4760701d89ad677666cd52691fd2f6f0b340c8b4239a3c17988a5 \
|
||||
--hash=sha256:5a3182b322a706525c46282ca6f064d27a02cffbd449f9f47416f1dc96aa71b0
|
||||
josepy==1.1.0 \
|
||||
--hash=sha256:1309a25aac3caeff5239729c58ff9b583f7d022ffdb1553406ddfc8e5b52b76e \
|
||||
--hash=sha256:fb5c62c77d26e04df29cb5ecd01b9ce69b6fcc9e521eb1ca193b7faa2afa7086
|
||||
linecache2==1.0.0 \
|
||||
--hash=sha256:e78be9c0a0dfcbac712fe04fbf92b96cddae80b1b842f24248214c8496f006ef \
|
||||
--hash=sha256:4b26ff4e7110db76eeb6f5a7b64a82623839d595c2038eeda662f2a2db78e97c
|
||||
# Using an older version of mock here prevents regressions of #5276.
|
||||
future==0.17.1 \
|
||||
--hash=sha256:67045236dcfd6816dc439556d009594abf643e5eb48992e36beac09c2ca659b8
|
||||
idna==2.8 \
|
||||
--hash=sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407 \
|
||||
--hash=sha256:ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c
|
||||
ipaddress==1.0.22 \
|
||||
--hash=sha256:64b28eec5e78e7510698f6d4da08800a5c575caa4a286c93d651c5d3ff7b6794 \
|
||||
--hash=sha256:b146c751ea45cad6188dd6cf2d9b757f6f4f8d6ffb96a023e6f2e26eea02a72c
|
||||
josepy==1.2.0 \
|
||||
--hash=sha256:8ea15573203f28653c00f4ac0142520777b1c59d9eddd8da3f256c6ba3cac916 \
|
||||
--hash=sha256:9cec9a839fe9520f0420e4f38e7219525daccce4813296627436fe444cd002d3
|
||||
mock==1.3.0 \
|
||||
--hash=sha256:3f573a18be94de886d1191f27c168427ef693e8dcfcecf95b170577b2eb69cbb \
|
||||
--hash=sha256:1e247dbecc6ce057299eb7ee019ad68314bb93152e81d9a6110d35f4d5eca0f6
|
||||
ordereddict==1.1 \
|
||||
--hash=sha256:1c35b4ac206cef2d24816c89f89cf289dd3d38cf7c449bb3fab7bf6d43f01b1f \
|
||||
--no-binary ordereddict
|
||||
packaging==16.8 \
|
||||
--hash=sha256:99276dc6e3a7851f32027a68f1095cd3f77c148091b092ea867a351811cfe388 \
|
||||
--hash=sha256:5d50835fdf0a7edf0b55e311b7c887786504efea1177abd7e69329a8e5ea619e
|
||||
parsedatetime==2.1 \
|
||||
--hash=sha256:ce9d422165cf6e963905cd5f74f274ebf7cc98c941916169178ef93f0e557838 \
|
||||
--hash=sha256:17c578775520c99131634e09cfca5a05ea9e1bd2a05cd06967ebece10df7af2d
|
||||
pbr==1.8.1 \
|
||||
--hash=sha256:46c8db75ae75a056bd1cc07fa21734fe2e603d11a07833ecc1eeb74c35c72e0c \
|
||||
--hash=sha256:e2127626a91e6c885db89668976db31020f0af2da728924b56480fc7ccf09649
|
||||
pyOpenSSL==18.0.0 \
|
||||
--hash=sha256:26ff56a6b5ecaf3a2a59f132681e2a80afcc76b4f902f612f518f92c2a1bf854 \
|
||||
--hash=sha256:6488f1423b00f73b7ad5167885312bb0ce410d3312eb212393795b53c8caa580
|
||||
pyparsing==2.1.8 \
|
||||
--hash=sha256:2f0f5ceb14eccd5aef809d6382e87df22ca1da583c79f6db01675ce7d7f49c18 \
|
||||
--hash=sha256:03a4869b9f3493807ee1f1cb405e6d576a1a2ca4d81a982677c0c1ad6177c56b \
|
||||
--hash=sha256:ab09aee814c0241ff0c503cff30018219fe1fc14501d89f406f4664a0ec9fbcd \
|
||||
--hash=sha256:6e9a7f052f8e26bcf749e4033e3115b6dc7e3c85aafcb794b9a88c9d9ef13c97 \
|
||||
--hash=sha256:9f463a6bcc4eeb6c08f1ed84439b17818e2085937c0dee0d7674ac127c67c12b \
|
||||
--hash=sha256:3626b4d81cfb300dad57f52f2f791caaf7b06c09b368c0aa7b868e53a5775424 \
|
||||
--hash=sha256:367b90cc877b46af56d4580cd0ae278062903f02b8204ab631f5a2c0f50adfd0 \
|
||||
--hash=sha256:9f1ea360086cd68681e7f4ca8f1f38df47bf81942a0d76a9673c2d23eff35b13
|
||||
pyRFC3339==1.0 \
|
||||
--hash=sha256:eea31835c56e2096af4363a5745a784878a61d043e247d3a6d6a0a32a9741f56 \
|
||||
--hash=sha256:8dfbc6c458b8daba1c0f3620a8c78008b323a268b27b7359e92a4ae41325f535
|
||||
--hash=sha256:1e247dbecc6ce057299eb7ee019ad68314bb93152e81d9a6110d35f4d5eca0f6 \
|
||||
--hash=sha256:3f573a18be94de886d1191f27c168427ef693e8dcfcecf95b170577b2eb69cbb
|
||||
parsedatetime==2.4 \
|
||||
--hash=sha256:3d817c58fb9570d1eec1dd46fa9448cd644eeed4fb612684b02dfda3a79cb84b \
|
||||
--hash=sha256:9ee3529454bf35c40a77115f5a596771e59e1aee8c53306f346c461b8e913094
|
||||
pbr==5.4.2 \
|
||||
--hash=sha256:56e52299170b9492513c64be44736d27a512fa7e606f21942160b68ce510b4bc \
|
||||
--hash=sha256:9b321c204a88d8ab5082699469f52cc94c5da45c51f114113d01b3d993c24cdf
|
||||
pyOpenSSL==19.0.0 \
|
||||
--hash=sha256:aeca66338f6de19d1aa46ed634c3b9ae519a64b458f8468aec688e7e3c20f200 \
|
||||
--hash=sha256:c727930ad54b10fc157015014b666f2d8b41f70c0d03e83ab67624fd3dd5d1e6
|
||||
pyRFC3339==1.1 \
|
||||
--hash=sha256:67196cb83b470709c580bb4738b83165e67c6cc60e1f2e4f286cfcb402a926f4 \
|
||||
--hash=sha256:81b8cbe1519cdb79bed04910dd6fa4e181faf8c88dff1e1b987b5f7ab23a5b1a
|
||||
pycparser==2.19 \
|
||||
--hash=sha256:a988718abfad80b6b157acce7bf130a30876d27603738ac39f140993246b25b3
|
||||
pyparsing==2.4.2 \
|
||||
--hash=sha256:6f98a7b9397e206d78cc01df10131398f1c8b8510a2f4d97d9abd82e1aacdd80 \
|
||||
--hash=sha256:d9338df12903bbf5d65a0e4e87c2161968b10d2e489652bb47001d82a9b028b4
|
||||
python-augeas==0.5.0 \
|
||||
--hash=sha256:67d59d66cdba8d624e0389b87b2a83a176f21f16a87553b50f5703b23f29bac2 \
|
||||
--no-binary python-augeas
|
||||
pytz==2015.7 \
|
||||
--hash=sha256:3abe6a6d3fc2fbbe4c60144211f45da2edbe3182a6f6511af6bbba0598b1f992 \
|
||||
--hash=sha256:939ef9c1e1224d980405689a97ffcf7828c56d1517b31d73464356c1f2b7769e \
|
||||
--hash=sha256:ead4aefa7007249e05e51b01095719d5a8dd95760089f5730aac5698b1932918 \
|
||||
--hash=sha256:3cca0df08bd0ed98432390494ce3ded003f5e661aa460be7a734bffe35983605 \
|
||||
--hash=sha256:3ede470d3d17ba3c07638dfa0d10452bc1b6e5ad326127a65ba77e6aaeb11bec \
|
||||
--hash=sha256:68c47964f7186eec306b13629627722b9079cd4447ed9e5ecaecd4eac84ca734 \
|
||||
--hash=sha256:dd5d3991950aae40a6c81de1578942e73d629808cefc51d12cd157980e6cfc18 \
|
||||
--hash=sha256:a77c52062c07eb7c7b30545dbc73e32995b7e117eea750317b5cb5c7a4618f14 \
|
||||
--hash=sha256:81af9aec4bc960a9a0127c488f18772dae4634689233f06f65443e7b11ebeb51 \
|
||||
--hash=sha256:e079b1dadc5c06246cc1bb6fe1b23a50b1d1173f2edd5104efd40bb73a28f406 \
|
||||
--hash=sha256:fbd26746772c24cb93c8b97cbdad5cb9e46c86bbdb1b9d8a743ee00e2fb1fc5d \
|
||||
--hash=sha256:99266ef30a37e43932deec2b7ca73e83c8dbc3b9ff703ec73eca6b1dae6befea \
|
||||
--hash=sha256:8b6ce1c993909783bc96e0b4f34ea223bff7a4df2c90bdb9c4e0f1ac928689e3
|
||||
requests==2.20.0 \
|
||||
--hash=sha256:99dcfdaaeb17caf6e526f32b6a7b780461512ab3f1d992187801694cba42770c \
|
||||
--hash=sha256:a84b8c9ab6239b578f22d1c21d51b696dcfe004032bb80ea832398d6909d7279
|
||||
six==1.10.0 \
|
||||
--hash=sha256:0ff78c403d9bccf5a425a6d31a12aa6b47f1c21ca4dc2573a7e2f32a97335eb1 \
|
||||
--hash=sha256:105f8d68616f8248e24bf0e9372ef04d3cc10104f1980f54d57b2ce73a5ad56a
|
||||
traceback2==1.4.0 \
|
||||
--hash=sha256:8253cebec4b19094d67cc5ed5af99bf1dba1285292226e98a31929f87a5d6b23 \
|
||||
--hash=sha256:05acc67a09980c2ecfedd3423f7ae0104839eccb55fc645773e1caa0951c3030
|
||||
unittest2==1.1.0 \
|
||||
--hash=sha256:13f77d0875db6d9b435e1d4f41e74ad4cc2eb6e1d5c824996092b3430f088bb8 \
|
||||
--hash=sha256:22882a0e418c284e1f718a822b3b022944d53d2d908e1690b319a9d3eb2c0579
|
||||
zope.component==4.2.2 \
|
||||
--hash=sha256:282c112b55dd8e3c869a3571f86767c150ab1284a9ace2bdec226c592acaf81a \
|
||||
--no-binary zope.component
|
||||
zope.event==4.1.0 \
|
||||
--hash=sha256:dc7a59a2fd91730d3793131a5d261b29e93ec4e2a97f1bc487ce8defee2fe786 \
|
||||
--no-binary zope.event
|
||||
zope.interface==4.1.3 \
|
||||
--hash=sha256:f07b631f7a601cd8cbd3332d54f43142c7088a83299f859356f08d1d4d4259b3 \
|
||||
--hash=sha256:de5cca083b9439d8002fb76bbe6b4998c5a5a721fab25b84298967f002df4c94 \
|
||||
--hash=sha256:6788416f7ea7f5b8a97be94825377aa25e8bdc73463e07baaf9858b29e737077 \
|
||||
--hash=sha256:6f3230f7254518201e5a3708cbb2de98c848304f06e3ded8bfb39e5825cba2e1 \
|
||||
--hash=sha256:5fa575a5240f04200c3088427d0d4b7b737f6e9018818a51d8d0f927a6a2517a \
|
||||
--hash=sha256:522194ad6a545735edd75c8a83f48d65d1af064e432a7d320d64f56bafc12e99 \
|
||||
--hash=sha256:e8c7b2d40943f71c99148c97f66caa7f5134147f57423f8db5b4825099ce9a09 \
|
||||
--hash=sha256:279024f0208601c3caa907c53876e37ad88625f7eaf1cb3842dbe360b2287017 \
|
||||
--hash=sha256:2e221a9eec7ccc58889a278ea13dcfed5ef939d80b07819a9a8b3cb1c681484f \
|
||||
--hash=sha256:69118965410ec86d44dc6b9017ee3ddbd582e0c0abeef62b3a19dbf6c8ad132b \
|
||||
--hash=sha256:d04df8686ec864d0cade8cf199f7f83aecd416109a20834d568f8310ded12dea \
|
||||
--hash=sha256:e75a947e15ee97e7e71e02ea302feb2fc62d3a2bb4668bf9dfbed43a506ac7e7 \
|
||||
--hash=sha256:4e45d22fb883222a5ab9f282a116fec5ee2e8d1a568ccff6a2d75bbd0eb6bcfc \
|
||||
--hash=sha256:bce9339bb3c7a55e0803b63d21c5839e8e479bc85c4adf42ae415b72f94facb2 \
|
||||
--hash=sha256:928138365245a0e8869a5999fbcc2a45475a0a6ed52a494d60dbdc540335fedd \
|
||||
--hash=sha256:0d841ba1bb840eea0e6489dc5ecafa6125554971f53b5acb87764441e61bceba \
|
||||
--hash=sha256:b09c8c1d47b3531c400e0195697f1414a63221de6ef478598a4f1460f7d9a392
|
||||
requests-toolbelt==0.8.0 \
|
||||
--hash=sha256:42c9c170abc2cacb78b8ab23ac957945c7716249206f90874651971a4acff237 \
|
||||
--hash=sha256:f6a531936c6fa4c6cfce1b9c10d5c4f498d16528d2a54a22ca00011205a187b5
|
||||
chardet==3.0.2 \
|
||||
--hash=sha256:4f7832e7c583348a9eddd927ee8514b3bf717c061f57b21dbe7697211454d9bb \
|
||||
--hash=sha256:6ebf56457934fdce01fb5ada5582762a84eed94cad43ed877964aebbdd8174c0
|
||||
urllib3==1.24.1 \
|
||||
--hash=sha256:61bf29cada3fc2fbefad4fdf059ea4bd1b4a86d2b6d15e1c7c0b582b9752fe39 \
|
||||
--hash=sha256:de9529817c93f27c8ccbfead6985011db27bd0ddfcdb2d86f3f663385c6a9c22
|
||||
certifi==2017.4.17 \
|
||||
--hash=sha256:f4318671072f030a33c7ca6acaef720ddd50ff124d1388e50c1bda4cbd6d7010 \
|
||||
--hash=sha256:f7527ebf7461582ce95f7a9e03dd141ce810d40590834f4ec20cddd54234c10a
|
||||
--hash=sha256:67d59d66cdba8d624e0389b87b2a83a176f21f16a87553b50f5703b23f29bac2
|
||||
pytz==2019.2 \
|
||||
--hash=sha256:26c0b32e437e54a18161324a2fca3c4b9846b74a8dccddd843113109e1116b32 \
|
||||
--hash=sha256:c894d57500a4cd2d5c71114aaab77dbab5eabd9022308ce5ac9bb93a60a6f0c7
|
||||
requests==2.21.0 \
|
||||
--hash=sha256:502a824f31acdacb3a35b6690b5fbf0bc41d63a24a45c4004352b0242707598e \
|
||||
--hash=sha256:7bf2a778576d825600030a110f3c0e3e8edc51dfaafe1c146e39a2027784957b
|
||||
requests-toolbelt==0.9.1 \
|
||||
--hash=sha256:380606e1d10dc85c3bd47bf5a6095f815ec007be7a8b69c878507068df059e6f \
|
||||
--hash=sha256:968089d4584ad4ad7c171454f0a5c6dac23971e9472521ea3b6d49d610aa6fc0
|
||||
six==1.12.0 \
|
||||
--hash=sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c \
|
||||
--hash=sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73
|
||||
urllib3==1.24.3 \
|
||||
--hash=sha256:2393a695cd12afedd0dcb26fe5d50d0cf248e5a66f75dbd89a3d4eb333a61af4 \
|
||||
--hash=sha256:a637e5fae88995b256e3409dc4d52c2e2e0ba32c42a6365fee8bbd2238de3cfb
|
||||
zope.component==4.5 \
|
||||
--hash=sha256:6edfd626c3b593b72895a8cfcf79bff41f4619194ce996a85bce31ac02b94e55 \
|
||||
--hash=sha256:984a06ba3def0b02b1117fa4c45b56e772e8c29c0340820fbf367e440a93a3a4
|
||||
zope.deferredimport==4.3.1 \
|
||||
--hash=sha256:57b2345e7b5eef47efcd4f634ff16c93e4265de3dcf325afc7315ade48d909e1 \
|
||||
--hash=sha256:9a0c211df44aa95f1c4e6d2626f90b400f56989180d3ef96032d708da3d23e0a
|
||||
zope.deprecation==4.4.0 \
|
||||
--hash=sha256:0d453338f04bacf91bbfba545d8bcdf529aa829e67b705eac8c1a7fdce66e2df \
|
||||
--hash=sha256:f1480b74995958b24ce37b0ef04d3663d2683e5d6debc96726eff18acf4ea113
|
||||
zope.event==4.4 \
|
||||
--hash=sha256:69c27debad9bdacd9ce9b735dad382142281ac770c4a432b533d6d65c4614bcf \
|
||||
--hash=sha256:d8e97d165fd5a0997b45f5303ae11ea3338becfe68c401dd88ffd2113fe5cae7
|
||||
zope.hookable==4.2.0 \
|
||||
--hash=sha256:22886e421234e7e8cedc21202e1d0ab59960e40a47dd7240e9659a2d82c51370 \
|
||||
--hash=sha256:39912f446e45b4e1f1951b5ffa2d5c8b074d25727ec51855ae9eab5408f105ab \
|
||||
--hash=sha256:3adb7ea0871dbc56b78f62c4f5c024851fc74299f4f2a95f913025b076cde220 \
|
||||
--hash=sha256:3d7c4b96341c02553d8b8d71065a9366ef67e6c6feca714f269894646bb8268b \
|
||||
--hash=sha256:4e826a11a529ed0464ffcecf34b0b7bd1b4928dd5848c5c61bedd7833e8f4801 \
|
||||
--hash=sha256:700d68cc30728de1c4c62088a981c6daeaefdf20a0d81995d2c0b7f442c5f88c \
|
||||
--hash=sha256:77c82a430cedfbf508d1aa406b2f437363c24fa90c73f577ead0fb5295749b83 \
|
||||
--hash=sha256:c1df3929a3666fc5a0c80d60a0c1e6f6ef97c7f6ed2f1b7cf49f3e6f3d4dde15 \
|
||||
--hash=sha256:dba8b2dd2cd41cb5f37bfa3f3d82721b8ae10e492944e48ddd90a439227f2893 \
|
||||
--hash=sha256:f492540305b15b5591bd7195d61f28946bb071de071cee5d68b6b8414da90fd2
|
||||
zope.interface==4.6.0 \
|
||||
--hash=sha256:086707e0f413ff8800d9c4bc26e174f7ee4c9c8b0302fbad68d083071822316c \
|
||||
--hash=sha256:1157b1ec2a1f5bf45668421e3955c60c610e31913cc695b407a574efdbae1f7b \
|
||||
--hash=sha256:11ebddf765bff3bbe8dbce10c86884d87f90ed66ee410a7e6c392086e2c63d02 \
|
||||
--hash=sha256:14b242d53f6f35c2d07aa2c0e13ccb710392bcd203e1b82a1828d216f6f6b11f \
|
||||
--hash=sha256:1b3d0dcabc7c90b470e59e38a9acaa361be43b3a6ea644c0063951964717f0e5 \
|
||||
--hash=sha256:20a12ab46a7e72b89ce0671e7d7a6c3c1ca2c2766ac98112f78c5bddaa6e4375 \
|
||||
--hash=sha256:298f82c0ab1b182bd1f34f347ea97dde0fffb9ecf850ecf7f8904b8442a07487 \
|
||||
--hash=sha256:2f6175722da6f23dbfc76c26c241b67b020e1e83ec7fe93c9e5d3dd18667ada2 \
|
||||
--hash=sha256:3b877de633a0f6d81b600624ff9137312d8b1d0f517064dfc39999352ab659f0 \
|
||||
--hash=sha256:4265681e77f5ac5bac0905812b828c9fe1ce80c6f3e3f8574acfb5643aeabc5b \
|
||||
--hash=sha256:550695c4e7313555549aa1cdb978dc9413d61307531f123558e438871a883d63 \
|
||||
--hash=sha256:5f4d42baed3a14c290a078e2696c5f565501abde1b2f3f1a1c0a94fbf6fbcc39 \
|
||||
--hash=sha256:62dd71dbed8cc6a18379700701d959307823b3b2451bdc018594c48956ace745 \
|
||||
--hash=sha256:7040547e5b882349c0a2cc9b50674b1745db551f330746af434aad4f09fba2cc \
|
||||
--hash=sha256:7e099fde2cce8b29434684f82977db4e24f0efa8b0508179fce1602d103296a2 \
|
||||
--hash=sha256:7e5c9a5012b2b33e87980cee7d1c82412b2ebabcb5862d53413ba1a2cfde23aa \
|
||||
--hash=sha256:81295629128f929e73be4ccfdd943a0906e5fe3cdb0d43ff1e5144d16fbb52b1 \
|
||||
--hash=sha256:95cc574b0b83b85be9917d37cd2fad0ce5a0d21b024e1a5804d044aabea636fc \
|
||||
--hash=sha256:968d5c5702da15c5bf8e4a6e4b67a4d92164e334e9c0b6acf080106678230b98 \
|
||||
--hash=sha256:9e998ba87df77a85c7bed53240a7257afe51a07ee6bc3445a0bf841886da0b97 \
|
||||
--hash=sha256:a0c39e2535a7e9c195af956610dba5a1073071d2d85e9d2e5d789463f63e52ab \
|
||||
--hash=sha256:a15e75d284178afe529a536b0e8b28b7e107ef39626a7809b4ee64ff3abc9127 \
|
||||
--hash=sha256:a6a6ff82f5f9b9702478035d8f6fb6903885653bff7ec3a1e011edc9b1a7168d \
|
||||
--hash=sha256:b639f72b95389620c1f881d94739c614d385406ab1d6926a9ffe1c8abbea23fe \
|
||||
--hash=sha256:bad44274b151d46619a7567010f7cde23a908c6faa84b97598fd2f474a0c6891 \
|
||||
--hash=sha256:bbcef00d09a30948756c5968863316c949d9cedbc7aabac5e8f0ffbdb632e5f1 \
|
||||
--hash=sha256:d788a3999014ddf416f2dc454efa4a5dbeda657c6aba031cf363741273804c6b \
|
||||
--hash=sha256:eed88ae03e1ef3a75a0e96a55a99d7937ed03e53d0cffc2451c208db445a2966 \
|
||||
--hash=sha256:f99451f3a579e73b5dd58b1b08d1179791d49084371d9a47baad3b22417f0317
|
||||
zope.proxy==4.3.2 \
|
||||
--hash=sha256:320a7619992e42142549ebf61e14ce27683b4d14b0cbc45f7c037ba64edb560c \
|
||||
--hash=sha256:824d4dbabbb7deb84f25fdb96ea1eeca436a1802c3c8d323b3eb4ac9d527d41c \
|
||||
--hash=sha256:8a32eb9c94908f3544da2dae3f4a9e6961d78819b88ac6b6f4a51cee2d65f4a0 \
|
||||
--hash=sha256:96265fd3bc3ea646f98482e16307a69de21402eeaaaaf4b841c1161ac2f71bb0 \
|
||||
--hash=sha256:ab6d6975d9c51c13cac828ff03168de21fb562b0664c59bcdc4a4b10f39a5b17 \
|
||||
--hash=sha256:af10cb772391772463f65a58348e2de5ecc06693c16d2078be276dc068bcbb54 \
|
||||
--hash=sha256:b8fd3a3de3f7b6452775e92af22af5977b17b69ac86a38a3ddfe870e40a0d05f \
|
||||
--hash=sha256:bb7088f1bed3b8214284a5e425dc23da56f2f28e8815b7580bfed9e245b6c0b6 \
|
||||
--hash=sha256:bc29b3665eac34f14c4aef5224bef045efcfb1a7d12d78c8685858de5fbf21c0 \
|
||||
--hash=sha256:c39fa6a159affeae5fe31b49d9f5b12bd674fe77271a9a324408b271440c50a7 \
|
||||
--hash=sha256:e946a036ac5b9f897e986ac9dc950a34cffc857d88eae6727b8434fbc4752366
|
||||
|
||||
# Contains the requirements for the letsencrypt package.
|
||||
#
|
||||
@@ -1190,18 +1338,18 @@ letsencrypt==0.7.0 \
|
||||
--hash=sha256:105a5fb107e45bcd0722eb89696986dcf5f08a86a321d6aef25a0c7c63375ade \
|
||||
--hash=sha256:c36e532c486a7e92155ee09da54b436a3c420813ec1c590b98f635d924720de9
|
||||
|
||||
certbot==0.32.0 \
|
||||
--hash=sha256:75fd986ae42cd90bde6400c5f5a0dd936a7f4a42a416146b1e8bb0f92028b443 \
|
||||
--hash=sha256:c0b94e25a07d83809d98029f09e9b501f86ec97624f45ce86800a7002488c3c8
|
||||
acme==0.32.0 \
|
||||
--hash=sha256:88b2d2741e5ea028c590a33b16fb647cb74af6b2db6c7909c738a48f879efdec \
|
||||
--hash=sha256:0eefce8b7880eb7eccc049a6b8ba262fc624bc34b3a8581d05b82f2bb39f1aec
|
||||
certbot-apache==0.32.0 \
|
||||
--hash=sha256:b2c82b7a1c44799ba3a150970513ed4fa9afeee40e326440800b1243f917ddb6 \
|
||||
--hash=sha256:68072775f1bb4bc9fc64cabe051a761f6dbf296012512eff7819144ac8b9ec97
|
||||
certbot-nginx==0.32.0 \
|
||||
--hash=sha256:3fc3664231586565d886ddcb679c95a2fb2494a2ce3e028149f1496dca5b47cf \
|
||||
--hash=sha256:82c43cd26aacc2eb0ae890be6a2f74d726b6dcb4ee7b63c0e55ec33e576f3e84
|
||||
certbot==0.40.1 \
|
||||
--hash=sha256:afe4d7edc61d4cab8b6f7ab7611d66aaba67d9f0404fa2760bd1cc430b2ec9ec \
|
||||
--hash=sha256:8dc81b3044cf401c55fa36c30893887fcb92657df1d76a8848059683df3b10d1
|
||||
acme==0.40.1 \
|
||||
--hash=sha256:a85387c26fb4fc24511b2579b8c61177b3175fd25e130c5be95a840d9f67d54f \
|
||||
--hash=sha256:33bf8686408d5b6b79886a9a43aee691ca754408deaec4fb2bb17b9af48a5ffc
|
||||
certbot-apache==0.40.1 \
|
||||
--hash=sha256:6c4a4e21a17bd8120056595e5670c9a0f86756da0ad269196e8d56fc1b9fec82 \
|
||||
--hash=sha256:16404e9e404f0b98c18bd752fad812a45ef7f9b0efa9bbc8589f24a94e67de7c
|
||||
certbot-nginx==0.40.1 \
|
||||
--hash=sha256:acdbfc4ab75ebc810264ee1248332f8e857c5e7776717b7cd53c4ceceb2b4d34 \
|
||||
--hash=sha256:014fdda80647ad9e67019b16c7cdaee5d21a750b393bcb98e1300615cc930f4f
|
||||
|
||||
UNLIKELY_EOF
|
||||
# -------------------------------------------------------------------------
|
||||
@@ -1364,7 +1512,7 @@ def main():
|
||||
temp,
|
||||
digest)
|
||||
for path, digest in PACKAGES]
|
||||
# On Windows, pip self-upgrade is not possible, it must be done through python interpreter.
|
||||
# Calling pip as a module is the preferred way to avoid problems about pip self-upgrade.
|
||||
command = [python, '-m', 'pip', 'install', '--no-index', '--no-deps', '-U']
|
||||
# Disable cache since it is not used and it otherwise sometimes throws permission warnings:
|
||||
command.extend(['--no-cache-dir'] if has_pip_cache else [])
|
||||
@@ -1466,6 +1614,24 @@ else
|
||||
exit 0
|
||||
fi
|
||||
|
||||
DeterminePythonVersion "NOCRASH"
|
||||
# Don't warn about file permissions if the user disabled the check or we
|
||||
# can't find an up-to-date Python.
|
||||
if [ "$PYVER" -ge "$MIN_PYVER" -a "$NO_PERMISSIONS_CHECK" != 1 ]; then
|
||||
# If the script fails for some reason, don't break certbot-auto.
|
||||
set +e
|
||||
# Suppress unexpected error output.
|
||||
CHECK_PERM_OUT=$(CheckPathPermissions "$LE_PYTHON" "$0" 2>/dev/null)
|
||||
CHECK_PERM_STATUS="$?"
|
||||
set -e
|
||||
# Only print output if the script ran successfully and it actually produced
|
||||
# output. The latter check resolves
|
||||
# https://github.com/certbot/certbot/issues/7012.
|
||||
if [ "$CHECK_PERM_STATUS" = 0 -a -n "$CHECK_PERM_OUT" ]; then
|
||||
error "$CHECK_PERM_OUT"
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ "$NO_SELF_UPGRADE" != 1 ]; then
|
||||
TEMP_DIR=$(TempDir)
|
||||
trap 'rm -rf "$TEMP_DIR"' EXIT
|
||||
@@ -1622,7 +1788,6 @@ if __name__ == '__main__':
|
||||
|
||||
UNLIKELY_EOF
|
||||
# ---------------------------------------------------------------------------
|
||||
DeterminePythonVersion "NOCRASH"
|
||||
if [ "$PYVER" -lt "$MIN_PYVER" ]; then
|
||||
error "WARNING: couldn't find Python $MIN_PYTHON_VERSION+ to check for updates."
|
||||
elif ! REMOTE_VERSION=`"$LE_PYTHON" "$TEMP_DIR/fetch.py" --latest-version` ; then
|
||||
|
||||
1
certbot-ci/MANIFEST.in
Normal file
1
certbot-ci/MANIFEST.in
Normal file
@@ -0,0 +1 @@
|
||||
recursive-include certbot_integration_tests/assets *
|
||||
@@ -2,7 +2,8 @@
|
||||
# Avoid false warnings because certbot packages are not installed in the thread that executes
|
||||
# the coverage: indeed, certbot is launched as a CLI from a subprocess.
|
||||
disable_warnings = module-not-imported,no-data-collected
|
||||
omit = **/*_test.py,**/tests/*,**/dns_common*,**/certbot_nginx/parser_obj.py
|
||||
|
||||
[report]
|
||||
# Exclude unit tests in coverage during integration tests.
|
||||
omit = **/*_test.py,**/tests/*,**/certbot_nginx/parser_obj.py
|
||||
omit = **/*_test.py,**/tests/*,**/dns_common*,**/certbot_nginx/parser_obj.py
|
||||
|
||||
32
certbot-ci/certbot_integration_tests/assets/cert.pem
Normal file
32
certbot-ci/certbot_integration_tests/assets/cert.pem
Normal file
@@ -0,0 +1,32 @@
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIFlTCCA32gAwIBAgIUR3wbM8qFE68f8NxfciHhUjR1GeUwDQYJKoZIhvcNAQEL
|
||||
BQAwWTELMAkGA1UEBhMCQVUxEzARBgNVBAgMClNvbWUtU3RhdGUxITAfBgNVBAoM
|
||||
GEludGVybmV0IFdpZGdpdHMgUHR5IEx0ZDESMBAGA1UEAwwJbmdpbngud3RmMCAX
|
||||
DTE5MDQxODIwMDUwM1oYDzIyOTMwMTMwMjAwNTAzWjBZMQswCQYDVQQGEwJBVTET
|
||||
MBEGA1UECAwKU29tZS1TdGF0ZTEhMB8GA1UECgwYSW50ZXJuZXQgV2lkZ2l0cyBQ
|
||||
dHkgTHRkMRIwEAYDVQQDDAluZ2lueC53dGYwggIiMA0GCSqGSIb3DQEBAQUAA4IC
|
||||
DwAwggIKAoICAQC/W+yxYE0PWJOS4df71Yx596fDjW03I9JZuu9kfP7mneMgy+OC
|
||||
HyRm0TEhl6FPUp9tD9YeEHloUZNjHEOg/qrnbEOspv3Ha3RFinzrzkMwbzEPR3Xf
|
||||
0go+aVsWelDhapFl8fccw4tWwijVZQquhBsWOUnPenS3Txe96kEv2NNJlJ0qFUa+
|
||||
rOTruzRzOzlbgKv5WRb4+BxxWonHLkAQ5IT87GBlsCerVIyPD+BnZveZGl6e9oMH
|
||||
ZlZvUT6aWRnzFWjAnQGiJpVIw7l9r4EW0jq1z7wqb37FrqrFbtWrOfUZVE7AlqXH
|
||||
aKIR82/xwkcZfFk3sCAM0IcZc8B2SDLi4zNZtDivW6qQgTC/3z5yf1hnJ+j00dtE
|
||||
X5qYlgXRaM2raOn31lxcerk5pjgagQ7Zj+v3YZS0QnenrgyXJcdnXLDj+cIARzx4
|
||||
QHtoO0nyP0RJqxvwX/H98513JTkeqFBc/Bx11UWYsUv20Qoo9IAuz0VDARu6rquu
|
||||
k9anv56yvxo77qZ8r80l3z8eMyDA+UjuSD2p1Za09RAHfva7o8rMUqULHNQ4pfFH
|
||||
JIUozHoinAg/9lBC/W80fcbILks+Sdi6E9WQ0n8PLl7oFLx9prEDCycKuC0z76J/
|
||||
Shb6R6sWr1YtzUFUc5EH2g9pMriaqT8uGO4CMOeRemXahrdT/H+Xg5m4TQIDAQAB
|
||||
o1MwUTAdBgNVHQ4EFgQU46gJeu9ZOfTQ6c4vfbWbSLUpEMowHwYDVR0jBBgwFoAU
|
||||
46gJeu9ZOfTQ6c4vfbWbSLUpEMowDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG9w0B
|
||||
AQsFAAOCAgEAcnfkXDUTsEGs0MleegkGbRCVy72a3U7tv1KVTLB8qLPc3tpPJJoT
|
||||
D4PbOuw9+yIE+HetZTZooOpaZoorLQdiwAEjlQ44RVuXSHSARQ8KW9ZZeiWN/Qvl
|
||||
Ip4xJ/cHxcKTFKSc/99o8M+kmPKEXF9SUMfKPc5jXarNxCsnA3VriYqJ+CnYEox2
|
||||
duNUEe3A9Y2d8ZxjmscBqlcXpk1kFwsCRT5UYVoUYwyjYznLkO5A+GJ0ZnMyRMQp
|
||||
obUiB34hUrNgyOaBvizk+pNh9EV4rEBPRQwhy4vDMco4AjQcwLWQAQ9G4GSt/E+Q
|
||||
62XdVDa6CAuOvBCudDPki7kEqNLbj1tMY1K/gsbgb6TYA/xTOVulAnqm4OEZ2svJ
|
||||
0Jqw3BzMfRTaxsNU6jxm8WehVL15GjoJUzfs7Te+l7Vm/QNc1Dv2pmEhVfBibwMa
|
||||
YxUZ8ClQtQ1lsOpne97Og0p/Cm93kKELNBLTjzXtpXGGPPYisAyNwe0Hadq8SiOd
|
||||
pXeNwXa5vHOXHv8xBENzBvFJ3TRN2GmMlHBp/eOfVUx/huNSpcnh2gO3fn5EbMj7
|
||||
43IaR133JW5yWbneYAMJOEAMdEB5EthRmEDtLVA7kLqLc/ywFTQ4VbS2b+PsOr5O
|
||||
501nzt0OTMMEz+UafvGXj5OPJBhe26RtnYXzVwwLfto/F5udM5zglWo=
|
||||
-----END CERTIFICATE-----
|
||||
11
certbot-ci/certbot_integration_tests/assets/hook.py
Executable file
11
certbot-ci/certbot_integration_tests/assets/hook.py
Executable file
@@ -0,0 +1,11 @@
|
||||
#!/usr/bin/env python
|
||||
import sys
|
||||
import os
|
||||
|
||||
hook_script_type = os.path.basename(os.path.dirname(sys.argv[1]))
|
||||
if hook_script_type == 'deploy' and ('RENEWED_DOMAINS' not in os.environ or 'RENEWED_LINEAGE' not in os.environ):
|
||||
sys.stderr.write('Environment variables not properly set!\n')
|
||||
sys.exit(1)
|
||||
|
||||
with open(sys.argv[2], 'a') as file_h:
|
||||
file_h.write(hook_script_type + '\n')
|
||||
52
certbot-ci/certbot_integration_tests/assets/key.pem
Normal file
52
certbot-ci/certbot_integration_tests/assets/key.pem
Normal file
@@ -0,0 +1,52 @@
|
||||
-----BEGIN PRIVATE KEY-----
|
||||
MIIJQQIBADANBgkqhkiG9w0BAQEFAASCCSswggknAgEAAoICAQC/W+yxYE0PWJOS
|
||||
4df71Yx596fDjW03I9JZuu9kfP7mneMgy+OCHyRm0TEhl6FPUp9tD9YeEHloUZNj
|
||||
HEOg/qrnbEOspv3Ha3RFinzrzkMwbzEPR3Xf0go+aVsWelDhapFl8fccw4tWwijV
|
||||
ZQquhBsWOUnPenS3Txe96kEv2NNJlJ0qFUa+rOTruzRzOzlbgKv5WRb4+BxxWonH
|
||||
LkAQ5IT87GBlsCerVIyPD+BnZveZGl6e9oMHZlZvUT6aWRnzFWjAnQGiJpVIw7l9
|
||||
r4EW0jq1z7wqb37FrqrFbtWrOfUZVE7AlqXHaKIR82/xwkcZfFk3sCAM0IcZc8B2
|
||||
SDLi4zNZtDivW6qQgTC/3z5yf1hnJ+j00dtEX5qYlgXRaM2raOn31lxcerk5pjga
|
||||
gQ7Zj+v3YZS0QnenrgyXJcdnXLDj+cIARzx4QHtoO0nyP0RJqxvwX/H98513JTke
|
||||
qFBc/Bx11UWYsUv20Qoo9IAuz0VDARu6rquuk9anv56yvxo77qZ8r80l3z8eMyDA
|
||||
+UjuSD2p1Za09RAHfva7o8rMUqULHNQ4pfFHJIUozHoinAg/9lBC/W80fcbILks+
|
||||
Sdi6E9WQ0n8PLl7oFLx9prEDCycKuC0z76J/Shb6R6sWr1YtzUFUc5EH2g9pMria
|
||||
qT8uGO4CMOeRemXahrdT/H+Xg5m4TQIDAQABAoICAAGGL+pxw+tdXz+KQPgmiUnn
|
||||
aRSrqbUIugIw9Pst67HWjBqUxSkiKl4PSH7mAEjrdY2e1KvEodLs42mkrf04ShAx
|
||||
0pArfFX8Sx7KrZgLOonGOPPQM+YmfCJnIGybaM2C1cmkFb3K6O81+LFKbr1ZHAYf
|
||||
SrE2XnufS6cdmItTBMvPPTk6lieqpOAjy5UnYZuS+Muxo/czsrZMbFCD08rOpyiE
|
||||
kXf94TMCJ2R0UetA7LPxe9N0TzLd485bLU55azV+dCkklwC9oe7EcFPJ9BNEdWdB
|
||||
UlRcMvxMGdwct+L3QTaEb2QlTwi5kqDl+XxJeduAHA3Pf1Haz1iqjVvj01PvT1di
|
||||
Cs0+ZeFBsa+BfiGDe9ONwuSQljda1CuI+vDv5bGUExulOSG1dHJ7RK9PBaXFaR/b
|
||||
/9tRBwAw1Erm7s1JIkjda5Oc46jFb3HzDaZYB1n5hUmEIrYM8HhUOGITyVT3hxDO
|
||||
AWlaV3aveQ0MmMXLptVXDgbjPGbWDGMLD9d5vUE9R7IyOLeXOmjthYlCH2rj378M
|
||||
r2PkgX2tD0A/yoEZ8XCFdtBWSVajLdL0/gkm7sKosMABBy3yrSCxbHeq5TFuTAXA
|
||||
hOdypX4NOZkA6WJU+hn3GkQyIScLqSrvGRA9kzHGoEWVZDKkB9DXg+dmTARZDWXD
|
||||
mCnHkJo6+FcbhUpXniuZAoIBAQDmE94vvdstB+HEtXxN1uNDY7H8gPc/BUonU6a9
|
||||
G5YOIbjByCfEDcXF8AUWekc6lc8DNG3ydx0dnb2ZAkxmdlsaD8GLqHGILzlSsOwR
|
||||
sez8nR4+4n9vYMfx9Qal8Ren5xEP9Z9sJcNqbKVGta1WFtQzrgYbpVXXf/Luv0xS
|
||||
YoVK8KaEACciD6XX4wmajrAXPPQgThvqQtXuTn/AxWsUDg1DK0tw1VRUuOJuJwpw
|
||||
f6qocM9AyqUNvdeVyjFx8Slag34ZI7fmxPtHX/e6opTg3zVXab1Ow8AMICOHMRL6
|
||||
m5/+wnWa9xMoKI4kYfk/QFqeTccnLDlwi6kQM8WRfbwr9AyPAoIBAQDU60wrX6Lm
|
||||
0vIfngv1/4j/w+AGAwjvxiuJ7Q7LwQ2fGsZGOIfMK/lpBxCn543kGbQT+KQKNOjO
|
||||
+EywObftnJ6Y2+om2NoLkCnCiptsfr5WlN8pxtIPQu2iu5xXA67WpQv4Nc4769PM
|
||||
wJGVW3pmPKi6H0QjjqYAZd1NAXdN9Au14zZVh3KBWoz82kTHWKSL6Ld1UClG728W
|
||||
k/moyCFFMMGTXX/LVliQzDVLM6L5jbAOaG317qAuxZIqFJ9NLwHFW9uH/i1S6Qfp
|
||||
+lOmOfVYKu1O/qh1DUBQfuJkR1XIn2ifZEjxOsxeTmWu1LXpyoZy526JRu49pk8Z
|
||||
DdEu+w7hsdNjAoIBAD1YWsub8Y6GJXpPcX9HpnzXXiOXN1VEUcs+kJyneFD4SMzS
|
||||
U1gA3BS0tIaTv94tB28xUYdunwLAhkb/x+Mh95RxUwert+m5va0Ao1DsgeWw9tmJ
|
||||
hrTptyYaUNV5/Pa1s2Tv9rvdLcd4hHDgDAGCQL4uzk4cvVCiOuHRe8YTorqig6N6
|
||||
bvSz+2IelPbyyJzJkcXzTZoei+/oWkPJ340PWhXou0qwdrXIPgdkvXHVeGlE+t2p
|
||||
qmyJi6vSp3Bb/sy1dq+5SFVtfBpBykmnA88ZdJ2EAge4RcJ150MqoIbVa8l/i9/v
|
||||
tNnmRlAJF233+LFwx4L4VbBebIt3YlwyjDOj9J0CggEAIknKOGnsV/O8ni7bikAe
|
||||
leG7X/x5IfPt6wZMDbAHO4oaSBCufcjPH4TNv9xgU014XIb8E9C1dS8zWmXRIujH
|
||||
+aHgsWTWqGoM75FWukAm8taCob2s8lw63KwN301uiI6HwO8ZSTkPILgaOc1DhtdZ
|
||||
7K9AT+GXBhVhcBc+WUVl5WKzy05GuGIWtlmIHfo+dXGCqdfA7fV9FEu8NtwTz4qs
|
||||
gcja3aoIFTltk7C7HCkfIxLaMnK9RQr4IOK1TL63MEs8rUfXkLSKW7m+YtSOmCZB
|
||||
lSkZg9AgfVYRq0h5nhddx91kicSISN+jLGaA7Sd6Q2LVwDG2CCOSNVyuRTyVBu+W
|
||||
NQKCAQAWN6vB6oToNIoBLdOThm0HD07cNHcrnBjtaKsYsQDgqbr2m8LRCRzNRML4
|
||||
jG0IAOWpuCiEGsgUPxywiI1Ufvyq7ZSNT1QQNzCR47NM3Ve6S2abrQkMIk9VJ+za
|
||||
CB9c1BH92GokoRxqswb/BiMttG2EIP8L8/pSRYEcVnaaxAkf9QOhEwj4LJPGX0mS
|
||||
t7kWIUVHPdFJ67F25dYr3mUHgyV+QJupQICkkkgY3nBOU1fS42vAugaxqH0wAP3T
|
||||
53FlpY3NuE7+kYC3FjfcBer99F1pOac3X9jxhk26w9dr2/QNA33xhDXHKYvoLUCG
|
||||
RPQylahJByU7IrtQzSCf/RE7q4v0
|
||||
-----END PRIVATE KEY-----
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user