Compare commits

..

6 Commits

Author SHA1 Message Date
m0namon
f9749f64d8 temp testfixes 2020-01-21 17:17:16 -08:00
sydneyli
c0633ec3ac Implement ApacheBlockNode.delete_child 2020-01-21 16:20:57 -08:00
sydneyli
c9edb49267 Implement ApacheBlockNode.add_child_* 2020-01-21 16:20:32 -08:00
sydneyli
9cc15da90f Implement ApacheBlockNode constructor 2020-01-21 16:19:58 -08:00
m0namon
69bb92d698 Bump apacheconfig dependency to latest version and install dev version of apache for coverage tests 2020-01-21 16:02:17 -08:00
m0namon
bafa9e2eb0 Load apacheconfig dependency, gate behind flag 2020-01-15 10:29:36 -08:00
163 changed files with 1311 additions and 3753 deletions

View File

@@ -69,12 +69,12 @@ Access can be defined for all or only selected repositories, which is nice.
```
- Redirected to Azure DevOps, select the account created in _Having an Azure DevOps account_ section.
- Select the organization, and click "Create a new project" (let's name it the same than the targeted github repo)
- Select the organization, and click "Create a new project" (let's name it the same than the targetted github repo)
- The Visibility is public, to profit from 10 parallel jobs
```
!!! ACCESS !!!
Azure Pipelines needs access to the GitHub account (in term of being able to check it is valid), and the Resources shared between the GitHub account and Azure Pipelines.
Azure Pipelines needs access to the GitHub account (in term of beeing able to check it is valid), and the Resources shared between the GitHub account and Azure Pipelines.
```
_Done. We can move to pipelines configuration._

View File

@@ -1,8 +1,5 @@
# Advanced pipeline for isolated checks and release purpose
trigger:
# When changing these triggers, please ensure the documentation under
# "Running tests in CI" is still correct.
- azure-test-*
- test-*
- '*.x'
pr:

View File

@@ -33,24 +33,22 @@ jobs:
pool:
vmImage: $(imageName)
steps:
- powershell: Invoke-WebRequest https://www.python.org/ftp/python/3.8.1/python-3.8.1-amd64-webinstall.exe -OutFile C:\py3-setup.exe
displayName: Get Python
- script: C:\py3-setup.exe /quiet PrependPath=1 InstallAllUsers=1 Include_launcher=1 InstallLauncherAllUsers=1 Include_test=0 Include_doc=0 Include_dev=1 Include_debug=0 Include_tcltk=0 TargetDir=C:\py3
displayName: Install Python
- task: DownloadPipelineArtifact@2
inputs:
artifact: windows-installer
path: $(Build.SourcesDirectory)/bin
displayName: Retrieve Windows installer
- script: $(Build.SourcesDirectory)\bin\certbot-beta-installer-win32.exe /S
displayName: Install Certbot
- powershell: Invoke-WebRequest https://www.python.org/ftp/python/3.8.0/python-3.8.0-amd64-webinstall.exe -OutFile C:\py3-setup.exe
displayName: Get Python
- script: C:\py3-setup.exe /quiet PrependPath=1 InstallAllUsers=1 Include_launcher=1 InstallLauncherAllUsers=1 Include_test=0 Include_doc=0 Include_dev=1 Include_debug=0 Include_tcltk=0 TargetDir=C:\py3
displayName: Install Python
- script: |
py -3 -m venv venv
venv\Scripts\python tools\pip_install.py -e certbot-ci
displayName: Prepare Certbot-CI
- script: |
set PATH=%ProgramFiles(x86)%\Certbot\bin;%PATH%
venv\Scripts\python -m pytest certbot-ci\windows_installer_integration_tests --allow-persistent-changes --installer-path $(Build.SourcesDirectory)\bin\certbot-beta-installer-win32.exe
displayName: Run windows installer integration tests
- script: |
set PATH=%ProgramFiles(x86)%\Certbot\bin;%PATH%
venv\Scripts\python -m pytest certbot-ci\certbot_integration_tests\certbot_tests -n 4
displayName: Run certbot integration tests
displayName: Run integration tests

1
.gitignore vendored
View File

@@ -26,7 +26,6 @@ tags
\#*#
.idea
.ropeproject
.vscode
# auth --cert-path --chain-path
/*.pem

View File

@@ -14,19 +14,17 @@ before_script:
- export TOX_TESTENV_PASSENV=TRAVIS
# Only build pushes to the master branch, PRs, and branches beginning with
# `test-`, `travis-test-`, or of the form `digit(s).digit(s).x`. This reduces
# the number of simultaneous Travis runs, which speeds turnaround time on
# review since there is a cap of on the number of simultaneous runs.
# `test-` or of the form `digit(s).digit(s).x`. This reduces the number of
# simultaneous Travis runs, which speeds turnaround time on review since there
# is a cap of on the number of simultaneous runs.
branches:
# When changing these branches, please ensure the documentation under
# "Running tests in CI" is still correct.
only:
# apache-parser-v2 is a temporary branch for doing work related to
# rewriting the parser in the Apache plugin.
- apache-parser-v2
- master
- /^\d+\.\d+\.x$/
- /^(travis-)?test-.*$/
- /^test-.*$/
# Jobs for the main test suite are always executed (including on PRs) except for pushes on master.
not-on-master: &not-on-master
@@ -61,8 +59,11 @@ matrix:
dist: trusty
env: TOXENV='py27-{acme,apache,apache-v2,certbot,dns,nginx}-oldest'
<<: *not-on-master
- python: "3.5"
env: TOXENV=py35
- python: "3.4"
env: TOXENV=py34
<<: *not-on-master
- python: "3.7"
env: TOXENV=py37
<<: *not-on-master
- python: "3.8"
env: TOXENV=py38
@@ -162,12 +163,31 @@ matrix:
sudo: required
services: docker
<<: *extended-test-suite
- python: "3.4"
env: TOXENV=py34
<<: *extended-test-suite
- python: "3.5"
env: TOXENV=py35
<<: *extended-test-suite
- python: "3.6"
env: TOXENV=py36
<<: *extended-test-suite
- python: "3.7"
env: TOXENV=py37
<<: *extended-test-suite
- python: "3.8"
env: TOXENV=py38
<<: *extended-test-suite
- python: "3.4"
env: ACME_SERVER=boulder-v1 TOXENV=integration
sudo: required
services: docker
<<: *extended-test-suite
- python: "3.4"
env: ACME_SERVER=boulder-v2 TOXENV=integration
sudo: required
services: docker
<<: *extended-test-suite
- python: "3.5"
env: ACME_SERVER=boulder-v1 TOXENV=integration
sudo: required
@@ -212,10 +232,6 @@ matrix:
env: TOXENV=le_auto_centos6
services: docker
<<: *extended-test-suite
- sudo: required
env: TOXENV=le_auto_oraclelinux6
services: docker
<<: *extended-test-suite
- sudo: required
env: TOXENV=docker_dev
services: docker
@@ -227,6 +243,9 @@ matrix:
- language: generic
env: TOXENV=py27
os: osx
# Using this osx_image is a workaround for
# https://travis-ci.community/t/xcode-8-3-homebrew-outdated-error/3798.
osx_image: xcode10.2
addons:
homebrew:
packages:
@@ -236,6 +255,9 @@ matrix:
- language: generic
env: TOXENV=py3
os: osx
# Using this osx_image is a workaround for
# https://travis-ci.community/t/xcode-8-3-homebrew-outdated-error/3798.
osx_image: xcode10.2
addons:
homebrew:
packages:
@@ -263,10 +285,8 @@ addons:
# except in tests where the environment variable CERTBOT_NO_PIN is set.
# virtualenv is listed here explicitly to make sure it is upgraded when
# CERTBOT_NO_PIN is set to work around failures we've seen when using an older
# version of virtualenv. The option "-I" is set so when CERTBOT_NO_PIN is also
# set, pip updates dependencies it thinks are already satisfied to avoid some
# problems with its lack of real dependency resolution.
install: 'tools/pip_install.py -I codecov tox virtualenv'
# version of virtualenv.
install: 'tools/pip_install.py -U codecov tox virtualenv'
# Most of the time TRAVIS_RETRY is an empty string, and has no effect on the
# script command. It is set only to `travis_retry` during farm tests, in
# order to trigger the Travis retry feature, and compensate the inherent
@@ -278,7 +298,6 @@ after_success: '[ "$TOXENV" == "py27-cover" ] && codecov -F linux'
notifications:
email: false
irc:
if: NOT branch =~ ^(travis-)?test-.*$
channels:
# This is set to a secure variable to prevent forks from sending
# notifications. This value was created by installing

View File

@@ -36,7 +36,6 @@ Authors
* [Brad Warren](https://github.com/bmw)
* [Brandon Kraft](https://github.com/kraftbj)
* [Brandon Kreisel](https://github.com/kraftbj)
* [Cameron Steel](https://github.com/Tugzrida)
* [Ceesjan Luiten](https://github.com/quinox)
* [Chad Whitacre](https://github.com/whit537)
* [Chhatoi Pritam Baral](https://github.com/pritambaral)
@@ -101,7 +100,6 @@ Authors
* [Harlan Lieberman-Berg](https://github.com/hlieberman)
* [Henri Salo](https://github.com/fgeek)
* [Henry Chen](https://github.com/henrychen95)
* [Hugo van Kemenade](https://github.com/hugovk)
* [Ingolf Becker](https://github.com/watercrossing)
* [Jaap Eldering](https://github.com/eldering)
* [Jacob Hoffman-Andrews](https://github.com/jsha)
@@ -126,7 +124,6 @@ Authors
* [Jonathan Herlin](https://github.com/Jonher937)
* [Jon Walsh](https://github.com/code-tree)
* [Joona Hoikkala](https://github.com/joohoi)
* [Josh McCullough](https://github.com/JoshMcCullough)
* [Josh Soref](https://github.com/jsoref)
* [Joubin Jabbari](https://github.com/joubin)
* [Juho Juopperi](https://github.com/jkjuopperi)

View File

@@ -303,7 +303,7 @@ class HTTP01Response(KeyAuthorizationChallengeResponse):
uri = chall.uri(domain)
logger.debug("Verifying %s at %s...", chall.typ, uri)
try:
http_response = requests.get(uri, verify=False)
http_response = requests.get(uri)
except requests.exceptions.RequestException as error:
logger.error("Unable to reach %s: %s", uri, error)
return False

View File

@@ -942,7 +942,7 @@ class ClientNetwork(object):
:param messages.RegistrationResource account: Account object. Required if you are
planning to use .post() with acme_version=2 for anything other than
creating a new account; may be set later after registering.
:param josepy.JWASignature alg: Algorithm to use in signing JWS.
:param josepy.JWASignature alg: Algoritm to use in signing JWS.
:param bool verify_ssl: Whether to verify certificates on SSL connections.
:param str user_agent: String to send as User-Agent header.
:param float timeout: Timeout for requests.

View File

@@ -36,7 +36,7 @@ ERROR_CODES = {
' domain'),
'dns': 'There was a problem with a DNS query during identifier validation',
'dnssec': 'The server could not validate a DNSSEC signed domain',
'incorrectResponse': 'Response received didn\'t match the challenge\'s requirements',
'incorrectResponse': 'Response recieved didn\'t match the challenge\'s requirements',
# deprecate invalidEmail
'invalidEmail': 'The provided email for a registration was invalid',
'invalidContact': 'The provided contact URI was invalid',
@@ -245,13 +245,13 @@ class Directory(jose.JSONDeSerializable):
try:
return self[name.replace('_', '-')]
except KeyError as error:
raise AttributeError(str(error))
raise AttributeError(str(error) + ': ' + name)
def __getitem__(self, name):
try:
return self._jobj[self._canon_key(name)]
except KeyError:
raise KeyError('Directory field "' + self._canon_key(name) + '" not found')
raise KeyError('Directory field not found')
def to_partial_json(self):
return self._jobj

View File

@@ -41,7 +41,7 @@ extensions = [
]
autodoc_member_order = 'bysource'
autodoc_default_flags = ['show-inheritance']
autodoc_default_flags = ['show-inheritance', 'private-members']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
@@ -113,7 +113,7 @@ pygments_style = 'sphinx'
#keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
todo_include_todos = True
# -- Options for HTML output ----------------------------------------------

View File

@@ -4,7 +4,7 @@ from setuptools import find_packages
from setuptools import setup
from setuptools.command.test import test as TestCommand
version = '1.3.0.dev0'
version = '1.1.0.dev0'
# Please update tox.ini when modifying dependency version requirements
install_requires = [
@@ -61,7 +61,7 @@ setup(
author="Certbot Project",
author_email='client-dev@letsencrypt.org',
license='Apache License 2.0',
python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*',
python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
@@ -70,6 +70,7 @@ setup(
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',

View File

@@ -181,7 +181,7 @@ class HTTP01ResponseTest(unittest.TestCase):
mock_get.return_value = mock.MagicMock(text=validation)
self.assertTrue(self.response.simple_verify(
self.chall, "local", KEY.public_key()))
mock_get.assert_called_once_with(self.chall.uri("local"), verify=False)
mock_get.assert_called_once_with(self.chall.uri("local"))
@mock.patch("acme.challenges.requests.get")
def test_simple_verify_bad_validation(self, mock_get):
@@ -197,7 +197,7 @@ class HTTP01ResponseTest(unittest.TestCase):
HTTP01Response.WHITESPACE_CUTSET))
self.assertTrue(self.response.simple_verify(
self.chall, "local", KEY.public_key()))
mock_get.assert_called_once_with(self.chall.uri("local"), verify=False)
mock_get.assert_called_once_with(self.chall.uri("local"))
@mock.patch("acme.challenges.requests.get")
def test_simple_verify_connection_error(self, mock_get):

View File

@@ -1,6 +1,7 @@
include LICENSE.txt
include README.rst
recursive-include tests *
include certbot_apache/_internal/centos-options-ssl-apache.conf
include certbot_apache/_internal/options-ssl-apache.conf
recursive-include certbot_apache/_internal/augeas_lens *.aug
global-exclude __pycache__

View File

@@ -1,15 +1,10 @@
""" Utility functions for certbot-apache plugin """
import binascii
import fnmatch
import hashlib
import logging
import re
import shutil
import struct
import subprocess
import time
from certbot import crypto_util
from certbot import errors
from certbot import util
@@ -18,95 +13,6 @@ from certbot.compat import os
logger = logging.getLogger(__name__)
def get_apache_ocsp_struct(ttl, ocsp_response):
"""Create Apache OCSP response structure to be used in response cache
:param int ttl: Time-To-Live in seconds
:param str ocsp_response: OCSP response data
:returns: Apache OCSP structure
:rtype: `str`
"""
ttl = time.time() + ttl
# As microseconds
ttl_struct = struct.pack('l', int(ttl*1000000))
return b'\x01'.join([ttl_struct, ocsp_response])
def certid_sha1_hex(cert_path):
"""Hex representation of certificate SHA1 fingerprint
:param str cert_path: File path to certificate
:returns: Hex representation SHA1 fingerprint of certificate
:rtype: `str`
"""
sha1_hex = binascii.hexlify(certid_sha1(cert_path))
return sha1_hex.decode('utf-8')
def certid_sha1(cert_path):
"""SHA1 fingerprint of certificate
:param str cert_path: File path to certificate
:returns: SHA1 fingerprint bytestring
:rtype: `str`
"""
return crypto_util.cert_sha1_fingerprint(cert_path)
def safe_copy(source, target):
"""Copies a file, while verifying the target integrity
with the source. Retries twice if the initial
copy fails.
:param str source: File path of the source file
:param str target: File path of the target file
:raises: .errors.PluginError: If file cannot be
copied or the target file hash does not match
with the source file.
"""
for _ in range(3):
try:
shutil.copy2(source, target)
except IOError as e:
emsg = "Could not copy {} to {}: {}".format(
source, target, e
)
raise errors.PluginError(emsg)
time.sleep(1)
try:
source_hash = _file_hash(source)
target_hash = _file_hash(target)
except IOError:
continue
if source_hash == target_hash:
return
raise errors.PluginError(
"Safe copy failed. The file integrity does not match"
)
def _file_hash(filepath):
"""Helper function for safe_copy that calculates a
sha-256 hash of file.
:param str filepath: Path of file to calculate hash for
:returns: File sha-256 hash
:rtype: str
"""
fhash = hashlib.sha256()
with open(filepath, 'rb') as fh:
fhash.update(fh.read())
return fhash.hexdigest()
def get_mod_deps(mod_name):
"""Get known module dependencies.

View File

@@ -1,5 +1,9 @@
""" apacheconfig implementation of the ParserNode interfaces """
from functools import partial
from certbot import errors
from certbot_apache._internal import assertions
from certbot_apache._internal import interfaces
from certbot_apache._internal import parsernode_util as util
@@ -78,12 +82,60 @@ class ApacheDirectiveNode(ApacheParserNode):
return
def _parameters_from_string(text):
text = text.strip()
words = []
word = ""
quote = None
escape = False
for c in text:
if c.isspace() and not quote:
if word:
words.append(word)
word = ""
else:
word += c
if not escape:
if not quote and c in "\"\'":
quote = c
elif c == quote:
words.append(word[1:-1])
word = ""
quote = None
escape = c == "\\"
if word:
words.append(word)
return tuple(words)
class ApacheBlockNode(ApacheDirectiveNode):
""" apacheconfig implementation of BlockNode interface """
def __init__(self, **kwargs):
super(ApacheBlockNode, self).__init__(**kwargs)
self.children = ()
self._raw_children = self._raw
children = []
for raw_node in self._raw_children:
metadata = self.metadata.copy()
metadata['ac_ast'] = raw_node
if raw_node.typestring == "comment":
node = ApacheCommentNode(comment=raw_node.name[2:],
metadata=metadata, ancestor=self,
filepath=self.filepath)
elif raw_node.typestring == "block":
parameters = _parameters_from_string(raw_node.arguments)
node = ApacheBlockNode(name=raw_node.tag, parameters=parameters,
metadata=metadata, ancestor=self,
filepath=self.filepath, enabled=self.enabled)
else:
parameters = ()
if raw_node.value:
parameters = _parameters_from_string(raw_node.value)
node = ApacheDirectiveNode(name=raw_node.name, parameters=parameters,
metadata=metadata, ancestor=self,
filepath=self.filepath, enabled=self.enabled)
children.append(node)
self.children = tuple(children)
def __eq__(self, other): # pragma: no cover
if isinstance(other, self.__class__):
@@ -97,36 +149,44 @@ class ApacheBlockNode(ApacheDirectiveNode):
self.metadata == other.metadata)
return False
def add_child_block(self, name, parameters=None, position=None): # pylint: disable=unused-argument
def _add_child_thing(self, raw_string, partial_node, position):
position = len(self._raw_children) if not position else position
# Cap position to length to mimic AugeasNode behavior. TODO: document that this happens
position = min(len(self._raw_children), position)
raw_ast = self._raw_children.add(position, raw_string)
metadata = self.metadata.copy()
metadata['ac_ast'] = raw_ast
new_node = partial_node(ancestor=self, metadata=metadata, filepath=self.filepath)
# Update metadata
children = list(self.children)
children.insert(position, new_node)
self.children = tuple(children)
return new_node
def add_child_block(self, name, parameters=None, position=None):
"""Adds a new BlockNode to the sequence of children"""
new_block = ApacheBlockNode(name=assertions.PASS,
parameters=assertions.PASS,
ancestor=self,
filepath=assertions.PASS,
metadata=self.metadata)
self.children += (new_block,)
return new_block
parameters_str = " " + " ".join(parameters) if parameters else ""
if not parameters:
parameters = []
partial_block = partial(ApacheBlockNode, name=name,
parameters=tuple(parameters), enabled=self.enabled)
return self._add_child_thing("\n<%s%s>\n</%s>" % (name, parameters_str, name),
partial_block, position)
def add_child_directive(self, name, parameters=None, position=None): # pylint: disable=unused-argument
def add_child_directive(self, name, parameters=None, position=None):
"""Adds a new DirectiveNode to the sequence of children"""
new_dir = ApacheDirectiveNode(name=assertions.PASS,
parameters=assertions.PASS,
ancestor=self,
filepath=assertions.PASS,
metadata=self.metadata)
self.children += (new_dir,)
return new_dir
# pylint: disable=unused-argument
def add_child_comment(self, comment="", position=None): # pragma: no cover
parameters_str = " " + " ".join(parameters) if parameters else ""
if not parameters: # TODO (mona): test
parameters = [] # pragma: no cover
partial_block = partial(ApacheDirectiveNode, name=name,
parameters=tuple(parameters), enabled=self.enabled)
return self._add_child_thing("\n%s%s" % (name, parameters_str), partial_block, position)
def add_child_comment(self, comment="", position=None):
"""Adds a new CommentNode to the sequence of children"""
new_comment = ApacheCommentNode(comment=assertions.PASS,
ancestor=self,
filepath=assertions.PASS,
metadata=self.metadata)
self.children += (new_comment,)
return new_comment
partial_comment = partial(ApacheCommentNode, comment=comment)
return self._add_child_thing(comment, partial_comment, position)
def find_blocks(self, name, exclude=True): # pylint: disable=unused-argument
"""Recursive search of BlockNodes from the sequence of children"""
@@ -151,9 +211,22 @@ class ApacheBlockNode(ApacheDirectiveNode):
filepath=assertions.PASS,
metadata=self.metadata)]
# TODO (mona): test
def delete_child(self, child): # pragma: no cover
"""Deletes a ParserNode from the sequence of children"""
return
index = -1
i = None
for i, elem in enumerate(self.children):
if elem == child:
index = i
break
if index < 0:
raise errors.PluginError("Could not find child node to delete")
children_list = list(self.children)
thing = children_list.pop(i)
self.children = tuple(children_list)
self._raw_children.remove(i)
return thing
def unsaved_files(self): # pragma: no cover
"""Returns a list of unsaved filepaths"""

View File

@@ -0,0 +1,25 @@
# This file contains important security parameters. If you modify this file
# manually, Certbot will be unable to automatically provide future security
# updates. Instead, Certbot will print and log an error message with a path to
# the up-to-date file that you will need to refer to when manually updating
# this file.
SSLEngine on
# Intermediate configuration, tweak to your needs
SSLProtocol all -SSLv2 -SSLv3
SSLCipherSuite ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:DHE-RSA-AES128-GCM-SHA256:DHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-AES128-SHA256:ECDHE-RSA-AES128-SHA256:ECDHE-ECDSA-AES128-SHA:ECDHE-RSA-AES256-SHA384:ECDHE-RSA-AES128-SHA:ECDHE-ECDSA-AES256-SHA384:ECDHE-ECDSA-AES256-SHA:ECDHE-RSA-AES256-SHA:DHE-RSA-AES128-SHA256:DHE-RSA-AES128-SHA:DHE-RSA-AES256-SHA256:DHE-RSA-AES256-SHA:ECDHE-ECDSA-DES-CBC3-SHA:ECDHE-RSA-DES-CBC3-SHA:EDH-RSA-DES-CBC3-SHA:AES128-GCM-SHA256:AES256-GCM-SHA384:AES128-SHA256:AES256-SHA256:AES128-SHA:AES256-SHA:DES-CBC3-SHA:!DSS
SSLHonorCipherOrder on
SSLOptions +StrictRequire
# Add vhost name to log entries:
LogFormat "%h %l %u %t \"%r\" %>s %b \"%{Referer}i\" \"%{User-agent}i\"" vhost_combined
LogFormat "%v %h %l %u %t \"%r\" %>s %b" vhost_common
#CustomLog /var/log/apache2/access.log vhost_combined
#LogLevel warn
#ErrorLog /var/log/apache2/error.log
# Always ensure Cookies have "Secure" set (JAH 2012/1)
#Header edit Set-Cookie (?i)^(.*)(;\s*secure)??((\s*;)?(.*)) "$1; Secure$3$4"

View File

@@ -12,6 +12,11 @@ import pkg_resources
import six
import zope.component
import zope.interface
try:
import apacheconfig
HAS_APACHECONFIG = True
except ImportError: # pragma: no cover
HAS_APACHECONFIG = False
from acme import challenges
from acme.magic_typing import DefaultDict # pylint: disable=unused-import, no-name-in-module
@@ -22,7 +27,6 @@ from acme.magic_typing import Union # pylint: disable=unused-import, no-name-in
from certbot import errors
from certbot import interfaces
from certbot import util
from certbot.achallenges import KeyAuthorizationAnnotatedChallenge # pylint: disable=unused-import
from certbot.compat import filesystem
from certbot.compat import os
@@ -262,7 +266,7 @@ class ApacheConfigurator(common.Installer):
pn_meta = {"augeasparser": self.parser,
"augeaspath": self.parser.get_root_augpath(),
"ac_ast": None}
if self.USE_PARSERNODE:
if self.USE_PARSERNODE and HAS_APACHECONFIG:
self.parser_root = self.get_parsernode_root(pn_meta)
self.parsed_paths = self.parser_root.parsed_paths()
@@ -370,6 +374,11 @@ class ApacheConfigurator(common.Installer):
apache_vars["modules"] = apache_util.parse_modules(self.option("ctl"))
metadata["apache_vars"] = apache_vars
with open(self.parser.loc["root"]) as f:
with apacheconfig.make_loader(writable=True,
**apacheconfig.flavors.NATIVE_APACHE) as loader:
metadata["ac_ast"] = loader.loads(f.read())
return dualparser.DualBlockNode(
name=assertions.PASS,
ancestor=None,
@@ -908,7 +917,7 @@ class ApacheConfigurator(common.Installer):
"""
v1_vhosts = self.get_virtual_hosts_v1()
if self.USE_PARSERNODE:
if self.USE_PARSERNODE and HAS_APACHECONFIG:
v2_vhosts = self.get_virtual_hosts_v2()
for v1_vh in v1_vhosts:
@@ -1844,7 +1853,7 @@ class ApacheConfigurator(common.Installer):
self.parser.find_dir("SSLCertificateKeyFile",
lineage.key_path, vhost.path))
def _enable_ocsp_stapling(self, ssl_vhost, unused_options, prefetch=False):
def _enable_ocsp_stapling(self, ssl_vhost, unused_options):
"""Enables OCSP Stapling
In OCSP, each client (e.g. browser) would have to query the
@@ -1865,9 +1874,6 @@ class ApacheConfigurator(common.Installer):
:param unused_options: Not currently used
:type unused_options: Not Available
:param prefetch: Use OCSP prefetching
:type prefetch: bool
:returns: Success, general_vhost (HTTP vhost)
:rtype: (bool, :class:`~certbot_apache._internal.obj.VirtualHost`)
@@ -1878,15 +1884,8 @@ class ApacheConfigurator(common.Installer):
"Unable to set OCSP directives.\n"
"Apache version is below 2.3.3.")
if prefetch:
if "socache_dbm_module" not in self.parser.modules:
self.enable_mod("socache_dbm")
cache_path = os.path.join(self.config.work_dir, "ocsp", "ocsp_cache.db")
cache_dir = ["dbm:"+cache_path]
else:
if "socache_shmcb_module" not in self.parser.modules:
self.enable_mod("socache_shmcb")
cache_dir = ["shmcb:/var/run/apache2/stapling_cache(128000)"]
if "socache_shmcb_module" not in self.parser.modules:
self.enable_mod("socache_shmcb")
# Check if there's an existing SSLUseStapling directive on.
use_stapling_aug_path = self.parser.find_dir("SSLUseStapling",
@@ -1906,7 +1905,9 @@ class ApacheConfigurator(common.Installer):
self.parser.aug.remove(
re.sub(r"/\w*$", "", stapling_cache_aug_path[0]))
self.parser.add_dir_to_ifmodssl(ssl_vhost_aug_path, "SSLStaplingCache", cache_dir)
self.parser.add_dir_to_ifmodssl(ssl_vhost_aug_path,
"SSLStaplingCache",
["shmcb:/var/run/apache2/stapling_cache(128000)"])
msg = "OCSP Stapling was enabled on SSL Vhost: %s.\n"%(
ssl_vhost.filep)
@@ -1955,7 +1956,7 @@ class ApacheConfigurator(common.Installer):
ssl_vhost.filep)
def _verify_no_matching_http_header(self, ssl_vhost, header_substring):
"""Checks to see if there is an existing Header directive that
"""Checks to see if an there is an existing Header directive that
contains the string header_substring.
:param ssl_vhost: vhost to check

View File

@@ -24,8 +24,6 @@ ALL_SSL_OPTIONS_HASHES = [
'0fcdc81280cd179a07ec4d29d3595068b9326b455c488de4b09f585d5dafc137',
'86cc09ad5415cd6d5f09a947fe2501a9344328b1e8a8b458107ea903e80baa6c',
'06675349e457eae856120cdebb564efe546f0b87399f2264baeb41e442c724c7',
'5cc003edd93fb9cd03d40c7686495f8f058f485f75b5e764b789245a386e6daf',
'007cd497a56a3bb8b6a2c1aeb4997789e7e38992f74e44cc5d13a625a738ac73',
]
"""SHA256 hashes of the contents of previous versions of all versions of MOD_SSL_CONF_SRC"""
@@ -69,9 +67,3 @@ AUTOHSTS_FREQ = 172800
MANAGED_COMMENT = "DO NOT REMOVE - Managed by Certbot"
MANAGED_COMMENT_ID = MANAGED_COMMENT+", VirtualHost id: {0}"
"""Managed by Certbot comments and the VirtualHost identification template"""
OCSP_APACHE_TTL = 432000
"""Apache TTL for OCSP response in seconds: 5 days"""
OCSP_INTERNAL_TTL = 86400
"""Internal TTL for OCSP response in seconds: 1 day"""

View File

@@ -7,12 +7,20 @@
SSLEngine on
# Intermediate configuration, tweak to your needs
SSLProtocol all -SSLv2 -SSLv3 -TLSv1 -TLSv1.1
SSLCipherSuite ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:DHE-RSA-AES128-GCM-SHA256:DHE-RSA-AES256-GCM-SHA384
SSLHonorCipherOrder off
SSLProtocol all -SSLv2 -SSLv3
SSLCipherSuite ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:DHE-RSA-AES128-GCM-SHA256:DHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-AES128-SHA256:ECDHE-RSA-AES128-SHA256:ECDHE-ECDSA-AES128-SHA:ECDHE-RSA-AES256-SHA384:ECDHE-RSA-AES128-SHA:ECDHE-ECDSA-AES256-SHA384:ECDHE-ECDSA-AES256-SHA:ECDHE-RSA-AES256-SHA:DHE-RSA-AES128-SHA256:DHE-RSA-AES128-SHA:DHE-RSA-AES256-SHA256:DHE-RSA-AES256-SHA:ECDHE-ECDSA-DES-CBC3-SHA:ECDHE-RSA-DES-CBC3-SHA:EDH-RSA-DES-CBC3-SHA:AES128-GCM-SHA256:AES256-GCM-SHA384:AES128-SHA256:AES256-SHA256:AES128-SHA:AES256-SHA:DES-CBC3-SHA:!DSS
SSLHonorCipherOrder on
SSLCompression off
SSLOptions +StrictRequire
# Add vhost name to log entries:
LogFormat "%h %l %u %t \"%r\" %>s %b \"%{Referer}i\" \"%{User-agent}i\"" vhost_combined
LogFormat "%v %h %l %u %t \"%r\" %>s %b" vhost_common
#CustomLog /var/log/apache2/access.log vhost_combined
#LogLevel warn
#ErrorLog /var/log/apache2/error.log
# Always ensure Cookies have "Secure" set (JAH 2012/1)
#Header edit Set-Cookie (?i)^(.*)(;\s*secure)??((\s*;)?(.*)) "$1; Secure$3$4"

View File

@@ -38,7 +38,7 @@ class CentOSConfigurator(configurator.ApacheConfigurator):
handle_sites=False,
challenge_location="/etc/httpd/conf.d",
MOD_SSL_CONF_SRC=pkg_resources.resource_filename(
"certbot_apache", os.path.join("_internal", "options-ssl-apache.conf"))
"certbot_apache", os.path.join("_internal", "centos-options-ssl-apache.conf"))
)
def config_test(self):

View File

@@ -9,16 +9,14 @@ from certbot import interfaces
from certbot import util
from certbot.compat import filesystem
from certbot.compat import os
from certbot_apache._internal import apache_util
from certbot_apache._internal import configurator
from certbot_apache._internal import prefetch_ocsp
logger = logging.getLogger(__name__)
@zope.interface.provider(interfaces.IPluginFactory)
class DebianConfigurator(prefetch_ocsp.OCSPPrefetchMixin, configurator.ApacheConfigurator):
class DebianConfigurator(configurator.ApacheConfigurator):
"""Debian specific ApacheConfigurator override class"""
OS_DEFAULTS = dict(

View File

@@ -33,7 +33,7 @@ class FedoraConfigurator(configurator.ApacheConfigurator):
challenge_location="/etc/httpd/conf.d",
MOD_SSL_CONF_SRC=pkg_resources.resource_filename(
# TODO: eventually newest version of Fedora will need their own config
"certbot_apache", os.path.join("_internal", "options-ssl-apache.conf"))
"certbot_apache", os.path.join("_internal", "centos-options-ssl-apache.conf"))
)
def config_test(self):

View File

@@ -705,7 +705,7 @@ class ApacheParser(object):
split_arg = arg.split("/")
for idx, split in enumerate(split_arg):
if any(char in ApacheParser.fnmatch_chars for char in split):
# Turn it into an augeas regex
# Turn it into a augeas regex
# TODO: Can this instead be an augeas glob instead of regex
split_arg[idx] = ("* [label()=~regexp('%s')]" %
self.fnmatch_to_re(split))

View File

@@ -1,389 +0,0 @@
"""A mixin class for OCSP response prefetching for Apache plugin.
The OCSP prefetching functionality solves multiple issues in Apache httpd
that make using OCSP must-staple error prone.
The prefetching functionality works by storing a value to PluginStorage,
noting certificates that Certbot should keep OCSP staples (OCSP responses)
updated for alongside of the information when the last response was
updated by Certbot.
When Certbot is invoked, typically by scheduled "certbot renew" and the
TTL from "lastupdate" value in PluginStorage entry has expired,
Certbot then proceeds to fetch a new OCSP response from the OCSP servers
pointed by the certificate.
The OCSP response is validated and if valid, stored to Apache DBM
cache. A high internal cache expiry value is set for Apache in order
to make it to not to discard the stored response and try to renew
the staple itself letting Certbot to renew it on its subsequent run
instead.
The DBM cache file used by Apache is a lightweight key-value storage.
For OCSP response caching, the sha1 hash of certificate fingerprint
is used as a key. The value consists of expiry time as timestamp
in microseconds, \x01 delimiter and the raw OCSP response.
When restarting Apache, Certbot backups the current OCSP response
cache, and restores it after the restart has happened. This is
done because Apache deletes and then recreates the file upon
restart.
"""
from datetime import datetime
import logging
import shutil
import time
from acme.magic_typing import Dict # pylint: disable=unused-import, no-name-in-module
from certbot import errors
from certbot import ocsp
from certbot.plugins.enhancements import OCSPPrefetchEnhancement
from certbot.compat import filesystem
from certbot.compat import os
from certbot_apache._internal import apache_util
from certbot_apache._internal import constants
logger = logging.getLogger(__name__)
class DBMHandler(object):
"""Context manager to handle DBM file reads and writes"""
def __init__(self, filename, mode):
self.filename = filename
self.filemode = mode
self.bsddb = False
self.database = None
def __enter__(self):
"""Open the DBM file and return the filehandle"""
try:
import bsddb
self.bsddb = True
try:
self.database = bsddb.hashopen(self.filename, self.filemode)
except Exception:
raise errors.PluginError("Unable to open dbm database file.")
except ImportError:
# Python3 doesn't have bsddb module, so we use dbm.ndbm instead
import dbm
if self.filename.endswith(".db"):
self.filename = self.filename[:-3]
try:
self.database = dbm.ndbm.open(self.filename, self.filemode) # pylint: disable=no-member
except Exception:
# This is raised if a file cannot be found
raise errors.PluginError("Unable to open dbm database file.")
return self.database
def __exit__(self, *args):
"""Close the DBM file"""
if self.bsddb:
self.database.sync()
self.database.close()
class OCSPPrefetchMixin(object):
"""OCSPPrefetchMixin implements OCSP response prefetching"""
def __init__(self, *args, **kwargs):
self._ocsp_prefetch = {} # type: Dict[str, str]
# This is required because of python super() call chain.
# Additionally, mypy isn't able to figure the chain out and needs to be
# disabled for this line. See https://github.com/python/mypy/issues/5887
super(OCSPPrefetchMixin, self).__init__(*args, **kwargs) # type: ignore
def _ensure_ocsp_dirs(self):
"""Makes sure that the OCSP directory paths exist."""
ocsp_work = os.path.join(self.config.work_dir, "ocsp_work")
ocsp_save = os.path.join(self.config.work_dir, "ocsp")
for path in [ocsp_work, ocsp_save]:
if not os.path.isdir(path):
filesystem.makedirs(path, 0o755)
def _ensure_ocsp_prefetch_compatibility(self):
"""Make sure that the operating system supports the required libraries
to manage Apache DBM files.
:raises: errors.NotSupportedError
"""
try:
import bsddb # pylint: disable=unused-variable
except ImportError:
import dbm
if not hasattr(dbm, 'ndbm') or dbm.ndbm.library != 'Berkeley DB': # pylint: disable=no-member
msg = ("Unfortunately your operating system does not have a "
"compatible database module available for managing "
"Apache OCSP stapling cache database.")
raise errors.NotSupportedError(msg)
def _ocsp_refresh_needed(self, pf_obj):
"""Refreshes OCSP response for a certiifcate if it's due
:param dict pf_obj: OCSP prefetch object from pluginstorage
:returns: If OCSP response was updated
:rtype: bool
"""
ttl = pf_obj["lastupdate"] + constants.OCSP_INTERNAL_TTL
if ttl < time.time():
return True
return False
def _ocsp_refresh(self, cert_path, chain_path):
"""Refresh the OCSP response for a certificate
:param str cert_path: Filesystem path to certificate file
:param str chain_path: Filesystem path to certificate chain file
"""
self._ensure_ocsp_dirs()
ocsp_workfile = os.path.join(
self.config.work_dir, "ocsp_work",
apache_util.certid_sha1_hex(cert_path))
handler = ocsp.RevocationChecker()
if not handler.ocsp_revoked_by_paths(cert_path, chain_path, ocsp_workfile):
# Guaranteed good response
cache_path = os.path.join(self.config.work_dir, "ocsp", "ocsp_cache.db")
cert_sha = apache_util.certid_sha1(cert_path)
# dbm.open automatically adds the file extension
self._write_to_dbm(cache_path, cert_sha, self._ocsp_response_dbm(ocsp_workfile))
else:
logger.warning("Encountered an issue while trying to prefetch OCSP "
"response for certificate: %s", cert_path)
# Clean up
try:
os.remove(ocsp_workfile)
except OSError:
# The OCSP workfile did not exist because of an OCSP response fetching error
return
def _write_to_dbm(self, filename, key, value):
"""Helper method to write an OCSP response cache value to DBM.
:param filename: DBM database filename
:param bytes key: Database key name
:param bytes value: Database entry value
"""
tmp_file = os.path.join(
self.config.work_dir,
"ocsp_work",
"tmp_" + os.path.basename(filename)
)
apache_util.safe_copy(filename, tmp_file)
with DBMHandler(tmp_file, 'w') as db:
db[key] = value
shutil.copy2(tmp_file, filename)
os.remove(tmp_file)
def _ocsp_ttl(self, next_update):
"""Calculates Apache internal TTL for the next OCSP staple
update.
The resulting TTL is half of the time between now
and the time noted by nextUpdate field in OCSP response.
If nextUpdate value is None, a default value will be
used instead.
:param next_update: datetime value for nextUpdate or None
:returns: TTL in seconds.
:rtype: int
"""
if next_update is not None:
now = datetime.utcnow()
res_ttl = int((next_update - now).total_seconds())
if res_ttl > 0:
return res_ttl/2
return constants.OCSP_APACHE_TTL
def _ocsp_response_dbm(self, workfile):
"""Creates a dbm entry for OCSP response data
:param str workfile: File path for raw OCSP response
:returns: OCSP response cache data that Apache can use
:rtype: string
"""
handler = ocsp.RevocationChecker()
_, _, next_update = handler.ocsp_times(workfile)
ttl = self._ocsp_ttl(next_update)
with open(workfile, 'rb') as fh:
response = fh.read()
return apache_util.get_apache_ocsp_struct(ttl, response)
def _ocsp_prefetch_save(self, cert_path, chain_path):
"""Saves status of current OCSP prefetch, including the last update
time to determine if an update is needed on later run.
:param str cert_path: Filesystem path to certificate
:param str chain_path: Filesystem path to certificate chain file
"""
status = {
"lastupdate": time.time(),
"cert_path": cert_path,
"chain_path": chain_path
}
cert_id = apache_util.certid_sha1_hex(cert_path)
self._ocsp_prefetch[cert_id] = status
self.storage.put("ocsp_prefetch", self._ocsp_prefetch)
self.storage.save()
def _ocsp_prefetch_fetch_state(self):
"""
Populates the OCSP prefetch state from the pluginstorage.
"""
try:
self._ocsp_prefetch = self.storage.fetch("ocsp_prefetch")
except KeyError:
self._ocsp_prefetch = dict()
def _ocsp_prefetch_backup_db(self):
"""
Copies the active dbm file to work directory. Logs a debug error
message if unable to copy, but does not error out as it would
prevent other critical functions that need to be carried out for
Apache httpd.
Erroring out here would prevent any restarts done by Apache plugin.
"""
self._ensure_ocsp_dirs()
cache_path = os.path.join(self.config.work_dir, "ocsp", "ocsp_cache.db")
try:
apache_util.safe_copy(
cache_path,
os.path.join(self.config.work_dir, "ocsp_work", "ocsp_cache.db"))
except errors.PluginError:
logger.debug("Encountered an issue while trying to backup OCSP dbm file")
def _ocsp_prefetch_restore_db(self):
"""
Restores the active dbm file from work directory. Logs a debug error
message if unable to restore, but does not error out as it would
prevent other critical functions that need to be carried out for
Apache httpd.
"""
self._ensure_ocsp_dirs()
cache_path = os.path.join(self.config.work_dir, "ocsp", "ocsp_cache.db")
work_file_path = os.path.join(self.config.work_dir, "ocsp_work", "ocsp_cache.db")
try:
shutil.copy2(work_file_path, cache_path)
except IOError:
logger.debug("Encountered an issue when trying to restore OCSP dbm file")
def enable_ocsp_prefetch(self, lineage, domains):
"""Enable OCSP Stapling and prefetching of the responses.
In OCSP, each client (e.g. browser) would have to query the
OCSP Responder to validate that the site certificate was not revoked.
Enabling OCSP Stapling would allow the web-server to query the OCSP
Responder, and staple its response to the offered certificate during
TLS. i.e. clients would not have to query the OCSP responder.
OCSP prefetching functionality addresses some of the pain points in
the implementation that's currently preset in Apache httpd. The
mitigation provided by Certbot are:
* OCSP staples get backed up before, and restored after httpd restart
* Valid OCSP staples do not get overwritten with errors in case of
network connectivity or OCSP responder issues
* The staples get updated asynchronically in the background instead
of blocking a incoming request.
"""
# Fail early if we are not able to support this
self._ensure_ocsp_prefetch_compatibility()
prefetch_vhosts = set()
for domain in domains:
matched_vhosts = self.choose_vhosts(domain, create_if_no_ssl=False)
# We should be handling only SSL vhosts
for vh in matched_vhosts:
if vh.ssl:
prefetch_vhosts.add(vh)
if not prefetch_vhosts:
raise errors.MisconfigurationError(
"Could not find VirtualHost to enable OCSP prefetching on."
)
try:
# The try - block is huge, but required for handling rollback properly.
for vh in prefetch_vhosts:
self._enable_ocsp_stapling(vh, None, prefetch=True)
self._ensure_ocsp_dirs()
self.restart()
# Ensure Apache has enough time to properly restart and create the file
time.sleep(2)
self._ocsp_refresh(lineage.cert_path, lineage.chain_path)
self._ocsp_prefetch_save(lineage.cert_path, lineage.chain_path)
self.save("Enabled OCSP prefetching")
except errors.PluginError as err:
# Revert the OCSP prefetch configuration
self.recovery_routine()
self.restart()
msg = ("Encountered an error while trying to enable OCSP prefetch "
"enhancement: %s\nOCSP prefetch was not enabled.")
raise errors.PluginError(msg % str(err))
def update_ocsp_prefetch(self, _unused_lineage):
"""Checks all certificates that are managed by OCSP prefetch, and
refreshes OCSP responses for them if required."""
self._ocsp_prefetch_fetch_state()
if not self._ocsp_prefetch:
# No OCSP prefetching enabled for any certificate
return
for pf in self._ocsp_prefetch.values():
if self._ocsp_refresh_needed(pf):
self._ocsp_refresh(pf["cert_path"], pf["chain_path"])
# Save the status to pluginstorage
self._ocsp_prefetch_save(pf["cert_path"], pf["chain_path"])
def restart(self):
"""Reloads the Apache server. When restarting, Apache deletes
the DBM cache file used to store OCSP staples. In this override
function, Certbot checks the pluginstorage if we're supposed to
manage OCSP prefetching. If needed, Certbot will backup the DBM
file, restoring it after calling restart.
:raises .errors.MisconfigurationError: If either the config test
or reload fails.
"""
if not self._ocsp_prefetch:
# Try to populate OCSP prefetch structure from pluginstorage
self._ocsp_prefetch_fetch_state()
if self._ocsp_prefetch:
# OCSP prefetching is enabled, so back up the db
self._ocsp_prefetch_backup_db()
try:
# Ignored because mypy doesn't know that this class is used as
# a mixin and fails because object has no restart method.
super(OCSPPrefetchMixin, self).restart() # type: ignore
finally:
if self._ocsp_prefetch:
# Restore the backed up dbm database
self._ocsp_prefetch_restore_db()
OCSPPrefetchEnhancement.register(OCSPPrefetchMixin) # pylint: disable=no-member

View File

@@ -4,13 +4,13 @@ from setuptools import find_packages
from setuptools import setup
from setuptools.command.test import test as TestCommand
version = '1.3.0.dev0'
version = '1.1.0.dev0'
# Remember to update local-oldest-requirements.txt when changing the minimum
# acme/certbot version.
install_requires = [
'acme>=0.29.0',
'certbot>=1.3.0.dev0',
'certbot>=1.0.0.dev0',
'mock',
'python-augeas',
'setuptools',
@@ -19,7 +19,7 @@ install_requires = [
]
dev_extras = [
'apacheconfig>=0.3.1',
'apacheconfig>=0.3.2',
]
class PyTest(TestCommand):
@@ -45,7 +45,7 @@ setup(
author="Certbot Project",
author_email='client-dev@letsencrypt.org',
license='Apache License 2.0',
python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*',
python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Plugins',
@@ -56,6 +56,7 @@ setup(
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',

View File

@@ -1,8 +1,15 @@
"""Tests for AugeasParserNode classes"""
import mock
import unittest
import util
try:
import apacheconfig # pylint: disable=import-error,unused-import
HAS_APACHECONFIG = True
except ImportError: # pragma: no cover
HAS_APACHECONFIG = False
from acme.magic_typing import List # pylint: disable=unused-import, no-name-in-module
from certbot import errors
@@ -10,6 +17,7 @@ from certbot_apache._internal import assertions
@unittest.skipIf(not HAS_APACHECONFIG, reason='Tests require apacheconfig dependency')
class AugeasParserNodeTest(util.ApacheTest): # pylint: disable=too-many-public-methods
"""Test AugeasParserNode using available test configurations"""
@@ -146,7 +154,7 @@ class AugeasParserNodeTest(util.ApacheTest): # pylint: disable=too-many-public-
self.assertTrue(found)
def test_add_child_comment(self):
newc = self.config.parser_root.primary.add_child_comment("The content")
newc = self.config.parser_root.add_child_comment("The content")
comments = self.config.parser_root.find_comments("The content")
self.assertEqual(len(comments), 1)
self.assertEqual(

View File

@@ -15,7 +15,8 @@ class DualParserNodeTest(unittest.TestCase): # pylint: disable=too-many-public-
parser_mock = mock.MagicMock()
parser_mock.aug.match.return_value = []
parser_mock.get_arg.return_value = []
self.metadata = {"augeasparser": parser_mock, "augeaspath": "/invalid", "ac_ast": None}
ast_mock = mock.MagicMock()
self.metadata = {"augeasparser": parser_mock, "augeaspath": "/invalid", "ac_ast": ast_mock}
self.block = dualparser.DualBlockNode(name="block",
ancestor=None,
filepath="/tmp/something",

View File

@@ -1,435 +0,0 @@
"""Test for certbot_apache._internal.configurator OCSP Prefetching functionality"""
import base64
from datetime import datetime
from datetime import timedelta
import json
import sys
import unittest
import mock
# six is used in mock.patch()
import six # pylint: disable=unused-import
from acme.magic_typing import Dict, List, Set, Union # pylint: disable=unused-import, no-name-in-module
from certbot import errors
from certbot.compat import os
import util
from certbot_apache._internal.prefetch_ocsp import DBMHandler
class MockDBM(object):
# pylint: disable=missing-docstring
"""Main mock DBM class for Py3 dbm module"""
def __init__(self, library='Berkeley DB'):
self.ndbm = Mockdbm_impl(library)
class Mockdbm_impl(object):
"""Mock dbm implementation that satisfies both bsddb and dbm interfaces"""
# pylint: disable=missing-docstring
def __init__(self, library='Berkeley DB'):
self.library = library
self.name = 'ndbm'
def open(self, path, mode):
return Mockdb(path, mode)
def hashopen(self, path, mode):
return Mockdb(path, mode)
class Mockdb(object):
"""Mock dbm.db for both bsddb and dbm databases"""
# pylint: disable=missing-docstring
def __init__(self, path, mode):
self._data = dict() # type: Dict[str, str]
if mode == "r" or mode == "w":
if not path.endswith(".db"):
path = path+".db"
with open(path, 'r') as fh:
try:
self._data = json.loads(fh.read())
except Exception: # pylint: disable=broad-except
self._data = dict()
self.path = path
self.mode = mode
def __setitem__(self, key, item):
bkey = base64.b64encode(key)
bitem = base64.b64encode(item)
self._data[bkey.decode()] = bitem.decode()
def __getitem__(self, key):
bkey = base64.b64encode(key)
return base64.b64decode(self._data[bkey.decode()])
def keys(self):
return [base64.b64decode(k) for k in self._data.keys()]
def sync(self):
return
def close(self):
with open(self.path, 'w') as fh:
fh.write(json.dumps(self._data))
class OCSPPrefetchTest(util.ApacheTest):
"""Tests for OCSP Prefetch feature"""
# pylint: disable=protected-access
def setUp(self): # pylint: disable=arguments-differ
super(OCSPPrefetchTest, self).setUp()
self.config = util.get_apache_configurator(
self.config_path, self.vhost_path, self.config_dir, self.work_dir,
os_info="debian")
self.lineage = mock.MagicMock(cert_path="cert", chain_path="chain")
self.config.parser.modules.add("headers_module")
self.config.parser.modules.add("mod_headers.c")
self.config.parser.modules.add("ssl_module")
self.config.parser.modules.add("mod_ssl.c")
self.config.parser.modules.add("socache_dbm_module")
self.config.parser.modules.add("mod_socache_dbm.c")
self.vh_truth = util.get_vh_truth(
self.temp_dir, "debian_apache_2_4/multiple_vhosts")
self.config._ensure_ocsp_dirs()
self.db_path = os.path.join(self.work_dir, "ocsp", "ocsp_cache") + ".db"
def _call_mocked(self, func, *args, **kwargs):
"""Helper method to call functins with mock stack"""
def mock_restart():
"""Mock ApacheConfigurator.restart that creates the dbm file"""
# Mock the Apache dbm file creation
open(self.db_path, 'a').close()
ver_path = "certbot_apache._internal.configurator.ApacheConfigurator.get_version"
res_path = "certbot_apache._internal.prefetch_ocsp.OCSPPrefetchMixin.restart"
cry_path = "certbot.crypto_util.cert_sha1_fingerprint"
with mock.patch(ver_path) as mock_ver:
mock_ver.return_value = (2, 4, 10)
with mock.patch(cry_path) as mock_cry:
mock_cry.return_value = b'j\x056\x1f\xfa\x08B\xe8D\xa1Bn\xeb*A\xebWx\xdd\xfe'
with mock.patch(res_path, side_effect=mock_restart):
return func(*args, **kwargs)
def call_mocked_py2(self, func, *args, **kwargs):
"""Calls methods with imports mocked to suit Py2 environment"""
if 'dbm' in sys.modules.keys(): # pragma: no cover
sys.modules['dbm'] = None
sys.modules['bsddb'] = Mockdbm_impl()
return self._call_mocked(func, *args, **kwargs)
def call_mocked_py3(self, func, *args, **kwargs):
"""Calls methods with imports mocked to suit Py3 environment"""
real_import = six.moves.builtins.__import__
def mock_import(*args, **kwargs):
"""Mock import to raise ImportError for Py2 specific module to make
ApacheConfigurator pick the correct one for Python3 regardless of the
python version the tests are running under."""
if args[0] == "bsddb":
raise ImportError
return real_import(*args, **kwargs)
with mock.patch('six.moves.builtins.__import__', side_effect=mock_import):
sys.modules['dbm'] = MockDBM()
return self._call_mocked(func, *args, **kwargs)
@mock.patch("certbot_apache._internal.override_debian.DebianConfigurator.enable_mod")
def test_ocsp_prefetch_enable_mods(self, mock_enable):
self.config.parser.modules.discard("socache_dbm_module")
self.config.parser.modules.discard("mod_socache_dbm.c")
self.config.parser.modules.discard("headers_module")
self.config.parser.modules.discard("mod_header.c")
ref_path = "certbot_apache._internal.override_debian.DebianConfigurator._ocsp_refresh"
with mock.patch(ref_path):
self.call_mocked_py2(self.config.enable_ocsp_prefetch,
self.lineage, ["ocspvhost.com"])
self.assertTrue(mock_enable.called)
self.assertEqual(len(self.config._ocsp_prefetch), 1)
@mock.patch("certbot_apache._internal.override_debian.DebianConfigurator.enable_mod")
def test_ocsp_prefetch_enable_error(self, _mock_enable):
ref_path = "certbot_apache._internal.override_debian.DebianConfigurator._ocsp_refresh"
self.config.recovery_routine = mock.MagicMock()
with mock.patch(ref_path, side_effect=errors.PluginError("failed")):
self.assertRaises(errors.PluginError,
self.call_mocked_py2,
self.config.enable_ocsp_prefetch,
self.lineage,
["ocspvhost.com"])
self.assertTrue(self.config.recovery_routine.called)
def test_ocsp_prefetch_vhost_not_found_error(self):
choose_path = "certbot_apache._internal.override_debian.DebianConfigurator.choose_vhosts"
with mock.patch(choose_path) as mock_choose:
mock_choose.return_value = []
self.assertRaises(errors.MisconfigurationError,
self.call_mocked_py2,
self.config.enable_ocsp_prefetch,
self.lineage,
["domainnotfound"])
@mock.patch("certbot_apache._internal.constants.OCSP_INTERNAL_TTL", 0)
def test_ocsp_prefetch_refresh(self):
def ocsp_req_mock(_cert, _chain, workfile):
"""Method to mock the OCSP request and write response to file"""
with open(workfile, 'w') as fh:
fh.write("MOCKRESPONSE")
return False
ocsp_path = "certbot.ocsp.RevocationChecker.ocsp_revoked_by_paths"
with mock.patch(ocsp_path, side_effect=ocsp_req_mock):
with mock.patch('certbot.ocsp.RevocationChecker.ocsp_times') as mock_times:
produced_at = datetime.today() - timedelta(days=1)
this_update = datetime.today() - timedelta(days=2)
next_update = datetime.today() + timedelta(days=2)
mock_times.return_value = produced_at, this_update, next_update
self.call_mocked_py2(self.config.enable_ocsp_prefetch,
self.lineage, ["ocspvhost.com"])
odbm = _read_dbm(self.db_path)
self.assertEqual(len(odbm.keys()), 1)
# The actual response data is prepended by Apache timestamp
self.assertTrue(odbm[list(odbm.keys())[0]].endswith(b'MOCKRESPONSE'))
with mock.patch(ocsp_path, side_effect=ocsp_req_mock) as mock_ocsp:
with mock.patch('certbot.ocsp.RevocationChecker.ocsp_times') as mock_times:
produced_at = datetime.today() - timedelta(days=1)
this_update = datetime.today() - timedelta(days=2)
next_update = datetime.today() + timedelta(days=2)
mock_times.return_value = produced_at, this_update, next_update
self.call_mocked_py2(self.config.update_ocsp_prefetch, None)
self.assertTrue(mock_ocsp.called)
def test_ocsp_prefetch_refresh_noop(self):
def ocsp_req_mock(_cert, _chain, workfile):
"""Method to mock the OCSP request and write response to file"""
with open(workfile, 'w') as fh:
fh.write("MOCKRESPONSE")
return True
ocsp_path = "certbot.ocsp.RevocationChecker.ocsp_revoked_by_paths"
with mock.patch(ocsp_path, side_effect=ocsp_req_mock):
self.call_mocked_py2(self.config.enable_ocsp_prefetch,
self.lineage, ["ocspvhost.com"])
self.assertEqual(len(self.config._ocsp_prefetch), 1)
refresh_path = "certbot_apache._internal.override_debian.DebianConfigurator._ocsp_refresh"
with mock.patch(refresh_path) as mock_refresh:
self.call_mocked_py2(self.config.update_ocsp_prefetch, None)
self.assertFalse(mock_refresh.called)
@mock.patch("certbot_apache._internal.configurator.ApacheConfigurator.config_test")
def test_ocsp_prefetch_backup_db(self, _mock_test):
def ocsp_del_db():
"""Side effect of _reload() that deletes the DBM file, like Apache
does when restarting"""
os.remove(self.db_path)
self.assertFalse(os.path.isfile(self.db_path))
# Make sure that the db file exists
open(self.db_path, 'a').close()
self.call_mocked_py2(self.config._write_to_dbm, self.db_path, b'mock_key', b'mock_value')
# Mock OCSP prefetch dict to signify that there should be a db
self.config._ocsp_prefetch = {"mock": "value"}
rel_path = "certbot_apache._internal.configurator.ApacheConfigurator._reload"
with mock.patch(rel_path, side_effect=ocsp_del_db):
self.config.restart()
odbm = _read_dbm(self.db_path)
self.assertEqual(odbm[b'mock_key'], b'mock_value')
@mock.patch("certbot_apache._internal.configurator.ApacheConfigurator.config_test")
@mock.patch("certbot_apache._internal.configurator.ApacheConfigurator._reload")
def test_ocsp_prefetch_backup_db_error(self, _mock_reload, _mock_test):
log_path = "certbot_apache._internal.prefetch_ocsp.logger.debug"
log_string = "Encountered an issue while trying to backup OCSP dbm file"
log_string2 = "Encountered an issue when trying to restore OCSP dbm file"
self.config._ocsp_prefetch = {"mock": "value"}
with mock.patch("shutil.copy2", side_effect=IOError):
with mock.patch(log_path) as mock_log:
self.config.restart()
self.assertTrue(mock_log.called)
self.assertEqual(mock_log.call_count, 2)
self.assertTrue(log_string in mock_log.call_args_list[0][0][0])
self.assertTrue(log_string2 in mock_log.call_args_list[1][0][0])
@mock.patch("certbot_apache._internal.prefetch_ocsp.OCSPPrefetchMixin.restart")
def test_ocsp_prefetch_refresh_fail(self, _mock_restart):
ocsp_path = "certbot.ocsp.RevocationChecker.ocsp_revoked_by_paths"
log_path = "certbot_apache._internal.prefetch_ocsp.logger.warning"
with mock.patch(ocsp_path) as mock_ocsp:
mock_ocsp.return_value = True
with mock.patch(log_path) as mock_log:
self.call_mocked_py2(self.config.enable_ocsp_prefetch,
self.lineage, ["ocspvhost.com"])
self.assertTrue(mock_log.called)
self.assertTrue(
"trying to prefetch OCSP" in mock_log.call_args[0][0])
@mock.patch("certbot_apache._internal.override_debian.DebianConfigurator._ocsp_refresh_needed")
def test_ocsp_prefetch_update_noop(self, mock_refresh):
self.config.update_ocsp_prefetch(None)
self.assertFalse(mock_refresh.called)
def test_ocsp_prefetch_preflight_check_noerror(self):
self.call_mocked_py2(self.config._ensure_ocsp_prefetch_compatibility)
self.call_mocked_py3(self.config._ensure_ocsp_prefetch_compatibility)
real_import = six.moves.builtins.__import__
def mock_import(*args, **kwargs):
if args[0] == "bsddb":
raise ImportError
return real_import(*args, **kwargs)
with mock.patch('six.moves.builtins.__import__', side_effect=mock_import):
sys.modules['dbm'] = MockDBM(library='Not Berkeley DB')
self.assertRaises(errors.NotSupportedError,
self._call_mocked,
self.config._ensure_ocsp_prefetch_compatibility)
def test_ocsp_prefetch_open_dbm_no_file(self):
open(self.db_path, 'a').close()
db_not_exists = self.db_path+"nonsense"
self.call_mocked_py2(self.config._write_to_dbm, self.db_path, b'k', b'v')
self.assertRaises(errors.PluginError,
self.call_mocked_py2,
self.config._write_to_dbm,
db_not_exists,
b'k', b'v')
def test_ocsp_prefetch_py2_open_file_error(self):
open(self.db_path, 'a').close()
mock_db = mock.MagicMock()
mock_db.hashopen.side_effect = Exception("error")
sys.modules["bsddb"] = mock_db
self.assertRaises(errors.PluginError,
self.config._write_to_dbm,
self.db_path,
b'k', b'v')
def test_ocsp_prefetch_py3_open_file_error(self):
open(self.db_path, 'a').close()
mock_db = mock.MagicMock()
mock_db.ndbm.open.side_effect = Exception("error")
sys.modules["dbm"] = mock_db
sys.modules["bsddb"] = None
self.assertRaises(errors.PluginError,
self.config._write_to_dbm,
self.db_path,
b'k', b'v')
def test_ocsp_prefetch_open_close_py2_noerror(self):
expected_val = b'whatever_value'
open(self.db_path, 'a').close()
self.call_mocked_py2(
self.config._write_to_dbm, self.db_path,
b'key', expected_val
)
db2 = self.call_mocked_py2(_read_dbm, self.db_path)
self.assertEqual(db2[b'key'], expected_val)
def test_ocsp_prefetch_open_close_py3_noerror(self):
expected_val = b'whatever_value'
open(self.db_path, 'a').close()
self.call_mocked_py3(
self.config._write_to_dbm, self.db_path,
b'key', expected_val
)
db2 = self.call_mocked_py3(_read_dbm, self.db_path)
self.assertEqual(db2[b'key'], expected_val)
def test_ocsp_prefetch_safe_open_hash_mismatch(self):
import random
def mock_hash(_filepath):
# shound not be the same value twice
return str(random.getrandbits(1024))
open(self.db_path, 'a').close()
with mock.patch("certbot_apache._internal.apache_util._file_hash", side_effect=mock_hash):
self.assertRaises(
errors.PluginError,
self.call_mocked_py3,
self.config._write_to_dbm, self.db_path,
b'anything', b'irrelevant'
)
def test_ocsp_prefetch_safe_open_hash_fail_open(self):
open(self.db_path, 'a').close()
with mock.patch("certbot_apache._internal.apache_util._file_hash", side_effect=IOError):
self.assertRaises(
errors.PluginError,
self.call_mocked_py3,
self.config._write_to_dbm, self.db_path,
b'anything', b'irrelevant'
)
@mock.patch("certbot_apache._internal.constants.OCSP_APACHE_TTL", 1234)
def test_ttl(self):
self.assertEqual(self.config._ocsp_ttl(None), 1234)
next_update = datetime.today() + timedelta(days=6)
ttl = self.config._ocsp_ttl(next_update)
# ttl should be roughly 3 days
self.assertTrue(ttl > 86400*2)
self.assertTrue(ttl < 86400*4)
@mock.patch("certbot_apache._internal.prefetch_ocsp.OCSPPrefetchMixin._ocsp_prefetch_fetch_state")
@mock.patch("certbot_apache._internal.configurator.ApacheConfigurator.config_test")
@mock.patch("certbot_apache._internal.configurator.ApacheConfigurator._reload")
def test_restart_load_state_call(self, _rl, _ct, mock_fch):
self.assertFalse(self.config._ocsp_prefetch)
self.config.restart()
self.assertTrue(mock_fch.called)
@mock.patch("certbot_apache._internal.prefetch_ocsp.OCSPPrefetchMixin._ocsp_prefetch_backup_db")
@mock.patch("certbot_apache._internal.prefetch_ocsp.OCSPPrefetchMixin._ocsp_prefetch_restore_db")
@mock.patch("certbot_apache._internal.configurator.ApacheConfigurator.config_test")
@mock.patch("certbot_apache._internal.configurator.ApacheConfigurator._reload")
def test_restart_backupdb_call(self, _rl, _ctest, mock_rest, mock_bck):
self.config._ocsp_prefetch = True
self.config.restart()
self.assertTrue(mock_rest.called)
self.assertTrue(mock_bck.called)
@mock.patch("certbot_apache._internal.prefetch_ocsp.OCSPPrefetchMixin._ocsp_prefetch_backup_db")
@mock.patch("certbot_apache._internal.prefetch_ocsp.OCSPPrefetchMixin._ocsp_prefetch_restore_db")
@mock.patch("certbot_apache._internal.configurator.ApacheConfigurator.config_test")
@mock.patch("certbot_apache._internal.configurator.ApacheConfigurator._reload")
def test_restart_recover_error(self, mock_reload, _ctest, mock_rest, mock_bck):
self.config._ocsp_prefetch = True
mock_reload.side_effect = errors.MisconfigurationError
self.assertRaises(errors.MisconfigurationError, self.config.restart)
self.assertTrue(mock_bck.called)
self.assertTrue(mock_rest.called)
def _read_dbm(filename):
"""Helper method for reading the dbm using context manager.
Used for tests.
:param str filename: DBM database filename
:returns: Dictionary of database keys and values
:rtype: dict
"""
ret = dict()
with DBMHandler(filename, 'r') as db:
for k in db.keys():
ret[k] = db[k]
return ret
if __name__ == "__main__":
unittest.main() # pragma: no cover

View File

@@ -5,7 +5,14 @@ import mock
import util
try:
import apacheconfig
HAS_APACHECONFIG = True
except ImportError: # pragma: no cover
HAS_APACHECONFIG = False
@unittest.skipIf(not HAS_APACHECONFIG, reason='Tests require apacheconfig dependency')
class ConfiguratorParserNodeTest(util.ApacheTest): # pylint: disable=too-many-public-methods
"""Test AugeasParserNode using available test configurations"""

View File

@@ -26,7 +26,7 @@ Listen 443
# Pass Phrase Dialog:
# Configure the pass phrase gathering process.
# The filtering dialog program (`builtin' is an internal
# The filtering dialog program (`builtin' is a internal
# terminal dialog) has to provide the pass phrase on stdout.
SSLPassPhraseDialog builtin

View File

@@ -702,7 +702,7 @@ IndexIgnore .??* *~ *# HEADER* README* RCS CVS *,v *,t
# English (en) - Esperanto (eo) - Estonian (et) - French (fr) - German (de)
# Greek-Modern (el) - Hebrew (he) - Italian (it) - Japanese (ja)
# Korean (ko) - Luxembourgeois* (ltz) - Norwegian Nynorsk (nn)
# Norwegian (no) - Polish (pl) - Portuguese (pt)
# Norwegian (no) - Polish (pl) - Portugese (pt)
# Brazilian Portuguese (pt-BR) - Russian (ru) - Swedish (sv)
# Simplified Chinese (zh-CN) - Spanish (es) - Traditional Chinese (zh-TW)
#

View File

@@ -13,7 +13,7 @@ Listen 443 https
# Pass Phrase Dialog:
# Configure the pass phrase gathering process.
# The filtering dialog program (`builtin' is an internal
# The filtering dialog program (`builtin' is a internal
# terminal dialog) has to provide the pass phrase on stdout.
SSLPassPhraseDialog exec:/usr/libexec/httpd-ssl-pass-dialog

View File

@@ -31,7 +31,7 @@
# Pass Phrase Dialog:
# Configure the pass phrase gathering process.
# The filtering dialog program (`builtin' is an internal
# The filtering dialog program (`builtin' is a internal
# terminal dialog) has to provide the pass phrase on stdout.
SSLPassPhraseDialog exec:/usr/share/apache2/ask-for-passphrase

View File

@@ -31,7 +31,7 @@
# Pass Phrase Dialog:
# Configure the pass phrase gathering process.
# The filtering dialog program (`builtin' is an internal
# The filtering dialog program (`builtin' is a internal
# terminal dialog) has to provide the pass phrase on stdout.
SSLPassPhraseDialog exec:/usr/share/apache2/ask-for-passphrase

View File

@@ -31,7 +31,7 @@
# Pass Phrase Dialog:
# Configure the pass phrase gathering process.
# The filtering dialog program (`builtin' is an internal
# The filtering dialog program (`builtin' is a internal
# terminal dialog) has to provide the pass phrase on stdout.
SSLPassPhraseDialog exec:/usr/share/apache2/ask-for-passphrase

View File

@@ -33,7 +33,7 @@
# English (en) - Esperanto (eo) - Estonian (et) - French (fr) - German (de)
# Greek-Modern (el) - Hebrew (he) - Italian (it) - Japanese (ja)
# Korean (ko) - Luxembourgeois* (ltz) - Norwegian Nynorsk (nn)
# Norwegian (no) - Polish (pl) - Portuguese (pt)
# Norwegian (no) - Polish (pl) - Portugese (pt)
# Brazilian Portuguese (pt-BR) - Russian (ru) - Swedish (sv)
# Simplified Chinese (zh-CN) - Spanish (es) - Traditional Chinese (zh-TW)
AddLanguage ca .ca

View File

@@ -43,7 +43,7 @@ SSLRandomSeed connect builtin
## Pass Phrase Dialog:
# Configure the pass phrase gathering process. The filtering dialog program
# (`builtin' is an internal terminal dialog) has to provide the pass phrase on
# (`builtin' is a internal terminal dialog) has to provide the pass phrase on
# stdout.
SSLPassPhraseDialog builtin

View File

@@ -31,7 +31,7 @@ if [ -z "$VENV_PATH" ]; then
fi
VENV_BIN="$VENV_PATH/bin"
BOOTSTRAP_VERSION_PATH="$VENV_PATH/certbot-auto-bootstrap-version.txt"
LE_AUTO_VERSION="1.2.0"
LE_AUTO_VERSION="1.0.0"
BASENAME=$(basename $0)
USAGE="Usage: $BASENAME [OPTIONS]
A self-updating wrapper script for the Certbot ACME client. When run, updates
@@ -256,28 +256,20 @@ DeprecationBootstrap() {
fi
}
MIN_PYTHON_2_VERSION="2.7"
MIN_PYVER2=$(echo "$MIN_PYTHON_2_VERSION" | sed 's/\.//')
MIN_PYTHON_3_VERSION="3.5"
MIN_PYVER3=$(echo "$MIN_PYTHON_3_VERSION" | sed 's/\.//')
MIN_PYTHON_VERSION="2.7"
MIN_PYVER=$(echo "$MIN_PYTHON_VERSION" | sed 's/\.//')
# Sets LE_PYTHON to Python version string and PYVER to the first two
# digits of the python version.
# MIN_PYVER and MIN_PYTHON_VERSION are also set by this function, and their
# values depend on if we try to use Python 3 or Python 2.
# digits of the python version
DeterminePythonVersion() {
# Arguments: "NOCRASH" if we shouldn't crash if we don't find a good python
#
# If no Python is found, PYVER is set to 0.
if [ "$USE_PYTHON_3" = 1 ]; then
MIN_PYVER=$MIN_PYVER3
MIN_PYTHON_VERSION=$MIN_PYTHON_3_VERSION
for LE_PYTHON in "$LE_PYTHON" python3; do
# Break (while keeping the LE_PYTHON value) if found.
$EXISTS "$LE_PYTHON" > /dev/null && break
done
else
MIN_PYVER=$MIN_PYVER2
MIN_PYTHON_VERSION=$MIN_PYTHON_2_VERSION
for LE_PYTHON in "$LE_PYTHON" python2.7 python27 python2 python; do
# Break (while keeping the LE_PYTHON value) if found.
$EXISTS "$LE_PYTHON" > /dev/null && break
@@ -293,7 +285,7 @@ DeterminePythonVersion() {
fi
fi
PYVER=$("$LE_PYTHON" -V 2>&1 | cut -d" " -f 2 | cut -d. -f1,2 | sed 's/\.//')
PYVER=`"$LE_PYTHON" -V 2>&1 | cut -d" " -f 2 | cut -d. -f1,2 | sed 's/\.//'`
if [ "$PYVER" -lt "$MIN_PYVER" ]; then
if [ "$1" != "NOCRASH" ]; then
error "You have an ancient version of Python entombed in your operating system..."
@@ -376,9 +368,7 @@ BootstrapDebCommon() {
# Sets TOOL to the name of the package manager
# Sets appropriate values for YES_FLAG and QUIET_FLAG based on $ASSUME_YES and $QUIET_FLAG.
# Note: this function is called both while selecting the bootstrap scripts and
# during the actual bootstrap. Some things like prompting to user can be done in the latter
# case, but not in the former one.
# Enables EPEL if applicable and possible.
InitializeRPMCommonBase() {
if type dnf 2>/dev/null
then
@@ -398,6 +388,26 @@ InitializeRPMCommonBase() {
if [ "$QUIET" = 1 ]; then
QUIET_FLAG='--quiet'
fi
if ! $TOOL list *virtualenv >/dev/null 2>&1; then
echo "To use Certbot, packages from the EPEL repository need to be installed."
if ! $TOOL list epel-release >/dev/null 2>&1; then
error "Enable the EPEL repository and try running Certbot again."
exit 1
fi
if [ "$ASSUME_YES" = 1 ]; then
/bin/echo -n "Enabling the EPEL repository in 3 seconds..."
sleep 1s
/bin/echo -ne "\e[0K\rEnabling the EPEL repository in 2 seconds..."
sleep 1s
/bin/echo -e "\e[0K\rEnabling the EPEL repository in 1 second..."
sleep 1s
fi
if ! $TOOL install $YES_FLAG $QUIET_FLAG epel-release; then
error "Could not enable EPEL. Aborting bootstrap!"
exit 1
fi
fi
}
BootstrapRpmCommonBase() {
@@ -478,91 +488,13 @@ BootstrapRpmCommon() {
BootstrapRpmCommonBase "$python_pkgs"
}
# If new packages are installed by BootstrapRpmPython3 below, this version
# number must be increased.
BOOTSTRAP_RPM_PYTHON3_LEGACY_VERSION=1
# Checks if rh-python36 can be installed.
Python36SclIsAvailable() {
InitializeRPMCommonBase >/dev/null 2>&1;
if "${TOOL}" list rh-python36 >/dev/null 2>&1; then
return 0
fi
if "${TOOL}" list centos-release-scl >/dev/null 2>&1; then
return 0
fi
return 1
}
# Try to enable rh-python36 from SCL if it is necessary and possible.
EnablePython36SCL() {
if "$EXISTS" python3.6 > /dev/null 2> /dev/null; then
return 0
fi
if [ ! -f /opt/rh/rh-python36/enable ]; then
return 0
fi
set +e
if ! . /opt/rh/rh-python36/enable; then
error 'Unable to enable rh-python36!'
exit 1
fi
set -e
}
# This bootstrap concerns old RedHat-based distributions that do not ship by default
# with Python 2.7, but only Python 2.6. We bootstrap them by enabling SCL and installing
# Python 3.6. Some of these distributions are: CentOS/RHEL/OL/SL 6.
BootstrapRpmPython3Legacy() {
# Tested with:
# - CentOS 6
InitializeRPMCommonBase
if ! "${TOOL}" list rh-python36 >/dev/null 2>&1; then
echo "To use Certbot on this operating system, packages from the SCL repository need to be installed."
if ! "${TOOL}" list centos-release-scl >/dev/null 2>&1; then
error "Enable the SCL repository and try running Certbot again."
exit 1
fi
if [ "${ASSUME_YES}" = 1 ]; then
/bin/echo -n "Enabling the SCL repository in 3 seconds... (Press Ctrl-C to cancel)"
sleep 1s
/bin/echo -ne "\e[0K\rEnabling the SCL repository in 2 seconds... (Press Ctrl-C to cancel)"
sleep 1s
/bin/echo -e "\e[0K\rEnabling the SCL repository in 1 second... (Press Ctrl-C to cancel)"
sleep 1s
fi
if ! "${TOOL}" install "${YES_FLAG}" "${QUIET_FLAG}" centos-release-scl; then
error "Could not enable SCL. Aborting bootstrap!"
exit 1
fi
fi
# CentOS 6 must use rh-python36 from SCL
if "${TOOL}" list rh-python36 >/dev/null 2>&1; then
python_pkgs="rh-python36-python
rh-python36-python-virtualenv
rh-python36-python-devel
"
else
error "No supported Python package available to install. Aborting bootstrap!"
exit 1
fi
BootstrapRpmCommonBase "${python_pkgs}"
# Enable SCL rh-python36 after bootstrapping.
EnablePython36SCL
}
# If new packages are installed by BootstrapRpmPython3 below, this version
# number must be increased.
BOOTSTRAP_RPM_PYTHON3_VERSION=1
BootstrapRpmPython3() {
# Tested with:
# - CentOS 6
# - Fedora 29
InitializeRPMCommonBase
@@ -573,6 +505,12 @@ BootstrapRpmPython3() {
python3-virtualenv
python3-devel
"
# EPEL uses python34
elif $TOOL list python34 >/dev/null 2>&1; then
python_pkgs="python34
python34-devel
python34-tools
"
else
error "No supported Python package available to install. Aborting bootstrap!"
exit 1
@@ -820,11 +758,6 @@ elif [ -f /etc/redhat-release ]; then
RPM_DIST_NAME=`(. /etc/os-release 2> /dev/null && echo $ID) || echo "unknown"`
if [ "$PYVER" -eq 26 -a $(uname -m) != 'x86_64' ]; then
# 32 bits CentOS 6 and affiliates are not supported anymore by certbot-auto.
DEPRECATED_OS=1
fi
# Set RPM_DIST_VERSION to VERSION_ID from /etc/os-release after splitting on
# '.' characters (e.g. "8.0" becomes "8"). If the command exits with an
# error, RPM_DIST_VERSION is set to "unknown".
@@ -836,50 +769,31 @@ elif [ -f /etc/redhat-release ]; then
RPM_DIST_VERSION=0
fi
# Handle legacy RPM distributions
if [ "$PYVER" -eq 26 ]; then
# Check if an automated bootstrap can be achieved on this system.
if ! Python36SclIsAvailable; then
INTERACTIVE_BOOTSTRAP=1
fi
# Starting to Fedora 29, python2 is on a deprecation path. Let's move to python3 then.
# RHEL 8 also uses python3 by default.
if [ "$RPM_DIST_NAME" = "fedora" -a "$RPM_DIST_VERSION" -ge 29 -o "$PYVER" -eq 26 ]; then
RPM_USE_PYTHON_3=1
elif [ "$RPM_DIST_NAME" = "rhel" -a "$RPM_DIST_VERSION" -ge 8 ]; then
RPM_USE_PYTHON_3=1
elif [ "$RPM_DIST_NAME" = "centos" -a "$RPM_DIST_VERSION" -ge 8 ]; then
RPM_USE_PYTHON_3=1
else
RPM_USE_PYTHON_3=0
fi
if [ "$RPM_USE_PYTHON_3" = 1 ]; then
Bootstrap() {
BootstrapMessage "Legacy RedHat-based OSes that will use Python3"
BootstrapRpmPython3Legacy
BootstrapMessage "RedHat-based OSes that will use Python3"
BootstrapRpmPython3
}
USE_PYTHON_3=1
BOOTSTRAP_VERSION="BootstrapRpmPython3Legacy $BOOTSTRAP_RPM_PYTHON3_LEGACY_VERSION"
# Try now to enable SCL rh-python36 for systems already bootstrapped
# NB: EnablePython36SCL has been defined along with BootstrapRpmPython3Legacy in certbot-auto
EnablePython36SCL
BOOTSTRAP_VERSION="BootstrapRpmPython3 $BOOTSTRAP_RPM_PYTHON3_VERSION"
else
# Starting to Fedora 29, python2 is on a deprecation path. Let's move to python3 then.
# RHEL 8 also uses python3 by default.
if [ "$RPM_DIST_NAME" = "fedora" -a "$RPM_DIST_VERSION" -ge 29 ]; then
RPM_USE_PYTHON_3=1
elif [ "$RPM_DIST_NAME" = "rhel" -a "$RPM_DIST_VERSION" -ge 8 ]; then
RPM_USE_PYTHON_3=1
elif [ "$RPM_DIST_NAME" = "centos" -a "$RPM_DIST_VERSION" -ge 8 ]; then
RPM_USE_PYTHON_3=1
else
RPM_USE_PYTHON_3=0
fi
if [ "$RPM_USE_PYTHON_3" = 1 ]; then
Bootstrap() {
BootstrapMessage "RedHat-based OSes that will use Python3"
BootstrapRpmPython3
}
USE_PYTHON_3=1
BOOTSTRAP_VERSION="BootstrapRpmPython3 $BOOTSTRAP_RPM_PYTHON3_VERSION"
else
Bootstrap() {
BootstrapMessage "RedHat-based OSes"
BootstrapRpmCommon
}
BOOTSTRAP_VERSION="BootstrapRpmCommon $BOOTSTRAP_RPM_COMMON_VERSION"
fi
Bootstrap() {
BootstrapMessage "RedHat-based OSes"
BootstrapRpmCommon
}
BOOTSTRAP_VERSION="BootstrapRpmCommon $BOOTSTRAP_RPM_COMMON_VERSION"
fi
LE_PYTHON="$prev_le_python"
@@ -956,13 +870,6 @@ if [ "$NO_BOOTSTRAP" = 1 ]; then
unset BOOTSTRAP_VERSION
fi
if [ "$DEPRECATED_OS" = 1 ]; then
Bootstrap() {
error "Skipping bootstrap because certbot-auto is deprecated on this system."
}
unset BOOTSTRAP_VERSION
fi
# Sets PREV_BOOTSTRAP_VERSION to the identifier for the bootstrap script used
# to install OS dependencies on this system. PREV_BOOTSTRAP_VERSION isn't set
# if it is unknown how OS dependencies were installed on this system.
@@ -1160,28 +1067,6 @@ if [ "$1" = "--le-auto-phase2" ]; then
# Phase 2: Create venv, install LE, and run.
shift 1 # the --le-auto-phase2 arg
if [ "$DEPRECATED_OS" = 1 ]; then
# Phase 2 damage control mode for deprecated OSes.
# In this situation, we bypass any bootstrap or certbot venv setup.
error "Your system is not supported by certbot-auto anymore."
if [ ! -d "$VENV_PATH" ] && OldVenvExists; then
VENV_BIN="$OLD_VENV_PATH/bin"
fi
if [ -f "$VENV_BIN/letsencrypt" -a "$INSTALL_ONLY" != 1 ]; then
error "Certbot will no longer receive updates."
error "Please visit https://certbot.eff.org/ to check for other alternatives."
"$VENV_BIN/letsencrypt" "$@"
exit 0
else
error "Certbot cannot be installed."
error "Please visit https://certbot.eff.org/ to check for other alternatives."
exit 1
fi
fi
SetPrevBootstrapVersion
if [ -z "$PHASE_1_VERSION" -a "$USE_PYTHON_3" = 1 ]; then
@@ -1193,15 +1078,8 @@ if [ "$1" = "--le-auto-phase2" ]; then
# If the selected Bootstrap function isn't a noop and it differs from the
# previously used version
if [ -n "$BOOTSTRAP_VERSION" -a "$BOOTSTRAP_VERSION" != "$PREV_BOOTSTRAP_VERSION" ]; then
# Check if we can rebootstrap without manual user intervention: this requires that
# certbot-auto is in non-interactive mode AND selected bootstrap does not claim to
# require a manual user intervention.
if [ "$NONINTERACTIVE" = 1 -a "$INTERACTIVE_BOOTSTRAP" != 1 ]; then
CAN_REBOOTSTRAP=1
fi
# Check if rebootstrap can be done non-interactively and current shell is non-interactive
# (true if stdin and stdout are not attached to a terminal).
if [ \( "$CAN_REBOOTSTRAP" = 1 \) -o \( \( -t 0 \) -a \( -t 1 \) \) ]; then
# if non-interactive mode or stdin and stdout are connected to a terminal
if [ \( "$NONINTERACTIVE" = 1 \) -o \( \( -t 0 \) -a \( -t 1 \) \) ]; then
if [ -d "$VENV_PATH" ]; then
rm -rf "$VENV_PATH"
fi
@@ -1212,21 +1090,12 @@ if [ "$1" = "--le-auto-phase2" ]; then
ln -s "$VENV_PATH" "$OLD_VENV_PATH"
fi
RerunWithArgs "$@"
# Otherwise bootstrap needs to be done manually by the user.
else
# If it is because bootstrapping is interactive, --non-interactive will be of no use.
if [ "$INTERACTIVE_BOOTSTRAP" = 1 ]; then
error "Skipping upgrade because new OS dependencies may need to be installed."
error "This requires manual user intervention: please run this script again manually."
# If this is because of the environment (eg. non interactive shell without
# --non-interactive flag set), help the user in that direction.
else
error "Skipping upgrade because new OS dependencies may need to be installed."
error
error "To upgrade to a newer version, please run this script again manually so you can"
error "approve changes or with --non-interactive on the command line to automatically"
error "install any required packages."
fi
error "Skipping upgrade because new OS dependencies may need to be installed."
error
error "To upgrade to a newer version, please run this script again manually so you can"
error "approve changes or with --non-interactive on the command line to automatically"
error "install any required packages."
# Set INSTALLED_VERSION to be the same so we don't update the venv
INSTALLED_VERSION="$LE_AUTO_VERSION"
# Continue to use OLD_VENV_PATH if the new venv doesn't exist
@@ -1274,11 +1143,11 @@ if [ "$1" = "--le-auto-phase2" ]; then
# pip install hashin
# hashin -r dependency-requirements.txt cryptography==1.5.2
# ```
ConfigArgParse==1.0 \
--hash=sha256:bf378245bc9cdc403a527e5b7406b991680c2a530e7e81af747880b54eb57133
certifi==2019.11.28 \
--hash=sha256:017c25db2a153ce562900032d5bc68e9f191e44e9a0f762f373977de9df1fbb3 \
--hash=sha256:25b64c7da4cd7479594d035c08c2d809eb4aab3a26e5a990ea98cc450c320f1f
ConfigArgParse==0.14.0 \
--hash=sha256:2e2efe2be3f90577aca9415e32cb629aa2ecd92078adbe27b53a03e53ff12e91
certifi==2019.9.11 \
--hash=sha256:e4f3620cfea4f83eedc95b24abd9cd56f3c4b146dd0177e83a21b4eb49e21e50 \
--hash=sha256:fd7c7c74727ddcf00e9acd26bba8da604ffec95bf1c2144e67aff7a8b50e6cef
cffi==1.13.2 \
--hash=sha256:0b49274afc941c626b605fb59b59c3485c17dc776dc3cc7cc14aca74cc19cc42 \
--hash=sha256:0e3ea92942cb1168e38c05c1d56b0527ce31f1a370f6117f1d490b8dcd6b3a04 \
@@ -1351,6 +1220,8 @@ enum34==1.1.6 \
funcsigs==1.0.2 \
--hash=sha256:330cc27ccbf7f1e992e69fef78261dc7c6569012cf397db8d3de0234e6c937ca \
--hash=sha256:a7bb0f2cf3a3fd1ab2732cb49eba4252c2af4240442415b4abce3b87022a8f50
future==0.18.2 \
--hash=sha256:b1bead90b70cf6ec3f0710ae53a525360fa360d306a86583adc6bf83a4db537d
idna==2.8 \
--hash=sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407 \
--hash=sha256:ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c
@@ -1363,40 +1234,40 @@ josepy==1.2.0 \
mock==1.3.0 \
--hash=sha256:1e247dbecc6ce057299eb7ee019ad68314bb93152e81d9a6110d35f4d5eca0f6 \
--hash=sha256:3f573a18be94de886d1191f27c168427ef693e8dcfcecf95b170577b2eb69cbb
parsedatetime==2.5 \
--hash=sha256:3b835fc54e472c17ef447be37458b400e3fefdf14bb1ffdedb5d2c853acf4ba1 \
--hash=sha256:d2e9ddb1e463de871d32088a3f3cea3dc8282b1b2800e081bd0ef86900451667
pbr==5.4.4 \
--hash=sha256:139d2625547dbfa5fb0b81daebb39601c478c21956dc57e2e07b74450a8c506b \
--hash=sha256:61aa52a0f18b71c5cc58232d2cf8f8d09cd67fcad60b742a60124cb8d6951488
pyOpenSSL==19.1.0 \
--hash=sha256:621880965a720b8ece2f1b2f54ea2071966ab00e2970ad2ce11d596102063504 \
--hash=sha256:9a24494b2602aaf402be5c9e30a0b82d4a5c67528fe8fb475e3f3bc00dd69507
parsedatetime==2.4 \
--hash=sha256:3d817c58fb9570d1eec1dd46fa9448cd644eeed4fb612684b02dfda3a79cb84b \
--hash=sha256:9ee3529454bf35c40a77115f5a596771e59e1aee8c53306f346c461b8e913094
pbr==5.4.3 \
--hash=sha256:2c8e420cd4ed4cec4e7999ee47409e876af575d4c35a45840d59e8b5f3155ab8 \
--hash=sha256:b32c8ccaac7b1a20c0ce00ce317642e6cf231cf038f9875e0280e28af5bf7ac9
pyOpenSSL==19.0.0 \
--hash=sha256:aeca66338f6de19d1aa46ed634c3b9ae519a64b458f8468aec688e7e3c20f200 \
--hash=sha256:c727930ad54b10fc157015014b666f2d8b41f70c0d03e83ab67624fd3dd5d1e6
pyRFC3339==1.1 \
--hash=sha256:67196cb83b470709c580bb4738b83165e67c6cc60e1f2e4f286cfcb402a926f4 \
--hash=sha256:81b8cbe1519cdb79bed04910dd6fa4e181faf8c88dff1e1b987b5f7ab23a5b1a
pycparser==2.19 \
--hash=sha256:a988718abfad80b6b157acce7bf130a30876d27603738ac39f140993246b25b3
pyparsing==2.4.6 \
--hash=sha256:4c830582a84fb022400b85429791bc551f1f4871c33f23e44f353119e92f969f \
--hash=sha256:c342dccb5250c08d45fd6f8b4a559613ca603b57498511740e65cd11a2e7dcec
pyparsing==2.4.5 \
--hash=sha256:20f995ecd72f2a1f4bf6b072b63b22e2eb457836601e76d6e5dfcd75436acc1f \
--hash=sha256:4ca62001be367f01bd3e92ecbb79070272a9d4964dce6a48a82ff0b8bc7e683a
python-augeas==0.5.0 \
--hash=sha256:67d59d66cdba8d624e0389b87b2a83a176f21f16a87553b50f5703b23f29bac2
pytz==2019.3 \
--hash=sha256:1c557d7d0e871de1f5ccd5833f60fb2550652da6be2693c1e02300743d21500d \
--hash=sha256:b02c06db6cf09c12dd25137e563b31700d3b80fcc4ad23abb7a315f2789819be
requests==2.22.0 \
--hash=sha256:11e007a8a2aa0323f5a921e9e6a2d7e4e67d9877e85773fba9ba6419025cbeb4 \
--hash=sha256:9cf5292fcd0f598c671cfc1e0d7d1a7f13bb8085e9a590f48c010551dc6c4b31
requests==2.21.0 \
--hash=sha256:502a824f31acdacb3a35b6690b5fbf0bc41d63a24a45c4004352b0242707598e \
--hash=sha256:7bf2a778576d825600030a110f3c0e3e8edc51dfaafe1c146e39a2027784957b
requests-toolbelt==0.9.1 \
--hash=sha256:380606e1d10dc85c3bd47bf5a6095f815ec007be7a8b69c878507068df059e6f \
--hash=sha256:968089d4584ad4ad7c171454f0a5c6dac23971e9472521ea3b6d49d610aa6fc0
six==1.14.0 \
--hash=sha256:236bdbdce46e6e6a3d61a337c0f8b763ca1e8717c03b369e87a7ec7ce1319c0a \
--hash=sha256:8f3cd2e254d8f793e7f3d6d9df77b92252b52637291d0f0da013c76ea2724b6c
urllib3==1.25.8 \
--hash=sha256:2f3db8b19923a873b3e5256dc9c2dedfa883e33d87c690d9c7913e1f40673cdc \
--hash=sha256:87716c2d2a7121198ebcb7ce7cccf6ce5e9ba539041cfbaeecfb641dc0bf6acc
six==1.13.0 \
--hash=sha256:1f1b7d42e254082a9db6279deae68afb421ceba6158efa6131de7b3003ee93fd \
--hash=sha256:30f610279e8b2578cab6db20741130331735c781b56053c59c4076da27f06b66
urllib3==1.24.3 \
--hash=sha256:2393a695cd12afedd0dcb26fe5d50d0cf248e5a66f75dbd89a3d4eb333a61af4 \
--hash=sha256:a637e5fae88995b256e3409dc4d52c2e2e0ba32c42a6365fee8bbd2238de3cfb
zope.component==4.6 \
--hash=sha256:ec2afc5bbe611dcace98bb39822c122d44743d635dafc7315b9aef25097db9e6
zope.deferredimport==4.3.1 \
@@ -1408,86 +1279,47 @@ zope.deprecation==4.4.0 \
zope.event==4.4 \
--hash=sha256:69c27debad9bdacd9ce9b735dad382142281ac770c4a432b533d6d65c4614bcf \
--hash=sha256:d8e97d165fd5a0997b45f5303ae11ea3338becfe68c401dd88ffd2113fe5cae7
zope.hookable==5.0.0 \
--hash=sha256:0992a0dd692003c09fb958e1480cebd1a28f2ef32faa4857d864f3ca8e9d6952 \
--hash=sha256:0f325838dbac827a1e2ed5d482c1f2656b6844dc96aa098f7727e76395fcd694 \
--hash=sha256:22a317ba00f61bac99eac1a5e330be7cb8c316275a21269ec58aa396b602af0c \
--hash=sha256:25531cb5e7b35e8a6d1d6eddef624b9a22ce5dcf8f4448ef0f165acfa8c3fc21 \
--hash=sha256:30890892652766fc80d11f078aca9a5b8150bef6b88aba23799581a53515c404 \
--hash=sha256:342d682d93937e5b8c232baffb32a87d5eee605d44f74566657c64a239b7f342 \
--hash=sha256:46b2fddf1f5aeb526e02b91f7e62afbb9fff4ffd7aafc97cdb00a0d717641567 \
--hash=sha256:523318ff96df9b8d378d997c00c5d4cbfbff68dc48ff5ee5addabdb697d27528 \
--hash=sha256:53aa02eb8921d4e667c69d76adeed8fe426e43870c101cb08dcd2f3468aff742 \
--hash=sha256:62e79e8fdde087cb20822d7874758f5acbedbffaf3c0fbe06309eb8a41ee4e06 \
--hash=sha256:74bf2f757f7385b56dc3548adae508d8b3ef952d600b4b12b88f7d1706b05dcc \
--hash=sha256:751ee9d89eb96e00c1d7048da9725ce392a708ed43406416dc5ed61e4d199764 \
--hash=sha256:7b83bc341e682771fe810b360cd5d9c886a948976aea4b979ff214e10b8b523b \
--hash=sha256:81eeeb27dbb0ddaed8070daee529f0d1bfe4f74c7351cce2aaca3ea287c4cc32 \
--hash=sha256:856509191e16930335af4d773c0fc31a17bae8991eb6f167a09d5eddf25b56cc \
--hash=sha256:8853e81fd07b18fa9193b19e070dc0557848d9945b1d2dac3b7782543458c87d \
--hash=sha256:94506a732da2832029aecdfe6ea07eb1b70ee06d802fff34e1b3618fe7cdf026 \
--hash=sha256:95ad874a8cc94e786969215d660143817f745225579bfe318c4676e218d3147c \
--hash=sha256:9758ec9174966ffe5c499b6c3d149f80aa0a9238020006a2b87c6af5963fcf48 \
--hash=sha256:a169823e331da939aa7178fc152e65699aeb78957e46c6f80ccb50ee4c3616c2 \
--hash=sha256:a67878a798f6ca292729a28c2226592b3d000dc6ee7825d31887b553686c7ac7 \
--hash=sha256:a9a6d9eb2319a09905670810e2de971d6c49013843700b4975e2fc0afe96c8db \
--hash=sha256:b3e118b58a3d2301960e6f5f25736d92f6b9f861728d3b8c26d69f54d8a157d2 \
--hash=sha256:ca6705c2a1fb5059a4efbe9f5426be4cdf71b3c9564816916fc7aa7902f19ede \
--hash=sha256:cf711527c9d4ae72085f137caffb4be74fc007ffb17cd103628c7d5ba17e205f \
--hash=sha256:d087602a6845ebe9d5a1c5a949fedde2c45f372d77fbce4f7fe44b68b28a1d03 \
--hash=sha256:d1080e1074ddf75ad6662a9b34626650759c19a9093e1a32a503d37e48da135b \
--hash=sha256:db9c60368aff2b7e6c47115f3ad9bd6e96aa298b12ed5f8cb13f5673b30be565 \
--hash=sha256:dbeb127a04473f5a989169eb400b67beb921c749599b77650941c21fe39cb8d9 \
--hash=sha256:dca336ca3682d869d291d7cd18284f6ff6876e4244eb1821430323056b000e2c \
--hash=sha256:dd69a9be95346d10c853b6233fcafe3c0315b89424b378f2ad45170d8e161568 \
--hash=sha256:dd79f8fae5894f1ee0a0042214685f2d039341250c994b825c10a4cd075d80f6 \
--hash=sha256:e647d850aa1286d98910133cee12bd87c354f7b7bb3f3cd816a62ba7fa2f7007 \
--hash=sha256:f37a210b5c04b2d4e4bac494ab15b70196f219a1e1649ddca78560757d4278fb \
--hash=sha256:f67820b6d33a705dc3c1c457156e51686f7b350ff57f2112e1a9a4dad38ec268 \
--hash=sha256:f68969978ccf0e6123902f7365aae5b7a9e99169d4b9105c47cf28e788116894 \
--hash=sha256:f717a0b34460ae1ac0064e91b267c0588ac2c098ffd695992e72cd5462d97a67 \
--hash=sha256:f9d58ccec8684ca276d5a4e7b0dfacca028336300a8f715d616d9f0ce9ae8096 \
--hash=sha256:fcc3513a54e656067cbf7b98bab0d6b9534b9eabc666d1f78aad6acdf0962736
zope.interface==4.7.1 \
--hash=sha256:048b16ac882a05bc7ef534e8b9f15c9d7a6c190e24e8938a19b7617af4ed854a \
--hash=sha256:05816cf8e7407cf62f2ec95c0a5d69ec4fa5741d9ccd10db9f21691916a9a098 \
--hash=sha256:065d6a1ac89d35445168813bed45048ed4e67a4cdfc5a68fdb626a770378869f \
--hash=sha256:14157421f4121a57625002cc4f48ac7521ea238d697c4a4459a884b62132b977 \
--hash=sha256:18dc895945694f397a0be86be760ff664b790f95d8e7752d5bab80284ff9105d \
--hash=sha256:1962c9f838bd6ae4075d0014f72697510daefc7e1c7e48b2607df0b6e157989c \
--hash=sha256:1a67408cacd198c7e6274a19920bb4568d56459e659e23c4915528686ac1763a \
--hash=sha256:21bf781076dd616bd07cf0223f79d61ab4f45176076f90bc2890e18c48195da4 \
--hash=sha256:21c0a5d98650aebb84efa16ce2c8df1a46bdc4fe8a9e33237d0ca0b23f416ead \
--hash=sha256:23cfeea25d1e42ff3bf4f9a0c31e9d5950aa9e7c4b12f0c4bd086f378f7b7a71 \
--hash=sha256:24b6fce1fb71abf9f4093e3259084efcc0ef479f89356757780685bd2b06ef37 \
--hash=sha256:24f84ce24eb6b5fcdcb38ad9761524f1ae96f7126abb5e597f8a3973d9921409 \
--hash=sha256:25e0ef4a824017809d6d8b0ce4ab3288594ba283e4d4f94d8cfb81d73ed65114 \
--hash=sha256:2e8fdd625e9aba31228e7ddbc36bad5c38dc3ee99a86aa420f89a290bd987ce9 \
--hash=sha256:2f3bc2f49b67b1bea82b942d25bc958d4f4ea6709b411cb2b6b9718adf7914ce \
--hash=sha256:35d24be9d04d50da3a6f4d61de028c1dd087045385a0ff374d93ef85af61b584 \
--hash=sha256:35dbe4e8c73003dff40dfaeb15902910a4360699375e7b47d3c909a83ff27cd0 \
--hash=sha256:3dfce831b824ab5cf446ed0c350b793ac6fa5fe33b984305cb4c966a86a8fb79 \
--hash=sha256:3f7866365df5a36a7b8de8056cd1c605648f56f9a226d918ed84c85d25e8d55f \
--hash=sha256:455cc8c01de3bac6f9c223967cea41f4449f58b4c2e724ec8177382ddd183ab4 \
--hash=sha256:4bb937e998be9d5e345f486693e477ba79e4344674484001a0b646be1d530487 \
--hash=sha256:52303a20902ca0888dfb83230ca3ee6fbe63c0ad1dd60aa0bba7958ccff454d8 \
--hash=sha256:6e0a897d4e09859cc80c6a16a29697406ead752292ace17f1805126a4f63c838 \
--hash=sha256:6e1816e7c10966330d77af45f77501f9a68818c065dec0ad11d22b50a0e212e7 \
--hash=sha256:73b5921c5c6ce3358c836461b5470bf675601c96d5e5d8f2a446951470614f67 \
--hash=sha256:8093cd45cdb5f6c8591cfd1af03d32b32965b0f79b94684cd0c9afdf841982bb \
--hash=sha256:864b4a94b60db301899cf373579fd9ef92edddbf0fb2cd5ae99f53ef423ccc56 \
--hash=sha256:8a27b4d3ea9c6d086ce8e7cdb3e8d319b6752e2a03238a388ccc83ccbe165f50 \
--hash=sha256:91b847969d4784abd855165a2d163f72ac1e58e6dce09a5e46c20e58f19cc96d \
--hash=sha256:b47b1028be4758c3167e474884ccc079b94835f058984b15c145966c4df64d27 \
--hash=sha256:b68814a322835d8ad671b7acc23a3b2acecba527bb14f4b53fc925f8a27e44d8 \
--hash=sha256:bcb50a032c3b6ec7fb281b3a83d2b31ab5246c5b119588725b1350d3a1d9f6a3 \
--hash=sha256:c56db7d10b25ce8918b6aec6b08ac401842b47e6c136773bfb3b590753f7fb67 \
--hash=sha256:c94b77a13d4f47883e4f97f9fa00f5feadd38af3e6b3c7be45cfdb0a14c7149b \
--hash=sha256:db381f6fdaef483ad435f778086ccc4890120aff8df2ba5cfeeac24d280b3145 \
--hash=sha256:e6487d01c8b7ed86af30ea141fcc4f93f8a7dde26f94177c1ad637c353bd5c07 \
--hash=sha256:e86923fa728dfba39c5bb6046a450bd4eec8ad949ac404eca728cfce320d1732 \
--hash=sha256:f6ca36dc1e9eeb46d779869c60001b3065fb670b5775c51421c099ea2a77c3c9 \
--hash=sha256:fb62f2cbe790a50d95593fb40e8cca261c31a2f5637455ea39440d6457c2ba25
zope.hookable==4.2.0 \
--hash=sha256:22886e421234e7e8cedc21202e1d0ab59960e40a47dd7240e9659a2d82c51370 \
--hash=sha256:39912f446e45b4e1f1951b5ffa2d5c8b074d25727ec51855ae9eab5408f105ab \
--hash=sha256:3adb7ea0871dbc56b78f62c4f5c024851fc74299f4f2a95f913025b076cde220 \
--hash=sha256:3d7c4b96341c02553d8b8d71065a9366ef67e6c6feca714f269894646bb8268b \
--hash=sha256:4e826a11a529ed0464ffcecf34b0b7bd1b4928dd5848c5c61bedd7833e8f4801 \
--hash=sha256:700d68cc30728de1c4c62088a981c6daeaefdf20a0d81995d2c0b7f442c5f88c \
--hash=sha256:77c82a430cedfbf508d1aa406b2f437363c24fa90c73f577ead0fb5295749b83 \
--hash=sha256:c1df3929a3666fc5a0c80d60a0c1e6f6ef97c7f6ed2f1b7cf49f3e6f3d4dde15 \
--hash=sha256:dba8b2dd2cd41cb5f37bfa3f3d82721b8ae10e492944e48ddd90a439227f2893 \
--hash=sha256:f492540305b15b5591bd7195d61f28946bb071de071cee5d68b6b8414da90fd2
zope.interface==4.6.0 \
--hash=sha256:086707e0f413ff8800d9c4bc26e174f7ee4c9c8b0302fbad68d083071822316c \
--hash=sha256:1157b1ec2a1f5bf45668421e3955c60c610e31913cc695b407a574efdbae1f7b \
--hash=sha256:11ebddf765bff3bbe8dbce10c86884d87f90ed66ee410a7e6c392086e2c63d02 \
--hash=sha256:14b242d53f6f35c2d07aa2c0e13ccb710392bcd203e1b82a1828d216f6f6b11f \
--hash=sha256:1b3d0dcabc7c90b470e59e38a9acaa361be43b3a6ea644c0063951964717f0e5 \
--hash=sha256:20a12ab46a7e72b89ce0671e7d7a6c3c1ca2c2766ac98112f78c5bddaa6e4375 \
--hash=sha256:298f82c0ab1b182bd1f34f347ea97dde0fffb9ecf850ecf7f8904b8442a07487 \
--hash=sha256:2f6175722da6f23dbfc76c26c241b67b020e1e83ec7fe93c9e5d3dd18667ada2 \
--hash=sha256:3b877de633a0f6d81b600624ff9137312d8b1d0f517064dfc39999352ab659f0 \
--hash=sha256:4265681e77f5ac5bac0905812b828c9fe1ce80c6f3e3f8574acfb5643aeabc5b \
--hash=sha256:550695c4e7313555549aa1cdb978dc9413d61307531f123558e438871a883d63 \
--hash=sha256:5f4d42baed3a14c290a078e2696c5f565501abde1b2f3f1a1c0a94fbf6fbcc39 \
--hash=sha256:62dd71dbed8cc6a18379700701d959307823b3b2451bdc018594c48956ace745 \
--hash=sha256:7040547e5b882349c0a2cc9b50674b1745db551f330746af434aad4f09fba2cc \
--hash=sha256:7e099fde2cce8b29434684f82977db4e24f0efa8b0508179fce1602d103296a2 \
--hash=sha256:7e5c9a5012b2b33e87980cee7d1c82412b2ebabcb5862d53413ba1a2cfde23aa \
--hash=sha256:81295629128f929e73be4ccfdd943a0906e5fe3cdb0d43ff1e5144d16fbb52b1 \
--hash=sha256:95cc574b0b83b85be9917d37cd2fad0ce5a0d21b024e1a5804d044aabea636fc \
--hash=sha256:968d5c5702da15c5bf8e4a6e4b67a4d92164e334e9c0b6acf080106678230b98 \
--hash=sha256:9e998ba87df77a85c7bed53240a7257afe51a07ee6bc3445a0bf841886da0b97 \
--hash=sha256:a0c39e2535a7e9c195af956610dba5a1073071d2d85e9d2e5d789463f63e52ab \
--hash=sha256:a15e75d284178afe529a536b0e8b28b7e107ef39626a7809b4ee64ff3abc9127 \
--hash=sha256:a6a6ff82f5f9b9702478035d8f6fb6903885653bff7ec3a1e011edc9b1a7168d \
--hash=sha256:b639f72b95389620c1f881d94739c614d385406ab1d6926a9ffe1c8abbea23fe \
--hash=sha256:bad44274b151d46619a7567010f7cde23a908c6faa84b97598fd2f474a0c6891 \
--hash=sha256:bbcef00d09a30948756c5968863316c949d9cedbc7aabac5e8f0ffbdb632e5f1 \
--hash=sha256:d788a3999014ddf416f2dc454efa4a5dbeda657c6aba031cf363741273804c6b \
--hash=sha256:eed88ae03e1ef3a75a0e96a55a99d7937ed03e53d0cffc2451c208db445a2966 \
--hash=sha256:f99451f3a579e73b5dd58b1b08d1179791d49084371d9a47baad3b22417f0317
zope.proxy==4.3.3 \
--hash=sha256:04646ac04ffa9c8e32fb2b5c3cd42995b2548ea14251f3c21ca704afae88e42c \
--hash=sha256:07b6bceea232559d24358832f1cd2ed344bbf05ca83855a5b9698b5f23c5ed60 \
@@ -1540,18 +1372,18 @@ letsencrypt==0.7.0 \
--hash=sha256:105a5fb107e45bcd0722eb89696986dcf5f08a86a321d6aef25a0c7c63375ade \
--hash=sha256:c36e532c486a7e92155ee09da54b436a3c420813ec1c590b98f635d924720de9
certbot==1.2.0 \
--hash=sha256:e25c17125c00b3398c8e9b9d54ef473c0e8f5aff53389f313a51b06cf472d335 \
--hash=sha256:95dcbae085f8e4eb18442fe7b12994b08964a9a6e8e352e556cdb4a8a625373c
acme==1.2.0 \
--hash=sha256:284d22fde75687a8ea72d737cac6bcbdc91f3c796221aa25378b8732ba6f6875 \
--hash=sha256:0630c740d49bda945e97bd35fc8d6f02d082c8cb9e18f8fec0dbb3d395ac26ab
certbot-apache==1.2.0 \
--hash=sha256:3f7493918353d3bd6067d446a2cf263e03831c4c10ec685b83d644b47767090d \
--hash=sha256:b46e9def272103a68108e48bf7e410ea46801529b1ea6954f6506b14dd9df9b3
certbot-nginx==1.2.0 \
--hash=sha256:efd32a2b32f2439279da446b6bf67684f591f289323c5f494ebfd86a566a28fd \
--hash=sha256:6fd7cf4f2545ad66e57000343227df9ccccaf04420e835e05cb3250fac1fa6db
certbot==1.0.0 \
--hash=sha256:8d074cff89dee002dec1c47cb0da04ea8e0ede8d68838b6d54aa41580d9262df \
--hash=sha256:86b82d31db19fffffb0d6b218951e2121ef514e3ff659aa042deaf92a33e302a
acme==1.0.0 \
--hash=sha256:f6972e436e76f7f1e395e81e149f8713ca8462d465b14993bddc53fb18a40644 \
--hash=sha256:6a08f12f848ce563b50bca421ba9db653df9f82cfefeaf8aba517f046d1386c2
certbot-apache==1.0.0 \
--hash=sha256:e591d0cf773ad33ee978f7adb1b69288eac2c8847c643b06e70260e707626f8e \
--hash=sha256:7335ab5687a0a47d9041d9e13f3a2d67d0e8372da97ab639edb31c14b787cd68
certbot-nginx==1.0.0 \
--hash=sha256:ce8a2e51165da7c15bfdc059cd6572d0f368c078f1e1a77633a2773310b2f231 \
--hash=sha256:63b4ae09d4f1c9ef0a1a2a49c3f651d8a7cb30303ec6f954239e987c5da45dc4
UNLIKELY_EOF
# -------------------------------------------------------------------------
@@ -1785,9 +1617,6 @@ UNLIKELY_EOF
say "Installation succeeded."
fi
# If you're modifying any of the code after this point in this current `if` block, you
# may need to update the "$DEPRECATED_OS" = 1 case at the beginning of phase 2 as well.
if [ "$INSTALL_ONLY" = 1 ]; then
say "Certbot is installed."
exit 0
@@ -1999,35 +1828,30 @@ UNLIKELY_EOF
error "WARNING: unable to check for updates."
fi
# If for any reason REMOTE_VERSION is not set, let's assume certbot-auto is up-to-date,
# and do not go into the self-upgrading process.
if [ -n "$REMOTE_VERSION" ]; then
LE_VERSION_STATE=`CompareVersions "$LE_PYTHON" "$LE_AUTO_VERSION" "$REMOTE_VERSION"`
LE_VERSION_STATE=`CompareVersions "$LE_PYTHON" "$LE_AUTO_VERSION" "$REMOTE_VERSION"`
if [ "$LE_VERSION_STATE" = "UNOFFICIAL" ]; then
say "Unofficial certbot-auto version detected, self-upgrade is disabled: $LE_AUTO_VERSION"
elif [ "$LE_VERSION_STATE" = "OUTDATED" ]; then
say "Upgrading certbot-auto $LE_AUTO_VERSION to $REMOTE_VERSION..."
if [ "$LE_VERSION_STATE" = "UNOFFICIAL" ]; then
say "Unofficial certbot-auto version detected, self-upgrade is disabled: $LE_AUTO_VERSION"
elif [ "$LE_VERSION_STATE" = "OUTDATED" ]; then
say "Upgrading certbot-auto $LE_AUTO_VERSION to $REMOTE_VERSION..."
# Now we drop into Python so we don't have to install even more
# dependencies (curl, etc.), for better flow control, and for the option of
# future Windows compatibility.
"$LE_PYTHON" "$TEMP_DIR/fetch.py" --le-auto-script "v$REMOTE_VERSION"
# Now we drop into Python so we don't have to install even more
# dependencies (curl, etc.), for better flow control, and for the option of
# future Windows compatibility.
"$LE_PYTHON" "$TEMP_DIR/fetch.py" --le-auto-script "v$REMOTE_VERSION"
# Install new copy of certbot-auto.
# TODO: Deal with quotes in pathnames.
say "Replacing certbot-auto..."
# Clone permissions with cp. chmod and chown don't have a --reference
# option on macOS or BSD, and stat -c on Linux is stat -f on macOS and BSD:
cp -p "$0" "$TEMP_DIR/letsencrypt-auto.permission-clone"
cp "$TEMP_DIR/letsencrypt-auto" "$TEMP_DIR/letsencrypt-auto.permission-clone"
# Using mv rather than cp leaves the old file descriptor pointing to the
# original copy so the shell can continue to read it unmolested. mv across
# filesystems is non-atomic, doing `rm dest, cp src dest, rm src`, but the
# cp is unlikely to fail if the rm doesn't.
mv -f "$TEMP_DIR/letsencrypt-auto.permission-clone" "$0"
fi # A newer version is available.
fi
# Install new copy of certbot-auto.
# TODO: Deal with quotes in pathnames.
say "Replacing certbot-auto..."
# Clone permissions with cp. chmod and chown don't have a --reference
# option on macOS or BSD, and stat -c on Linux is stat -f on macOS and BSD:
cp -p "$0" "$TEMP_DIR/letsencrypt-auto.permission-clone"
cp "$TEMP_DIR/letsencrypt-auto" "$TEMP_DIR/letsencrypt-auto.permission-clone"
# Using mv rather than cp leaves the old file descriptor pointing to the
# original copy so the shell can continue to read it unmolested. mv across
# filesystems is non-atomic, doing `rm dest, cp src dest, rm src`, but the
# cp is unlikely to fail if the rm doesn't.
mv -f "$TEMP_DIR/letsencrypt-auto.permission-clone" "$0"
fi # A newer version is available.
fi # Self-upgrading is allowed.
RerunWithArgs --le-auto-phase2 "$@"

View File

@@ -62,7 +62,7 @@ def _setup_primary_node(config):
"""
Setup the environment for integration tests.
Will:
- check runtime compatibility (Docker, docker-compose, Nginx)
- check runtime compatiblity (Docker, docker-compose, Nginx)
- create a temporary workspace and the persistent GIT repositories space
- configure and start paralleled ACME CA servers using Docker
- transfer ACME CA servers configurations to pytest nodes using env variables

View File

@@ -189,7 +189,7 @@ class ACMEServer(object):
print('=> Finished configuring the HTTP proxy.')
def _launch_process(self, command, cwd=os.getcwd(), env=None):
"""Launch silently a subprocess OS command"""
"""Launch silently an subprocess OS command"""
if not env:
env = os.environ
process = subprocess.Popen(command, stdout=self._stdout, stderr=subprocess.STDOUT, cwd=cwd, env=env)

View File

@@ -40,7 +40,7 @@ setup(
author="Certbot Project",
author_email='client-dev@letsencrypt.org',
license='Apache License 2.0',
python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*',
python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
@@ -49,6 +49,7 @@ setup(
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',

View File

@@ -1,38 +0,0 @@
"""
General conftest for pytest execution of all integration tests lying
in the window_installer_integration tests package.
As stated by pytest documentation, conftest module is used to set on
for a directory a specific configuration using built-in pytest hooks.
See https://docs.pytest.org/en/latest/reference.html#hook-reference
"""
from __future__ import print_function
import os
import pytest
ROOT_PATH = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))
def pytest_addoption(parser):
"""
Standard pytest hook to add options to the pytest parser.
:param parser: current pytest parser that will be used on the CLI
"""
parser.addoption('--installer-path',
default=os.path.join(ROOT_PATH, 'windows-installer', 'build',
'nsis', 'certbot-beta-installer-win32.exe'),
help='set the path of the windows installer to use, default to '
'CERTBOT_ROOT_PATH\\windows-installer\\build\\nsis\\certbot-beta-installer-win32.exe')
parser.addoption('--allow-persistent-changes', action='store_true',
help='needs to be set, and confirm that the test will make persistent changes on this machine')
def pytest_configure(config):
"""
Standard pytest hook used to add a configuration logic for each node of a pytest run.
:param config: the current pytest configuration
"""
if not config.option.allow_persistent_changes:
raise RuntimeError('This integration test would install Certbot on your machine. '
'Please run it again with the `--allow-persistent-changes` flag set to acknowledge.')

View File

@@ -1,61 +0,0 @@
import os
import time
import unittest
import subprocess
import re
@unittest.skipIf(os.name != 'nt', reason='Windows installer tests must be run on Windows.')
def test_it(request):
try:
subprocess.check_call(['certbot', '--version'])
except (subprocess.CalledProcessError, OSError):
pass
else:
raise AssertionError('Expect certbot to not be available in the PATH.')
try:
# Install certbot
subprocess.check_call([request.config.option.installer_path, '/S'])
# Assert certbot is installed and runnable
output = subprocess.check_output(['certbot', '--version'], universal_newlines=True)
assert re.match(r'^certbot \d+\.\d+\.\d+.*$', output), 'Flag --version does not output a version.'
# Assert renew task is installed and ready
output = _ps('(Get-ScheduledTask -TaskName "Certbot Renew Task").State', capture_stdout=True)
assert output.strip() == 'Ready'
# Assert renew task is working
now = time.time()
_ps('Start-ScheduledTask -TaskName "Certbot Renew Task"')
status = 'Running'
while status != 'Ready':
status = _ps('(Get-ScheduledTask -TaskName "Certbot Renew Task").State', capture_stdout=True).strip()
time.sleep(1)
log_path = os.path.join('C:\\', 'Certbot', 'log', 'letsencrypt.log')
modification_time = os.path.getmtime(log_path)
assert now < modification_time, 'Certbot log file has not been modified by the renew task.'
with open(log_path) as file_h:
data = file_h.read()
assert 'no renewal failures' in data, 'Renew task did not execute properly.'
finally:
# Sadly this command cannot work in non interactive mode: uninstaller will ask explicitly permission in an UAC prompt
# print('Uninstalling Certbot ...')
# uninstall_path = _ps('(gci "HKLM:\\SOFTWARE\\Wow6432Node\\Microsoft\\Windows\\CurrentVersion\\Uninstall"'
# ' | foreach { gp $_.PSPath }'
# ' | ? { $_ -match "Certbot" }'
# ' | select UninstallString)'
# '.UninstallString', capture_stdout=True)
# subprocess.check_call([uninstall_path, '/S'])
pass
def _ps(powershell_str, capture_stdout=False):
fn = subprocess.check_output if capture_stdout else subprocess.check_call
return fn(['powershell.exe', '-c', powershell_str], universal_newlines=True)

View File

@@ -39,7 +39,7 @@ class ValidatorTest(unittest.TestCase):
cert, "test.com", "127.0.0.1"))
@mock.patch("certbot_compatibility_test.validator.requests.get")
def test_successful_redirect(self, mock_get_request):
def test_succesful_redirect(self, mock_get_request):
mock_get_request.return_value = create_response(
301, {"location": "https://test.com"})
self.assertTrue(self.validator.redirect("test.com"))

View File

@@ -3,7 +3,7 @@ import sys
from setuptools import find_packages
from setuptools import setup
version = '1.3.0.dev0'
version = '1.1.0.dev0'
install_requires = [
'certbot',
@@ -28,7 +28,7 @@ setup(
author="Certbot Project",
author_email='client-dev@letsencrypt.org',
license='Apache License 2.0',
python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*',
python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
@@ -37,6 +37,7 @@ setup(
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',

View File

@@ -22,40 +22,17 @@ Credentials
Use of this plugin requires a configuration file containing Cloudflare API
credentials, obtained from your Cloudflare
`account page <https://dash.cloudflare.com/profile/api-tokens>`_.
Previously, Cloudflare's "Global API Key" was used for authentication, however
this key can access the entire Cloudflare API for all domains in your account,
meaning it could cause a lot of damage if leaked.
Cloudflare's newer API Tokens can be restricted to specific domains and
operations, and are therefore now the recommended authentication option.
However, due to some shortcomings in Cloudflare's implementation of Tokens,
Tokens created for Certbot currently require ``Zone:Zone:Read`` and ``Zone:DNS:Edit``
permissions for **all** zones in your account. While this is not ideal, your Token
will still have fewer permission than the Global key, so it's still worth doing.
Hopefully Cloudflare will improve this in the future.
Using Cloudflare Tokens also requires at least version 2.3.1 of the ``cloudflare``
python module. If the version that automatically installed with this plugin is
older than that, and you can't upgrade it on your system, you'll have to stick to
the Global key.
`account page <https://www.cloudflare.com/a/account/my-account>`_. This plugin
does not currently support Cloudflare's "API Tokens", so please ensure you use
the "Global API Key" for authentication.
.. code-block:: ini
:name: certbot_cloudflare_token.ini
:caption: Example credentials file using restricted API Token (recommended):
# Cloudflare API token used by Certbot
dns_cloudflare_api_token = 0123456789abcdef0123456789abcdef01234567
.. code-block:: ini
:name: certbot_cloudflare_key.ini
:caption: Example credentials file using Global API Key (not recommended):
:name: credentials.ini
:caption: Example credentials file:
# Cloudflare API credentials used by Certbot
dns_cloudflare_email = cloudflare@example.com
dns_cloudflare_api_key = 0123456789abcdef0123456789abcdef01234
dns_cloudflare_api_key = 0123456789abcdef0123456789abcdef01234567
The path to this file can be provided interactively or using the
``--dns-cloudflare-credentials`` command-line argument. Certbot records the path

View File

@@ -4,10 +4,6 @@ import logging
import CloudFlare
import zope.interface
from acme.magic_typing import Any
from acme.magic_typing import Dict
from acme.magic_typing import List
from certbot import errors
from certbot import interfaces
from certbot.plugins import dns_common
@@ -42,35 +38,14 @@ class Authenticator(dns_common.DNSAuthenticator):
return 'This plugin configures a DNS TXT record to respond to a dns-01 challenge using ' + \
'the Cloudflare API.'
def _validate_credentials(self, credentials):
token = credentials.conf('api-token')
email = credentials.conf('email')
key = credentials.conf('api-key')
if token:
if email or key:
raise errors.PluginError('{}: dns_cloudflare_email and dns_cloudflare_api_key are '
'not needed when using an API Token'
.format(credentials.confobj.filename))
elif email or key:
if not email:
raise errors.PluginError('{}: dns_cloudflare_email is required when using a Global '
'API Key. (should be email address associated with '
'Cloudflare account)'.format(credentials.confobj.filename))
if not key:
raise errors.PluginError('{}: dns_cloudflare_api_key is required when using a '
'Global API Key. (see {})'
.format(credentials.confobj.filename, ACCOUNT_URL))
else:
raise errors.PluginError('{}: Either dns_cloudflare_api_token (recommended), or '
'dns_cloudflare_email and dns_cloudflare_api_key are required.'
' (see {})'.format(credentials.confobj.filename, ACCOUNT_URL))
def _setup_credentials(self):
self.credentials = self._configure_credentials(
'credentials',
'Cloudflare credentials INI file',
None,
self._validate_credentials
{
'email': 'email address associated with Cloudflare account',
'api-key': 'API key for Cloudflare account, obtained from {0}'.format(ACCOUNT_URL)
}
)
def _perform(self, domain, validation_name, validation):
@@ -80,8 +55,6 @@ class Authenticator(dns_common.DNSAuthenticator):
self._get_cloudflare_client().del_txt_record(domain, validation_name, validation)
def _get_cloudflare_client(self):
if self.credentials.conf('api-token'):
return _CloudflareClient(None, self.credentials.conf('api-token'))
return _CloudflareClient(self.credentials.conf('email'), self.credentials.conf('api-key'))
@@ -115,15 +88,8 @@ class _CloudflareClient(object):
logger.debug('Attempting to add record to zone %s: %s', zone_id, data)
self.cf.zones.dns_records.post(zone_id, data=data) # zones | pylint: disable=no-member
except CloudFlare.exceptions.CloudFlareAPIError as e:
code = int(e)
hint = None
if code == 9109:
hint = 'Does your API token have "Zone:DNS:Edit" permissions?'
logger.error('Encountered CloudFlareAPIError adding TXT record: %d %s', e, e)
raise errors.PluginError('Error communicating with the Cloudflare API: {0}{1}'
.format(e, ' ({0})'.format(hint) if hint else ''))
raise errors.PluginError('Error communicating with the Cloudflare API: {0}'.format(e))
record_id = self._find_txt_record_id(zone_id, record_name, record_content)
logger.debug('Successfully added TXT record with record_id: %s', record_id)
@@ -173,8 +139,6 @@ class _CloudflareClient(object):
"""
zone_name_guesses = dns_common.base_domain_name_guesses(domain)
zones = [] # type: List[Dict[str, Any]]
code = msg = None
for zone_name in zone_name_guesses:
params = {'name': zone_name,
@@ -184,26 +148,16 @@ class _CloudflareClient(object):
zones = self.cf.zones.get(params=params) # zones | pylint: disable=no-member
except CloudFlare.exceptions.CloudFlareAPIError as e:
code = int(e)
msg = str(e)
hint = None
if code == 6003:
hint = ('Did you copy your entire API token/key? To use Cloudflare tokens, '
'you\'ll need the python package cloudflare>=2.3.1.{}'
.format(' This certbot is running cloudflare ' + str(CloudFlare.__version__)
if hasattr(CloudFlare, '__version__') else ''))
hint = 'Did you copy your entire API key?'
elif code == 9103:
hint = 'Did you enter the correct email address and Global key?'
elif code == 9109:
hint = 'Did you enter a valid Cloudflare Token?'
hint = 'Did you enter the correct email address?'
if hint:
raise errors.PluginError('Error determining zone_id: {0} {1}. Please confirm '
'that you have supplied valid Cloudflare API credentials. ({2})'
.format(code, msg, hint))
else:
logger.debug('Unrecognised CloudFlareAPIError while finding zone_id: %d %s. '
'Continuing with next zone guess...', e, e)
raise errors.PluginError('Error determining zone_id: {0} {1}. Please confirm that '
'you have supplied valid Cloudflare API credentials.{2}'
.format(code, e, ' ({0})'.format(hint) if hint else ''))
if zones:
zone_id = zones[0]['id']
@@ -211,10 +165,9 @@ class _CloudflareClient(object):
return zone_id
raise errors.PluginError('Unable to determine zone_id for {0} using zone names: {1}. '
'Please confirm that the domain name has been entered correctly '
'and is already associated with the supplied Cloudflare account.{2}'
.format(domain, zone_name_guesses, ' The error from Cloudflare was:'
' {0} {1}'.format(code, msg) if code is not None else ''))
'Please confirm that the domain name has been entered correctly '
'and is already associated with the supplied Cloudflare account.'
.format(domain, zone_name_guesses))
def _find_txt_record_id(self, zone_id, record_name, record_content):
"""

View File

@@ -38,7 +38,7 @@ extensions = ['sphinx.ext.autodoc',
'sphinx.ext.viewcode']
autodoc_member_order = 'bysource'
autodoc_default_flags = ['show-inheritance']
autodoc_default_flags = ['show-inheritance', 'private-members']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
@@ -84,7 +84,7 @@ default_role = 'py:obj'
pygments_style = 'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
todo_include_todos = True
# -- Options for HTML output ----------------------------------------------

View File

@@ -1,3 +1,3 @@
# Remember to update setup.py to match the package versions below.
acme[dev]==0.29.0
certbot[dev]==1.1.0
-e certbot[dev]

View File

@@ -4,13 +4,13 @@ from setuptools import find_packages
from setuptools import setup
from setuptools.command.test import test as TestCommand
version = '1.3.0.dev0'
version = '1.1.0.dev0'
# Remember to update local-oldest-requirements.txt when changing the minimum
# acme/certbot version.
install_requires = [
'acme>=0.29.0',
'certbot>=1.1.0',
'certbot>=1.0.0.dev0',
'cloudflare>=1.5.1',
'mock',
'setuptools',
@@ -44,7 +44,7 @@ setup(
author="Certbot Project",
author_email='client-dev@letsencrypt.org',
license='Apache License 2.0',
python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*',
python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Plugins',
@@ -55,6 +55,7 @@ setup(
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',

View File

@@ -12,9 +12,6 @@ from certbot.plugins.dns_test_common import DOMAIN
from certbot.tests import util as test_util
API_ERROR = CloudFlare.exceptions.CloudFlareAPIError(1000, '', '')
API_TOKEN = 'an-api-token'
API_KEY = 'an-api-key'
EMAIL = 'example@example.com'
@@ -52,50 +49,6 @@ class AuthenticatorTest(test_util.TempDirTestCase, dns_test_common.BaseAuthentic
expected = [mock.call.del_txt_record(DOMAIN, '_acme-challenge.'+DOMAIN, mock.ANY)]
self.assertEqual(expected, self.mock_client.mock_calls)
def test_api_token(self):
dns_test_common.write({"cloudflare_api_token": API_TOKEN},
self.config.cloudflare_credentials)
self.auth.perform([self.achall])
expected = [mock.call.add_txt_record(DOMAIN, '_acme-challenge.'+DOMAIN, mock.ANY, mock.ANY)]
self.assertEqual(expected, self.mock_client.mock_calls)
def test_no_creds(self):
dns_test_common.write({}, self.config.cloudflare_credentials)
self.assertRaises(errors.PluginError,
self.auth.perform,
[self.achall])
def test_missing_email_or_key(self):
dns_test_common.write({"cloudflare_api_key": API_KEY}, self.config.cloudflare_credentials)
self.assertRaises(errors.PluginError,
self.auth.perform,
[self.achall])
dns_test_common.write({"cloudflare_email": EMAIL}, self.config.cloudflare_credentials)
self.assertRaises(errors.PluginError,
self.auth.perform,
[self.achall])
def test_email_or_key_with_token(self):
dns_test_common.write({"cloudflare_api_token": API_TOKEN, "cloudflare_email": EMAIL},
self.config.cloudflare_credentials)
self.assertRaises(errors.PluginError,
self.auth.perform,
[self.achall])
dns_test_common.write({"cloudflare_api_token": API_TOKEN, "cloudflare_api_key": API_KEY},
self.config.cloudflare_credentials)
self.assertRaises(errors.PluginError,
self.auth.perform,
[self.achall])
dns_test_common.write({"cloudflare_api_token": API_TOKEN, "cloudflare_email": EMAIL,
"cloudflare_api_key": API_KEY}, self.config.cloudflare_credentials)
self.assertRaises(errors.PluginError,
self.auth.perform,
[self.achall])
class CloudflareClientTest(unittest.TestCase):
record_name = "foo"
@@ -130,7 +83,7 @@ class CloudflareClientTest(unittest.TestCase):
def test_add_txt_record_error(self):
self.cf.zones.get.return_value = [{'id': self.zone_id}]
self.cf.zones.dns_records.post.side_effect = CloudFlare.exceptions.CloudFlareAPIError(9109, '', '')
self.cf.zones.dns_records.post.side_effect = API_ERROR
self.assertRaises(
errors.PluginError,
@@ -153,25 +106,6 @@ class CloudflareClientTest(unittest.TestCase):
self.cloudflare_client.add_txt_record,
DOMAIN, self.record_name, self.record_content, self.record_ttl)
def test_add_txt_record_bad_creds(self):
self.cf.zones.get.side_effect = CloudFlare.exceptions.CloudFlareAPIError(6003, '', '')
self.assertRaises(
errors.PluginError,
self.cloudflare_client.add_txt_record,
DOMAIN, self.record_name, self.record_content, self.record_ttl)
self.cf.zones.get.side_effect = CloudFlare.exceptions.CloudFlareAPIError(9103, '', '')
self.assertRaises(
errors.PluginError,
self.cloudflare_client.add_txt_record,
DOMAIN, self.record_name, self.record_content, self.record_ttl)
self.cf.zones.get.side_effect = CloudFlare.exceptions.CloudFlareAPIError(9109, '', '')
self.assertRaises(
errors.PluginError,
self.cloudflare_client.add_txt_record,
DOMAIN, self.record_name, self.record_content, self.record_ttl)
def test_del_txt_record(self):
self.cf.zones.get.return_value = [{'id': self.zone_id}]
self.cf.zones.dns_records.get.return_value = [{'id': self.record_id}]

View File

@@ -38,7 +38,7 @@ extensions = ['sphinx.ext.autodoc',
'sphinx.ext.viewcode']
autodoc_member_order = 'bysource'
autodoc_default_flags = ['show-inheritance']
autodoc_default_flags = ['show-inheritance', 'private-members']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
@@ -84,7 +84,7 @@ default_role = 'py:obj'
pygments_style = 'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
todo_include_todos = True
# -- Options for HTML output ----------------------------------------------

View File

@@ -1,3 +1,3 @@
# Remember to update setup.py to match the package versions below.
acme[dev]==0.31.0
certbot[dev]==1.1.0
-e certbot[dev]

View File

@@ -4,13 +4,13 @@ from setuptools import find_packages
from setuptools import setup
from setuptools.command.test import test as TestCommand
version = '1.3.0.dev0'
version = '1.1.0.dev0'
# Remember to update local-oldest-requirements.txt when changing the minimum
# acme/certbot version.
install_requires = [
'acme>=0.31.0',
'certbot>=1.1.0',
'certbot>=1.0.0.dev0',
'dns-lexicon>=2.2.1', # Support for >1 TXT record per name
'mock',
'setuptools',
@@ -44,7 +44,7 @@ setup(
author="Certbot Project",
author_email='client-dev@letsencrypt.org',
license='Apache License 2.0',
python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*',
python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Plugins',
@@ -55,6 +55,7 @@ setup(
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',

View File

@@ -38,7 +38,7 @@ extensions = ['sphinx.ext.autodoc',
'sphinx.ext.viewcode']
autodoc_member_order = 'bysource'
autodoc_default_flags = ['show-inheritance']
autodoc_default_flags = ['show-inheritance', 'private-members']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
@@ -84,7 +84,7 @@ default_role = 'py:obj'
pygments_style = 'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
todo_include_todos = True
# -- Options for HTML output ----------------------------------------------

View File

@@ -1,3 +1,3 @@
# Remember to update setup.py to match the package versions below.
acme[dev]==0.29.0
certbot[dev]==1.1.0
-e certbot[dev]

View File

@@ -4,13 +4,13 @@ from setuptools import find_packages
from setuptools import setup
from setuptools.command.test import test as TestCommand
version = '1.3.0.dev0'
version = '1.1.0.dev0'
# Remember to update local-oldest-requirements.txt when changing the minimum
# acme/certbot version.
install_requires = [
'acme>=0.29.0',
'certbot>=1.1.0',
'certbot>=1.0.0.dev0',
'mock',
'python-digitalocean>=1.11',
'setuptools',
@@ -45,7 +45,7 @@ setup(
author="Certbot Project",
author_email='client-dev@letsencrypt.org',
license='Apache License 2.0',
python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*',
python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Plugins',
@@ -56,6 +56,7 @@ setup(
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',

View File

@@ -38,7 +38,7 @@ extensions = ['sphinx.ext.autodoc',
'sphinx.ext.viewcode']
autodoc_member_order = 'bysource'
autodoc_default_flags = ['show-inheritance']
autodoc_default_flags = ['show-inheritance', 'private-members']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
@@ -84,7 +84,7 @@ default_role = 'py:obj'
pygments_style = 'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
todo_include_todos = True
# -- Options for HTML output ----------------------------------------------

View File

@@ -1,3 +1,3 @@
# Remember to update setup.py to match the package versions below.
acme[dev]==0.31.0
certbot[dev]==1.1.0
-e certbot[dev]

View File

@@ -5,13 +5,13 @@ from setuptools import find_packages
from setuptools import setup
from setuptools.command.test import test as TestCommand
version = '1.3.0.dev0'
version = '1.1.0.dev0'
# Remember to update local-oldest-requirements.txt when changing the minimum
# acme/certbot version.
install_requires = [
'acme>=0.31.0',
'certbot>=1.1.0',
'certbot>=1.0.0.dev0',
'mock',
'setuptools',
'zope.interface',
@@ -56,7 +56,7 @@ setup(
author="Certbot Project",
author_email='client-dev@letsencrypt.org',
license='Apache License 2.0',
python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*',
python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Plugins',
@@ -67,6 +67,7 @@ setup(
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',

View File

@@ -38,7 +38,7 @@ extensions = ['sphinx.ext.autodoc',
'sphinx.ext.viewcode']
autodoc_member_order = 'bysource'
autodoc_default_flags = ['show-inheritance']
autodoc_default_flags = ['show-inheritance', 'private-members']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
@@ -84,7 +84,7 @@ default_role = 'py:obj'
pygments_style = 'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
todo_include_todos = True
# -- Options for HTML output ----------------------------------------------

View File

@@ -1,3 +1,3 @@
# Remember to update setup.py to match the package versions below.
acme[dev]==0.31.0
certbot[dev]==1.1.0
-e certbot[dev]

View File

@@ -4,13 +4,13 @@ from setuptools import find_packages
from setuptools import setup
from setuptools.command.test import test as TestCommand
version = '1.3.0.dev0'
version = '1.1.0.dev0'
# Remember to update local-oldest-requirements.txt when changing the minimum
# acme/certbot version.
install_requires = [
'acme>=0.31.0',
'certbot>=1.1.0',
'certbot>=1.0.0.dev0',
'dns-lexicon>=2.2.1', # Support for >1 TXT record per name
'mock',
'setuptools',
@@ -44,7 +44,7 @@ setup(
author="Certbot Project",
author_email='client-dev@letsencrypt.org',
license='Apache License 2.0',
python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*',
python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Plugins',
@@ -55,6 +55,7 @@ setup(
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',

View File

@@ -1 +1 @@
Gehirn Infrastructure Service DNS Authenticator plugin for Certbot
Gehirn Infrastracture Service DNS Authenticator plugin for Certbot

View File

@@ -1,14 +1,14 @@
"""
The `~certbot_dns_gehirn.dns_gehirn` plugin automates the process of completing
a ``dns-01`` challenge (`~acme.challenges.DNS01`) by creating, and subsequently
removing, TXT records using the Gehirn Infrastructure Service DNS API.
removing, TXT records using the Gehirn Infrastracture Service DNS API.
Named Arguments
---------------
======================================== =====================================
``--dns-gehirn-credentials`` Gehirn Infrastructure Service
``--dns-gehirn-credentials`` Gehirn Infrastracture Service
credentials_ INI file.
(Required)
``--dns-gehirn-propagation-seconds`` The number of seconds to wait for DNS
@@ -22,15 +22,15 @@ Credentials
-----------
Use of this plugin requires a configuration file containing
Gehirn Infrastructure Service DNS API credentials,
obtained from your Gehirn Infrastructure Service
Gehirn Infrastracture Service DNS API credentials,
obtained from your Gehirn Infrastracture Service
`dashboard <https://gis.gehirn.jp/>`_.
.. code-block:: ini
:name: credentials.ini
:caption: Example credentials file:
# Gehirn Infrastructure Service API credentials used by Certbot
# Gehirn Infrastracture Service API credentials used by Certbot
dns_gehirn_api_token = 00000000-0000-0000-0000-000000000000
dns_gehirn_api_secret = MDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAw
@@ -40,7 +40,7 @@ to this file for use during renewal, but does not store the file's contents.
.. caution::
You should protect these API credentials as you would the password to your
Gehirn Infrastructure Service account. Users who can read this file can use
Gehirn Infrastracture Service account. Users who can read this file can use
these credentials to issue arbitrary API calls on your behalf. Users who can
cause Certbot to run using these credentials can complete a ``dns-01``
challenge to acquire new certificates or revoke existing certificates for

View File

@@ -1,4 +1,4 @@
"""DNS Authenticator for Gehirn Infrastructure Service DNS."""
"""DNS Authenticator for Gehirn Infrastracture Service DNS."""
import logging
from lexicon.providers import gehirn
@@ -15,14 +15,14 @@ DASHBOARD_URL = "https://gis.gehirn.jp/"
@zope.interface.implementer(interfaces.IAuthenticator)
@zope.interface.provider(interfaces.IPluginFactory)
class Authenticator(dns_common.DNSAuthenticator):
"""DNS Authenticator for Gehirn Infrastructure Service DNS
"""DNS Authenticator for Gehirn Infrastracture Service DNS
This Authenticator uses the Gehirn Infrastructure Service API to fulfill
This Authenticator uses the Gehirn Infrastracture Service API to fulfill
a dns-01 challenge.
"""
description = 'Obtain certificates using a DNS TXT record ' + \
'(if you are using Gehirn Infrastructure Service for DNS).'
'(if you are using Gehirn Infrastracture Service for DNS).'
ttl = 60
def __init__(self, *args, **kwargs):
@@ -32,20 +32,20 @@ class Authenticator(dns_common.DNSAuthenticator):
@classmethod
def add_parser_arguments(cls, add): # pylint: disable=arguments-differ
super(Authenticator, cls).add_parser_arguments(add, default_propagation_seconds=30)
add('credentials', help='Gehirn Infrastructure Service credentials file.')
add('credentials', help='Gehirn Infrastracture Service credentials file.')
def more_info(self): # pylint: disable=missing-docstring,no-self-use
return 'This plugin configures a DNS TXT record to respond to a dns-01 challenge using ' + \
'the Gehirn Infrastructure Service API.'
'the Gehirn Infrastracture Service API.'
def _setup_credentials(self):
self.credentials = self._configure_credentials(
'credentials',
'Gehirn Infrastructure Service credentials file',
'Gehirn Infrastracture Service credentials file',
{
'api-token': 'API token for Gehirn Infrastructure Service ' + \
'api-token': 'API token for Gehirn Infrastracture Service ' + \
'API obtained from {0}'.format(DASHBOARD_URL),
'api-secret': 'API secret for Gehirn Infrastructure Service ' + \
'api-secret': 'API secret for Gehirn Infrastracture Service ' + \
'API obtained from {0}'.format(DASHBOARD_URL),
}
)
@@ -66,7 +66,7 @@ class Authenticator(dns_common.DNSAuthenticator):
class _GehirnLexiconClient(dns_common_lexicon.LexiconClient):
"""
Encapsulates all communication with the Gehirn Infrastructure Service via Lexicon.
Encapsulates all communication with the Gehirn Infrastracture Service via Lexicon.
"""
def __init__(self, api_token, api_secret, ttl):

View File

@@ -38,7 +38,7 @@ extensions = ['sphinx.ext.autodoc',
'sphinx.ext.viewcode']
autodoc_member_order = 'bysource'
autodoc_default_flags = ['show-inheritance']
autodoc_default_flags = ['show-inheritance', 'private-members']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
@@ -84,7 +84,7 @@ default_role = 'py:obj'
pygments_style = 'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
todo_include_todos = True
# -- Options for HTML output ----------------------------------------------

View File

@@ -1,3 +1,3 @@
# Remember to update setup.py to match the package versions below.
acme[dev]==0.31.0
certbot[dev]==1.1.0
-e certbot[dev]

View File

@@ -4,12 +4,12 @@ from setuptools import find_packages
from setuptools import setup
from setuptools.command.test import test as TestCommand
version = '1.3.0.dev0'
version = '1.1.0.dev0'
# Please update tox.ini when modifying dependency version requirements
install_requires = [
'acme>=0.31.0',
'certbot>=1.1.0',
'certbot>=1.0.0.dev0',
'dns-lexicon>=2.1.22',
'mock',
'setuptools',
@@ -38,12 +38,12 @@ class PyTest(TestCommand):
setup(
name='certbot-dns-gehirn',
version=version,
description="Gehirn Infrastructure Service DNS Authenticator plugin for Certbot",
description="Gehirn Infrastracture Service DNS Authenticator plugin for Certbot",
url='https://github.com/certbot/certbot',
author="Certbot Project",
author_email='client-dev@letsencrypt.org',
license='Apache License 2.0',
python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*',
python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Plugins',
@@ -54,6 +54,7 @@ setup(
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',

View File

@@ -39,7 +39,7 @@ extensions = ['sphinx.ext.autodoc',
'jsonlexer']
autodoc_member_order = 'bysource'
autodoc_default_flags = ['show-inheritance']
autodoc_default_flags = ['show-inheritance', 'private-members']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
@@ -85,7 +85,7 @@ default_role = 'py:obj'
pygments_style = 'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
todo_include_todos = True
# -- Options for HTML output ----------------------------------------------

View File

@@ -1,3 +1,3 @@
# Remember to update setup.py to match the package versions below.
acme[dev]==0.29.0
certbot[dev]==1.1.0
-e certbot[dev]

View File

@@ -4,13 +4,13 @@ from setuptools import find_packages
from setuptools import setup
from setuptools.command.test import test as TestCommand
version = '1.3.0.dev0'
version = '1.1.0.dev0'
# Remember to update local-oldest-requirements.txt when changing the minimum
# acme/certbot version.
install_requires = [
'acme>=0.29.0',
'certbot>=1.1.0',
'certbot>=1.0.0.dev0',
'google-api-python-client>=1.5.5',
'mock',
'oauth2client>=4.0',
@@ -47,7 +47,7 @@ setup(
author="Certbot Project",
author_email='client-dev@letsencrypt.org',
license='Apache License 2.0',
python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*',
python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Plugins',
@@ -58,6 +58,7 @@ setup(
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',

View File

@@ -38,7 +38,7 @@ extensions = ['sphinx.ext.autodoc',
'sphinx.ext.viewcode']
autodoc_member_order = 'bysource'
autodoc_default_flags = ['show-inheritance']
autodoc_default_flags = ['show-inheritance', 'private-members']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
@@ -84,7 +84,7 @@ default_role = 'py:obj'
pygments_style = 'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
todo_include_todos = True
# -- Options for HTML output ----------------------------------------------

View File

@@ -1,4 +1,4 @@
# Remember to update setup.py to match the package versions below.
acme[dev]==0.31.0
certbot[dev]==1.1.0
-e certbot[dev]
dns-lexicon==2.2.3

View File

@@ -4,12 +4,12 @@ from setuptools import find_packages
from setuptools import setup
from setuptools.command.test import test as TestCommand
version = '1.3.0.dev0'
version = '1.1.0.dev0'
# Please update tox.ini when modifying dependency version requirements
install_requires = [
'acme>=0.31.0',
'certbot>=1.1.0',
'certbot>=1.0.0.dev0',
'dns-lexicon>=2.2.3',
'mock',
'setuptools',
@@ -43,7 +43,7 @@ setup(
author="Certbot Project",
author_email='client-dev@letsencrypt.org',
license='Apache License 2.0',
python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*',
python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Plugins',
@@ -54,6 +54,7 @@ setup(
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',

View File

@@ -38,7 +38,7 @@ extensions = ['sphinx.ext.autodoc',
'sphinx.ext.viewcode']
autodoc_member_order = 'bysource'
autodoc_default_flags = ['show-inheritance']
autodoc_default_flags = ['show-inheritance', 'private-members']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
@@ -84,7 +84,7 @@ default_role = 'py:obj'
pygments_style = 'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
todo_include_todos = True
# -- Options for HTML output ----------------------------------------------

View File

@@ -1,3 +1,3 @@
# Remember to update setup.py to match the package versions below.
acme[dev]==0.31.0
certbot[dev]==1.1.0
-e certbot[dev]

View File

@@ -4,13 +4,13 @@ from setuptools import find_packages
from setuptools import setup
from setuptools.command.test import test as TestCommand
version = '1.3.0.dev0'
version = '1.1.0.dev0'
# Remember to update local-oldest-requirements.txt when changing the minimum
# acme/certbot version.
install_requires = [
'acme>=0.31.0',
'certbot>=1.1.0',
'certbot>=1.0.0.dev0',
'dns-lexicon>=2.2.1', # Support for >1 TXT record per name
'mock',
'setuptools',
@@ -44,7 +44,7 @@ setup(
author="Certbot Project",
author_email='client-dev@letsencrypt.org',
license='Apache License 2.0',
python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*',
python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Plugins',
@@ -55,6 +55,7 @@ setup(
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',

View File

@@ -38,7 +38,7 @@ extensions = ['sphinx.ext.autodoc',
'sphinx.ext.viewcode']
autodoc_member_order = 'bysource'
autodoc_default_flags = ['show-inheritance']
autodoc_default_flags = ['show-inheritance', 'private-members']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
@@ -84,7 +84,7 @@ default_role = 'py:obj'
pygments_style = 'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
todo_include_todos = True
# -- Options for HTML output ----------------------------------------------

View File

@@ -1,3 +1,3 @@
# Remember to update setup.py to match the package versions below.
acme[dev]==0.31.0
certbot[dev]==1.1.0
-e certbot[dev]

View File

@@ -4,13 +4,13 @@ from setuptools import find_packages
from setuptools import setup
from setuptools.command.test import test as TestCommand
version = '1.3.0.dev0'
version = '1.1.0.dev0'
# Remember to update local-oldest-requirements.txt when changing the minimum
# acme/certbot version.
install_requires = [
'acme>=0.31.0',
'certbot>=1.1.0',
'certbot>=1.0.0.dev0',
'dns-lexicon>=2.2.1', # Support for >1 TXT record per name
'mock',
'setuptools',
@@ -44,7 +44,7 @@ setup(
author="Certbot Project",
author_email='client-dev@letsencrypt.org',
license='Apache License 2.0',
python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*',
python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Plugins',
@@ -55,6 +55,7 @@ setup(
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',

View File

@@ -38,7 +38,7 @@ extensions = ['sphinx.ext.autodoc',
'sphinx.ext.viewcode']
autodoc_member_order = 'bysource'
autodoc_default_flags = ['show-inheritance']
autodoc_default_flags = ['show-inheritance', 'private-members']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
@@ -84,7 +84,7 @@ default_role = 'py:obj'
pygments_style = 'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
todo_include_todos = True
# -- Options for HTML output ----------------------------------------------

View File

@@ -1,4 +1,4 @@
# Remember to update setup.py to match the package versions below.
acme[dev]==0.31.0
certbot[dev]==1.1.0
-e certbot[dev]
dns-lexicon==2.7.14

View File

@@ -4,13 +4,13 @@ from setuptools import find_packages
from setuptools import setup
from setuptools.command.test import test as TestCommand
version = '1.3.0.dev0'
version = '1.1.0.dev0'
# Remember to update local-oldest-requirements.txt when changing the minimum
# acme/certbot version.
install_requires = [
'acme>=0.31.0',
'certbot>=1.1.0',
'certbot>=1.0.0.dev0',
'dns-lexicon>=2.7.14', # Correct proxy use on OVH provider
'mock',
'setuptools',
@@ -44,7 +44,7 @@ setup(
author="Certbot Project",
author_email='client-dev@letsencrypt.org',
license='Apache License 2.0',
python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*',
python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Plugins',
@@ -55,6 +55,7 @@ setup(
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',

View File

@@ -129,7 +129,7 @@ class _RFC2136Client(object):
rcode = response.rcode()
if rcode == dns.rcode.NOERROR:
logger.debug('Successfully added TXT record %s', record_name)
logger.debug('Successfully added TXT record')
else:
raise errors.PluginError('Received response from server: {0}'
.format(dns.rcode.to_text(rcode)))
@@ -164,7 +164,7 @@ class _RFC2136Client(object):
rcode = response.rcode()
if rcode == dns.rcode.NOERROR:
logger.debug('Successfully deleted TXT record %s', record_name)
logger.debug('Successfully deleted TXT record')
else:
raise errors.PluginError('Received response from server: {0}'
.format(dns.rcode.to_text(rcode)))

View File

@@ -38,7 +38,7 @@ extensions = ['sphinx.ext.autodoc',
'sphinx.ext.viewcode']
autodoc_member_order = 'bysource'
autodoc_default_flags = ['show-inheritance']
autodoc_default_flags = ['show-inheritance', 'private-members']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
@@ -84,7 +84,7 @@ default_role = 'py:obj'
pygments_style = 'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
todo_include_todos = True
# -- Options for HTML output ----------------------------------------------

View File

@@ -1,3 +1,3 @@
# Remember to update setup.py to match the package versions below.
acme[dev]==0.29.0
certbot[dev]==1.1.0
-e certbot[dev]

View File

@@ -4,13 +4,13 @@ from setuptools import find_packages
from setuptools import setup
from setuptools.command.test import test as TestCommand
version = '1.3.0.dev0'
version = '1.1.0.dev0'
# Remember to update local-oldest-requirements.txt when changing the minimum
# acme/certbot version.
install_requires = [
'acme>=0.29.0',
'certbot>=1.1.0',
'certbot>=1.0.0.dev0',
'dnspython',
'mock',
'setuptools',
@@ -44,7 +44,7 @@ setup(
author="Certbot Project",
author_email='client-dev@letsencrypt.org',
license='Apache License 2.0',
python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*',
python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Plugins',
@@ -55,6 +55,7 @@ setup(
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',

View File

@@ -38,7 +38,7 @@ extensions = ['sphinx.ext.autodoc',
'sphinx.ext.viewcode']
autodoc_member_order = 'bysource'
autodoc_default_flags = ['show-inheritance']
autodoc_default_flags = ['show-inheritance', 'private-members']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
@@ -84,7 +84,7 @@ default_role = 'py:obj'
pygments_style = 'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
todo_include_todos = True
# -- Options for HTML output ----------------------------------------------

View File

@@ -1,3 +1,3 @@
# Remember to update setup.py to match the package versions below.
acme[dev]==0.29.0
certbot[dev]==1.1.0
-e certbot[dev]

View File

@@ -4,13 +4,13 @@ from setuptools import find_packages
from setuptools import setup
from setuptools.command.test import test as TestCommand
version = '1.3.0.dev0'
version = '1.1.0.dev0'
# Remember to update local-oldest-requirements.txt when changing the minimum
# acme/certbot version.
install_requires = [
'acme>=0.29.0',
'certbot>=1.1.0',
'certbot>=1.0.0.dev0',
'boto3',
'mock',
'setuptools',
@@ -39,7 +39,7 @@ setup(
author="Certbot Project",
author_email='client-dev@letsencrypt.org',
license='Apache License 2.0',
python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*',
python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Plugins',
@@ -50,6 +50,7 @@ setup(
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',

View File

@@ -38,7 +38,7 @@ extensions = ['sphinx.ext.autodoc',
'sphinx.ext.viewcode']
autodoc_member_order = 'bysource'
autodoc_default_flags = ['show-inheritance']
autodoc_default_flags = ['show-inheritance', 'private-members']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
@@ -84,7 +84,7 @@ default_role = 'py:obj'
pygments_style = 'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
todo_include_todos = True
# -- Options for HTML output ----------------------------------------------

View File

@@ -1,3 +1,3 @@
# Remember to update setup.py to match the package versions below.
acme[dev]==0.31.0
certbot[dev]==1.1.0
-e certbot[dev]

View File

@@ -4,12 +4,12 @@ from setuptools import find_packages
from setuptools import setup
from setuptools.command.test import test as TestCommand
version = '1.3.0.dev0'
version = '1.1.0.dev0'
# Please update tox.ini when modifying dependency version requirements
install_requires = [
'acme>=0.31.0',
'certbot>=1.1.0',
'certbot>=1.0.0.dev0',
'dns-lexicon>=2.1.23',
'mock',
'setuptools',
@@ -43,7 +43,7 @@ setup(
author="Certbot Project",
author_email='client-dev@letsencrypt.org',
license='Apache License 2.0',
python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*',
python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Plugins',
@@ -54,6 +54,7 @@ setup(
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',

View File

@@ -313,6 +313,9 @@ class NginxConfigurator(common.Installer):
.. todo:: This should maybe return list if no obvious answer
is presented.
.. todo:: The special name "$hostname" corresponds to the machine's
hostname. Currently we just ignore this.
:param str target_name: domain name
:param bool create_if_no_match: If we should create a new vhost from default
when there is no match found. If we can't choose a default, raise a
@@ -595,12 +598,6 @@ class NginxConfigurator(common.Installer):
all_names = set() # type: Set[str]
for vhost in self.parser.get_vhosts():
try:
vhost.names.remove("$hostname")
vhost.names.add(socket.gethostname())
except KeyError:
pass
all_names.update(vhost.names)
for addr in vhost.addrs:
@@ -1011,7 +1008,7 @@ class NginxConfigurator(common.Installer):
matches = re.findall(r"built with OpenSSL ([^ ]+) ", text)
if not matches:
logger.warning("NGINX configured with OpenSSL alternatives is not officially"
" supported by Certbot.")
"supported by Certbot.")
return ""
return matches[0]

View File

@@ -10,4 +10,4 @@ ssl_session_timeout 1440m;
ssl_protocols TLSv1.2;
ssl_prefer_server_ciphers off;
ssl_ciphers "ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:DHE-RSA-AES128-GCM-SHA256:DHE-RSA-AES256-GCM-SHA384";
ssl_ciphers "ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:DHE-RSA-AES128-GCM-SHA256:DHE-RSA-AES256-GCM-SHA384:ECDHE-RSA-AES128-SHA";

View File

@@ -11,4 +11,4 @@ ssl_session_tickets off;
ssl_protocols TLSv1.2;
ssl_prefer_server_ciphers off;
ssl_ciphers "ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:DHE-RSA-AES128-GCM-SHA256:DHE-RSA-AES256-GCM-SHA384";
ssl_ciphers "ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:DHE-RSA-AES128-GCM-SHA256:DHE-RSA-AES256-GCM-SHA384:ECDHE-RSA-AES128-SHA";

View File

@@ -10,4 +10,4 @@ ssl_session_timeout 1440m;
ssl_protocols TLSv1.2 TLSv1.3;
ssl_prefer_server_ciphers off;
ssl_ciphers "ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:DHE-RSA-AES128-GCM-SHA256:DHE-RSA-AES256-GCM-SHA384";
ssl_ciphers "ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:DHE-RSA-AES128-GCM-SHA256:DHE-RSA-AES256-GCM-SHA384:ECDHE-RSA-AES128-SHA";

View File

@@ -11,4 +11,4 @@ ssl_session_tickets off;
ssl_protocols TLSv1.2 TLSv1.3;
ssl_prefer_server_ciphers off;
ssl_ciphers "ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:DHE-RSA-AES128-GCM-SHA256:DHE-RSA-AES256-GCM-SHA384";
ssl_ciphers "ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:DHE-RSA-AES128-GCM-SHA256:DHE-RSA-AES256-GCM-SHA384:ECDHE-RSA-AES128-SHA";

Some files were not shown because too many files have changed in this diff Show More