Compare commits

..

2 Commits

Author SHA1 Message Date
Brad Warren
2743fb1686 test windows installer only 2021-04-05 10:48:57 -07:00
Brad Warren
3487623bc0 Upgrade Python to 3.8.9. 2021-04-05 10:46:23 -07:00
501 changed files with 16475 additions and 14083 deletions

View File

@@ -4,44 +4,71 @@ jobs:
- name: IMAGE_NAME
value: ubuntu-18.04
- name: PYTHON_VERSION
value: 3.10
value: 3.9
- group: certbot-common
strategy:
matrix:
linux-py36:
PYTHON_VERSION: 3.6
TOXENV: py36
linux-py37:
PYTHON_VERSION: 3.7
TOXENV: py37
linux-py38:
PYTHON_VERSION: 3.8
TOXENV: py38
linux-py39:
PYTHON_VERSION: 3.9
TOXENV: py39
linux-py37-nopin:
PYTHON_VERSION: 3.7
TOXENV: py37
CERTBOT_NO_PIN: 1
linux-external-mock:
TOXENV: external-mock
linux-boulder-v1-integration-certbot-oldest:
PYTHON_VERSION: 3.6
TOXENV: integration-certbot-oldest
ACME_SERVER: boulder-v1
linux-boulder-v2-integration-certbot-oldest:
PYTHON_VERSION: 3.7
PYTHON_VERSION: 3.6
TOXENV: integration-certbot-oldest
ACME_SERVER: boulder-v2
linux-boulder-v1-integration-nginx-oldest:
PYTHON_VERSION: 3.6
TOXENV: integration-nginx-oldest
ACME_SERVER: boulder-v1
linux-boulder-v2-integration-nginx-oldest:
PYTHON_VERSION: 3.7
PYTHON_VERSION: 3.6
TOXENV: integration-nginx-oldest
ACME_SERVER: boulder-v2
linux-boulder-v1-py36-integration:
PYTHON_VERSION: 3.6
TOXENV: integration
ACME_SERVER: boulder-v1
linux-boulder-v2-py36-integration:
PYTHON_VERSION: 3.6
TOXENV: integration
ACME_SERVER: boulder-v2
linux-boulder-v1-py37-integration:
PYTHON_VERSION: 3.7
TOXENV: integration
ACME_SERVER: boulder-v1
linux-boulder-v2-py37-integration:
PYTHON_VERSION: 3.7
TOXENV: integration
ACME_SERVER: boulder-v2
linux-boulder-v1-py38-integration:
PYTHON_VERSION: 3.8
TOXENV: integration
ACME_SERVER: boulder-v1
linux-boulder-v2-py38-integration:
PYTHON_VERSION: 3.8
TOXENV: integration
ACME_SERVER: boulder-v2
linux-boulder-v2-py39-integration:
linux-boulder-v1-py39-integration:
PYTHON_VERSION: 3.9
TOXENV: integration
ACME_SERVER: boulder-v2
linux-boulder-v2-py310-integration:
PYTHON_VERSION: 3.10
ACME_SERVER: boulder-v1
linux-boulder-v2-py39-integration:
PYTHON_VERSION: 3.9
TOXENV: integration
ACME_SERVER: boulder-v2
nginx-compat:
@@ -52,12 +79,21 @@ jobs:
TOXENV: integration-dns-rfc2136
docker-dev:
TOXENV: docker_dev
le-modification:
IMAGE_NAME: ubuntu-18.04
TOXENV: modification
farmtest-apache2:
macos-farmtest-apache2:
# We run one of these test farm tests on macOS to help ensure the
# tests continue to work on the platform.
IMAGE_NAME: macOS-10.15
PYTHON_VERSION: 3.8
TOXENV: test-farm-apache2
farmtest-leauto-upgrades:
PYTHON_VERSION: 3.7
TOXENV: test-farm-leauto-upgrades
farmtest-certonly-standalone:
PYTHON_VERSION: 3.7
TOXENV: test-farm-certonly-standalone
farmtest-sdists:
PYTHON_VERSION: 3.7
TOXENV: test-farm-sdists
pool:
vmImage: $(IMAGE_NAME)
steps:

View File

@@ -1,66 +1,12 @@
jobs:
- job: docker_build
pool:
vmImage: ubuntu-18.04
strategy:
matrix:
amd64:
DOCKER_ARCH: amd64
# Do not run the heavy non-amd64 builds for test branches
${{ if not(startsWith(variables['Build.SourceBranchName'], 'test-')) }}:
arm32v6:
DOCKER_ARCH: arm32v6
arm64v8:
DOCKER_ARCH: arm64v8
# The default timeout of 60 minutes is a little low for compiling
# cryptography on ARM architectures.
timeoutInMinutes: 180
steps:
- bash: set -e && tools/docker/build.sh $(dockerTag) $DOCKER_ARCH
displayName: Build the Docker images
# We don't filter for the Docker Hub organization to continue to allow
# easy testing of these scripts on forks.
- bash: |
set -e
DOCKER_IMAGES=$(docker images --filter reference='*/certbot' --filter reference='*/dns-*' --format '{{.Repository}}')
docker save --output images.tar $DOCKER_IMAGES
displayName: Save the Docker images
# If the name of the tar file or artifact changes, the deploy stage will
# also need to be updated.
- bash: set -e && mv images.tar $(Build.ArtifactStagingDirectory)
displayName: Prepare Docker artifact
- task: PublishPipelineArtifact@1
inputs:
path: $(Build.ArtifactStagingDirectory)
artifact: docker_$(DOCKER_ARCH)
displayName: Store Docker artifact
- job: docker_run
dependsOn: docker_build
pool:
vmImage: ubuntu-18.04
steps:
- task: DownloadPipelineArtifact@2
inputs:
artifact: docker_amd64
path: $(Build.SourcesDirectory)
displayName: Retrieve Docker images
- bash: set -e && docker load --input $(Build.SourcesDirectory)/images.tar
displayName: Load Docker images
- bash: |
set -ex
DOCKER_IMAGES=$(docker images --filter reference='*/certbot' --filter reference='*/dns-*' --format '{{.Repository}}:{{.Tag}}')
for DOCKER_IMAGE in ${DOCKER_IMAGES}
do docker run --rm "${DOCKER_IMAGE}" plugins --prepare
done
displayName: Run integration tests for Docker images
- job: installer_build
pool:
vmImage: windows-2019
vmImage: vs2017-win2016
steps:
- task: UsePythonVersion@0
inputs:
versionSpec: 3.9
architecture: x64
versionSpec: 3.8
architecture: x86
addToPath: true
- script: |
python -m venv venv
@@ -87,12 +33,20 @@ jobs:
matrix:
win2019:
imageName: windows-2019
win2016:
imageName: vs2017-win2016
pool:
vmImage: $(imageName)
steps:
- powershell: |
if ($PSVersionTable.PSVersion.Major -ne 5) {
throw "Powershell version is not 5.x"
}
condition: eq(variables['imageName'], 'vs2017-win2016')
displayName: Check Powershell 5.x is used in vs2017-win2016
- task: UsePythonVersion@0
inputs:
versionSpec: 3.9
versionSpec: 3.8
addToPath: true
- task: DownloadPipelineArtifact@2
inputs:
@@ -107,116 +61,10 @@ jobs:
PIP_NO_BUILD_ISOLATION: no
displayName: Prepare Certbot-CI
- script: |
set PATH=%ProgramFiles%\Certbot\bin;%PATH%
venv\Scripts\python -m pytest certbot-ci\windows_installer_integration_tests --allow-persistent-changes --installer-path $(Build.SourcesDirectory)\bin\certbot-beta-installer-win_amd64.exe
set PATH=%ProgramFiles(x86)%\Certbot\bin;%PATH%
venv\Scripts\python -m pytest certbot-ci\windows_installer_integration_tests --allow-persistent-changes --installer-path $(Build.SourcesDirectory)\bin\certbot-beta-installer-win32.exe
displayName: Run windows installer integration tests
- script: |
set PATH=%ProgramFiles%\Certbot\bin;%PATH%
set PATH=%ProgramFiles(x86)%\Certbot\bin;%PATH%
venv\Scripts\python -m pytest certbot-ci\certbot_integration_tests\certbot_tests -n 4
displayName: Run certbot integration tests
- job: snaps_build
pool:
vmImage: ubuntu-18.04
strategy:
matrix:
amd64:
SNAP_ARCH: amd64
# Do not run the heavy non-amd64 builds for test branches
${{ if not(startsWith(variables['Build.SourceBranchName'], 'test-')) }}:
armhf:
SNAP_ARCH: armhf
arm64:
SNAP_ARCH: arm64
timeoutInMinutes: 0
steps:
- script: |
set -e
sudo apt-get update
sudo apt-get install -y --no-install-recommends snapd
sudo snap install --classic snapcraft
displayName: Install dependencies
- task: UsePythonVersion@0
inputs:
versionSpec: 3.8
addToPath: true
- task: DownloadSecureFile@1
name: credentials
inputs:
secureFile: launchpad-credentials
- script: |
set -e
git config --global user.email "$(Build.RequestedForEmail)"
git config --global user.name "$(Build.RequestedFor)"
mkdir -p ~/.local/share/snapcraft/provider/launchpad
cp $(credentials.secureFilePath) ~/.local/share/snapcraft/provider/launchpad/credentials
python3 tools/snap/build_remote.py ALL --archs ${SNAP_ARCH} --timeout 19800
displayName: Build snaps
- script: |
set -e
mv *.snap $(Build.ArtifactStagingDirectory)
mv certbot-dns-*/*.snap $(Build.ArtifactStagingDirectory)
displayName: Prepare artifacts
- task: PublishPipelineArtifact@1
inputs:
path: $(Build.ArtifactStagingDirectory)
artifact: snaps_$(SNAP_ARCH)
displayName: Store snaps artifacts
- job: snap_run
dependsOn: snaps_build
pool:
vmImage: ubuntu-18.04
steps:
- task: UsePythonVersion@0
inputs:
versionSpec: 3.8
addToPath: true
- script: |
set -e
sudo apt-get update
sudo apt-get install -y --no-install-recommends nginx-light snapd
python3 -m venv venv
venv/bin/python tools/pipstrap.py
venv/bin/python tools/pip_install.py -U tox
displayName: Install dependencies
- task: DownloadPipelineArtifact@2
inputs:
artifact: snaps_amd64
path: $(Build.SourcesDirectory)/snap
displayName: Retrieve Certbot snaps
- script: |
set -e
sudo snap install --dangerous --classic snap/certbot_*.snap
displayName: Install Certbot snap
- script: |
set -e
venv/bin/python -m tox -e integration-external,apacheconftest-external-with-pebble
displayName: Run tox
- job: snap_dns_run
dependsOn: snaps_build
pool:
vmImage: ubuntu-18.04
steps:
- script: |
set -e
sudo apt-get update
sudo apt-get install -y --no-install-recommends snapd
displayName: Install dependencies
- task: UsePythonVersion@0
inputs:
versionSpec: 3.8
addToPath: true
- task: DownloadPipelineArtifact@2
inputs:
artifact: snaps_amd64
path: $(Build.SourcesDirectory)/snap
displayName: Retrieve Certbot snaps
- script: |
set -e
python3 -m venv venv
venv/bin/python tools/pipstrap.py
venv/bin/python tools/pip_install.py -e certbot-ci
displayName: Prepare Certbot-CI
- script: |
set -e
sudo -E venv/bin/pytest certbot-ci/snap_integration_tests/dns_tests --allow-persistent-changes --snap-folder $(Build.SourcesDirectory)/snap --snap-arch amd64
displayName: Test DNS plugins snaps

View File

@@ -1,53 +1,53 @@
jobs:
- job: test
variables:
PYTHON_VERSION: 3.10
PYTHON_VERSION: 3.9
strategy:
matrix:
macos-py37-cover:
macos-py36:
IMAGE_NAME: macOS-10.15
PYTHON_VERSION: 3.7
TOXENV: py37-cover
macos-py310-cover:
PYTHON_VERSION: 3.6
TOXENV: py36
macos-py39:
IMAGE_NAME: macOS-10.15
PYTHON_VERSION: 3.10
TOXENV: py310-cover
windows-py37:
IMAGE_NAME: windows-2019
PYTHON_VERSION: 3.7
TOXENV: py37-win
windows-py39-cover:
IMAGE_NAME: windows-2019
PYTHON_VERSION: 3.9
TOXENV: py39-cover-win
TOXENV: py39
windows-py36:
IMAGE_NAME: vs2017-win2016
PYTHON_VERSION: 3.6
TOXENV: py36
windows-py38-cover:
IMAGE_NAME: vs2017-win2016
PYTHON_VERSION: 3.8
TOXENV: py38-cover
windows-integration-certbot:
IMAGE_NAME: windows-2019
PYTHON_VERSION: 3.9
IMAGE_NAME: vs2017-win2016
PYTHON_VERSION: 3.8
TOXENV: integration-certbot
linux-oldest-tests-1:
IMAGE_NAME: ubuntu-18.04
PYTHON_VERSION: 3.7
PYTHON_VERSION: 3.6
TOXENV: '{acme,apache,apache-v2,certbot}-oldest'
linux-oldest-tests-2:
IMAGE_NAME: ubuntu-18.04
PYTHON_VERSION: 3.7
PYTHON_VERSION: 3.6
TOXENV: '{dns,nginx}-oldest'
linux-py37:
linux-py36:
IMAGE_NAME: ubuntu-18.04
PYTHON_VERSION: 3.7
TOXENV: py37
linux-py310-cover:
PYTHON_VERSION: 3.6
TOXENV: py36
linux-py39-cover:
IMAGE_NAME: ubuntu-18.04
PYTHON_VERSION: 3.10
TOXENV: py310-cover
linux-py310-lint:
PYTHON_VERSION: 3.9
TOXENV: py39-cover
linux-py39-lint:
IMAGE_NAME: ubuntu-18.04
PYTHON_VERSION: 3.10
TOXENV: lint-posix
linux-py310-mypy:
PYTHON_VERSION: 3.9
TOXENV: lint
linux-py39-mypy:
IMAGE_NAME: ubuntu-18.04
PYTHON_VERSION: 3.10
TOXENV: mypy-posix
PYTHON_VERSION: 3.9
TOXENV: mypy
linux-integration:
IMAGE_NAME: ubuntu-18.04
PYTHON_VERSION: 3.8
@@ -56,11 +56,16 @@ jobs:
apache-compat:
IMAGE_NAME: ubuntu-18.04
TOXENV: apache_compat
le-modification:
IMAGE_NAME: ubuntu-18.04
TOXENV: modification
apacheconftest:
IMAGE_NAME: ubuntu-18.04
PYTHON_VERSION: 3.6
TOXENV: apacheconftest-with-pebble
nginxroundtrip:
IMAGE_NAME: ubuntu-18.04
PYTHON_VERSION: 3.6
TOXENV: nginxroundtrip
pool:
vmImage: $(IMAGE_NAME)

View File

@@ -3,7 +3,7 @@ stages:
jobs:
- job: prepare
pool:
vmImage: windows-2019
vmImage: vs2017-win2016
steps:
# If we change the output filename from `release_notes.md`, it should also be changed in tools/create_github_release.py
- bash: |

View File

@@ -19,12 +19,11 @@ stages:
# Then the file was added as a secure file in Azure pipelines
# with the name snapcraft.cfg by following the instructions at
# https://docs.microsoft.com/en-us/azure/devops/pipelines/library/secure-files?view=azure-devops
# including authorizing the file for use in the "nightly" and "release"
# pipelines as described at
# https://docs.microsoft.com/en-us/azure/devops/pipelines/library/secure-files?view=azure-devops#q-how-do-i-authorize-a-secure-file-for-use-in-a-specific-pipeline.
# including authorizing the file in all pipelines as described at
# https://docs.microsoft.com/en-us/azure/devops/pipelines/library/secure-files?view=azure-devops#how-do-i-authorize-a-secure-file-for-use-in-all-pipelines.
#
# This file has a maximum lifetime of one year and the current
# file will expire on 2022-07-25 which is also tracked by
# file will expire on 2021-07-28 which is also tracked by
# https://github.com/certbot/certbot/issues/7931. The file will
# need to be updated before then to prevent automated deploys
# from breaking.

View File

@@ -1,6 +1,4 @@
stages:
- stage: TestAndPackage
jobs:
- template: ../jobs/standard-tests-jobs.yml
- template: ../jobs/extended-tests-jobs.yml
- template: ../jobs/packaging-jobs.yml

View File

@@ -1,16 +1,15 @@
steps:
- bash: |
set -e
sudo apt-get update
sudo apt-get install -y --no-install-recommends libaugeas0
FINAL_STATUS=0
declare -a FAILED_BUILDS
tools/venv.py
source venv/bin/activate
python3 -m venv .venv
source .venv/bin/activate
python tools/pipstrap.py
for doc_path in */docs
do
echo ""
echo "##[group]Building $doc_path"
pip install -q -e $doc_path/..[docs]
if ! sphinx-build -W --keep-going -b html $doc_path $doc_path/_build/html; then
FINAL_STATUS=1
FAILED_BUILDS[${#FAILED_BUILDS[@]}]="${doc_path%/docs}"

View File

@@ -1,10 +1,6 @@
steps:
# We run brew update because we've seen attempts to install an older version
# of a package fail. See
# https://github.com/actions/virtual-environments/issues/3165.
- bash: |
set -e
brew update
brew install augeas
condition: startswith(variables['IMAGE_NAME'], 'macOS')
displayName: Install MacOS dependencies

1
.github/FUNDING.yml vendored
View File

@@ -1 +0,0 @@
custom: https://supporters.eff.org/donate/support-work-on-certbot

8
.gitignore vendored
View File

@@ -4,11 +4,13 @@
build/
dist*/
/venv*/
/kgs/
/.tox/
/releases*/
/log*
letsencrypt.log
certbot.log
letsencrypt-auto-source/letsencrypt-auto.sig.lzma.base64
poetry.lock
# coverage
@@ -30,6 +32,12 @@ tags
# auth --cert-path --chain-path
/*.pem
# letstest
tests/letstest/letest-*/
tests/letstest/*.pem
tests/letstest/venv/
tests/letstest/venv3/
.venv
# pytest cache

View File

@@ -1,5 +1,6 @@
[settings]
skip_glob=venv*
skip=letsencrypt-auto-source
force_sort_within_sections=True
force_single_line=True
order_by_type=False

View File

@@ -56,25 +56,7 @@ extension-pkg-whitelist=pywintypes,win32api,win32file,win32security
# See https://github.com/PyCQA/pylint/issues/1498.
# 3) Same as point 2 for no-value-for-parameter.
# See https://github.com/PyCQA/pylint/issues/2820.
# 4) raise-missing-from makes it an error to raise an exception from except
# block without using explicit exception chaining. While explicit exception
# chaining results in a slightly more informative traceback, I don't think
# it's beneficial enough for us to change all of our current instances and
# give Certbot developers errors about this when they're working on new code
# in the future. You can read more about exception chaining and this pylint
# check at
# https://blog.ram.rachum.com/post/621791438475296768/improving-python-exception-chaining-with.
# 5) wrong-import-order generates false positives and a pylint developer
# suggests that people using isort should disable this check at
# https://github.com/PyCQA/pylint/issues/3817#issuecomment-687892090.
# 6) unspecified-encoding generates errors when encoding is not specified in
# in a call to the built-in open function. This relates more to a design decision
# (unspecified encoding makes the open function use the default encoding of the system)
# than a clear flaw on which a check should be enforced. Anyway the project does
# not need to enforce encoding on files so we disable this check.
# 7) consider-using-f-string is "suggesting" to move to f-string when possible with an error. This
# clearly relates to code design and not to potential defects in the code, let's just ignore that.
disable=fixme,locally-disabled,locally-enabled,bad-continuation,no-self-use,invalid-name,cyclic-import,duplicate-code,design,import-outside-toplevel,useless-object-inheritance,unsubscriptable-object,no-value-for-parameter,no-else-return,no-else-raise,no-else-break,no-else-continue,raise-missing-from,wrong-import-order,unspecified-encoding,consider-using-f-string
disable=fixme,locally-disabled,locally-enabled,bad-continuation,no-self-use,invalid-name,cyclic-import,duplicate-code,design,import-outside-toplevel,useless-object-inheritance,unsubscriptable-object,no-value-for-parameter,no-else-return,no-else-raise,no-else-break,no-else-continue
[REPORTS]

View File

@@ -138,7 +138,6 @@ Authors
* [Joubin Jabbari](https://github.com/joubin)
* [Juho Juopperi](https://github.com/jkjuopperi)
* [Kane York](https://github.com/riking)
* [Katsuyoshi Ozaki](https://github.com/moratori)
* [Kenichi Maehashi](https://github.com/kmaehashi)
* [Kenneth Skovhede](https://github.com/kenkendk)
* [Kevin Burke](https://github.com/kevinburke)

View File

@@ -1,5 +1,5 @@
# This Dockerfile builds an image for development.
FROM ubuntu:focal
FROM debian:buster
# Note: this only exposes the port to other docker containers.
EXPOSE 80 443
@@ -8,9 +8,8 @@ WORKDIR /opt/certbot/src
COPY . .
RUN apt-get update && \
DEBIAN_FRONTEND=noninteractive apt-get install apache2 git python3-dev \
python3-venv gcc libaugeas0 libssl-dev libffi-dev ca-certificates \
openssl nginx-light -y --no-install-recommends && \
apt-get install apache2 git python3-dev python3-venv gcc libaugeas0 \
libssl-dev libffi-dev ca-certificates openssl nginx-light -y && \
apt-get clean && \
rm -rf /var/lib/apt/lists/* \
/tmp/* \

View File

@@ -4,6 +4,5 @@ include pytest.ini
recursive-include docs *
recursive-include examples *
recursive-include tests *
include acme/py.typed
global-exclude __pycache__
global-exclude *.py[cod]

View File

@@ -5,20 +5,12 @@ import functools
import hashlib
import logging
import socket
from typing import cast
from typing import Any
from typing import Dict
from typing import Mapping
from typing import Optional
from typing import Tuple
from typing import Type
from typing import TypeVar
from typing import Union
from cryptography.hazmat.primitives import hashes
from cryptography.hazmat.primitives import hashes # type: ignore
import josepy as jose
from OpenSSL import crypto
from OpenSSL import SSL
from OpenSSL import SSL # type: ignore # https://github.com/python/typeshed/issues/2052
import requests
from acme import crypto_util
@@ -29,19 +21,16 @@ from acme.mixins import TypeMixin
logger = logging.getLogger(__name__)
GenericChallenge = TypeVar('GenericChallenge', bound='Challenge')
class Challenge(jose.TypedJSONObjectWithFields):
# _fields_to_partial_json
"""ACME challenge."""
TYPES: Dict[str, Type['Challenge']] = {}
TYPES: dict = {}
@classmethod
def from_json(cls: Type[GenericChallenge],
jobj: Mapping[str, Any]) -> Union[GenericChallenge, 'UnrecognizedChallenge']:
def from_json(cls, jobj):
try:
return cast(GenericChallenge, super().from_json(jobj))
return super(Challenge, cls).from_json(jobj)
except jose.UnrecognizedTypeError as error:
logger.debug(error)
return UnrecognizedChallenge.from_json(jobj)
@@ -50,9 +39,9 @@ class Challenge(jose.TypedJSONObjectWithFields):
class ChallengeResponse(ResourceMixin, TypeMixin, jose.TypedJSONObjectWithFields):
# _fields_to_partial_json
"""ACME challenge response."""
TYPES: Dict[str, Type['ChallengeResponse']] = {}
TYPES: dict = {}
resource_type = 'challenge'
resource: str = fields.resource(resource_type)
resource = fields.Resource(resource_type)
class UnrecognizedChallenge(Challenge):
@@ -67,17 +56,16 @@ class UnrecognizedChallenge(Challenge):
:ivar jobj: Original JSON decoded object.
"""
jobj: Dict[str, Any]
def __init__(self, jobj: Mapping[str, Any]) -> None:
super().__init__()
def __init__(self, jobj):
super(UnrecognizedChallenge, self).__init__()
object.__setattr__(self, "jobj", jobj)
def to_partial_json(self) -> Dict[str, Any]:
def to_partial_json(self):
return self.jobj # pylint: disable=no-member
@classmethod
def from_json(cls, jobj: Mapping[str, Any]) -> 'UnrecognizedChallenge':
def from_json(cls, jobj):
return cls(jobj)
@@ -91,13 +79,13 @@ class _TokenChallenge(Challenge):
"""Minimum size of the :attr:`token` in bytes."""
# TODO: acme-spec doesn't specify token as base64-encoded value
token: bytes = jose.field(
token = jose.Field(
"token", encoder=jose.encode_b64jose, decoder=functools.partial(
jose.decode_b64jose, size=TOKEN_SIZE, minimum=True))
# XXX: rename to ~token_good_for_url
@property
def good_token(self) -> bool: # XXX: @token.decoder
def good_token(self): # XXX: @token.decoder
"""Is `token` good?
.. todo:: acme-spec wants "It MUST NOT contain any non-ASCII
@@ -114,13 +102,13 @@ class _TokenChallenge(Challenge):
class KeyAuthorizationChallengeResponse(ChallengeResponse):
"""Response to Challenges based on Key Authorization.
:param str key_authorization:
:param unicode key_authorization:
"""
key_authorization: str = jose.field("keyAuthorization")
key_authorization = jose.Field("keyAuthorization")
thumbprint_hash_function = hashes.SHA256
def verify(self, chall: 'KeyAuthorizationChallenge', account_public_key: jose.JWK) -> bool:
def verify(self, chall, account_public_key):
"""Verify the key authorization.
:param KeyAuthorization chall: Challenge that corresponds to
@@ -132,7 +120,7 @@ class KeyAuthorizationChallengeResponse(ChallengeResponse):
:rtype: bool
"""
parts = self.key_authorization.split('.') # pylint: disable=no-member
parts = self.key_authorization.split('.')
if len(parts) != 2:
logger.debug("Key authorization (%r) is not well formed",
self.key_authorization)
@@ -152,15 +140,12 @@ class KeyAuthorizationChallengeResponse(ChallengeResponse):
return True
def to_partial_json(self) -> Dict[str, Any]:
jobj = super().to_partial_json()
def to_partial_json(self):
jobj = super(KeyAuthorizationChallengeResponse, self).to_partial_json()
jobj.pop('keyAuthorization', None)
return jobj
# TODO: Make this method a generic of K (bound=KeyAuthorizationChallenge), response_cls of type
# Type[K] and use it in response/response_and_validation return types once Python 3.6 support is
# dropped (do not support generic ABC classes, see https://github.com/python/typing/issues/449).
class KeyAuthorizationChallenge(_TokenChallenge, metaclass=abc.ABCMeta):
"""Challenge based on Key Authorization.
@@ -173,18 +158,18 @@ class KeyAuthorizationChallenge(_TokenChallenge, metaclass=abc.ABCMeta):
thumbprint_hash_function = (
KeyAuthorizationChallengeResponse.thumbprint_hash_function)
def key_authorization(self, account_key: jose.JWK) -> str:
def key_authorization(self, account_key):
"""Generate Key Authorization.
:param JWK account_key:
:rtype str:
:rtype unicode:
"""
return self.encode("token") + "." + jose.b64encode(
account_key.thumbprint(
hash_function=self.thumbprint_hash_function)).decode()
def response(self, account_key: jose.JWK) -> KeyAuthorizationChallengeResponse:
def response(self, account_key):
"""Generate response to the challenge.
:param JWK account_key:
@@ -197,7 +182,7 @@ class KeyAuthorizationChallenge(_TokenChallenge, metaclass=abc.ABCMeta):
key_authorization=self.key_authorization(account_key))
@abc.abstractmethod
def validation(self, account_key: jose.JWK, **kwargs: Any) -> Any:
def validation(self, account_key, **kwargs):
"""Generate validation for the challenge.
Subclasses must implement this method, but they are likely to
@@ -211,8 +196,7 @@ class KeyAuthorizationChallenge(_TokenChallenge, metaclass=abc.ABCMeta):
"""
raise NotImplementedError() # pragma: no cover
def response_and_validation(self, account_key: jose.JWK, *args: Any, **kwargs: Any
) -> Tuple[KeyAuthorizationChallengeResponse, Any]:
def response_and_validation(self, account_key, *args, **kwargs):
"""Generate response and validation.
Convenience function that return results of `response` and
@@ -231,14 +215,14 @@ class DNS01Response(KeyAuthorizationChallengeResponse):
"""ACME dns-01 challenge response."""
typ = "dns-01"
def simple_verify(self, chall: 'DNS01', domain: str, account_public_key: jose.JWK) -> bool: # pylint: disable=unused-argument
def simple_verify(self, chall, domain, account_public_key): # pylint: disable=unused-argument
"""Simple verify.
This method no longer checks DNS records and is a simple wrapper
around `KeyAuthorizationChallengeResponse.verify`.
:param challenges.DNS01 chall: Corresponding challenge.
:param str domain: Domain name being verified.
:param unicode domain: Domain name being verified.
:param JWK account_public_key: Public key for the key pair
being authorized.
@@ -262,24 +246,23 @@ class DNS01(KeyAuthorizationChallenge):
LABEL = "_acme-challenge"
"""Label clients prepend to the domain name being validated."""
def validation(self, account_key: jose.JWK, **unused_kwargs: Any) -> str:
def validation(self, account_key, **unused_kwargs):
"""Generate validation.
:param JWK account_key:
:rtype: str
:rtype: unicode
"""
return jose.b64encode(hashlib.sha256(self.key_authorization(
account_key).encode("utf-8")).digest()).decode()
def validation_domain_name(self, name: str) -> str:
def validation_domain_name(self, name):
"""Domain name for TXT validation record.
:param str name: Domain name being validated.
:rtype: str
:param unicode name: Domain name being validated.
"""
return f"{self.LABEL}.{name}"
return "{0}.{1}".format(self.LABEL, name)
@ChallengeResponse.register
@@ -298,12 +281,11 @@ class HTTP01Response(KeyAuthorizationChallengeResponse):
WHITESPACE_CUTSET = "\n\r\t "
"""Whitespace characters which should be ignored at the end of the body."""
def simple_verify(self, chall: 'HTTP01', domain: str, account_public_key: jose.JWK,
port: Optional[int] = None) -> bool:
def simple_verify(self, chall, domain, account_public_key, port=None):
"""Simple verify.
:param challenges.SimpleHTTP chall: Corresponding challenge.
:param str domain: Domain name being verified.
:param unicode domain: Domain name being verified.
:param JWK account_public_key: Public key for the key pair
being authorized.
:param int port: Port used in the validation.
@@ -332,15 +314,6 @@ class HTTP01Response(KeyAuthorizationChallengeResponse):
except requests.exceptions.RequestException as error:
logger.error("Unable to reach %s: %s", uri, error)
return False
# By default, http_response.text will try to guess the encoding to use
# when decoding the response to Python unicode strings. This guesswork
# is error prone. RFC 8555 specifies that HTTP-01 responses should be
# key authorizations with possible trailing whitespace. Since key
# authorizations must be composed entirely of the base64url alphabet
# plus ".", we tell requests that the response should be ASCII. See
# https://datatracker.ietf.org/doc/html/rfc8555#section-8.3 for more
# info.
http_response.encoding = "ascii"
logger.debug("Received %s: %s. Headers: %s", http_response,
http_response.text, http_response.headers)
@@ -364,31 +337,31 @@ class HTTP01(KeyAuthorizationChallenge):
"""URI root path for the server provisioned resource."""
@property
def path(self) -> str:
def path(self):
"""Path (starting with '/') for provisioned resource.
:rtype: str
:rtype: string
"""
return '/' + self.URI_ROOT_PATH + '/' + self.encode('token')
def uri(self, domain: str) -> str:
def uri(self, domain):
"""Create an URI to the provisioned resource.
Forms an URI to the HTTPS server provisioned resource
(containing :attr:`~SimpleHTTP.token`).
:param str domain: Domain name being verified.
:rtype: str
:param unicode domain: Domain name being verified.
:rtype: string
"""
return "http://" + domain + self.path
def validation(self, account_key: jose.JWK, **unused_kwargs: Any) -> str:
def validation(self, account_key, **unused_kwargs):
"""Generate validation.
:param JWK account_key:
:rtype: str
:rtype: unicode
"""
return self.key_authorization(account_key)
@@ -411,15 +384,14 @@ class TLSALPN01Response(KeyAuthorizationChallengeResponse):
ACME_TLS_1_PROTOCOL = "acme-tls/1"
@property
def h(self) -> bytes:
def h(self):
"""Hash value stored in challenge certificate"""
return hashlib.sha256(self.key_authorization.encode('utf-8')).digest()
def gen_cert(self, domain: str, key: Optional[crypto.PKey] = None, bits: int = 2048
) -> Tuple[crypto.X509, crypto.PKey]:
def gen_cert(self, domain, key=None, bits=2048):
"""Generate tls-alpn-01 certificate.
:param str domain: Domain verified by the challenge.
:param unicode domain: Domain verified by the challenge.
:param OpenSSL.crypto.PKey key: Optional private key used in
certificate generation. If not provided (``None``), then
fresh key will be generated.
@@ -432,19 +404,19 @@ class TLSALPN01Response(KeyAuthorizationChallengeResponse):
key = crypto.PKey()
key.generate_key(crypto.TYPE_RSA, bits)
der_value = b"DER:" + codecs.encode(self.h, 'hex')
acme_extension = crypto.X509Extension(self.ID_PE_ACME_IDENTIFIER_V1,
critical=True, value=der_value)
critical=True, value=der_value)
return crypto_util.gen_ss_cert(key, [domain], force_san=True,
extensions=[acme_extension]), key
extensions=[acme_extension]), key
def probe_cert(self, domain: str, host: Optional[str] = None,
port: Optional[int] = None) -> crypto.X509:
def probe_cert(self, domain, host=None, port=None):
"""Probe tls-alpn-01 challenge certificate.
:param str domain: domain being validated, required.
:param str host: IP address used to probe the certificate.
:param unicode domain: domain being validated, required.
:param string host: IP address used to probe the certificate.
:param int port: Port used to probe the certificate.
"""
@@ -454,13 +426,13 @@ class TLSALPN01Response(KeyAuthorizationChallengeResponse):
if port is None:
port = self.PORT
return crypto_util.probe_sni(host=host.encode(), port=port, name=domain.encode(),
alpn_protocols=[self.ACME_TLS_1_PROTOCOL])
return crypto_util.probe_sni(host=host, port=port, name=domain,
alpn_protocols=[self.ACME_TLS_1_PROTOCOL])
def verify_cert(self, domain: str, cert: crypto.X509) -> bool:
def verify_cert(self, domain, cert):
"""Verify tls-alpn-01 challenge certificate.
:param str domain: Domain name being validated.
:param unicode domain: Domain name being validated.
:param OpensSSL.crypto.X509 cert: Challenge certificate.
:returns: Whether the certificate was successfully verified.
@@ -469,10 +441,7 @@ class TLSALPN01Response(KeyAuthorizationChallengeResponse):
"""
# pylint: disable=protected-access
names = crypto_util._pyopenssl_cert_or_req_all_names(cert)
# Type ignore needed due to
# https://github.com/pyca/pyopenssl/issues/730.
logger.debug('Certificate %s. SANs: %s',
cert.digest('sha256'), names)
logger.debug('Certificate %s. SANs: %s', cert.digest('sha256'), names)
if len(names) != 1 or names[0].lower() != domain.lower():
return False
@@ -487,9 +456,8 @@ class TLSALPN01Response(KeyAuthorizationChallengeResponse):
return False
# pylint: disable=too-many-arguments
def simple_verify(self, chall: 'TLSALPN01', domain: str, account_public_key: jose.JWK,
cert: Optional[crypto.X509] = None, host: Optional[str] = None,
port: Optional[int] = None) -> bool:
def simple_verify(self, chall, domain, account_public_key,
cert=None, host=None, port=None):
"""Simple verify.
Verify ``validation`` using ``account_public_key``, optionally
@@ -529,11 +497,11 @@ class TLSALPN01(KeyAuthorizationChallenge):
response_cls = TLSALPN01Response
typ = response_cls.typ
def validation(self, account_key: jose.JWK, **kwargs: Any) -> Tuple[crypto.X509, crypto.PKey]:
def validation(self, account_key, **kwargs):
"""Generate validation.
:param JWK account_key:
:param str domain: Domain verified by the challenge.
:param unicode domain: Domain verified by the challenge.
:param OpenSSL.crypto.PKey cert_key: Optional private key used
in certificate generation. If not provided (``None``), then
fresh key will be generated.
@@ -541,13 +509,12 @@ class TLSALPN01(KeyAuthorizationChallenge):
:rtype: `tuple` of `OpenSSL.crypto.X509` and `OpenSSL.crypto.PKey`
"""
# TODO: Remove cast when response() is generic.
return cast(TLSALPN01Response, self.response(account_key)).gen_cert(
return self.response(account_key).gen_cert(
key=kwargs.get('cert_key'),
domain=cast(str, kwargs.get('domain')))
domain=kwargs.get('domain'))
@staticmethod
def is_supported() -> bool:
def is_supported():
"""
Check if TLS-ALPN-01 challenge is supported on this machine.
This implies that a recent version of OpenSSL is installed (>= 1.0.2),
@@ -569,8 +536,7 @@ class DNS(_TokenChallenge):
LABEL = "_acme-challenge"
"""Label clients prepend to the domain name being validated."""
def gen_validation(self, account_key: jose.JWK, alg: jose.JWASignature = jose.RS256,
**kwargs: Any) -> jose.JWS:
def gen_validation(self, account_key, alg=jose.RS256, **kwargs):
"""Generate validation.
:param .JWK account_key: Private account key.
@@ -584,7 +550,7 @@ class DNS(_TokenChallenge):
payload=self.json_dumps(sort_keys=True).encode('utf-8'),
key=account_key, alg=alg, **kwargs)
def check_validation(self, validation: jose.JWS, account_public_key: jose.JWK) -> bool:
def check_validation(self, validation, account_public_key):
"""Check validation.
:param JWS validation:
@@ -601,7 +567,7 @@ class DNS(_TokenChallenge):
logger.debug("Checking validation for DNS failed: %s", error)
return False
def gen_response(self, account_key: jose.JWK, **kwargs: Any) -> 'DNSResponse':
def gen_response(self, account_key, **kwargs):
"""Generate response.
:param .JWK account_key: Private account key.
@@ -610,12 +576,13 @@ class DNS(_TokenChallenge):
:rtype: DNSResponse
"""
return DNSResponse(validation=self.gen_validation(account_key, **kwargs))
return DNSResponse(validation=self.gen_validation(
account_key, **kwargs))
def validation_domain_name(self, name: str) -> str:
def validation_domain_name(self, name):
"""Domain name for TXT validation record.
:param str name: Domain name being validated.
:param unicode name: Domain name being validated.
"""
return "{0}.{1}".format(self.LABEL, name)
@@ -630,9 +597,9 @@ class DNSResponse(ChallengeResponse):
"""
typ = "dns"
validation: jose.JWS = jose.field("validation", decoder=jose.JWS.from_json)
validation = jose.Field("validation", decoder=jose.JWS.from_json)
def check_validation(self, chall: 'DNS', account_public_key: jose.JWK) -> bool:
def check_validation(self, chall, account_public_key):
"""Check validation.
:param challenges.DNS chall:

View File

@@ -1,7 +1,4 @@
"""ACME client API."""
# pylint: disable=too-many-lines
# This pylint disable can be deleted once the deprecated ACMEv1 code is
# removed.
import base64
import collections
import datetime
@@ -10,22 +7,13 @@ import heapq
import http.client as http_client
import logging
import re
import sys
import time
from types import ModuleType
from typing import Any
from typing import Callable
from typing import cast
from typing import Dict
from typing import Iterable
from typing import List
from typing import Mapping
from typing import Optional
from typing import Set
from typing import Text
from typing import Tuple
from typing import Union
import warnings
import josepy as jose
import OpenSSL
@@ -34,7 +22,6 @@ from requests.adapters import HTTPAdapter
from requests.utils import parse_header_links
from requests_toolbelt.adapters.source import SourceAddressAdapter
from acme import challenges
from acme import crypto_util
from acme import errors
from acme import jws
@@ -55,8 +42,7 @@ class ClientBase:
:ivar .ClientNetwork net: Client network.
:ivar int acme_version: ACME protocol version. 1 or 2.
"""
def __init__(self, directory: messages.Directory, net: 'ClientNetwork',
acme_version: int) -> None:
def __init__(self, directory, net, acme_version):
"""Initialize.
:param .messages.Directory directory: Directory Resource
@@ -68,9 +54,7 @@ class ClientBase:
self.acme_version = acme_version
@classmethod
def _regr_from_response(cls, response: requests.Response, uri: Optional[str] = None,
terms_of_service: Optional[str] = None
) -> messages.RegistrationResource:
def _regr_from_response(cls, response, uri=None, terms_of_service=None):
if 'terms-of-service' in response.links:
terms_of_service = response.links['terms-of-service']['url']
@@ -79,8 +63,7 @@ class ClientBase:
uri=response.headers.get('Location', uri),
terms_of_service=terms_of_service)
def _send_recv_regr(self, regr: messages.RegistrationResource,
body: messages.Registration) -> messages.RegistrationResource:
def _send_recv_regr(self, regr, body):
response = self._post(regr.uri, body)
# TODO: Boulder returns httplib.ACCEPTED
@@ -93,7 +76,7 @@ class ClientBase:
response, uri=regr.uri,
terms_of_service=regr.terms_of_service)
def _post(self, *args: Any, **kwargs: Any) -> requests.Response:
def _post(self, *args, **kwargs):
"""Wrapper around self.net.post that adds the acme_version.
"""
@@ -102,9 +85,7 @@ class ClientBase:
kwargs.setdefault('new_nonce_url', getattr(self.directory, 'newNonce'))
return self.net.post(*args, **kwargs)
def update_registration(self, regr: messages.RegistrationResource,
update: Optional[messages.Registration] = None
) -> messages.RegistrationResource:
def update_registration(self, regr, update=None):
"""Update registration.
:param messages.RegistrationResource regr: Registration Resource.
@@ -121,8 +102,7 @@ class ClientBase:
self.net.account = updated_regr
return updated_regr
def deactivate_registration(self, regr: messages.RegistrationResource
) -> messages.RegistrationResource:
def deactivate_registration(self, regr):
"""Deactivate registration.
:param messages.RegistrationResource regr: The Registration Resource
@@ -132,8 +112,7 @@ class ClientBase:
:rtype: `.RegistrationResource`
"""
return self.update_registration(regr, messages.Registration.from_json(
{"status": "deactivated", "contact": None}))
return self.update_registration(regr, update={'status': 'deactivated'})
def deactivate_authorization(self,
authzr: messages.AuthorizationResource
@@ -152,18 +131,15 @@ class ClientBase:
return self._authzr_from_response(response,
authzr.body.identifier, authzr.uri)
def _authzr_from_response(self, response: requests.Response,
identifier: Optional[messages.Identifier] = None,
uri: Optional[str] = None) -> messages.AuthorizationResource:
def _authzr_from_response(self, response, identifier=None, uri=None):
authzr = messages.AuthorizationResource(
body=messages.Authorization.from_json(response.json()),
uri=response.headers.get('Location', uri))
if identifier is not None and authzr.body.identifier != identifier: # pylint: disable=no-member
if identifier is not None and authzr.body.identifier != identifier:
raise errors.UnexpectedUpdate(authzr)
return authzr
def answer_challenge(self, challb: messages.ChallengeBody,
response: challenges.ChallengeResponse) -> messages.ChallengeResource:
def answer_challenge(self, challb, response):
"""Answer challenge.
:param challb: Challenge Resource body.
@@ -178,21 +154,21 @@ class ClientBase:
:raises .UnexpectedUpdate:
"""
resp = self._post(challb.uri, response)
response = self._post(challb.uri, response)
try:
authzr_uri = resp.links['up']['url']
authzr_uri = response.links['up']['url']
except KeyError:
raise errors.ClientError('"up" Link header missing')
challr = messages.ChallengeResource(
authzr_uri=authzr_uri,
body=messages.ChallengeBody.from_json(resp.json()))
# TODO: check that challr.uri == resp.headers['Location']?
body=messages.ChallengeBody.from_json(response.json()))
# TODO: check that challr.uri == response.headers['Location']?
if challr.uri != challb.uri:
raise errors.UnexpectedUpdate(challr.uri)
return challr
@classmethod
def retry_after(cls, response: requests.Response, default: int) -> datetime.datetime:
def retry_after(cls, response, default):
"""Compute next `poll` time based on response ``Retry-After`` header.
Handles integers and various datestring formats per
@@ -223,7 +199,7 @@ class ClientBase:
return datetime.datetime.now() + datetime.timedelta(seconds=seconds)
def _revoke(self, cert: jose.ComparableX509, rsn: int, url: str) -> None:
def _revoke(self, cert, rsn, url):
"""Revoke certificate.
:param .ComparableX509 cert: `OpenSSL.crypto.X509` wrapped in
@@ -248,9 +224,6 @@ class ClientBase:
class Client(ClientBase):
"""ACME client for a v1 API.
.. deprecated:: 1.18.0
Use :class:`ClientV2` instead.
.. todo::
Clean up raised error types hierarchy, document, and handle (wrap)
instances of `.DeserializationError` raised in `from_json()`.
@@ -265,9 +238,8 @@ class Client(ClientBase):
"""
def __init__(self, directory: messages.Directory, key: jose.JWK,
alg: jose.JWASignature=jose.RS256, verify_ssl: bool = True,
net: Optional['ClientNetwork'] = None) -> None:
def __init__(self, directory, key, alg=jose.RS256, verify_ssl=True,
net=None):
"""Initialize.
:param directory: Directory Resource (`.messages.Directory`) or
@@ -281,11 +253,10 @@ class Client(ClientBase):
if isinstance(directory, str):
directory = messages.Directory.from_json(
net.get(directory).json())
super().__init__(directory=directory,
net=net, acme_version=1)
super(Client, self).__init__(directory=directory,
net=net, acme_version=1)
def register(self, new_reg: Optional[messages.NewRegistration] = None
) -> messages.RegistrationResource:
def register(self, new_reg=None):
"""Register.
:param .NewRegistration new_reg:
@@ -302,18 +273,16 @@ class Client(ClientBase):
# "Instance of 'Field' has no key/contact member" bug:
return self._regr_from_response(response)
def query_registration(self, regr: messages.RegistrationResource
) -> messages.RegistrationResource:
def query_registration(self, regr):
"""Query server about registration.
:param messages.RegistrationResource regr: Existing Registration
:param messages.RegistrationResource: Existing Registration
Resource.
"""
return self._send_recv_regr(regr, messages.UpdateRegistration())
def agree_to_tos(self, regr: messages.RegistrationResource
) -> messages.RegistrationResource:
def agree_to_tos(self, regr):
"""Agree to the terms-of-service.
Agree to the terms-of-service in a Registration Resource.
@@ -328,8 +297,7 @@ class Client(ClientBase):
return self.update_registration(
regr.update(body=regr.body.update(agreement=regr.terms_of_service)))
def request_challenges(self, identifier: messages.Identifier,
new_authzr_uri: Optional[str] = None) -> messages.AuthorizationResource:
def request_challenges(self, identifier, new_authzr_uri=None):
"""Request challenges.
:param .messages.Identifier identifier: Identifier to be challenged.
@@ -355,8 +323,7 @@ class Client(ClientBase):
assert response.status_code == http_client.CREATED
return self._authzr_from_response(response, identifier)
def request_domain_challenges(self, domain: str,new_authzr_uri: Optional[str] = None
) -> messages.AuthorizationResource:
def request_domain_challenges(self, domain, new_authzr_uri=None):
"""Request challenges for domain names.
This is simply a convenience function that wraps around
@@ -376,9 +343,7 @@ class Client(ClientBase):
return self.request_challenges(messages.Identifier(
typ=messages.IDENTIFIER_FQDN, value=domain), new_authzr_uri)
def request_issuance(self, csr: jose.ComparableX509,
authzrs: Iterable[messages.AuthorizationResource]
) -> messages.CertificateResource:
def request_issuance(self, csr, authzrs):
"""Request issuance.
:param csr: CSR
@@ -415,8 +380,7 @@ class Client(ClientBase):
body=jose.ComparableX509(OpenSSL.crypto.load_certificate(
OpenSSL.crypto.FILETYPE_ASN1, response.content)))
def poll(self, authzr: messages.AuthorizationResource
) -> Tuple[messages.AuthorizationResource, requests.Response]:
def poll(self, authzr):
"""Poll Authorization Resource for status.
:param authzr: Authorization Resource
@@ -432,11 +396,8 @@ class Client(ClientBase):
response, authzr.body.identifier, authzr.uri)
return updated_authzr, response
def poll_and_request_issuance(self, csr: jose.ComparableX509,
authzrs: Iterable[messages.AuthorizationResource],
mintime: int = 5, max_attempts: int = 10
) -> Tuple[messages.CertificateResource,
Tuple[messages.AuthorizationResource, ...]]:
def poll_and_request_issuance(
self, csr, authzrs, mintime=5, max_attempts=10):
"""Poll and request issuance.
This function polls all provided Authorization Resource URIs
@@ -494,7 +455,7 @@ class Client(ClientBase):
updated[authzr] = updated_authzr
attempts[authzr] += 1
if updated_authzr.body.status not in ( # pylint: disable=no-member
if updated_authzr.body.status not in (
messages.STATUS_VALID, messages.STATUS_INVALID):
if attempts[authzr] < max_attempts:
# push back to the priority queue, with updated retry_after
@@ -510,7 +471,7 @@ class Client(ClientBase):
updated_authzrs = tuple(updated[authzr] for authzr in authzrs)
return self.request_issuance(csr, updated_authzrs), updated_authzrs
def _get_cert(self, uri: str) -> Tuple[requests.Response, jose.ComparableX509]:
def _get_cert(self, uri):
"""Returns certificate from URI.
:param str uri: URI of certificate
@@ -526,7 +487,7 @@ class Client(ClientBase):
return response, jose.ComparableX509(OpenSSL.crypto.load_certificate(
OpenSSL.crypto.FILETYPE_ASN1, response.content))
def check_cert(self, certr: messages.CertificateResource) -> messages.CertificateResource:
def check_cert(self, certr):
"""Check for new cert.
:param certr: Certificate Resource
@@ -545,7 +506,7 @@ class Client(ClientBase):
raise errors.UnexpectedUpdate(response.text)
return certr.update(body=cert)
def refresh(self, certr: messages.CertificateResource) -> messages.CertificateResource:
def refresh(self, certr):
"""Refresh certificate.
:param certr: Certificate Resource
@@ -560,8 +521,7 @@ class Client(ClientBase):
# respond with status code 403 (Forbidden)
return self.check_cert(certr)
def fetch_chain(self, certr: messages.CertificateResource,
max_length: int = 10) -> List[jose.ComparableX509]:
def fetch_chain(self, certr, max_length=10):
"""Fetch chain for certificate.
:param .CertificateResource certr: Certificate Resource
@@ -590,7 +550,7 @@ class Client(ClientBase):
"Recursion limit reached. Didn't get {0}".format(uri))
return chain
def revoke(self, cert: jose.ComparableX509, rsn: int) -> None:
def revoke(self, cert, rsn):
"""Revoke certificate.
:param .ComparableX509 cert: `OpenSSL.crypto.X509` wrapped in
@@ -601,7 +561,7 @@ class Client(ClientBase):
:raises .ClientError: If revocation is unsuccessful.
"""
self._revoke(cert, rsn, self.directory[messages.Revocation])
return self._revoke(cert, rsn, self.directory[messages.Revocation])
class ClientV2(ClientBase):
@@ -611,15 +571,16 @@ class ClientV2(ClientBase):
:ivar .ClientNetwork net: Client network.
"""
def __init__(self, directory: messages.Directory, net: 'ClientNetwork') -> None:
def __init__(self, directory, net):
"""Initialize.
:param .messages.Directory directory: Directory Resource
:param .ClientNetwork net: Client network.
"""
super().__init__(directory=directory, net=net, acme_version=2)
super(ClientV2, self).__init__(directory=directory,
net=net, acme_version=2)
def new_account(self, new_account: messages.NewRegistration) -> messages.RegistrationResource:
def new_account(self, new_account):
"""Register.
:param .NewRegistration new_account:
@@ -632,17 +593,16 @@ class ClientV2(ClientBase):
response = self._post(self.directory['newAccount'], new_account)
# if account already exists
if response.status_code == 200 and 'Location' in response.headers:
raise errors.ConflictError(response.headers['Location'])
raise errors.ConflictError(response.headers.get('Location'))
# "Instance of 'Field' has no key/contact member" bug:
regr = self._regr_from_response(response)
self.net.account = regr
return regr
def query_registration(self, regr: messages.RegistrationResource
) -> messages.RegistrationResource:
def query_registration(self, regr):
"""Query server about registration.
:param messages.RegistrationResource regr: Existing Registration
:param messages.RegistrationResource: Existing Registration
Resource.
"""
@@ -654,9 +614,7 @@ class ClientV2(ClientBase):
terms_of_service=regr.terms_of_service)
return self.net.account
def update_registration(self, regr: messages.RegistrationResource,
update: Optional[messages.Registration] = None
) -> messages.RegistrationResource:
def update_registration(self, regr, update=None):
"""Update registration.
:param messages.RegistrationResource regr: Registration Resource.
@@ -669,9 +627,9 @@ class ClientV2(ClientBase):
"""
# https://github.com/certbot/certbot/issues/6155
new_regr = self._get_v2_account(regr)
return super().update_registration(new_regr, update)
return super(ClientV2, self).update_registration(new_regr, update)
def _get_v2_account(self, regr: messages.RegistrationResource) -> messages.RegistrationResource:
def _get_v2_account(self, regr):
self.net.account = None
only_existing_reg = regr.body.update(only_return_existing=True)
response = self._post(self.directory['newAccount'], only_existing_reg)
@@ -680,10 +638,10 @@ class ClientV2(ClientBase):
self.net.account = new_regr
return new_regr
def new_order(self, csr_pem: bytes) -> messages.OrderResource:
def new_order(self, csr_pem):
"""Request a new Order object from the server.
:param bytes csr_pem: A CSR in PEM format.
:param str csr_pem: A CSR in PEM format.
:returns: The newly created order.
:rtype: OrderResource
@@ -691,23 +649,16 @@ class ClientV2(ClientBase):
csr = OpenSSL.crypto.load_certificate_request(OpenSSL.crypto.FILETYPE_PEM, csr_pem)
# pylint: disable=protected-access
dnsNames = crypto_util._pyopenssl_cert_or_req_all_names(csr)
ipNames = crypto_util._pyopenssl_cert_or_req_san_ip(csr)
# ipNames is now []string
identifiers = []
for name in dnsNames:
identifiers.append(messages.Identifier(typ=messages.IDENTIFIER_FQDN,
value=name))
for ips in ipNames:
identifiers.append(messages.Identifier(typ=messages.IDENTIFIER_IP,
value=ips))
order = messages.NewOrder(identifiers=identifiers)
response = self._post(self.directory['newOrder'], order)
body = messages.Order.from_json(response.json())
authorizations = []
# pylint has trouble understanding our josepy based objects which use
# things like custom metaclass logic. body.authorizations should be a
# list of strings containing URLs so let's disable this check here.
for url in body.authorizations: # pylint: disable=not-an-iterable
for url in body.authorizations:
authorizations.append(self._authzr_from_response(self._post_as_get(url), uri=url))
return messages.OrderResource(
body=body,
@@ -715,8 +666,7 @@ class ClientV2(ClientBase):
authorizations=authorizations,
csr_pem=csr_pem)
def poll(self, authzr: messages.AuthorizationResource
) -> Tuple[messages.AuthorizationResource, requests.Response]:
def poll(self, authzr):
"""Poll Authorization Resource for status.
:param authzr: Authorization Resource
@@ -732,8 +682,7 @@ class ClientV2(ClientBase):
response, authzr.body.identifier, authzr.uri)
return updated_authzr, response
def poll_and_finalize(self, orderr: messages.OrderResource,
deadline: Optional[datetime.datetime] = None) -> messages.OrderResource:
def poll_and_finalize(self, orderr, deadline=None):
"""Poll authorizations and finalize the order.
If no deadline is provided, this method will timeout after 90
@@ -751,14 +700,13 @@ class ClientV2(ClientBase):
orderr = self.poll_authorizations(orderr, deadline)
return self.finalize_order(orderr, deadline)
def poll_authorizations(self, orderr: messages.OrderResource, deadline: datetime.datetime
) -> messages.OrderResource:
def poll_authorizations(self, orderr, deadline):
"""Poll Order Resource for status."""
responses = []
for url in orderr.body.authorizations:
while datetime.datetime.now() < deadline:
authzr = self._authzr_from_response(self._post_as_get(url), uri=url)
if authzr.body.status != messages.STATUS_PENDING: # pylint: disable=no-member
if authzr.body.status != messages.STATUS_PENDING:
responses.append(authzr)
break
time.sleep(1)
@@ -776,8 +724,7 @@ class ClientV2(ClientBase):
raise errors.ValidationError(failed)
return orderr.update(authorizations=responses)
def finalize_order(self, orderr: messages.OrderResource, deadline: datetime.datetime,
fetch_alternative_chains: bool = False) -> messages.OrderResource:
def finalize_order(self, orderr, deadline, fetch_alternative_chains=False):
"""Finalize an order and obtain a certificate.
:param messages.OrderResource orderr: order to finalize
@@ -809,7 +756,7 @@ class ClientV2(ClientBase):
return orderr
raise errors.TimeoutError()
def revoke(self, cert: jose.ComparableX509, rsn: int) -> None:
def revoke(self, cert, rsn):
"""Revoke certificate.
:param .ComparableX509 cert: `OpenSSL.crypto.X509` wrapped in
@@ -820,13 +767,13 @@ class ClientV2(ClientBase):
:raises .ClientError: If revocation is unsuccessful.
"""
self._revoke(cert, rsn, self.directory['revokeCert'])
return self._revoke(cert, rsn, self.directory['revokeCert'])
def external_account_required(self) -> bool:
def external_account_required(self):
"""Checks if ACME server requires External Account Binding authentication."""
return hasattr(self.directory, 'meta') and self.directory.meta.external_account_required
def _post_as_get(self, *args: Any, **kwargs: Any) -> requests.Response:
def _post_as_get(self, *args, **kwargs):
"""
Send GET request using the POST-as-GET protocol.
:param args:
@@ -836,7 +783,7 @@ class ClientV2(ClientBase):
new_args = args[:1] + (None,) + args[1:]
return self._post(*new_args, **kwargs)
def _get_links(self, response: requests.Response, relation_type: str) -> List[str]:
def _get_links(self, response, relation_type):
"""
Retrieves all Link URIs of relation_type from the response.
:param requests.Response response: The requests HTTP response.
@@ -855,9 +802,6 @@ class BackwardsCompatibleClientV2:
"""ACME client wrapper that tends towards V2-style calls, but
supports V1 servers.
.. deprecated:: 1.18.0
Use :class:`ClientV2` instead.
.. note:: While this class handles the majority of the differences
between versions of the ACME protocol, if you need to support an
ACME server based on version 3 or older of the IETF ACME draft
@@ -873,7 +817,7 @@ class BackwardsCompatibleClientV2:
:ivar .ClientBase client: either Client or ClientV2
"""
def __init__(self, net: 'ClientNetwork', key: jose.JWK, server: str) -> None:
def __init__(self, net, key, server):
directory = messages.Directory.from_json(net.get(server).json())
self.acme_version = self._acme_version_from_directory(directory)
self.client: Union[Client, ClientV2]
@@ -882,28 +826,26 @@ class BackwardsCompatibleClientV2:
else:
self.client = ClientV2(directory, net=net)
def __getattr__(self, name: str) -> Any:
def __getattr__(self, name):
return getattr(self.client, name)
def new_account_and_tos(self, regr: messages.NewRegistration,
check_tos_cb: Optional[Callable[[str], None]] = None
) -> messages.RegistrationResource:
def new_account_and_tos(self, regr, check_tos_cb=None):
"""Combined register and agree_tos for V1, new_account for V2
:param .NewRegistration regr:
:param callable check_tos_cb: callback that raises an error if
the check does not work
"""
def _assess_tos(tos: str) -> None:
def _assess_tos(tos):
if check_tos_cb is not None:
check_tos_cb(tos)
if self.acme_version == 1:
client_v1 = cast(Client, self.client)
regr_res = client_v1.register(regr)
if regr_res.terms_of_service is not None:
_assess_tos(regr_res.terms_of_service)
return client_v1.agree_to_tos(regr_res)
return regr_res
regr = client_v1.register(regr)
if regr.terms_of_service is not None:
_assess_tos(regr.terms_of_service)
return client_v1.agree_to_tos(regr)
return regr
else:
client_v2 = cast(ClientV2, self.client)
if "terms_of_service" in client_v2.directory.meta:
@@ -911,13 +853,13 @@ class BackwardsCompatibleClientV2:
regr = regr.update(terms_of_service_agreed=True)
return client_v2.new_account(regr)
def new_order(self, csr_pem: bytes) -> messages.OrderResource:
def new_order(self, csr_pem):
"""Request a new Order object from the server.
If using ACMEv1, returns a dummy OrderResource with only
the authorizations field filled in.
:param bytes csr_pem: A CSR in PEM format.
:param str csr_pem: A CSR in PEM format.
:returns: The newly created order.
:rtype: OrderResource
@@ -937,8 +879,7 @@ class BackwardsCompatibleClientV2:
return messages.OrderResource(authorizations=authorizations, csr_pem=csr_pem)
return cast(ClientV2, self.client).new_order(csr_pem)
def finalize_order(self, orderr: messages.OrderResource, deadline: datetime.datetime,
fetch_alternative_chains: bool = False) -> messages.OrderResource:
def finalize_order(self, orderr, deadline, fetch_alternative_chains=False):
"""Finalize an order and obtain a certificate.
:param messages.OrderResource orderr: order to finalize
@@ -972,15 +913,14 @@ class BackwardsCompatibleClientV2:
'certificate, please rerun the command for a new one.')
cert = OpenSSL.crypto.dump_certificate(
OpenSSL.crypto.FILETYPE_PEM,
cast(OpenSSL.crypto.X509, cast(jose.ComparableX509, certr.body).wrapped)).decode()
chain_str = crypto_util.dump_pyopenssl_chain(chain).decode()
OpenSSL.crypto.FILETYPE_PEM, certr.body.wrapped).decode()
chain = crypto_util.dump_pyopenssl_chain(chain).decode()
return orderr.update(fullchain_pem=(cert + chain_str))
return orderr.update(fullchain_pem=(cert + chain))
return cast(ClientV2, self.client).finalize_order(
orderr, deadline, fetch_alternative_chains)
def revoke(self, cert: jose.ComparableX509, rsn: int) -> None:
def revoke(self, cert, rsn):
"""Revoke certificate.
:param .ComparableX509 cert: `OpenSSL.crypto.X509` wrapped in
@@ -991,14 +931,14 @@ class BackwardsCompatibleClientV2:
:raises .ClientError: If revocation is unsuccessful.
"""
self.client.revoke(cert, rsn)
return self.client.revoke(cert, rsn)
def _acme_version_from_directory(self, directory: messages.Directory) -> int:
def _acme_version_from_directory(self, directory):
if hasattr(directory, 'newNonce'):
return 2
return 1
def external_account_required(self) -> bool:
def external_account_required(self):
"""Checks if the server requires an external account for ACMEv2 servers.
Always return False for ACMEv1 servers, as it doesn't use External Account Binding."""
@@ -1030,10 +970,9 @@ class ClientNetwork:
:param source_address: Optional source address to bind to when making requests.
:type source_address: str or tuple(str, int)
"""
def __init__(self, key: jose.JWK, account: Optional[messages.RegistrationResource] = None,
alg: jose.JWASignature = jose.RS256, verify_ssl: bool = True,
user_agent: str = 'acme-python', timeout: int = DEFAULT_NETWORK_TIMEOUT,
source_address: Optional[Union[str, Tuple[str, int]]] = None) -> None:
def __init__(self, key, account=None, alg=jose.RS256, verify_ssl=True,
user_agent='acme-python', timeout=DEFAULT_NETWORK_TIMEOUT,
source_address=None):
self.key = key
self.account = account
self.alg = alg
@@ -1050,7 +989,7 @@ class ClientNetwork:
self.session.mount("http://", adapter)
self.session.mount("https://", adapter)
def __del__(self) -> None:
def __del__(self):
# Try to close the session, but don't show exceptions to the
# user if the call to close() fails. See #4840.
try:
@@ -1058,16 +997,15 @@ class ClientNetwork:
except Exception: # pylint: disable=broad-except
pass
def _wrap_in_jws(self, obj: jose.JSONDeSerializable, nonce: str, url: str,
acme_version: int) -> str:
def _wrap_in_jws(self, obj, nonce, url, acme_version):
"""Wrap `JSONDeSerializable` object in JWS.
.. todo:: Implement ``acmePath``.
:param josepy.JSONDeSerializable obj:
:param str url: The URL to which this object will be POSTed
:param str nonce:
:rtype: str
:param bytes nonce:
:rtype: `josepy.JWS`
"""
if isinstance(obj, VersionedLEACMEMixin):
@@ -1076,7 +1014,7 @@ class ClientNetwork:
logger.debug('JWS payload:\n%s', jobj)
kwargs = {
"alg": self.alg,
"nonce": nonce,
"nonce": nonce
}
if acme_version == 2:
kwargs["url"] = url
@@ -1085,11 +1023,10 @@ class ClientNetwork:
if self.account is not None:
kwargs["kid"] = self.account["uri"]
kwargs["key"] = self.key
return jws.JWS.sign(jobj, **cast(Mapping[str, Any], kwargs)).json_dumps(indent=2)
return jws.JWS.sign(jobj, **kwargs).json_dumps(indent=2)
@classmethod
def _check_response(cls, response: requests.Response,
content_type: Optional[str] = None) -> requests.Response:
def _check_response(cls, response, content_type=None):
"""Check response content and its type.
.. note::
@@ -1119,7 +1056,7 @@ class ClientNetwork:
jobj = None
if response.status_code == 409:
raise errors.ConflictError(response.headers.get('Location', 'UNKNOWN-LOCATION'))
raise errors.ConflictError(response.headers.get('Location'))
if not response.ok:
if jobj is not None:
@@ -1142,11 +1079,12 @@ class ClientNetwork:
'response', response_ct)
if content_type == cls.JSON_CONTENT_TYPE and jobj is None:
raise errors.ClientError(f'Unexpected response Content-Type: {response_ct}')
raise errors.ClientError(
'Unexpected response Content-Type: {0}'.format(response_ct))
return response
def _send_request(self, method: str, url: str, *args: Any, **kwargs: Any) -> requests.Response:
def _send_request(self, method, url, *args, **kwargs):
"""Send HTTP request.
Makes sure that `verify_ssl` is respected. Logs request and
@@ -1195,25 +1133,15 @@ class ClientNetwork:
if m is None:
raise # pragma: no cover
host, path, _err_no, err_msg = m.groups()
raise ValueError(f"Requesting {host}{path}:{err_msg}")
raise ValueError("Requesting {0}{1}:{2}".format(host, path, err_msg))
# If the Content-Type is DER or an Accept header was sent in the
# request, the response may not be UTF-8 encoded. In this case, we
# don't set response.encoding and log the base64 response instead of
# raw bytes to keep binary data out of the logs. This code can be
# simplified to only check for an Accept header in the request when
# ACMEv1 support is dropped.
# If content is DER, log the base64 of it instead of raw bytes, to keep
# binary data out of the logs.
debug_content: Union[bytes, str]
if (response.headers.get("Content-Type") == DER_CONTENT_TYPE or
"Accept" in kwargs["headers"]):
if response.headers.get("Content-Type") == DER_CONTENT_TYPE:
debug_content = base64.b64encode(response.content)
else:
# We set response.encoding so response.text knows the response is
# UTF-8 encoded instead of trying to guess the encoding that was
# used which is error prone. This setting affects all future
# accesses of .text made on the returned response object as well.
response.encoding = "utf-8"
debug_content = response.text
debug_content = response.content.decode("utf-8")
logger.debug('Received response:\nHTTP %d\n%s\n\n%s',
response.status_code,
"\n".join("{0}: {1}".format(k, v)
@@ -1221,7 +1149,7 @@ class ClientNetwork:
debug_content)
return response
def head(self, *args: Any, **kwargs: Any) -> requests.Response:
def head(self, *args, **kwargs):
"""Send HEAD request without checking the response.
Note, that `_check_response` is not called, as it is expected
@@ -1231,13 +1159,12 @@ class ClientNetwork:
"""
return self._send_request('HEAD', *args, **kwargs)
def get(self, url: str, content_type: str = JSON_CONTENT_TYPE,
**kwargs: Any) -> requests.Response:
def get(self, url, content_type=JSON_CONTENT_TYPE, **kwargs):
"""Send GET request and check response."""
return self._check_response(
self._send_request('GET', url, **kwargs), content_type=content_type)
def _add_nonce(self, response: requests.Response) -> None:
def _add_nonce(self, response):
if self.REPLAY_NONCE_HEADER in response.headers:
nonce = response.headers[self.REPLAY_NONCE_HEADER]
try:
@@ -1249,7 +1176,7 @@ class ClientNetwork:
else:
raise errors.MissingNonce(response)
def _get_nonce(self, url: str, new_nonce_url: str) -> str:
def _get_nonce(self, url, new_nonce_url):
if not self._nonces:
logger.debug('Requesting fresh nonce')
if new_nonce_url is None:
@@ -1260,7 +1187,7 @@ class ClientNetwork:
self._add_nonce(response)
return self._nonces.pop()
def post(self, *args: Any, **kwargs: Any) -> requests.Response:
def post(self, *args, **kwargs):
"""POST object wrapped in `.JWS` and check response.
If the server responded with a badNonce error, the request will
@@ -1275,9 +1202,8 @@ class ClientNetwork:
return self._post_once(*args, **kwargs)
raise
def _post_once(self, url: str, obj: jose.JSONDeSerializable,
content_type: str = JOSE_CONTENT_TYPE, acme_version: int = 1,
**kwargs: Any) -> requests.Response:
def _post_once(self, url, obj, content_type=JOSE_CONTENT_TYPE,
acme_version=1, **kwargs):
new_nonce_url = kwargs.pop('new_nonce_url', None)
data = self._wrap_in_jws(obj, self._get_nonce(url, new_nonce_url), url, acme_version)
kwargs.setdefault('headers', {'Content-Type': content_type})
@@ -1285,35 +1211,3 @@ class ClientNetwork:
response = self._check_response(response, content_type=content_type)
self._add_nonce(response)
return response
# This class takes a similar approach to the cryptography project to deprecate attributes
# in public modules. See the _ModuleWithDeprecation class here:
# https://github.com/pyca/cryptography/blob/91105952739442a74582d3e62b3d2111365b0dc7/src/cryptography/utils.py#L129
class _ClientDeprecationModule:
"""
Internal class delegating to a module, and displaying warnings when attributes
related to deprecated attributes in the acme.client module.
"""
def __init__(self, module: ModuleType) -> None:
self.__dict__['_module'] = module
def __getattr__(self, attr: str) -> Any:
if attr in ('Client', 'BackwardsCompatibleClientV2'):
warnings.warn('The {0} attribute in acme.client is deprecated '
'and will be removed soon.'.format(attr),
DeprecationWarning, stacklevel=2)
return getattr(self._module, attr)
def __setattr__(self, attr: str, value: Any) -> None: # pragma: no cover
setattr(self._module, attr, value)
def __delattr__(self, attr: str) -> None: # pragma: no cover
delattr(self._module, attr)
def __dir__(self) -> List[str]: # pragma: no cover
return ['_module'] + dir(self._module)
# Patching ourselves to warn about deprecation and planned removal of some elements in the module.
sys.modules[__name__] = cast(ModuleType, _ClientDeprecationModule(sys.modules[__name__]))

View File

@@ -1,23 +1,17 @@
"""Crypto utilities."""
import binascii
import contextlib
import ipaddress
import logging
import os
import re
import socket
from typing import Any
from typing import Callable
from typing import List
from typing import Mapping
from typing import Optional
from typing import Set
from typing import Tuple
from typing import Union
import josepy as jose
from OpenSSL import crypto
from OpenSSL import SSL
from OpenSSL import SSL # type: ignore # https://github.com/python/typeshed/issues/2052
from acme import errors
@@ -30,14 +24,14 @@ logger = logging.getLogger(__name__)
# https://www.openssl.org/docs/ssl/SSLv23_method.html). _serve_sni
# should be changed to use "set_options" to disable SSLv2 and SSLv3,
# in case it's used for things other than probing/serving!
_DEFAULT_SSL_METHOD = SSL.SSLv23_METHOD
_DEFAULT_SSL_METHOD = SSL.SSLv23_METHOD # type: ignore
class _DefaultCertSelection:
def __init__(self, certs: Mapping[bytes, Tuple[crypto.PKey, crypto.X509]]):
def __init__(self, certs):
self.certs = certs
def __call__(self, connection: SSL.Connection) -> Optional[Tuple[crypto.PKey, crypto.X509]]:
def __call__(self, connection):
server_name = connection.get_servername()
return self.certs.get(server_name, None)
@@ -55,13 +49,9 @@ class SSLSocket: # pylint: disable=too-few-public-methods
`certs` parameter would be ignored, and therefore must be empty.
"""
def __init__(self, sock: socket.socket,
certs: Optional[Mapping[bytes, Tuple[crypto.PKey, crypto.X509]]] = None,
method: int = _DEFAULT_SSL_METHOD,
alpn_selection: Optional[Callable[[SSL.Connection, List[bytes]], bytes]] = None,
cert_selection: Optional[Callable[[SSL.Connection],
Tuple[crypto.PKey, crypto.X509]]] = None
) -> None:
def __init__(self, sock, certs=None,
method=_DEFAULT_SSL_METHOD, alpn_selection=None,
cert_selection=None):
self.sock = sock
self.alpn_selection = alpn_selection
self.method = method
@@ -69,18 +59,14 @@ class SSLSocket: # pylint: disable=too-few-public-methods
raise ValueError("Neither cert_selection or certs specified.")
if cert_selection and certs:
raise ValueError("Both cert_selection and certs specified.")
actual_cert_selection: Union[_DefaultCertSelection,
Optional[Callable[[SSL.Connection],
Tuple[crypto.PKey,
crypto.X509]]]] = cert_selection
if actual_cert_selection is None:
actual_cert_selection = _DefaultCertSelection(certs if certs else {})
self.cert_selection = actual_cert_selection
if cert_selection is None:
cert_selection = _DefaultCertSelection(certs)
self.cert_selection = cert_selection
def __getattr__(self, name: str) -> Any:
def __getattr__(self, name):
return getattr(self.sock, name)
def _pick_certificate_cb(self, connection: SSL.Connection) -> None:
def _pick_certificate_cb(self, connection):
"""SNI certificate callback.
This method will set a new OpenSSL context object for this
@@ -112,17 +98,17 @@ class SSLSocket: # pylint: disable=too-few-public-methods
# pylint: disable=missing-function-docstring
def __init__(self, connection: SSL.Connection) -> None:
def __init__(self, connection):
self._wrapped = connection
def __getattr__(self, name: str) -> Any:
def __getattr__(self, name):
return getattr(self._wrapped, name)
def shutdown(self, *unused_args: Any) -> bool:
def shutdown(self, *unused_args):
# OpenSSL.SSL.Connection.shutdown doesn't accept any args
return self._wrapped.shutdown()
def accept(self) -> Tuple[FakeConnection, Any]: # pylint: disable=missing-function-docstring
def accept(self): # pylint: disable=missing-function-docstring
sock, addr = self.sock.accept()
context = SSL.Context(self.method)
@@ -146,9 +132,9 @@ class SSLSocket: # pylint: disable=too-few-public-methods
return ssl_sock, addr
def probe_sni(name: bytes, host: bytes, port: int = 443, timeout: int = 300, # pylint: disable=too-many-arguments
method: int = _DEFAULT_SSL_METHOD, source_address: Tuple[str, int] = ('', 0),
alpn_protocols: Optional[List[str]] = None) -> crypto.X509:
def probe_sni(name, host, port=443, timeout=300, # pylint: disable=too-many-arguments
method=_DEFAULT_SSL_METHOD, source_address=('', 0),
alpn_protocols=None):
"""Probe SNI server for SSL certificate.
:param bytes name: Byte string to send as the server name in the
@@ -161,7 +147,7 @@ def probe_sni(name: bytes, host: bytes, port: int = 443, timeout: int = 300, #
of source interface). See `socket.creation_connection` for more
info. Available only in Python 2.7+.
:param alpn_protocols: Protocols to request using ALPN.
:type alpn_protocols: `list` of `str`
:type alpn_protocols: `list` of `bytes`
:raises acme.errors.Error: In case of any problems.
@@ -182,8 +168,8 @@ def probe_sni(name: bytes, host: bytes, port: int = 443, timeout: int = 300, #
source_address[1]
) if any(source_address) else ""
)
socket_tuple: Tuple[bytes, int] = (host, port)
sock = socket.create_connection(socket_tuple, **socket_kwargs) # type: ignore[arg-type]
socket_tuple: Tuple[str, int] = (host, port)
sock = socket.create_connection(socket_tuple, **socket_kwargs) # type: ignore
except socket.error as error:
raise errors.Error(error)
@@ -201,45 +187,23 @@ def probe_sni(name: bytes, host: bytes, port: int = 443, timeout: int = 300, #
return client_ssl.get_peer_certificate()
def make_csr(private_key_pem: bytes, domains: Optional[Union[Set[str], List[str]]] = None,
must_staple: bool = False,
ipaddrs: Optional[List[Union[ipaddress.IPv4Address, ipaddress.IPv6Address]]] = None
) -> bytes:
"""Generate a CSR containing domains or IPs as subjectAltNames.
def make_csr(private_key_pem, domains, must_staple=False):
"""Generate a CSR containing a list of domains as subjectAltNames.
:param buffer private_key_pem: Private key, in PEM PKCS#8 format.
:param list domains: List of DNS names to include in subjectAltNames of CSR.
:param bool must_staple: Whether to include the TLS Feature extension (aka
OCSP Must Staple: https://tools.ietf.org/html/rfc7633).
:param list ipaddrs: List of IPaddress(type ipaddress.IPv4Address or ipaddress.IPv6Address)
names to include in subbjectAltNames of CSR.
params ordered this way for backward competablity when called by positional argument.
:returns: buffer PEM-encoded Certificate Signing Request.
"""
private_key = crypto.load_privatekey(
crypto.FILETYPE_PEM, private_key_pem)
csr = crypto.X509Req()
sanlist = []
# if domain or ip list not supplied make it empty list so it's easier to iterate
if domains is None:
domains = []
if ipaddrs is None:
ipaddrs = []
if len(domains)+len(ipaddrs) == 0:
raise ValueError("At least one of domains or ipaddrs parameter need to be not empty")
for address in domains:
sanlist.append('DNS:' + address)
for ips in ipaddrs:
sanlist.append('IP:' + ips.exploded)
# make sure its ascii encoded
san_string = ', '.join(sanlist).encode('ascii')
# for IP san it's actually need to be octet-string,
# but somewhere downsteam thankfully handle it for us
extensions = [
crypto.X509Extension(
b'subjectAltName',
critical=False,
value=san_string
value=', '.join('DNS:' + d for d in domains).encode('ascii')
),
]
if must_staple:
@@ -255,9 +219,7 @@ def make_csr(private_key_pem: bytes, domains: Optional[Union[Set[str], List[str]
crypto.FILETYPE_PEM, csr)
def _pyopenssl_cert_or_req_all_names(loaded_cert_or_req: Union[crypto.X509, crypto.X509Req]
) -> List[str]:
# unlike its name this only outputs DNS names, other type of idents will ignored
def _pyopenssl_cert_or_req_all_names(loaded_cert_or_req):
common_name = loaded_cert_or_req.get_subject().CN
sans = _pyopenssl_cert_or_req_san(loaded_cert_or_req)
@@ -266,7 +228,7 @@ def _pyopenssl_cert_or_req_all_names(loaded_cert_or_req: Union[crypto.X509, cryp
return [common_name] + [d for d in sans if d != common_name]
def _pyopenssl_cert_or_req_san(cert_or_req: Union[crypto.X509, crypto.X509Req]) -> List[str]:
def _pyopenssl_cert_or_req_san(cert_or_req):
"""Get Subject Alternative Names from certificate or CSR using pyOpenSSL.
.. todo:: Implement directly in PyOpenSSL!
@@ -277,87 +239,47 @@ def _pyopenssl_cert_or_req_san(cert_or_req: Union[crypto.X509, crypto.X509Req])
:param cert_or_req: Certificate or CSR.
:type cert_or_req: `OpenSSL.crypto.X509` or `OpenSSL.crypto.X509Req`.
:returns: A list of Subject Alternative Names that is DNS.
:rtype: `list` of `str`
:returns: A list of Subject Alternative Names.
:rtype: `list` of `unicode`
"""
# This function finds SANs with dns name
# This function finds SANs by dumping the certificate/CSR to text and
# searching for "X509v3 Subject Alternative Name" in the text. This method
# is used to support PyOpenSSL version 0.13 where the
# `_subjectAltNameString` and `get_extensions` methods are not available
# for CSRs.
# constants based on PyOpenSSL certificate/CSR text dump
part_separator = ":"
parts_separator = ", "
prefix = "DNS" + part_separator
sans_parts = _pyopenssl_extract_san_list_raw(cert_or_req)
if isinstance(cert_or_req, crypto.X509):
# pylint: disable=line-too-long
func: Union[Callable[[int, crypto.X509Req], bytes], Callable[[int, crypto.X509], bytes]] = crypto.dump_certificate
else:
func = crypto.dump_certificate_request
text = func(crypto.FILETYPE_TEXT, cert_or_req).decode("utf-8")
# WARNING: this function does not support multiple SANs extensions.
# Multiple X509v3 extensions of the same type is disallowed by RFC 5280.
match = re.search(r"X509v3 Subject Alternative Name:(?: critical)?\s*(.*)", text)
# WARNING: this function assumes that no SAN can include
# parts_separator, hence the split!
sans_parts = [] if match is None else match.group(1).split(parts_separator)
return [part.split(part_separator)[1]
for part in sans_parts if part.startswith(prefix)]
def _pyopenssl_cert_or_req_san_ip(cert_or_req: Union[crypto.X509, crypto.X509Req]) -> List[str]:
"""Get Subject Alternative Names IPs from certificate or CSR using pyOpenSSL.
:param cert_or_req: Certificate or CSR.
:type cert_or_req: `OpenSSL.crypto.X509` or `OpenSSL.crypto.X509Req`.
:returns: A list of Subject Alternative Names that are IP Addresses.
:rtype: `list` of `str`. note that this returns as string, not IPaddress object
"""
# constants based on PyOpenSSL certificate/CSR text dump
part_separator = ":"
prefix = "IP Address" + part_separator
sans_parts = _pyopenssl_extract_san_list_raw(cert_or_req)
return [part[len(prefix):] for part in sans_parts if part.startswith(prefix)]
def _pyopenssl_extract_san_list_raw(cert_or_req: Union[crypto.X509, crypto.X509Req]) -> List[str]:
"""Get raw SAN string from cert or csr, parse it as UTF-8 and return.
:param cert_or_req: Certificate or CSR.
:type cert_or_req: `OpenSSL.crypto.X509` or `OpenSSL.crypto.X509Req`.
:returns: raw san strings, parsed byte as utf-8
:rtype: `list` of `str`
"""
# This function finds SANs by dumping the certificate/CSR to text and
# searching for "X509v3 Subject Alternative Name" in the text. This method
# is used to because in PyOpenSSL version <0.17 `_subjectAltNameString` methods are
# not able to Parse IP Addresses in subjectAltName string.
if isinstance(cert_or_req, crypto.X509):
# pylint: disable=line-too-long
text = crypto.dump_certificate(crypto.FILETYPE_TEXT, cert_or_req).decode('utf-8')
else:
text = crypto.dump_certificate_request(crypto.FILETYPE_TEXT, cert_or_req).decode('utf-8')
# WARNING: this function does not support multiple SANs extensions.
# Multiple X509v3 extensions of the same type is disallowed by RFC 5280.
raw_san = re.search(r"X509v3 Subject Alternative Name:(?: critical)?\s*(.*)", text)
parts_separator = ", "
# WARNING: this function assumes that no SAN can include
# parts_separator, hence the split!
sans_parts = [] if raw_san is None else raw_san.group(1).split(parts_separator)
return sans_parts
def gen_ss_cert(key: crypto.PKey, domains: Optional[List[str]] = None,
not_before: Optional[int] = None,
validity: int = (7 * 24 * 60 * 60), force_san: bool = True,
extensions: Optional[List[crypto.X509Extension]] = None,
ips: Optional[List[Union[ipaddress.IPv4Address, ipaddress.IPv6Address]]] = None
) -> crypto.X509:
def gen_ss_cert(key, domains, not_before=None,
validity=(7 * 24 * 60 * 60), force_san=True, extensions=None):
"""Generate new self-signed certificate.
:type domains: `list` of `str`
:type domains: `list` of `unicode`
:param OpenSSL.crypto.PKey key:
:param bool force_san:
:param extensions: List of additional extensions to include in the cert.
:type extensions: `list` of `OpenSSL.crypto.X509Extension`
:type ips: `list` of (`ipaddress.IPv4Address` or `ipaddress.IPv6Address`)
If more than one domain is provided, all of the domains are put into
``subjectAltName`` X.509 extension and first domain is set as the
@@ -365,39 +287,28 @@ def gen_ss_cert(key: crypto.PKey, domains: Optional[List[str]] = None,
extension is used, unless `force_san` is ``True``.
"""
assert domains or ips, "Must provide one or more hostnames or IPs for the cert."
assert domains, "Must provide one or more hostnames for the cert."
cert = crypto.X509()
cert.set_serial_number(int(binascii.hexlify(os.urandom(16)), 16))
cert.set_version(2)
if extensions is None:
extensions = []
if domains is None:
domains = []
if ips is None:
ips = []
extensions.append(
crypto.X509Extension(
b"basicConstraints", True, b"CA:TRUE, pathlen:0"),
)
if len(domains) > 0:
cert.get_subject().CN = domains[0]
cert.get_subject().CN = domains[0]
# TODO: what to put into cert.get_subject()?
cert.set_issuer(cert.get_subject())
sanlist = []
for address in domains:
sanlist.append('DNS:' + address)
for ip in ips:
sanlist.append('IP:' + ip.exploded)
san_string = ', '.join(sanlist).encode('ascii')
if force_san or len(domains) > 1 or len(ips) > 0:
if force_san or len(domains) > 1:
extensions.append(crypto.X509Extension(
b"subjectAltName",
critical=False,
value=san_string
value=b", ".join(b"DNS:" + d.encode() for d in domains)
))
cert.add_extensions(extensions)
@@ -410,8 +321,7 @@ def gen_ss_cert(key: crypto.PKey, domains: Optional[List[str]] = None,
return cert
def dump_pyopenssl_chain(chain: Union[List[jose.ComparableX509], List[crypto.X509]],
filetype: int = crypto.FILETYPE_PEM) -> bytes:
def dump_pyopenssl_chain(chain, filetype=crypto.FILETYPE_PEM):
"""Dump certificate chain into a bundle.
:param list chain: List of `OpenSSL.crypto.X509` (or wrapped in
@@ -424,10 +334,8 @@ def dump_pyopenssl_chain(chain: Union[List[jose.ComparableX509], List[crypto.X50
# XXX: returns empty string when no chain is available, which
# shuts up RenewableCert, but might not be the best solution...
def _dump_cert(cert: Union[jose.ComparableX509, crypto.X509]) -> bytes:
def _dump_cert(cert):
if isinstance(cert, jose.ComparableX509):
if isinstance(cert.wrapped, crypto.X509Req):
raise errors.Error("Unexpected CSR provided.") # pragma: no cover
cert = cert.wrapped
return crypto.dump_certificate(filetype, cert)

View File

@@ -1,17 +1,5 @@
"""ACME errors."""
import typing
from typing import Any
from typing import List
from typing import Mapping
from typing import Set
from josepy import errors as jose_errors
import requests
# We import acme.messages only during type check to avoid circular dependencies. Type references
# to acme.message.* must be quoted to be lazily initialized and avoid compilation errors.
if typing.TYPE_CHECKING:
from acme import messages # pragma: no cover
class Error(Exception):
@@ -40,12 +28,12 @@ class NonceError(ClientError):
class BadNonce(NonceError):
"""Bad nonce error."""
def __init__(self, nonce: str, error: Exception, *args: Any) -> None:
super().__init__(*args)
def __init__(self, nonce, error, *args):
super(BadNonce, self).__init__(*args)
self.nonce = nonce
self.error = error
def __str__(self) -> str:
def __str__(self):
return 'Invalid nonce ({0!r}): {1}'.format(self.nonce, self.error)
@@ -59,11 +47,11 @@ class MissingNonce(NonceError):
:ivar requests.Response ~.response: HTTP Response
"""
def __init__(self, response: requests.Response, *args: Any) -> None:
super().__init__(*args)
def __init__(self, response, *args):
super(MissingNonce, self).__init__(*args)
self.response = response
def __str__(self) -> str:
def __str__(self):
return ('Server {0} response did not include a replay '
'nonce, headers: {1} (This may be a service outage)'.format(
self.response.request.method, self.response.headers))
@@ -81,20 +69,17 @@ class PollError(ClientError):
to the most recently updated one
"""
def __init__(self, exhausted: Set['messages.AuthorizationResource'],
updated: Mapping['messages.AuthorizationResource',
'messages.AuthorizationResource']
) -> None:
def __init__(self, exhausted, updated):
self.exhausted = exhausted
self.updated = updated
super().__init__()
super(PollError, self).__init__()
@property
def timeout(self) -> bool:
def timeout(self):
"""Was the error caused by timeout?"""
return bool(self.exhausted)
def __repr__(self) -> str:
def __repr__(self):
return '{0}(exhausted={1!r}, updated={2!r})'.format(
self.__class__.__name__, self.exhausted, self.updated)
@@ -103,9 +88,9 @@ class ValidationError(Error):
"""Error for authorization failures. Contains a list of authorization
resources, each of which is invalid and should have an error field.
"""
def __init__(self, failed_authzrs: List['messages.AuthorizationResource']) -> None:
def __init__(self, failed_authzrs):
self.failed_authzrs = failed_authzrs
super().__init__()
super(ValidationError, self).__init__()
class TimeoutError(Error): # pylint: disable=redefined-builtin
@@ -115,13 +100,13 @@ class TimeoutError(Error): # pylint: disable=redefined-builtin
class IssuanceError(Error):
"""Error sent by the server after requesting issuance of a certificate."""
def __init__(self, error: 'messages.Error') -> None:
def __init__(self, error):
"""Initialize.
:param messages.Error error: The error provided by the server.
"""
self.error = error
super().__init__()
super(IssuanceError, self).__init__()
class ConflictError(ClientError):
@@ -132,9 +117,9 @@ class ConflictError(ClientError):
Also used in V2 of the ACME client for the same purpose.
"""
def __init__(self, location: str) -> None:
def __init__(self, location):
self.location = location
super().__init__()
super(ConflictError, self).__init__()
class WildcardUnsupportedError(Error):

View File

@@ -1,7 +1,4 @@
"""ACME JSON fields."""
import datetime
from typing import Any
import logging
import josepy as jose
@@ -13,17 +10,17 @@ logger = logging.getLogger(__name__)
class Fixed(jose.Field):
"""Fixed field."""
def __init__(self, json_name: str, value: Any) -> None:
def __init__(self, json_name, value):
self.value = value
super().__init__(
super(Fixed, self).__init__(
json_name=json_name, default=value, omitempty=False)
def decode(self, value: Any) -> Any:
def decode(self, value):
if value != self.value:
raise jose.DeserializationError('Expected {0!r}'.format(self.value))
return self.value
def encode(self, value: Any) -> Any:
def encode(self, value):
if value != self.value:
logger.warning(
'Overriding fixed field (%s) with %r', self.json_name, value)
@@ -40,11 +37,11 @@ class RFC3339Field(jose.Field):
"""
@classmethod
def default_encoder(cls, value: datetime.datetime) -> str:
def default_encoder(cls, value):
return pyrfc3339.generate(value)
@classmethod
def default_decoder(cls, value: str) -> datetime.datetime:
def default_decoder(cls, value):
try:
return pyrfc3339.parse(value)
except ValueError as error:
@@ -54,29 +51,14 @@ class RFC3339Field(jose.Field):
class Resource(jose.Field):
"""Resource MITM field."""
def __init__(self, resource_type: str, *args: Any, **kwargs: Any) -> None:
def __init__(self, resource_type, *args, **kwargs):
self.resource_type = resource_type
kwargs['default'] = resource_type
super().__init__('resource', *args, **kwargs)
super(Resource, self).__init__(
'resource', default=resource_type, *args, **kwargs)
def decode(self, value: Any) -> Any:
def decode(self, value):
if value != self.resource_type:
raise jose.DeserializationError(
'Wrong resource type: {0} instead of {1}'.format(
value, self.resource_type))
return value
def fixed(json_name: str, value: Any) -> Any:
"""Generates a type-friendly Fixed field."""
return Fixed(json_name, value)
def rfc3339(json_name: str, omitempty: bool = False) -> Any:
"""Generates a type-friendly RFC3339 field."""
return RFC3339Field(json_name, omitempty=omitempty)
def resource(resource_type: str) -> Any:
"""Generates a type-friendly Resource field."""
return Resource(resource_type)

View File

@@ -4,22 +4,20 @@ The JWS implementation in josepy only implements the base JOSE standard. In
order to support the new header fields defined in ACME, this module defines some
ACME-specific classes that layer on top of josepy.
"""
from typing import Optional
import josepy as jose
class Header(jose.Header):
"""ACME-specific JOSE Header. Implements nonce, kid, and url.
"""
nonce: Optional[bytes] = jose.field('nonce', omitempty=True, encoder=jose.encode_b64jose)
kid: Optional[str] = jose.field('kid', omitempty=True)
url: Optional[str] = jose.field('url', omitempty=True)
nonce = jose.Field('nonce', omitempty=True, encoder=jose.encode_b64jose)
kid = jose.Field('kid', omitempty=True)
url = jose.Field('url', omitempty=True)
# Mypy does not understand the josepy magic happening here, and falsely claims
# that nonce is redefined. Let's ignore the type check here.
@nonce.decoder # type: ignore[no-redef,union-attr]
def nonce(value: str) -> bytes: # type: ignore[misc] # pylint: disable=no-self-argument,missing-function-docstring
@nonce.decoder # type: ignore
def nonce(value): # pylint: disable=no-self-argument,missing-function-docstring
try:
return jose.decode_b64jose(value)
except jose.DeserializationError as error:
@@ -29,12 +27,12 @@ class Header(jose.Header):
class Signature(jose.Signature):
"""ACME-specific Signature. Uses ACME-specific Header for customer fields."""
__slots__ = jose.Signature._orig_slots # type: ignore[attr-defined] # pylint: disable=protected-access,no-member
__slots__ = jose.Signature._orig_slots # pylint: disable=no-member
# TODO: decoder/encoder should accept cls? Otherwise, subclassing
# JSONObjectWithFields is tricky...
header_cls = Header
header: Header = jose.field(
header = jose.Field(
'header', omitempty=True, default=header_cls(),
decoder=header_cls.from_json)
@@ -44,16 +42,15 @@ class Signature(jose.Signature):
class JWS(jose.JWS):
"""ACME-specific JWS. Includes none, url, and kid in protected header."""
signature_cls = Signature
__slots__ = jose.JWS._orig_slots # type: ignore[attr-defined] # pylint: disable=protected-access
__slots__ = jose.JWS._orig_slots
@classmethod
# type: ignore[override] # pylint: disable=arguments-differ
def sign(cls, payload: bytes, key: jose.JWK, alg: jose.JWASignature, nonce: Optional[bytes],
url: Optional[str] = None, kid: Optional[str] = None) -> jose.JWS:
# pylint: disable=arguments-differ
def sign(cls, payload, key, alg, nonce, url=None, kid=None):
# Per ACME spec, jwk and kid are mutually exclusive, so only include a
# jwk field if kid is not provided.
include_jwk = kid is None
return super().sign(payload, key=key, alg=alg,
protect=frozenset(['nonce', 'url', 'kid', 'jwk', 'alg']),
nonce=nonce, url=url, kid=kid,
include_jwk=include_jwk)
return super(JWS, cls).sign(payload, key=key, alg=alg,
protect=frozenset(['nonce', 'url', 'kid', 'jwk', 'alg']),
nonce=nonce, url=url, kid=kid,
include_jwk=include_jwk)

View File

@@ -6,13 +6,12 @@ available. This code is being kept for now for backwards compatibility.
"""
import warnings
from typing import * # pylint: disable=wildcard-import, unused-wildcard-import
from typing import Any
from typing import Collection, IO # type: ignore
warnings.warn("acme.magic_typing is deprecated and will be removed in a future release.",
DeprecationWarning)
class TypingClass:
"""Ignore import errors by getting anything"""
def __getattr__(self, name: str) -> Any:
def __getattr__(self, name):
return None # pragma: no cover

View File

@@ -1,19 +1,9 @@
"""ACME protocol messages."""
import datetime
from collections.abc import Hashable
import json
from typing import Any
from typing import Dict
from typing import Iterator
from typing import List
from typing import Mapping
from typing import MutableMapping
from typing import Optional
from typing import Tuple
from typing import Type
from typing import TYPE_CHECKING
from typing import TypeVar
from typing import Union
import josepy as jose
@@ -24,11 +14,6 @@ from acme import jws
from acme import util
from acme.mixins import ResourceMixin
if TYPE_CHECKING:
from typing_extensions import Protocol # pragma: no cover
else:
Protocol = object
OLD_ERROR_PREFIX = "urn:acme:error:"
ERROR_PREFIX = "urn:ietf:params:acme:error:"
@@ -65,14 +50,14 @@ ERROR_CODES = {
'externalAccountRequired': 'The server requires external account binding',
}
ERROR_TYPE_DESCRIPTIONS = {**{
ERROR_PREFIX + name: desc for name, desc in ERROR_CODES.items()
}, **{ # add errors with old prefix, deprecate me
OLD_ERROR_PREFIX + name: desc for name, desc in ERROR_CODES.items()
}}
ERROR_TYPE_DESCRIPTIONS = dict(
(ERROR_PREFIX + name, desc) for name, desc in ERROR_CODES.items())
ERROR_TYPE_DESCRIPTIONS.update(dict( # add errors with old prefix, deprecate me
(OLD_ERROR_PREFIX + name, desc) for name, desc in ERROR_CODES.items()))
def is_acme_error(err: BaseException) -> bool:
def is_acme_error(err):
"""Check if argument is an ACME error."""
if isinstance(err, Error) and (err.typ is not None):
return (ERROR_PREFIX in err.typ) or (OLD_ERROR_PREFIX in err.typ)
@@ -84,20 +69,20 @@ class Error(jose.JSONObjectWithFields, errors.Error):
https://tools.ietf.org/html/draft-ietf-appsawg-http-problem-00
:ivar str typ:
:ivar str title:
:ivar str detail:
:ivar unicode typ:
:ivar unicode title:
:ivar unicode detail:
"""
typ: str = jose.field('type', omitempty=True, default='about:blank')
title: str = jose.field('title', omitempty=True)
detail: str = jose.field('detail', omitempty=True)
typ = jose.Field('type', omitempty=True, default='about:blank')
title = jose.Field('title', omitempty=True)
detail = jose.Field('detail', omitempty=True)
@classmethod
def with_code(cls, code: str, **kwargs: Any) -> 'Error':
def with_code(cls, code, **kwargs):
"""Create an Error instance with an ACME Error code.
:str code: An ACME error code, like 'dnssec'.
:unicode code: An ACME error code, like 'dnssec'.
:kwargs: kwargs to pass to Error.
"""
@@ -107,74 +92,73 @@ class Error(jose.JSONObjectWithFields, errors.Error):
typ = ERROR_PREFIX + code
# Mypy will not understand that the Error constructor accepts a named argument
# "typ" because of josepy magic. Let's ignore the type check here.
return cls(typ=typ, **kwargs)
return cls(typ=typ, **kwargs) # type: ignore
@property
def description(self) -> Optional[str]:
def description(self):
"""Hardcoded error description based on its type.
:returns: Description if standard ACME error or ``None``.
:rtype: str
:rtype: unicode
"""
return ERROR_TYPE_DESCRIPTIONS.get(self.typ)
@property
def code(self) -> Optional[str]:
def code(self):
"""ACME error code.
Basically self.typ without the ERROR_PREFIX.
:returns: error code if standard ACME code or ``None``.
:rtype: str
:rtype: unicode
"""
code = str(self.typ).rsplit(':', maxsplit=1)[-1]
code = str(self.typ).split(':')[-1]
if code in ERROR_CODES:
return code
return None
def __str__(self) -> str:
def __str__(self):
return b' :: '.join(
part.encode('ascii', 'backslashreplace') for part in
(self.typ, self.description, self.detail, self.title)
if part is not None).decode()
class _Constant(jose.JSONDeSerializable, Hashable):
class _Constant(jose.JSONDeSerializable, Hashable): # type: ignore
"""ACME constant."""
__slots__ = ('name',)
POSSIBLE_NAMES: Dict[str, '_Constant'] = NotImplemented
def __init__(self, name: str) -> None:
super().__init__()
def __init__(self, name):
super(_Constant, self).__init__()
self.POSSIBLE_NAMES[name] = self # pylint: disable=unsupported-assignment-operation
self.name = name
def to_partial_json(self) -> str:
def to_partial_json(self):
return self.name
@classmethod
def from_json(cls, jobj: str) -> '_Constant':
def from_json(cls, jobj):
if jobj not in cls.POSSIBLE_NAMES: # pylint: disable=unsupported-membership-test
raise jose.DeserializationError(f'{cls.__name__} not recognized')
raise jose.DeserializationError(
'{0} not recognized'.format(cls.__name__))
return cls.POSSIBLE_NAMES[jobj]
def __repr__(self) -> str:
return f'{self.__class__.__name__}({self.name})'
def __repr__(self):
return '{0}({1})'.format(self.__class__.__name__, self.name)
def __eq__(self, other: Any) -> bool:
def __eq__(self, other):
return isinstance(other, type(self)) and other.name == self.name
def __hash__(self) -> int:
def __hash__(self):
return hash((self.__class__, self.name))
class Status(_Constant):
"""ACME "status" field."""
POSSIBLE_NAMES: Dict[str, _Constant] = {}
POSSIBLE_NAMES: dict = {}
STATUS_UNKNOWN = Status('unknown')
STATUS_PENDING = Status('pending')
STATUS_PROCESSING = Status('processing')
@@ -187,103 +171,88 @@ STATUS_DEACTIVATED = Status('deactivated')
class IdentifierType(_Constant):
"""ACME identifier type."""
POSSIBLE_NAMES: Dict[str, _Constant] = {}
POSSIBLE_NAMES: Dict[str, 'IdentifierType'] = {}
IDENTIFIER_FQDN = IdentifierType('dns') # IdentifierDNS in Boulder
IDENTIFIER_IP = IdentifierType('ip') # IdentifierIP in pebble - not in Boulder yet
class Identifier(jose.JSONObjectWithFields):
"""ACME identifier.
:ivar IdentifierType typ:
:ivar str value:
:ivar unicode value:
"""
typ: IdentifierType = jose.field('type', decoder=IdentifierType.from_json)
value: str = jose.field('value')
class HasResourceType(Protocol):
"""
Represents a class with a resource_type class parameter of type string.
"""
resource_type: str = NotImplemented
GenericHasResourceType = TypeVar("GenericHasResourceType", bound=HasResourceType)
typ = jose.Field('type', decoder=IdentifierType.from_json)
value = jose.Field('value')
class Directory(jose.JSONDeSerializable):
"""Directory."""
_REGISTERED_TYPES: Dict[str, Type[HasResourceType]] = {}
_REGISTERED_TYPES: Dict[str, Type[Any]] = {}
class Meta(jose.JSONObjectWithFields):
"""Directory Meta."""
_terms_of_service: str = jose.field('terms-of-service', omitempty=True)
_terms_of_service_v2: str = jose.field('termsOfService', omitempty=True)
website: str = jose.field('website', omitempty=True)
caa_identities: List[str] = jose.field('caaIdentities', omitempty=True)
external_account_required: bool = jose.field('externalAccountRequired', omitempty=True)
_terms_of_service = jose.Field('terms-of-service', omitempty=True)
_terms_of_service_v2 = jose.Field('termsOfService', omitempty=True)
website = jose.Field('website', omitempty=True)
caa_identities = jose.Field('caaIdentities', omitempty=True)
external_account_required = jose.Field('externalAccountRequired', omitempty=True)
def __init__(self, **kwargs: Any) -> None:
def __init__(self, **kwargs):
kwargs = {self._internal_name(k): v for k, v in kwargs.items()}
super().__init__(**kwargs)
super(Directory.Meta, self).__init__(**kwargs)
@property
def terms_of_service(self) -> str:
def terms_of_service(self):
"""URL for the CA TOS"""
return self._terms_of_service or self._terms_of_service_v2
def __iter__(self) -> Iterator[str]:
def __iter__(self):
# When iterating over fields, use the external name 'terms_of_service' instead of
# the internal '_terms_of_service'.
for name in super().__iter__():
for name in super(Directory.Meta, self).__iter__():
yield name[1:] if name == '_terms_of_service' else name
def _internal_name(self, name: str) -> str:
def _internal_name(self, name):
return '_' + name if name == 'terms_of_service' else name
@classmethod
def _canon_key(cls, key: Union[str, HasResourceType, Type[HasResourceType]]) -> str:
if isinstance(key, str):
return key
return key.resource_type
@classmethod
def register(cls,
resource_body_cls: Type[GenericHasResourceType]) -> Type[GenericHasResourceType]:
def _canon_key(cls, key):
return getattr(key, 'resource_type', key)
@classmethod
def register(cls, resource_body_cls: Type[Any]) -> Type[Any]:
"""Register resource."""
resource_type = resource_body_cls.resource_type
assert resource_type not in cls._REGISTERED_TYPES
cls._REGISTERED_TYPES[resource_type] = resource_body_cls
return resource_body_cls
def __init__(self, jobj: Mapping[str, Any]) -> None:
def __init__(self, jobj):
canon_jobj = util.map_keys(jobj, self._canon_key)
# TODO: check that everything is an absolute URL; acme-spec is
# not clear on that
self._jobj = canon_jobj
def __getattr__(self, name: str) -> Any:
def __getattr__(self, name):
try:
return self[name.replace('_', '-')]
except KeyError as error:
raise AttributeError(str(error))
def __getitem__(self, name: Union[str, HasResourceType, Type[HasResourceType]]) -> Any:
def __getitem__(self, name):
try:
return self._jobj[self._canon_key(name)]
except KeyError:
raise KeyError('Directory field "' + self._canon_key(name) + '" not found')
def to_partial_json(self) -> Dict[str, Any]:
def to_partial_json(self):
return self._jobj
@classmethod
def from_json(cls, jobj: MutableMapping[str, Any]) -> 'Directory':
def from_json(cls, jobj):
jobj['meta'] = cls.Meta.from_json(jobj.pop('meta', {}))
return cls(jobj)
@@ -294,16 +263,16 @@ class Resource(jose.JSONObjectWithFields):
:ivar acme.messages.ResourceBody body: Resource body.
"""
body: "ResourceBody" = jose.field('body')
body = jose.Field('body')
class ResourceWithURI(Resource):
"""ACME Resource with URI.
:ivar str uri: Location of the resource.
:ivar unicode ~.uri: Location of the resource.
"""
uri: str = jose.field('uri') # no ChallengeResource.uri
uri = jose.Field('uri') # no ChallengeResource.uri
class ResourceBody(jose.JSONObjectWithFields):
@@ -314,8 +283,7 @@ class ExternalAccountBinding:
"""ACME External Account Binding"""
@classmethod
def from_data(cls, account_public_key: jose.JWK, kid: str, hmac_key: str,
directory: Directory) -> Dict[str, Any]:
def from_data(cls, account_public_key, kid, hmac_key, directory):
"""Create External Account Binding Resource from contact details, kid and hmac."""
key_json = json.dumps(account_public_key.to_partial_json()).encode()
@@ -329,40 +297,33 @@ class ExternalAccountBinding:
return eab.to_partial_json()
GenericRegistration = TypeVar('GenericRegistration', bound='Registration')
class Registration(ResourceBody):
"""Registration Resource Body.
:ivar jose.JWK key: Public key.
:ivar josepy.jwk.JWK key: Public key.
:ivar tuple contact: Contact information following ACME spec,
`tuple` of `str`.
:ivar str agreement:
`tuple` of `unicode`.
:ivar unicode agreement:
"""
# on new-reg key server ignores 'key' and populates it based on
# JWS.signature.combined.jwk
key: jose.JWK = jose.field('key', omitempty=True, decoder=jose.JWK.from_json)
key = jose.Field('key', omitempty=True, decoder=jose.JWK.from_json)
# Contact field implements special behavior to allow messages that clear existing
# contacts while not expecting the `contact` field when loading from json.
# This is implemented in the constructor and *_json methods.
contact: Tuple[str, ...] = jose.field('contact', omitempty=True, default=())
agreement: str = jose.field('agreement', omitempty=True)
status: Status = jose.field('status', omitempty=True)
terms_of_service_agreed: bool = jose.field('termsOfServiceAgreed', omitempty=True)
only_return_existing: bool = jose.field('onlyReturnExisting', omitempty=True)
external_account_binding: Dict[str, Any] = jose.field('externalAccountBinding',
omitempty=True)
contact = jose.Field('contact', omitempty=True, default=())
agreement = jose.Field('agreement', omitempty=True)
status = jose.Field('status', omitempty=True)
terms_of_service_agreed = jose.Field('termsOfServiceAgreed', omitempty=True)
only_return_existing = jose.Field('onlyReturnExisting', omitempty=True)
external_account_binding = jose.Field('externalAccountBinding', omitempty=True)
phone_prefix = 'tel:'
email_prefix = 'mailto:'
@classmethod
def from_data(cls: Type[GenericRegistration], phone: Optional[str] = None,
email: Optional[str] = None,
external_account_binding: Optional[Dict[str, Any]] = None,
**kwargs: Any) -> GenericRegistration:
def from_data(cls, phone=None, email=None, external_account_binding=None, **kwargs):
"""
Create registration resource from contact details.
@@ -391,19 +352,19 @@ class Registration(ResourceBody):
return cls(**kwargs)
def __init__(self, **kwargs: Any) -> None:
def __init__(self, **kwargs):
"""Note if the user provides a value for the `contact` member."""
if 'contact' in kwargs and kwargs['contact'] is not None:
if 'contact' in kwargs:
# Avoid the __setattr__ used by jose.TypedJSONObjectWithFields
object.__setattr__(self, '_add_contact', True)
super().__init__(**kwargs)
super(Registration, self).__init__(**kwargs)
def _filter_contact(self, prefix: str) -> Tuple[str, ...]:
def _filter_contact(self, prefix):
return tuple(
detail[len(prefix):] for detail in self.contact # pylint: disable=not-an-iterable
if detail.startswith(prefix))
def _add_contact_if_appropriate(self, jobj: Dict[str, Any]) -> Dict[str, Any]:
def _add_contact_if_appropriate(self, jobj):
"""
The `contact` member of Registration objects should not be required when
de-serializing (as it would be if the Fields' `omitempty` flag were `False`), but
@@ -420,23 +381,23 @@ class Registration(ResourceBody):
return jobj
def to_partial_json(self) -> Dict[str, Any]:
def to_partial_json(self):
"""Modify josepy.JSONDeserializable.to_partial_json()"""
jobj = super().to_partial_json()
jobj = super(Registration, self).to_partial_json()
return self._add_contact_if_appropriate(jobj)
def fields_to_partial_json(self) -> Dict[str, Any]:
def fields_to_partial_json(self):
"""Modify josepy.JSONObjectWithFields.fields_to_partial_json()"""
jobj = super().fields_to_partial_json()
jobj = super(Registration, self).fields_to_partial_json()
return self._add_contact_if_appropriate(jobj)
@property
def phones(self) -> Tuple[str, ...]:
def phones(self):
"""All phones found in the ``contact`` field."""
return self._filter_contact(self.phone_prefix)
@property
def emails(self) -> Tuple[str, ...]:
def emails(self):
"""All emails found in the ``contact`` field."""
return self._filter_contact(self.email_prefix)
@@ -445,26 +406,26 @@ class Registration(ResourceBody):
class NewRegistration(ResourceMixin, Registration):
"""New registration."""
resource_type = 'new-reg'
resource: str = fields.resource(resource_type)
resource = fields.Resource(resource_type)
class UpdateRegistration(ResourceMixin, Registration):
"""Update registration."""
resource_type = 'reg'
resource: str = fields.resource(resource_type)
resource = fields.Resource(resource_type)
class RegistrationResource(ResourceWithURI):
"""Registration Resource.
:ivar acme.messages.Registration body:
:ivar str new_authzr_uri: Deprecated. Do not use.
:ivar str terms_of_service: URL for the CA TOS.
:ivar unicode new_authzr_uri: Deprecated. Do not use.
:ivar unicode terms_of_service: URL for the CA TOS.
"""
body: Registration = jose.field('body', decoder=Registration.from_json)
new_authzr_uri: str = jose.field('new_authzr_uri', omitempty=True)
terms_of_service: str = jose.field('terms_of_service', omitempty=True)
body = jose.Field('body', decoder=Registration.from_json)
new_authzr_uri = jose.Field('new_authzr_uri', omitempty=True)
terms_of_service = jose.Field('terms_of_service', omitempty=True)
class ChallengeBody(ResourceBody):
@@ -489,47 +450,47 @@ class ChallengeBody(ResourceBody):
# challenge object supports either one, but should be accessed through the
# name "uri". In Client.answer_challenge, whichever one is set will be
# used.
_uri: str = jose.field('uri', omitempty=True, default=None)
_url: str = jose.field('url', omitempty=True, default=None)
status: Status = jose.field('status', decoder=Status.from_json,
_uri = jose.Field('uri', omitempty=True, default=None)
_url = jose.Field('url', omitempty=True, default=None)
status = jose.Field('status', decoder=Status.from_json,
omitempty=True, default=STATUS_PENDING)
validated: datetime.datetime = fields.rfc3339('validated', omitempty=True)
error: Error = jose.field('error', decoder=Error.from_json,
validated = fields.RFC3339Field('validated', omitempty=True)
error = jose.Field('error', decoder=Error.from_json,
omitempty=True, default=None)
def __init__(self, **kwargs: Any) -> None:
def __init__(self, **kwargs):
kwargs = {self._internal_name(k): v for k, v in kwargs.items()}
super().__init__(**kwargs)
super(ChallengeBody, self).__init__(**kwargs)
def encode(self, name: str) -> Any:
return super().encode(self._internal_name(name))
def encode(self, name):
return super(ChallengeBody, self).encode(self._internal_name(name))
def to_partial_json(self) -> Dict[str, Any]:
jobj = super().to_partial_json()
def to_partial_json(self):
jobj = super(ChallengeBody, self).to_partial_json()
jobj.update(self.chall.to_partial_json())
return jobj
@classmethod
def fields_from_json(cls, jobj: Mapping[str, Any]) -> Dict[str, Any]:
jobj_fields = super().fields_from_json(jobj)
def fields_from_json(cls, jobj):
jobj_fields = super(ChallengeBody, cls).fields_from_json(jobj)
jobj_fields['chall'] = challenges.Challenge.from_json(jobj)
return jobj_fields
@property
def uri(self) -> str:
def uri(self):
"""The URL of this challenge."""
return self._url or self._uri
def __getattr__(self, name: str) -> Any:
def __getattr__(self, name):
return getattr(self.chall, name)
def __iter__(self) -> Iterator[str]:
def __iter__(self):
# When iterating over fields, use the external name 'uri' instead of
# the internal '_uri'.
for name in super().__iter__():
for name in super(ChallengeBody, self).__iter__():
yield name[1:] if name == '_uri' else name
def _internal_name(self, name: str) -> str:
def _internal_name(self, name):
return '_' + name if name == 'uri' else name
@@ -537,16 +498,16 @@ class ChallengeResource(Resource):
"""Challenge Resource.
:ivar acme.messages.ChallengeBody body:
:ivar str authzr_uri: URI found in the 'up' ``Link`` header.
:ivar unicode authzr_uri: URI found in the 'up' ``Link`` header.
"""
body: ChallengeBody = jose.field('body', decoder=ChallengeBody.from_json)
authzr_uri: str = jose.field('authzr_uri')
body = jose.Field('body', decoder=ChallengeBody.from_json)
authzr_uri = jose.Field('authzr_uri')
@property
def uri(self) -> str:
def uri(self):
"""The URL of the challenge body."""
return self.body.uri # pylint: disable=no-member
return self.body.uri
class Authorization(ResourceBody):
@@ -560,26 +521,26 @@ class Authorization(ResourceBody):
:ivar datetime.datetime expires:
"""
identifier: Identifier = jose.field('identifier', decoder=Identifier.from_json, omitempty=True)
challenges: List[ChallengeBody] = jose.field('challenges', omitempty=True)
combinations: Tuple[Tuple[int, ...], ...] = jose.field('combinations', omitempty=True)
identifier = jose.Field('identifier', decoder=Identifier.from_json, omitempty=True)
challenges = jose.Field('challenges', omitempty=True)
combinations = jose.Field('combinations', omitempty=True)
status: Status = jose.field('status', omitempty=True, decoder=Status.from_json)
status = jose.Field('status', omitempty=True, decoder=Status.from_json)
# TODO: 'expires' is allowed for Authorization Resources in
# general, but for Key Authorization '[t]he "expires" field MUST
# be absent'... then acme-spec gives example with 'expires'
# present... That's confusing!
expires: datetime.datetime = fields.rfc3339('expires', omitempty=True)
wildcard: bool = jose.field('wildcard', omitempty=True)
expires = fields.RFC3339Field('expires', omitempty=True)
wildcard = jose.Field('wildcard', omitempty=True)
# Mypy does not understand the josepy magic happening here, and falsely claims
# that challenge is redefined. Let's ignore the type check here.
@challenges.decoder # type: ignore
def challenges(value: List[Dict[str, Any]]) -> Tuple[ChallengeBody, ...]: # type: ignore[misc] # pylint: disable=no-self-argument,missing-function-docstring
def challenges(value): # pylint: disable=no-self-argument,missing-function-docstring
return tuple(ChallengeBody.from_json(chall) for chall in value)
@property
def resolved_combinations(self) -> Tuple[Tuple[ChallengeBody, ...], ...]:
def resolved_combinations(self):
"""Combinations with challenges instead of indices."""
return tuple(tuple(self.challenges[idx] for idx in combo)
for combo in self.combinations) # pylint: disable=not-an-iterable
@@ -589,37 +550,37 @@ class Authorization(ResourceBody):
class NewAuthorization(ResourceMixin, Authorization):
"""New authorization."""
resource_type = 'new-authz'
resource: str = fields.resource(resource_type)
resource = fields.Resource(resource_type)
class UpdateAuthorization(ResourceMixin, Authorization):
"""Update authorization."""
resource_type = 'authz'
resource: str = fields.resource(resource_type)
resource = fields.Resource(resource_type)
class AuthorizationResource(ResourceWithURI):
"""Authorization Resource.
:ivar acme.messages.Authorization body:
:ivar str new_cert_uri: Deprecated. Do not use.
:ivar unicode new_cert_uri: Deprecated. Do not use.
"""
body: Authorization = jose.field('body', decoder=Authorization.from_json)
new_cert_uri: str = jose.field('new_cert_uri', omitempty=True)
body = jose.Field('body', decoder=Authorization.from_json)
new_cert_uri = jose.Field('new_cert_uri', omitempty=True)
@Directory.register
class CertificateRequest(ResourceMixin, jose.JSONObjectWithFields):
"""ACME new-cert request.
:ivar jose.ComparableX509 csr:
:ivar josepy.util.ComparableX509 csr:
`OpenSSL.crypto.X509Req` wrapped in `.ComparableX509`
"""
resource_type = 'new-cert'
resource: str = fields.resource(resource_type)
csr: jose.ComparableX509 = jose.field('csr', decoder=jose.decode_csr, encoder=jose.encode_csr)
resource = fields.Resource(resource_type)
csr = jose.Field('csr', decoder=jose.decode_csr, encoder=jose.encode_csr)
class CertificateResource(ResourceWithURI):
@@ -627,27 +588,27 @@ class CertificateResource(ResourceWithURI):
:ivar josepy.util.ComparableX509 body:
`OpenSSL.crypto.X509` wrapped in `.ComparableX509`
:ivar str cert_chain_uri: URI found in the 'up' ``Link`` header
:ivar unicode cert_chain_uri: URI found in the 'up' ``Link`` header
:ivar tuple authzrs: `tuple` of `AuthorizationResource`.
"""
cert_chain_uri: str = jose.field('cert_chain_uri')
authzrs: Tuple[AuthorizationResource, ...] = jose.field('authzrs')
cert_chain_uri = jose.Field('cert_chain_uri')
authzrs = jose.Field('authzrs')
@Directory.register
class Revocation(ResourceMixin, jose.JSONObjectWithFields):
"""Revocation message.
:ivar jose.ComparableX509 certificate: `OpenSSL.crypto.X509` wrapped in
`jose.ComparableX509`
:ivar .ComparableX509 certificate: `OpenSSL.crypto.X509` wrapped in
`.ComparableX509`
"""
resource_type = 'revoke-cert'
resource: str = fields.resource(resource_type)
certificate: jose.ComparableX509 = jose.field(
resource = fields.Resource(resource_type)
certificate = jose.Field(
'certificate', decoder=jose.decode_cert, encoder=jose.encode_cert)
reason: int = jose.field('reason')
reason = jose.Field('reason')
class Order(ResourceBody):
@@ -664,26 +625,26 @@ class Order(ResourceBody):
:ivar datetime.datetime expires: When the order expires.
:ivar ~.Error error: Any error that occurred during finalization, if applicable.
"""
identifiers: List[Identifier] = jose.field('identifiers', omitempty=True)
status: Status = jose.field('status', decoder=Status.from_json, omitempty=True)
authorizations: List[str] = jose.field('authorizations', omitempty=True)
certificate: str = jose.field('certificate', omitempty=True)
finalize: str = jose.field('finalize', omitempty=True)
expires: datetime.datetime = fields.rfc3339('expires', omitempty=True)
error: Error = jose.field('error', omitempty=True, decoder=Error.from_json)
identifiers = jose.Field('identifiers', omitempty=True)
status = jose.Field('status', decoder=Status.from_json,
omitempty=True)
authorizations = jose.Field('authorizations', omitempty=True)
certificate = jose.Field('certificate', omitempty=True)
finalize = jose.Field('finalize', omitempty=True)
expires = fields.RFC3339Field('expires', omitempty=True)
error = jose.Field('error', omitempty=True, decoder=Error.from_json)
# Mypy does not understand the josepy magic happening here, and falsely claims
# that identifiers is redefined. Let's ignore the type check here.
@identifiers.decoder # type: ignore
def identifiers(value: List[Dict[str, Any]]) -> Tuple[Identifier, ...]: # type: ignore[misc] # pylint: disable=no-self-argument,missing-function-docstring
def identifiers(value): # pylint: disable=no-self-argument,missing-function-docstring
return tuple(Identifier.from_json(identifier) for identifier in value)
class OrderResource(ResourceWithURI):
"""Order Resource.
:ivar acme.messages.Order body:
:ivar bytes csr_pem: The CSR this Order will be finalized with.
:ivar str csr_pem: The CSR this Order will be finalized with.
:ivar authorizations: Fully-fetched AuthorizationResource objects.
:vartype authorizations: `list` of `acme.messages.AuthorizationResource`
:ivar str fullchain_pem: The fetched contents of the certificate URL
@@ -693,13 +654,11 @@ class OrderResource(ResourceWithURI):
finalization.
:vartype alternative_fullchains_pem: `list` of `str`
"""
body: Order = jose.field('body', decoder=Order.from_json)
csr_pem: bytes = jose.field('csr_pem', omitempty=True)
authorizations: List[AuthorizationResource] = jose.field('authorizations')
fullchain_pem: str = jose.field('fullchain_pem', omitempty=True)
alternative_fullchains_pem: List[str] = jose.field('alternative_fullchains_pem',
omitempty=True)
body = jose.Field('body', decoder=Order.from_json)
csr_pem = jose.Field('csr_pem', omitempty=True)
authorizations = jose.Field('authorizations')
fullchain_pem = jose.Field('fullchain_pem', omitempty=True)
alternative_fullchains_pem = jose.Field('alternative_fullchains_pem', omitempty=True)
@Directory.register
class NewOrder(Order):

View File

@@ -1,28 +1,26 @@
"""Useful mixins for Challenge and Resource objects"""
from typing import Any
from typing import Dict
class VersionedLEACMEMixin:
"""This mixin stores the version of Let's Encrypt's endpoint being used."""
@property
def le_acme_version(self) -> int:
def le_acme_version(self):
"""Define the version of ACME protocol to use"""
return getattr(self, '_le_acme_version', 1)
@le_acme_version.setter
def le_acme_version(self, version: int) -> None:
def le_acme_version(self, version):
# We need to use object.__setattr__ to not depend on the specific implementation of
# __setattr__ in current class (eg. jose.TypedJSONObjectWithFields raises AttributeError
# for any attempt to set an attribute to make objects immutable).
object.__setattr__(self, '_le_acme_version', version)
def __setattr__(self, key: str, value: Any) -> None:
def __setattr__(self, key, value):
if key == 'le_acme_version':
# Required for @property to operate properly. See comment above.
object.__setattr__(self, key, value)
else:
super().__setattr__(key, value) # pragma: no cover
super(VersionedLEACMEMixin, self).__setattr__(key, value) # pragma: no cover
class ResourceMixin(VersionedLEACMEMixin):
@@ -30,14 +28,14 @@ class ResourceMixin(VersionedLEACMEMixin):
This mixin generates a RFC8555 compliant JWS payload
by removing the `resource` field if needed (eg. ACME v2 protocol).
"""
def to_partial_json(self) -> Dict[str, Any]:
def to_partial_json(self):
"""See josepy.JSONDeserializable.to_partial_json()"""
return _safe_jobj_compliance(super(),
return _safe_jobj_compliance(super(ResourceMixin, self),
'to_partial_json', 'resource')
def fields_to_partial_json(self) -> Dict[str, Any]:
def fields_to_partial_json(self):
"""See josepy.JSONObjectWithFields.fields_to_partial_json()"""
return _safe_jobj_compliance(super(),
return _safe_jobj_compliance(super(ResourceMixin, self),
'fields_to_partial_json', 'resource')
@@ -46,23 +44,22 @@ class TypeMixin(VersionedLEACMEMixin):
This mixin allows generation of a RFC8555 compliant JWS payload
by removing the `type` field if needed (eg. ACME v2 protocol).
"""
def to_partial_json(self) -> Dict[str, Any]:
def to_partial_json(self):
"""See josepy.JSONDeserializable.to_partial_json()"""
return _safe_jobj_compliance(super(),
return _safe_jobj_compliance(super(TypeMixin, self),
'to_partial_json', 'type')
def fields_to_partial_json(self) -> Dict[str, Any]:
def fields_to_partial_json(self):
"""See josepy.JSONObjectWithFields.fields_to_partial_json()"""
return _safe_jobj_compliance(super(),
return _safe_jobj_compliance(super(TypeMixin, self),
'fields_to_partial_json', 'type')
def _safe_jobj_compliance(instance: Any, jobj_method: str,
uncompliant_field: str) -> Dict[str, Any]:
def _safe_jobj_compliance(instance, jobj_method, uncompliant_field):
if hasattr(instance, jobj_method):
jobj: Dict[str, Any] = getattr(instance, jobj_method)()
jobj = getattr(instance, jobj_method)()
if instance.le_acme_version == 2:
jobj.pop(uncompliant_field, None)
return jobj
raise AttributeError(f'Method {jobj_method}() is not implemented.') # pragma: no cover
raise AttributeError('Method {0}() is not implemented.'.format(jobj_method)) # pragma: no cover

View File

View File

@@ -7,17 +7,7 @@ import logging
import socket
import socketserver
import threading
from typing import Any
from typing import cast
from typing import List
from typing import Mapping
from typing import Optional
from typing import Set
from typing import Tuple
from typing import Type
from OpenSSL import crypto
from OpenSSL import SSL
from acme import challenges
from acme import crypto_util
@@ -28,30 +18,30 @@ logger = logging.getLogger(__name__)
class TLSServer(socketserver.TCPServer):
"""Generic TLS Server."""
def __init__(self, *args: Any, **kwargs: Any) -> None:
def __init__(self, *args, **kwargs):
self.ipv6 = kwargs.pop("ipv6", False)
if self.ipv6:
self.address_family = socket.AF_INET6
else:
self.address_family = socket.AF_INET
self.certs = kwargs.pop("certs", {})
self.method = kwargs.pop("method", crypto_util._DEFAULT_SSL_METHOD)
self.method = kwargs.pop(
"method", crypto_util._DEFAULT_SSL_METHOD)
self.allow_reuse_address = kwargs.pop("allow_reuse_address", True)
super().__init__(*args, **kwargs)
socketserver.TCPServer.__init__(self, *args, **kwargs)
def _wrap_sock(self) -> None:
self.socket = cast(socket.socket, crypto_util.SSLSocket(
def _wrap_sock(self):
self.socket = crypto_util.SSLSocket(
self.socket, cert_selection=self._cert_selection,
alpn_selection=getattr(self, '_alpn_selection', None),
method=self.method))
method=self.method)
def _cert_selection(self, connection: SSL.Connection
) -> Tuple[crypto.PKey, crypto.X509]: # pragma: no cover
def _cert_selection(self, connection): # pragma: no cover
"""Callback selecting certificate for connection."""
server_name = connection.get_servername()
return self.certs.get(server_name, None)
def server_bind(self) -> None:
def server_bind(self):
self._wrap_sock()
return socketserver.TCPServer.server_bind(self)
@@ -71,15 +61,11 @@ class BaseDualNetworkedServers:
If two servers are instantiated, they will serve on the same port.
"""
def __init__(self, ServerClass: Type[socketserver.TCPServer], server_address: Tuple[str, int],
*remaining_args: Any, **kwargs: Any) -> None:
def __init__(self, ServerClass, server_address, *remaining_args, **kwargs):
port = server_address[1]
self.threads: List[threading.Thread] = []
self.servers: List[socketserver.BaseServer] = []
# Preserve socket error for re-raising, if no servers can be started
last_socket_err: Optional[socket.error] = None
# Must try True first.
# Ubuntu, for example, will fail to bind to IPv4 if we've already bound
# to IPv6. But that's ok, since it will accept IPv4 connections on the IPv6
@@ -96,8 +82,7 @@ class BaseDualNetworkedServers:
logger.debug(
"Successfully bound to %s:%s using %s", new_address[0],
new_address[1], "IPv6" if ip_version else "IPv4")
except socket.error as e:
last_socket_err = e
except socket.error:
if self.servers:
# Already bound using IPv6.
logger.debug(
@@ -116,12 +101,9 @@ class BaseDualNetworkedServers:
# bind to the same port for both servers.
port = server.socket.getsockname()[1]
if not self.servers:
if last_socket_err:
raise last_socket_err
else: # pragma: no cover
raise socket.error("Could not bind to IPv4 or IPv6.")
raise socket.error("Could not bind to IPv4 or IPv6.")
def serve_forever(self) -> None:
def serve_forever(self):
"""Wraps socketserver.TCPServer.serve_forever"""
for server in self.servers:
thread = threading.Thread(
@@ -129,11 +111,11 @@ class BaseDualNetworkedServers:
thread.start()
self.threads.append(thread)
def getsocknames(self) -> List[Tuple[str, int]]:
def getsocknames(self):
"""Wraps socketserver.TCPServer.socket.getsockname"""
return [server.socket.getsockname() for server in self.servers]
def shutdown_and_server_close(self) -> None:
def shutdown_and_server_close(self):
"""Wraps socketserver.TCPServer.shutdown, socketserver.TCPServer.server_close, and
threading.Thread.join"""
for server in self.servers:
@@ -149,16 +131,13 @@ class TLSALPN01Server(TLSServer, ACMEServerMixin):
ACME_TLS_1_PROTOCOL = b"acme-tls/1"
def __init__(self, server_address: Tuple[str, int],
certs: List[Tuple[crypto.PKey, crypto.X509]],
challenge_certs: Mapping[str, Tuple[crypto.PKey, crypto.X509]],
ipv6: bool = False) -> None:
def __init__(self, server_address, certs, challenge_certs, ipv6=False):
TLSServer.__init__(
self, server_address, _BaseRequestHandlerWithLogging, certs=certs,
ipv6=ipv6)
self.challenge_certs = challenge_certs
def _cert_selection(self, connection: SSL.Connection) -> Tuple[crypto.PKey, crypto.X509]:
def _cert_selection(self, connection):
# TODO: We would like to serve challenge cert only if asked for it via
# ALPN. To do this, we need to retrieve the list of protos from client
# hello, but this is currently impossible with openssl [0], and ALPN
@@ -168,9 +147,9 @@ class TLSALPN01Server(TLSServer, ACMEServerMixin):
# [0] https://github.com/openssl/openssl/issues/4952
server_name = connection.get_servername()
logger.debug("Serving challenge cert for server name %s", server_name)
return self.challenge_certs[server_name]
return self.challenge_certs.get(server_name, None)
def _alpn_selection(self, _connection: SSL.Connection, alpn_protos: List[bytes]) -> bytes:
def _alpn_selection(self, _connection, alpn_protos):
"""Callback to select alpn protocol."""
if len(alpn_protos) == 1 and alpn_protos[0] == self.ACME_TLS_1_PROTOCOL:
logger.debug("Agreed on %s ALPN", self.ACME_TLS_1_PROTOCOL)
@@ -184,22 +163,21 @@ class TLSALPN01Server(TLSServer, ACMEServerMixin):
class HTTPServer(BaseHTTPServer.HTTPServer):
"""Generic HTTP Server."""
def __init__(self, *args: Any, **kwargs: Any) -> None:
def __init__(self, *args, **kwargs):
self.ipv6 = kwargs.pop("ipv6", False)
if self.ipv6:
self.address_family = socket.AF_INET6
else:
self.address_family = socket.AF_INET
super().__init__(*args, **kwargs)
BaseHTTPServer.HTTPServer.__init__(self, *args, **kwargs)
class HTTP01Server(HTTPServer, ACMEServerMixin):
"""HTTP01 Server."""
def __init__(self, server_address: Tuple[str, int], resources: Set[challenges.HTTP01],
ipv6: bool = False, timeout: int = 30) -> None:
super().__init__(
server_address, HTTP01RequestHandler.partial_init(
def __init__(self, server_address, resources, ipv6=False, timeout=30):
HTTPServer.__init__(
self, server_address, HTTP01RequestHandler.partial_init(
simple_http_resources=resources, timeout=timeout), ipv6=ipv6)
@@ -207,8 +185,8 @@ class HTTP01DualNetworkedServers(BaseDualNetworkedServers):
"""HTTP01Server Wrapper. Tries everything for both. Failures for one don't
affect the other."""
def __init__(self, *args: Any, **kwargs: Any) -> None:
super().__init__(HTTP01Server, *args, **kwargs)
def __init__(self, *args, **kwargs):
BaseDualNetworkedServers.__init__(self, HTTP01Server, *args, **kwargs)
class HTTP01RequestHandler(BaseHTTPServer.BaseHTTPRequestHandler):
@@ -223,10 +201,10 @@ class HTTP01RequestHandler(BaseHTTPServer.BaseHTTPRequestHandler):
HTTP01Resource = collections.namedtuple(
"HTTP01Resource", "chall response validation")
def __init__(self, *args: Any, **kwargs: Any) -> None:
def __init__(self, *args, **kwargs):
self.simple_http_resources = kwargs.pop("simple_http_resources", set())
self._timeout = kwargs.pop('timeout', 30)
super().__init__(*args, **kwargs)
BaseHTTPServer.BaseHTTPRequestHandler.__init__(self, *args, **kwargs)
self.server: HTTP01Server
# In parent class BaseHTTPRequestHandler, 'timeout' is a class-level property but we
@@ -236,7 +214,7 @@ class HTTP01RequestHandler(BaseHTTPServer.BaseHTTPRequestHandler):
# everyone happy, we statically redefine 'timeout' as a method property, and set the
# timeout value in a new internal instance-level property _timeout.
@property
def timeout(self) -> int: # type: ignore[override]
def timeout(self):
"""
The default timeout this server should apply to requests.
:return: timeout to apply
@@ -244,16 +222,16 @@ class HTTP01RequestHandler(BaseHTTPServer.BaseHTTPRequestHandler):
"""
return self._timeout
def log_message(self, format: str, *args: Any) -> None: # pylint: disable=redefined-builtin
def log_message(self, format, *args): # pylint: disable=redefined-builtin
"""Log arbitrary message."""
logger.debug("%s - - %s", self.client_address[0], format % args)
def handle(self) -> None:
def handle(self):
"""Handle request."""
self.log_message("Incoming request")
BaseHTTPServer.BaseHTTPRequestHandler.handle(self)
def do_GET(self) -> None: # pylint: disable=invalid-name,missing-function-docstring
def do_GET(self): # pylint: disable=invalid-name,missing-function-docstring
if self.path == "/":
self.handle_index()
elif self.path.startswith("/" + challenges.HTTP01.URI_ROOT_PATH):
@@ -261,21 +239,21 @@ class HTTP01RequestHandler(BaseHTTPServer.BaseHTTPRequestHandler):
else:
self.handle_404()
def handle_index(self) -> None:
def handle_index(self):
"""Handle index page."""
self.send_response(200)
self.send_header("Content-Type", "text/html")
self.end_headers()
self.wfile.write(self.server.server_version.encode())
def handle_404(self) -> None:
def handle_404(self):
"""Handler 404 Not Found errors."""
self.send_response(http_client.NOT_FOUND, message="Not Found")
self.send_header("Content-type", "text/html")
self.end_headers()
self.wfile.write(b"404")
def handle_simple_http_resource(self) -> None:
def handle_simple_http_resource(self):
"""Handle HTTP01 provisioned resources."""
for resource in self.simple_http_resources:
if resource.chall.path == self.path:
@@ -291,8 +269,7 @@ class HTTP01RequestHandler(BaseHTTPServer.BaseHTTPRequestHandler):
self.path)
@classmethod
def partial_init(cls, simple_http_resources: Set[challenges.HTTP01],
timeout: int) -> 'functools.partial[HTTP01RequestHandler]':
def partial_init(cls, simple_http_resources, timeout):
"""Partially initialize this handler.
This is useful because `socketserver.BaseServer` takes
@@ -308,11 +285,11 @@ class HTTP01RequestHandler(BaseHTTPServer.BaseHTTPRequestHandler):
class _BaseRequestHandlerWithLogging(socketserver.BaseRequestHandler):
"""BaseRequestHandler with logging."""
def log_message(self, format: str, *args: Any) -> None: # pylint: disable=redefined-builtin
def log_message(self, format, *args): # pylint: disable=redefined-builtin
"""Log arbitrary message."""
logger.debug("%s - - %s", self.client_address[0], format % args)
def handle(self) -> None:
def handle(self):
"""Handle request."""
self.log_message("Incoming request")
socketserver.BaseRequestHandler.handle(self)

View File

@@ -1,10 +1,6 @@
"""ACME utilities."""
from typing import Any
from typing import Callable
from typing import Dict
from typing import Mapping
def map_keys(dikt: Mapping[Any, Any], func: Callable[[Any], Any]) -> Dict[Any, Any]:
def map_keys(dikt, func):
"""Map dictionary keys."""
return {func(key): value for key, value in dikt.items()}

View File

@@ -58,7 +58,7 @@ master_doc = 'index'
# General information about the project.
project = u'acme-python'
copyright = u'2015, Let\'s Encrypt Project'
copyright = u'2015-2015, Let\'s Encrypt Project'
author = u'Let\'s Encrypt Project'
# The version info for the project you're documenting, acts as replacement for

View File

@@ -0,0 +1,2 @@
python -m acme.standalone -p 1234
curl -k https://localhost:1234

View File

@@ -0,0 +1 @@
../../../acme/testdata/rsa2048_cert.pem

View File

@@ -0,0 +1 @@
../../../acme/testdata/rsa2048_key.pem

View File

@@ -7,7 +7,4 @@
# in --editable mode (-e), just "pip install acme[docs]" does not work as
# expected and "pip install -e acme[docs]" must be used instead
# We also pin our dependencies for increased stability.
-c ../tools/requirements.txt
-e acme[docs]

2
acme/setup.cfg Normal file
View File

@@ -0,0 +1,2 @@
[bdist_wheel]
universal = 1

View File

@@ -3,17 +3,27 @@ import sys
from setuptools import find_packages
from setuptools import setup
version = '1.25.0.dev0'
version = '1.14.0.dev0'
# Please update tox.ini when modifying dependency version requirements
install_requires = [
'cryptography>=2.5.0',
'josepy>=1.10.0',
'cryptography>=2.1.4',
# formerly known as acme.jose:
# 1.1.0+ is required to avoid the warnings described at
# https://github.com/certbot/josepy/issues/13.
'josepy>=1.1.0',
'PyOpenSSL>=17.3.0',
'pyrfc3339',
'pytz>=2019.3',
'requests>=2.20.0',
'pytz',
'requests>=2.6.0',
'requests-toolbelt>=0.3.0',
'setuptools>=41.6.0',
'setuptools>=39.0.1',
]
dev_extras = [
'pytest',
'pytest-xdist',
'tox',
]
docs_extras = [
@@ -21,31 +31,25 @@ docs_extras = [
'sphinx_rtd_theme',
]
test_extras = [
'pytest',
'pytest-xdist',
'typing-extensions',
]
setup(
name='acme',
version=version,
description='ACME protocol implementation in Python',
url='https://github.com/letsencrypt/letsencrypt',
author="Certbot Project",
author_email='certbot-dev@eff.org',
author_email='client-dev@letsencrypt.org',
license='Apache License 2.0',
python_requires='>=3.7',
python_requires='>=3.6',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Programming Language :: Python :: 3.10',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Security',
],
@@ -54,7 +58,7 @@ setup(
include_package_data=True,
install_requires=install_requires,
extras_require={
'dev': dev_extras,
'docs': docs_extras,
'test': test_extras,
},
)

View File

@@ -6,7 +6,6 @@ from unittest import mock
import josepy as jose
import OpenSSL
import requests
from josepy.jwk import JWKEC
from acme import errors
@@ -293,7 +292,7 @@ class TLSALPN01ResponseTest(unittest.TestCase):
def test_gen_verify_cert_gen_key(self):
cert, key = self.response.gen_cert(self.domain)
self.assertIsInstance(key, OpenSSL.crypto.PKey)
self.assertTrue(isinstance(key, OpenSSL.crypto.PKey))
self.assertTrue(self.response.verify_cert(self.domain, cert))
def test_verify_bad_cert(self):
@@ -327,12 +326,12 @@ class TLSALPN01ResponseTest(unittest.TestCase):
self.response.probe_cert('foo.com')
mock_gethostbyname.assert_called_once_with('foo.com')
mock_probe_sni.assert_called_once_with(
host=b'127.0.0.1', port=self.response.PORT, name=b'foo.com',
host='127.0.0.1', port=self.response.PORT, name='foo.com',
alpn_protocols=['acme-tls/1'])
self.response.probe_cert('foo.com', host='8.8.8.8')
mock_probe_sni.assert_called_with(
host=b'8.8.8.8', port=mock.ANY, name=b'foo.com',
host='8.8.8.8', port=mock.ANY, name='foo.com',
alpn_protocols=['acme-tls/1'])
@mock.patch('acme.challenges.TLSALPN01Response.probe_cert')
@@ -402,11 +401,8 @@ class DNSTest(unittest.TestCase):
hash(DNS.from_json(self.jmsg))
def test_gen_check_validation(self):
ec_key_secp384r1 = JWKEC(key=test_util.load_ecdsa_private_key('ec_secp384r1_key.pem'))
for key, alg in [(KEY, jose.RS256), (ec_key_secp384r1, jose.ES384)]:
with self.subTest(key=key, alg=alg):
self.assertTrue(self.msg.check_validation(
self.msg.gen_validation(key, alg=alg), key.public_key()))
self.assertTrue(self.msg.check_validation(
self.msg.gen_validation(KEY), KEY.public_key()))
def test_gen_check_validation_wrong_key(self):
key2 = jose.JWKRSA.load(test_util.load_vector('rsa1024_key.pem'))
@@ -427,25 +423,20 @@ class DNSTest(unittest.TestCase):
payload=self.msg.update(
token=b'x' * 20).json_dumps().encode('utf-8'),
alg=jose.RS256, key=KEY)
self.assertFalse(self.msg.check_validation(bad_validation, KEY.public_key()))
self.assertFalse(self.msg.check_validation(
bad_validation, KEY.public_key()))
def test_gen_response(self):
with mock.patch('acme.challenges.DNS.gen_validation') as mock_gen:
mock_gen.return_value = mock.sentinel.validation
response = self.msg.gen_response(KEY)
from acme.challenges import DNSResponse
self.assertIsInstance(response, DNSResponse)
self.assertTrue(isinstance(response, DNSResponse))
self.assertEqual(response.validation, mock.sentinel.validation)
def test_validation_domain_name(self):
self.assertEqual('_acme-challenge.le.wtf', self.msg.validation_domain_name('le.wtf'))
def test_validation_domain_name_ecdsa(self):
ec_key_secp384r1 = JWKEC(key=test_util.load_ecdsa_private_key('ec_secp384r1_key.pem'))
self.assertIs(self.msg.check_validation(
self.msg.gen_validation(ec_key_secp384r1, alg=jose.ES384),
ec_key_secp384r1.public_key()), True
)
self.assertEqual(
'_acme-challenge.le.wtf', self.msg.validation_domain_name('le.wtf'))
class DNSResponseTest(unittest.TestCase):
@@ -483,7 +474,8 @@ class DNSResponseTest(unittest.TestCase):
hash(DNSResponse.from_json(self.jmsg_from))
def test_check_validation(self):
self.assertTrue(self.msg.check_validation(self.chall, KEY.public_key()))
self.assertTrue(
self.msg.check_validation(self.chall, KEY.public_key()))
class JWSPayloadRFC8555Compliant(unittest.TestCase):

View File

@@ -3,7 +3,6 @@
import copy
import datetime
import http.client as http_client
import ipaddress
import json
import unittest
from typing import Dict
@@ -24,7 +23,6 @@ import test_util
CERT_DER = test_util.load_vector('cert.der')
CERT_SAN_PEM = test_util.load_vector('cert-san.pem')
CSR_SAN_PEM = test_util.load_vector('csr-san.pem')
CSR_MIXED_PEM = test_util.load_vector('csr-mixed.pem')
KEY = jose.JWKRSA.load(test_util.load_vector('rsa512_key.pem'))
KEY2 = jose.JWKRSA.load(test_util.load_vector('rsa256_key.pem'))
@@ -92,7 +90,7 @@ class BackwardsCompatibleClientV2Test(ClientTestBase):
"""Tests for acme.client.BackwardsCompatibleClientV2."""
def setUp(self):
super().setUp()
super(BackwardsCompatibleClientV2Test, self).setUp()
# contains a loaded cert
self.certr = messages.CertificateResource(
body=messages_test.CERT)
@@ -321,7 +319,7 @@ class ClientTest(ClientTestBase):
"""Tests for acme.client.Client."""
def setUp(self):
super().setUp()
super(ClientTest, self).setUp()
self.directory = DIRECTORY_V1
@@ -606,8 +604,8 @@ class ClientTest(ClientTestBase):
# make sure that max_attempts is per-authorization, rather
# than global
max_attempts=max(len(authzrs[0].retries), len(authzrs[1].retries)))
self.assertIs(cert[0], csr)
self.assertIs(cert[1], updated_authzrs)
self.assertTrue(cert[0] is csr)
self.assertTrue(cert[1] is updated_authzrs)
self.assertEqual(updated_authzrs[0].uri, 'a...')
self.assertEqual(updated_authzrs[1].uri, 'b.')
self.assertEqual(updated_authzrs[0].times, [
@@ -643,7 +641,7 @@ class ClientTest(ClientTestBase):
authzr = self.client.deactivate_authorization(self.authzr)
self.assertEqual(authzb, authzr.body)
self.assertEqual(self.client.net.post.call_count, 1)
self.assertIn(self.authzr.uri, self.net.post.call_args_list[0][0])
self.assertTrue(self.authzr.uri in self.net.post.call_args_list[0][0])
def test_check_cert(self):
self.response.headers['Location'] = self.certr.uri
@@ -702,7 +700,7 @@ class ClientTest(ClientTestBase):
def test_revocation_payload(self):
obj = messages.Revocation(certificate=self.certr.body, reason=self.rsn)
self.assertIn('reason', obj.to_partial_json().keys())
self.assertTrue('reason' in obj.to_partial_json().keys())
self.assertEqual(self.rsn, obj.to_partial_json()['reason'])
def test_revoke_bad_status_raises_error(self):
@@ -718,7 +716,7 @@ class ClientV2Test(ClientTestBase):
"""Tests for acme.client.ClientV2."""
def setUp(self):
super().setUp()
super(ClientV2Test, self).setUp()
self.directory = DIRECTORY_V2
@@ -742,7 +740,7 @@ class ClientV2Test(ClientTestBase):
self.orderr = messages.OrderResource(
body=self.order,
uri='https://www.letsencrypt-demo.org/acme/acct/1/order/1',
authorizations=[self.authzr, self.authzr2], csr_pem=CSR_MIXED_PEM)
authorizations=[self.authzr, self.authzr2], csr_pem=CSR_SAN_PEM)
def test_new_account(self):
self.response.status_code = http_client.CREATED
@@ -772,7 +770,7 @@ class ClientV2Test(ClientTestBase):
with mock.patch('acme.client.ClientV2._post_as_get') as mock_post_as_get:
mock_post_as_get.side_effect = (authz_response, authz_response2)
self.assertEqual(self.client.new_order(CSR_MIXED_PEM), self.orderr)
self.assertEqual(self.client.new_order(CSR_SAN_PEM), self.orderr)
@mock.patch('acme.client.datetime')
def test_poll_and_finalize(self, mock_datetime):
@@ -879,9 +877,9 @@ class ClientV2Test(ClientTestBase):
self.response.headers['Location'] = self.regr.uri
self.response.json.return_value = self.regr.body.to_json()
self.assertEqual(self.regr, self.client.update_registration(self.regr))
self.assertIsNotNone(self.client.net.account)
self.assertNotEqual(self.client.net.account, None)
self.assertEqual(self.client.net.post.call_count, 2)
self.assertIn(DIRECTORY_V2.newAccount, self.net.post.call_args_list[0][0])
self.assertTrue(DIRECTORY_V2.newAccount in self.net.post.call_args_list[0][0])
self.response.json.return_value = self.regr.body.update(
contact=()).to_json()
@@ -945,7 +943,7 @@ class ClientNetworkTest(unittest.TestCase):
self.response.links = {}
def test_init(self):
self.assertIs(self.net.verify_ssl, self.verify_ssl)
self.assertTrue(self.net.verify_ssl is self.verify_ssl)
def test_wrap_in_jws(self):
# pylint: disable=protected-access
@@ -1187,7 +1185,7 @@ class ClientNetworkWithMockedResponseTest(unittest.TestCase):
def send_request(*args, **kwargs):
# pylint: disable=unused-argument,missing-docstring
self.assertNotIn("new_nonce_url", kwargs)
self.assertFalse("new_nonce_url" in kwargs)
method = args[0]
uri = args[1]
if method == 'HEAD' and uri != "new_nonce_uri":
@@ -1332,7 +1330,7 @@ class ClientNetworkSourceAddressBindingTest(unittest.TestCase):
from acme.client import ClientNetwork
net = ClientNetwork(key=None, alg=None, source_address=self.source_address)
for adapter in net.session.adapters.values():
self.assertIn(self.source_address, adapter.source_address)
self.assertTrue(self.source_address in adapter.source_address)
def test_behavior_assumption(self):
"""This is a test that guardrails the HTTPAdapter behavior so that if the default for

View File

@@ -1,6 +1,5 @@
"""Tests for acme.crypto_util."""
import itertools
import ipaddress
import socket
import socketserver
import threading
@@ -109,6 +108,7 @@ class PyOpenSSLCertOrReqAllNamesTest(unittest.TestCase):
class PyOpenSSLCertOrReqSANTest(unittest.TestCase):
"""Test for acme.crypto_util._pyopenssl_cert_or_req_san."""
@classmethod
def _call(cls, loader, name):
# pylint: disable=protected-access
@@ -174,50 +174,9 @@ class PyOpenSSLCertOrReqSANTest(unittest.TestCase):
['chicago-cubs.venafi.example', 'cubs.venafi.example'])
class PyOpenSSLCertOrReqSANIPTest(unittest.TestCase):
"""Test for acme.crypto_util._pyopenssl_cert_or_req_san_ip."""
@classmethod
def _call(cls, loader, name):
# pylint: disable=protected-access
from acme.crypto_util import _pyopenssl_cert_or_req_san_ip
return _pyopenssl_cert_or_req_san_ip(loader(name))
def _call_cert(self, name):
return self._call(test_util.load_cert, name)
def _call_csr(self, name):
return self._call(test_util.load_csr, name)
def test_cert_no_sans(self):
self.assertEqual(self._call_cert('cert.pem'), [])
def test_csr_no_sans(self):
self.assertEqual(self._call_csr('csr-nosans.pem'), [])
def test_cert_domain_sans(self):
self.assertEqual(self._call_cert('cert-san.pem'), [])
def test_csr_domain_sans(self):
self.assertEqual(self._call_csr('csr-san.pem'), [])
def test_cert_ip_two_sans(self):
self.assertEqual(self._call_cert('cert-ipsans.pem'), ['192.0.2.145', '203.0.113.1'])
def test_csr_ip_two_sans(self):
self.assertEqual(self._call_csr('csr-ipsans.pem'), ['192.0.2.145', '203.0.113.1'])
def test_csr_ipv6_sans(self):
self.assertEqual(self._call_csr('csr-ipv6sans.pem'),
['0:0:0:0:0:0:0:1', 'A3BE:32F3:206E:C75D:956:CEE:9858:5EC5'])
def test_cert_ipv6_sans(self):
self.assertEqual(self._call_cert('cert-ipv6sans.pem'),
['0:0:0:0:0:0:0:1', 'A3BE:32F3:206E:C75D:956:CEE:9858:5EC5'])
class GenSsCertTest(unittest.TestCase):
"""Test for gen_ss_cert (generation of self-signed cert)."""
class RandomSnTest(unittest.TestCase):
"""Test for random certificate serial numbers."""
def setUp(self):
@@ -228,19 +187,11 @@ class GenSsCertTest(unittest.TestCase):
def test_sn_collisions(self):
from acme.crypto_util import gen_ss_cert
for _ in range(self.cert_count):
cert = gen_ss_cert(self.key, ['dummy'], force_san=True,
ips=[ipaddress.ip_address("10.10.10.10")])
cert = gen_ss_cert(self.key, ['dummy'], force_san=True)
self.serial_num.append(cert.get_serial_number())
self.assertGreaterEqual(len(set(self.serial_num)), self.cert_count)
def test_no_name(self):
from acme.crypto_util import gen_ss_cert
with self.assertRaises(AssertionError):
gen_ss_cert(self.key, ips=[ipaddress.ip_address("1.1.1.1")])
gen_ss_cert(self.key)
self.assertTrue(len(set(self.serial_num)) > 1)
class MakeCSRTest(unittest.TestCase):
"""Test for standalone functions."""
@@ -255,8 +206,8 @@ class MakeCSRTest(unittest.TestCase):
def test_make_csr(self):
csr_pem = self._call_with_key(["a.example", "b.example"])
self.assertIn(b'--BEGIN CERTIFICATE REQUEST--', csr_pem)
self.assertIn(b'--END CERTIFICATE REQUEST--', csr_pem)
self.assertTrue(b'--BEGIN CERTIFICATE REQUEST--' in csr_pem)
self.assertTrue(b'--END CERTIFICATE REQUEST--' in csr_pem)
csr = OpenSSL.crypto.load_certificate_request(
OpenSSL.crypto.FILETYPE_PEM, csr_pem)
# In pyopenssl 0.13 (used with TOXENV=py27-oldest), csr objects don't
@@ -272,27 +223,6 @@ class MakeCSRTest(unittest.TestCase):
).get_data(),
)
def test_make_csr_ip(self):
csr_pem = self._call_with_key(["a.example"], False, [ipaddress.ip_address('127.0.0.1'), ipaddress.ip_address('::1')])
self.assertIn(b'--BEGIN CERTIFICATE REQUEST--' , csr_pem)
self.assertIn(b'--END CERTIFICATE REQUEST--' , csr_pem)
csr = OpenSSL.crypto.load_certificate_request(
OpenSSL.crypto.FILETYPE_PEM, csr_pem)
# In pyopenssl 0.13 (used with TOXENV=py27-oldest), csr objects don't
# have a get_extensions() method, so we skip this test if the method
# isn't available.
if hasattr(csr, 'get_extensions'):
self.assertEqual(len(csr.get_extensions()), 1)
self.assertEqual(csr.get_extensions()[0].get_data(),
OpenSSL.crypto.X509Extension(
b'subjectAltName',
critical=False,
value=b'DNS:a.example, IP:127.0.0.1, IP:::1',
).get_data(),
)
# for IP san it's actually need to be octet-string,
# but somewhere downstream thankfully handle it for us
def test_make_csr_must_staple(self):
csr_pem = self._call_with_key(["a.example"], must_staple=True)
csr = OpenSSL.crypto.load_certificate_request(
@@ -311,9 +241,6 @@ class MakeCSRTest(unittest.TestCase):
self.assertEqual(len(must_staple_exts), 1,
"Expected exactly one Must Staple extension")
def test_make_csr_without_hostname(self):
self.assertRaises(ValueError, self._call_with_key)
class DumpPyopensslChainTest(unittest.TestCase):
"""Test for dump_pyopenssl_chain."""

View File

@@ -24,8 +24,8 @@ class MissingNonceTest(unittest.TestCase):
self.error = MissingNonce(self.response)
def test_str(self):
self.assertIn("FOO", str(self.error))
self.assertIn("{}", str(self.error))
self.assertTrue("FOO" in str(self.error))
self.assertTrue("{}" in str(self.error))
class PollErrorTest(unittest.TestCase):

View File

@@ -10,8 +10,8 @@ class FixedTest(unittest.TestCase):
"""Tests for acme.fields.Fixed."""
def setUp(self):
from acme.fields import fixed
self.field = fixed('name', 'x')
from acme.fields import Fixed
self.field = Fixed('name', 'x')
def test_decode(self):
self.assertEqual('x', self.field.decode('x'))

View File

@@ -48,7 +48,7 @@ class JWSTest(unittest.TestCase):
self.assertEqual(jws.signature.combined.nonce, self.nonce)
self.assertEqual(jws.signature.combined.url, self.url)
self.assertEqual(jws.signature.combined.kid, self.kid)
self.assertIsNone(jws.signature.combined.jwk)
self.assertEqual(jws.signature.combined.jwk, None)
# TODO: check that nonce is in protected header
self.assertEqual(jws, JWS.from_json(jws.to_json()))
@@ -58,7 +58,7 @@ class JWSTest(unittest.TestCase):
jws = JWS.sign(payload=b'foo', key=self.privkey,
alg=jose.RS256, nonce=self.nonce,
url=self.url)
self.assertIsNone(jws.signature.combined.kid)
self.assertEqual(jws.signature.combined.kid, None)
self.assertEqual(jws.signature.combined.jwk, self.pubkey)

View File

@@ -41,13 +41,13 @@ class ErrorTest(unittest.TestCase):
def test_description(self):
self.assertEqual('The request message was malformed', self.error.description)
self.assertIsNone(self.error_custom.description)
self.assertTrue(self.error_custom.description is None)
def test_code(self):
from acme.messages import Error
self.assertEqual('malformed', self.error.code)
self.assertIsNone(self.error_custom.code)
self.assertIsNone(Error().code)
self.assertEqual(None, self.error_custom.code)
self.assertEqual(None, Error().code)
def test_is_acme_error(self):
from acme.messages import is_acme_error, Error
@@ -260,10 +260,10 @@ class RegistrationTest(unittest.TestCase):
self.assertEqual(empty_new_reg.contact, ())
self.assertEqual(new_reg_with_contact.contact, ())
self.assertNotIn('contact', empty_new_reg.to_partial_json())
self.assertNotIn('contact', empty_new_reg.fields_to_partial_json())
self.assertIn('contact', new_reg_with_contact.to_partial_json())
self.assertIn('contact', new_reg_with_contact.fields_to_partial_json())
self.assertTrue('contact' not in empty_new_reg.to_partial_json())
self.assertTrue('contact' not in empty_new_reg.fields_to_partial_json())
self.assertTrue('contact' in new_reg_with_contact.to_partial_json())
self.assertTrue('contact' in new_reg_with_contact.fields_to_partial_json())
class UpdateRegistrationTest(unittest.TestCase):
@@ -406,7 +406,7 @@ class AuthorizationResourceTest(unittest.TestCase):
authzr = AuthorizationResource(
uri=mock.sentinel.uri,
body=mock.sentinel.body)
self.assertIsInstance(authzr, jose.JSONDeSerializable)
self.assertTrue(isinstance(authzr, jose.JSONDeSerializable))
class CertificateRequestTest(unittest.TestCase):
@@ -417,7 +417,7 @@ class CertificateRequestTest(unittest.TestCase):
self.req = CertificateRequest(csr=CSR)
def test_json_de_serializable(self):
self.assertIsInstance(self.req, jose.JSONDeSerializable)
self.assertTrue(isinstance(self.req, jose.JSONDeSerializable))
from acme.messages import CertificateRequest
self.assertEqual(
self.req, CertificateRequest.from_json(self.req.to_json()))
@@ -433,7 +433,7 @@ class CertificateResourceTest(unittest.TestCase):
cert_chain_uri=mock.sentinel.cert_chain_uri)
def test_json_de_serializable(self):
self.assertIsInstance(self.certr, jose.JSONDeSerializable)
self.assertTrue(isinstance(self.certr, jose.JSONDeSerializable))
from acme.messages import CertificateResource
self.assertEqual(
self.certr, CertificateResource.from_json(self.certr.to_json()))

View File

@@ -165,6 +165,7 @@ class TLSALPN01ServerTest(unittest.TestCase):
class BaseDualNetworkedServersTest(unittest.TestCase):
"""Test for acme.standalone.BaseDualNetworkedServers."""
class SingleProtocolServer(socketserver.TCPServer):
"""Server that only serves on a single protocol. FreeBSD has this behavior for AF_INET6."""
def __init__(self, *args, **kwargs):
@@ -174,7 +175,7 @@ class BaseDualNetworkedServersTest(unittest.TestCase):
kwargs["bind_and_activate"] = False
else:
self.address_family = socket.AF_INET
super().__init__(*args, **kwargs)
socketserver.TCPServer.__init__(self, *args, **kwargs)
if ipv6:
# NB: On Windows, socket.IPPROTO_IPV6 constant may be missing.
# We use the corresponding value (41) instead.
@@ -189,17 +190,12 @@ class BaseDualNetworkedServersTest(unittest.TestCase):
@mock.patch("socket.socket.bind")
def test_fail_to_bind(self, mock_bind):
from errno import EADDRINUSE
mock_bind.side_effect = socket.error
from acme.standalone import BaseDualNetworkedServers
mock_bind.side_effect = socket.error(EADDRINUSE, "Fake addr in use error")
with self.assertRaises(socket.error) as em:
BaseDualNetworkedServers(
BaseDualNetworkedServersTest.SingleProtocolServer,
('', 0), socketserver.BaseRequestHandler)
self.assertEqual(em.exception.errno, EADDRINUSE)
self.assertRaises(socket.error, BaseDualNetworkedServers,
BaseDualNetworkedServersTest.SingleProtocolServer,
('', 0),
socketserver.BaseRequestHandler)
def test_ports_equal(self):
from acme.standalone import BaseDualNetworkedServers

View File

@@ -10,7 +10,6 @@ from cryptography.hazmat.primitives import serialization
import josepy as jose
from OpenSSL import crypto
import pkg_resources
from josepy.util import ComparableECKey
def load_vector(*names):
@@ -61,14 +60,6 @@ def load_rsa_private_key(*names):
load_vector(*names), password=None, backend=default_backend()))
def load_ecdsa_private_key(*names):
"""Load ECDSA private key."""
loader = _guess_loader(names[-1], serialization.load_pem_private_key,
serialization.load_der_private_key)
return ComparableECKey(loader(
load_vector(*names), password=None, backend=default_backend()))
def load_pyopenssl_private_key(*names):
"""Load pyOpenSSL private key."""
loader = _guess_loader(

View File

@@ -15,7 +15,3 @@ and for the certificates:
openssl req -key rsa2048_key.pem -new -subj '/CN=example.com' -x509 -outform DER > cert.der
openssl req -key rsa2048_key.pem -new -subj '/CN=example.com' -x509 > rsa2048_cert.pem
openssl req -key rsa1024_key.pem -new -subj '/CN=example.com' -x509 > rsa1024_cert.pem
and for the elliptic key curves:
openssl genpkey -algorithm EC -out ec_secp384r1.pem -pkeyopt ec_paramgen_curve:P-384 -pkeyopt ec_param_enc:named_curve

View File

@@ -1,21 +0,0 @@
-----BEGIN CERTIFICATE-----
MIIDizCCAnOgAwIBAgIIPNBLQXwhoUkwDQYJKoZIhvcNAQELBQAwKDEmMCQGA1UE
AxMdUGViYmxlIEludGVybWVkaWF0ZSBDQSAxNzNiMjYwHhcNMjAwNTI5MTkxODA5
WhcNMjUwNTI5MTkxODA5WjAWMRQwEgYDVQQDEwsxOTIuMC4yLjE0NTCCASIwDQYJ
KoZIhvcNAQEBBQADggEPADCCAQoCggEBALyChb+NDA26GF1AfC0nzEdfOTchKw0h
q41xEjonvg5UXgZf/aH/ntvugIkYP0MaFifNAjebOVVsemEVEtyWcUKTfBHKZGbZ
ukTDwFIjfTccCfo6U/B2H7ZLzJIywl8DcUw9DypadeQBm8PS0VVR2ncy73dvaqym
crhAwlASyXU0mhLqRDMMxfg5Bn/FWpcsIcDpLmPn8Q/FvdRc2t5ryBNw/aWOlwqT
Oy16nbfLj2T0zG1A3aPuD+eT/JFUe/o3K7R+FAx7wt+RziQO46wLVVF1SueZUrIU
zqN04Gl8Kt1WM2SniZ0gq/rORUNcPtT0NAEsEslTQfA+Trq6j2peqyMCAwEAAaOB
yjCBxzAOBgNVHQ8BAf8EBAMCBaAwHQYDVR0lBBYwFAYIKwYBBQUHAwEGCCsGAQUF
BwMCMAwGA1UdEwEB/wQCMAAwHQYDVR0OBBYEFHj1mwZzP//nMIH2i58NRUl/arHn
MB8GA1UdIwQYMBaAFF5DVAKabvIUvKFHGouscA2Qdpe6MDEGCCsGAQUFBwEBBCUw
IzAhBggrBgEFBQcwAYYVaHR0cDovLzEyNy4wLjAuMTo0MDAyMBUGA1UdEQQOMAyH
BMAAApGHBMsAcQEwDQYJKoZIhvcNAQELBQADggEBAHjSgDg76/UCIMSYddyhj18r
LdNKjA7p8ovnErSkebFT4lIZ9f3Sma9moNr0w64M33NamuFyHe/KTdk90mvoW8Uu
26aDekiRIeeMakzbAtDKn67tt2tbedKIYRATcSYVwsV46uZKbM621dZKIjjxOWpo
IY6rZYrku8LYhoXJXOqRduV3cTRVuTm5bBa9FfVNtt6N1T5JOtKKDEhuSaF4RSug
PDy3hQIiHrVvhPfVrXU3j6owz/8UCS5549inES9ONTFrvM9o0H1R/MsmGNXR5hF5
iJqHKC7n8LZujhVnoFIpHu2Dsiefbfr+yRYJS4I+ezy6Nq/Ok8rc8zp0eoX+uyY=
-----END CERTIFICATE-----

View File

@@ -1,22 +0,0 @@
-----BEGIN CERTIFICATE-----
MIIDmzCCAoOgAwIBAgIIFdxeZP+v2rgwDQYJKoZIhvcNAQELBQAwKDEmMCQGA1UE
AxMdUGViYmxlIEludGVybWVkaWF0ZSBDQSA0M2M5NTcwHhcNMjAwNTMwMDQwNzMw
WhcNMjUwNTMwMDQwNzMwWjAOMQwwCgYDVQQDEwM6OjEwggEiMA0GCSqGSIb3DQEB
AQUAA4IBDwAwggEKAoIBAQC7VidVduJvqKtrSH0fw6PjE0cqL4Kfzo7klexWUkHG
KVAa0fRVZFZ462jxKOt417V2U4WJQ6WHHO9PJ+3gW62d/MhCw8FRtUQS4nYFjqB6
32+RFU21VRN7cWoQEqSwnEPbh/v/zv/KS5JhQ+swWUo79AOLm1kjnZWCKtcqh1Lc
Ug5Tkpot6luoxTKp52MkchvXDpj0q2B/XpLJ8/pw5cqjv7mH12EDOK2HXllA+WwX
ZpstcEhaA4FqtaHOW/OHnwTX5MUbINXE5YYHVEDR6moVM31/W/3pe9NDUMTDE7Si
lVQnZbXM9NYbzZqlh+WhemDWwnIfGI6rtsfNEiirVEOlAgMBAAGjgeIwgd8wDgYD
VR0PAQH/BAQDAgWgMB0GA1UdJQQWMBQGCCsGAQUFBwMBBggrBgEFBQcDAjAMBgNV
HRMBAf8EAjAAMB0GA1UdDgQWBBS8DL+MZfDIy6AKky69Tgry2Vxq5DAfBgNVHSME
GDAWgBRAsFqVenRRKgB1YPzWKzb9bzZ/ozAxBggrBgEFBQcBAQQlMCMwIQYIKwYB
BQUHMAGGFWh0dHA6Ly8xMjcuMC4wLjE6NDAwMjAtBgNVHREEJjAkhxAAAAAAAAAA
AAAAAAAAAAABhxCjvjLzIG7HXQlWDO6YWF7FMA0GCSqGSIb3DQEBCwUAA4IBAQBY
M9UTZ3uaKMQ+He9kWR3p9jh6hTSD0FNi79ZdfkG0lgSzhhduhN7OhzQH2ihUUfa6
rtKTw74fGbszhizCd9UB8YPKlm3si1Xbg6ZUQlA1RtoQo7RUGEa6ZbR68PKGm9Go
hTTFIl/JS8jzxBR8jywZdyqtprUx+nnNUDiNk0hJtFLhw7OJH0AHlAUNqHsfD08m
HXRdaV6q14HXU5g31slBat9H4D6tCU/2uqBURwW0wVdnqh4QeRfAeqiatJS9EmSF
ctbc7n894Idy2Xce7NFoIy5cht3m6Rd42o/LmBsJopBmQcDPZT70/XzRtc2qE0cS
CzBIGQHUJ6BfmBjrCQnp
-----END CERTIFICATE-----

View File

@@ -1,16 +0,0 @@
-----BEGIN CERTIFICATE REQUEST-----
MIICbTCCAVUCAQIwADCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKT/
CE7Y5EYBvI4p7Frt763upIKHDHO/R5/TWMjG8Jm9qTMui8sbMgyh2Yh+lR/j/5Xd
tQrhgC6wx10MrW2+3JtYS88HP1p6si8zU1dbK34n3NyyklR2RivW0R7dXgnYNy7t
5YcDYLCrbRMIPINV/uHrmzIHWYUDNcZVdAfIM2AHfKYuV6Mepcn///5GR+l4GcAh
Nkf9CW8OdAIuKdbyLCxVr0mUW/vJz1b12uxPsgUdax9sjXgZdT4pfMXADsFd1NeF
atpsXU073inqtHru+2F9ijHTQ75TC+u/rr6eYl3BnBntac0gp/ADtDBii7/Q1JOO
Bhq7xJNqqxIEdiyM7zcCAwEAAaAoMCYGCSqGSIb3DQEJDjEZMBcwFQYDVR0RBA4w
DIcEwAACkYcEywBxATANBgkqhkiG9w0BAQsFAAOCAQEADG5g3zdbSCaXpZhWHkzE
Mek3f442TUE1pB+ITRpthmM4N3zZWETYmbLCIAO624uMrRnbCCMvAoLs/L/9ETg/
XMMFtonQC8u9i9tV8B1ceBh8lpIfa+8b9TMWH3bqnrbWQ+YIl+Yd0gXiCZWJ9vK4
eM1Gddu/2bR6s/k4h/XAWRgEexqk57EHr1z0N+T9OoX939n3mVcNI+u9kfd5VJ0z
VyA3R8WR6T6KlEl5P5pcWe5Kuyhi7xMmLVImXqBtvKq4O1AMfM+gQr/yn9aE8IRq
khP7JrMBLUIub1c/qu2TfvnynNPSM/ZcOX+6PHdHmRkR3nI0Ndpv7Ntv31FTplAm
Dw==
-----END CERTIFICATE REQUEST-----

View File

@@ -1,16 +0,0 @@
-----BEGIN CERTIFICATE REQUEST-----
MIIChTCCAW0CAQIwADCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAOIc
UAppcqJfTkSqqOFqGt1v7lIJZPOcF4bcKI3d5cHAGbOuVxbC7uMaDuObwYLzoiED
qnvs1NaEq2phO6KsgGESB7IE2LUjJivO7OnSZjNRpL5si/9egvBiNCn/50lULaWG
gLEuyMfk3awZy2mVAymy7Grhbx069A4TH8TqsHuq2RpKyuDL27e/jUt6yYecb3pu
hWMiWy3segif4tI46pkOW0/I6DpxyYD2OqOvzxm/voS9RMqE2+7YJA327H7bEi3N
lJZEZ1zy7clZ9ga5fBQaetzbg2RyxTrZ7F919NQXSFoXgxb10Eg64wIpz0L3ooCm
GEHehsZZexa3J5ccIvMCAwEAAaBAMD4GCSqGSIb3DQEJDjExMC8wLQYDVR0RBCYw
JIcQAAAAAAAAAAAAAAAAAAAAAYcQo74y8yBux10JVgzumFhexTANBgkqhkiG9w0B
AQsFAAOCAQEALvwVn0A/JPTCiNzcozHFnp5M23C9PXCplWc5u4k34d4XXzpSeFDz
fL4gy7NpYIueme2K2ppw2j3PNQUdR6vQ5a75sriegWYrosL+7Q6Joh51ZyEUZQoD
mNl4M4S4oX85EaChR6NFGBywTfjFarYi32XBTbFE7rK8N8KM+DQkNdwL1MXqaHWz
F1obQKpNXlLedbCBOteV5Eg4zG3565zu/Gw/NhwzzV3mQmgxUcd1sMJxAfHQz4Vl
ImLL+xMcR03nDsH2bgtDbK2tJm7WszSxA9tC+Xp2lRewxrnQloRWPYDz177WGQ5Q
SoGDzTTtA6uWZxG8h7CkNLOGvA8LtU2rNA==
-----END CERTIFICATE REQUEST-----

View File

@@ -1,16 +0,0 @@
-----BEGIN CERTIFICATE REQUEST-----
MIICdjCCAV4CAQIwADCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMXq
v1y8EIcCbaUIzCtOcLkLS0MJ35oS+6DmV5WB1A0cIk6YrjsHIsY2lwMm13BWIvmw
tY+Y6n0rr7eViNx5ZRGHpHEI/TL3Neb+VefTydL5CgvK3dd4ex2kSbTaed3fmpOx
qMajEduwNcZPCcmoEXPkfrCP8w2vKQUkQ+JRPcdX1nTuzticeRP5B7YCmJsmxkEh
Y0tzzZ+NIRDARoYNofefY86h3e5q66gtJxccNchmIM3YQahhg5n3Xoo8hGfM/TIc
R7ncCBCLO6vtqo0QFva/NQODrgOmOsmgvqPkUWQFdZfWM8yIaU826dktx0CPB78t
TudnJ1rBRvGsjHMsZikCAwEAAaAxMC8GCSqGSIb3DQEJDjEiMCAwHgYDVR0RBBcw
FYINYS5leGVtcGxlLmNvbYcEwAACbzANBgkqhkiG9w0BAQsFAAOCAQEAdGMcRCxq
1X09gn1TNdMt64XUv+wdJCKDaJ+AgyIJj7QvVw8H5k7dOnxS4I+a/yo4jE+LDl2/
AuHcBLFEI4ddewdJSMrTNZjuRYuOdr3KP7fL7MffICSBi45vw5EOXg0tnjJCEiKu
6gcJgbLSP5JMMd7Haf33Q/VWsmHofR3VwOMdrnakwAU3Ff5WTuXTNVhL1kT/uLFX
yW1ru6BF4unwNqSR2UeulljpNfRBsiN4zJK11W6n9KT0NkBr9zY5WCM4sW7i8k9V
TeypWGo3jBKzYAGeuxZsB97U77jZ2lrGdBLZKfbcjnTeRVqCvCRrui4El7UGYFmj
7s6OJyWx5DSV8w==
-----END CERTIFICATE REQUEST-----

View File

@@ -1,6 +0,0 @@
-----BEGIN PRIVATE KEY-----
MIG2AgEAMBAGByqGSM49AgEGBSuBBAAiBIGeMIGbAgEBBDArTn0pbFk3xHfKeXte
xJgS4JVdJQ8mqvezhaNpULZPnwb+mcKLlrj6f5SRM52yREGhZANiAAQcrMoPMVqV
rHnDGGz5HUKLNmXfChlNgsrwsruawXF+M283CA6eckAjTXNyiC/ounWmvtoKsZG0
2UQOfQUNSCANId/r986yRGc03W6RJSkcRp86qBYjNsLgbZpber/3+M4=
-----END PRIVATE KEY-----

View File

@@ -3,6 +3,5 @@ include README.rst
recursive-include tests *
recursive-include certbot_apache/_internal/augeas_lens *.aug
recursive-include certbot_apache/_internal/tls_configs *.conf
include certbot_apache/py.typed
global-exclude __pycache__
global-exclude *.py[cod]

View File

@@ -4,11 +4,6 @@ import fnmatch
import logging
import re
import subprocess
from typing import Dict
from typing import Iterable
from typing import List
from typing import Optional
from typing import Tuple
import pkg_resources
@@ -19,7 +14,7 @@ from certbot.compat import os
logger = logging.getLogger(__name__)
def get_mod_deps(mod_name: str) -> List[str]:
def get_mod_deps(mod_name):
"""Get known module dependencies.
.. note:: This does not need to be accurate in order for the client to
@@ -38,7 +33,7 @@ def get_mod_deps(mod_name: str) -> List[str]:
return deps.get(mod_name, [])
def get_file_path(vhost_path: str) -> Optional[str]:
def get_file_path(vhost_path):
"""Get file path from augeas_vhost_path.
Takes in Augeas path and returns the file name
@@ -55,7 +50,7 @@ def get_file_path(vhost_path: str) -> Optional[str]:
return _split_aug_path(vhost_path)[0]
def get_internal_aug_path(vhost_path: str) -> str:
def get_internal_aug_path(vhost_path):
"""Get the Augeas path for a vhost with the file path removed.
:param str vhost_path: Augeas virtual host path
@@ -67,7 +62,7 @@ def get_internal_aug_path(vhost_path: str) -> str:
return _split_aug_path(vhost_path)[1]
def _split_aug_path(vhost_path: str) -> Tuple[str, str]:
def _split_aug_path(vhost_path):
"""Splits an Augeas path into a file path and an internal path.
After removing "/files", this function splits vhost_path into the
@@ -81,7 +76,7 @@ def _split_aug_path(vhost_path: str) -> Tuple[str, str]:
"""
# Strip off /files
file_path = vhost_path[6:]
internal_path: List[str] = []
internal_path = []
# Remove components from the end of file_path until it becomes valid
while not os.path.exists(file_path):
@@ -91,7 +86,7 @@ def _split_aug_path(vhost_path: str) -> Tuple[str, str]:
return file_path, "/".join(reversed(internal_path))
def parse_define_file(filepath: str, varname: str) -> Dict[str, str]:
def parse_define_file(filepath, varname):
""" Parses Defines from a variable in configuration file
:param str filepath: Path of file to parse
@@ -101,7 +96,7 @@ def parse_define_file(filepath: str, varname: str) -> Dict[str, str]:
:rtype: `dict`
"""
return_vars: Dict[str, str] = {}
return_vars = {}
# Get list of words in the variable
a_opts = util.get_var_from_file(varname, filepath).split()
for i, v in enumerate(a_opts):
@@ -116,27 +111,28 @@ def parse_define_file(filepath: str, varname: str) -> Dict[str, str]:
return return_vars
def unique_id() -> str:
def unique_id():
""" Returns an unique id to be used as a VirtualHost identifier"""
return binascii.hexlify(os.urandom(16)).decode("utf-8")
def included_in_paths(filepath: str, paths: Iterable[str]) -> bool:
def included_in_paths(filepath, paths):
"""
Returns true if the filepath is included in the list of paths
that may contain full paths or wildcard paths that need to be
expanded.
:param str filepath: Filepath to check
:param list paths: List of paths to check against
:params list paths: List of paths to check against
:returns: True if included
:rtype: bool
"""
return any(fnmatch.fnmatch(filepath, path) for path in paths)
def parse_defines(apachectl: str) -> Dict[str, str]:
def parse_defines(apachectl):
"""
Gets Defines from httpd process and returns a dictionary of
the defined variables.
@@ -147,7 +143,7 @@ def parse_defines(apachectl: str) -> Dict[str, str]:
:rtype: dict
"""
variables: Dict[str, str] = {}
variables = {}
define_cmd = [apachectl, "-t", "-D",
"DUMP_RUN_CFG"]
matches = parse_from_subprocess(define_cmd, r"Define: ([^ \n]*)")
@@ -157,15 +153,18 @@ def parse_defines(apachectl: str) -> Dict[str, str]:
return {}
for match in matches:
# Value could also contain = so split only once
parts = match.split('=', 1)
value = parts[1] if len(parts) == 2 else ''
variables[parts[0]] = value
if match.count("=") > 1:
logger.error("Unexpected number of equal signs in "
"runtime config dump.")
raise errors.PluginError(
"Error parsing Apache runtime variables")
parts = match.partition("=")
variables[parts[0]] = parts[2]
return variables
def parse_includes(apachectl: str) -> List[str]:
def parse_includes(apachectl):
"""
Gets Include directives from httpd process and returns a list of
their values.
@@ -176,11 +175,12 @@ def parse_includes(apachectl: str) -> List[str]:
:rtype: list of str
"""
inc_cmd: List[str] = [apachectl, "-t", "-D", "DUMP_INCLUDES"]
inc_cmd = [apachectl, "-t", "-D",
"DUMP_INCLUDES"]
return parse_from_subprocess(inc_cmd, r"\(.*\) (.*)")
def parse_modules(apachectl: str) -> List[str]:
def parse_modules(apachectl):
"""
Get loaded modules from httpd process, and return the list
of loaded module names.
@@ -191,11 +191,12 @@ def parse_modules(apachectl: str) -> List[str]:
:rtype: list of str
"""
mod_cmd = [apachectl, "-t", "-D", "DUMP_MODULES"]
mod_cmd = [apachectl, "-t", "-D",
"DUMP_MODULES"]
return parse_from_subprocess(mod_cmd, r"(.*)_module")
def parse_from_subprocess(command: List[str], regexp: str) -> List[str]:
def parse_from_subprocess(command, regexp):
"""Get values from stdout of subprocess command
:param list command: Command to run
@@ -209,7 +210,7 @@ def parse_from_subprocess(command: List[str], regexp: str) -> List[str]:
return re.compile(regexp).findall(stdout)
def _get_runtime_cfg(command: List[str]) -> str:
def _get_runtime_cfg(command):
"""
Get runtime configuration info.
@@ -219,14 +220,13 @@ def _get_runtime_cfg(command: List[str]) -> str:
"""
try:
proc = subprocess.run(
proc = subprocess.Popen(
command,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
universal_newlines=True,
check=False,
env=util.env_no_snap_for_external_calls())
stdout, stderr = proc.stdout, proc.stderr
stdout, stderr = proc.communicate()
except (OSError, ValueError):
logger.error(
@@ -244,8 +244,7 @@ def _get_runtime_cfg(command: List[str]) -> str:
return stdout
def find_ssl_apache_conf(prefix: str) -> str:
def find_ssl_apache_conf(prefix):
"""
Find a TLS Apache config file in the dedicated storage.
:param str prefix: prefix of the TLS Apache config file to find

View File

@@ -1,14 +1,9 @@
""" apacheconfig implementation of the ParserNode interfaces """
from typing import Any
from typing import Iterable
from typing import List
from typing import Optional
from typing import Tuple
from certbot_apache._internal import assertions
from certbot_apache._internal import interfaces
from certbot_apache._internal import parsernode_util as util
from certbot_apache._internal.interfaces import ParserNode
class ApacheParserNode(interfaces.ParserNode):
@@ -18,20 +13,19 @@ class ApacheParserNode(interfaces.ParserNode):
by parsing the equivalent configuration text using the apacheconfig library.
"""
def __init__(self, **kwargs: Any) -> None:
# pylint: disable=unused-variable
ancestor, dirty, filepath, metadata = util.parsernode_kwargs(kwargs)
super().__init__(**kwargs)
def __init__(self, **kwargs):
ancestor, dirty, filepath, metadata = util.parsernode_kwargs(kwargs) # pylint: disable=unused-variable
super(ApacheParserNode, self).__init__(**kwargs)
self.ancestor = ancestor
self.filepath = filepath
self.dirty = dirty
self.metadata = metadata
self._raw: Any = self.metadata["ac_ast"]
self._raw = self.metadata["ac_ast"]
def save(self, msg: str) -> None:
pass # pragma: no cover
def save(self, msg): # pragma: no cover
pass
def find_ancestors(self, name: str) -> List["ApacheParserNode"]: # pylint: disable=unused-variable
def find_ancestors(self, name): # pylint: disable=unused-variable
"""Find ancestor BlockNodes with a given name"""
return [ApacheBlockNode(name=assertions.PASS,
parameters=assertions.PASS,
@@ -43,33 +37,33 @@ class ApacheParserNode(interfaces.ParserNode):
class ApacheCommentNode(ApacheParserNode):
""" apacheconfig implementation of CommentNode interface """
def __init__(self, **kwargs: Any) -> None:
comment, kwargs = util.commentnode_kwargs(kwargs)
super().__init__(**kwargs)
def __init__(self, **kwargs):
comment, kwargs = util.commentnode_kwargs(kwargs) # pylint: disable=unused-variable
super(ApacheCommentNode, self).__init__(**kwargs)
self.comment = comment
def __eq__(self, other: Any) -> bool:
def __eq__(self, other): # pragma: no cover
if isinstance(other, self.__class__):
return (self.comment == other.comment and
self.dirty == other.dirty and
self.ancestor == other.ancestor and
self.metadata == other.metadata and
self.filepath == other.filepath)
return False # pragma: no cover
return False
class ApacheDirectiveNode(ApacheParserNode):
""" apacheconfig implementation of DirectiveNode interface """
def __init__(self, **kwargs: Any) -> None:
def __init__(self, **kwargs):
name, parameters, enabled, kwargs = util.directivenode_kwargs(kwargs)
super().__init__(**kwargs)
super(ApacheDirectiveNode, self).__init__(**kwargs)
self.name = name
self.parameters = parameters
self.enabled = enabled
self.include: Optional[str] = None
self.include = None
def __eq__(self, other: Any) -> bool:
def __eq__(self, other): # pragma: no cover
if isinstance(other, self.__class__):
return (self.name == other.name and
self.filepath == other.filepath and
@@ -78,21 +72,21 @@ class ApacheDirectiveNode(ApacheParserNode):
self.dirty == other.dirty and
self.ancestor == other.ancestor and
self.metadata == other.metadata)
return False # pragma: no cover
return False
def set_parameters(self, _parameters: Iterable[str]) -> None:
def set_parameters(self, _parameters): # pragma: no cover
"""Sets the parameters for DirectiveNode"""
return # pragma: no cover
return
class ApacheBlockNode(ApacheDirectiveNode):
""" apacheconfig implementation of BlockNode interface """
def __init__(self, **kwargs: Any) -> None:
super().__init__(**kwargs)
def __init__(self, **kwargs):
super(ApacheBlockNode, self).__init__(**kwargs)
self.children: Tuple[ApacheParserNode, ...] = ()
def __eq__(self, other: Any) -> bool:
def __eq__(self, other): # pragma: no cover
if isinstance(other, self.__class__):
return (self.name == other.name and
self.filepath == other.filepath and
@@ -102,11 +96,10 @@ class ApacheBlockNode(ApacheDirectiveNode):
self.dirty == other.dirty and
self.ancestor == other.ancestor and
self.metadata == other.metadata)
return False # pragma: no cover
return False
# pylint: disable=unused-argument
def add_child_block(self, name: str, parameters: Optional[List[str]] = None,
position: Optional[int] = None) -> "ApacheBlockNode": # pragma: no cover
def add_child_block(self, name, parameters=None, position=None): # pragma: no cover
"""Adds a new BlockNode to the sequence of children"""
new_block = ApacheBlockNode(name=assertions.PASS,
parameters=assertions.PASS,
@@ -117,8 +110,7 @@ class ApacheBlockNode(ApacheDirectiveNode):
return new_block
# pylint: disable=unused-argument
def add_child_directive(self, name: str, parameters: Optional[List[str]] = None,
position: int = None) -> ApacheDirectiveNode: # pragma: no cover
def add_child_directive(self, name, parameters=None, position=None): # pragma: no cover
"""Adds a new DirectiveNode to the sequence of children"""
new_dir = ApacheDirectiveNode(name=assertions.PASS,
parameters=assertions.PASS,
@@ -129,9 +121,7 @@ class ApacheBlockNode(ApacheDirectiveNode):
return new_dir
# pylint: disable=unused-argument
def add_child_comment(
self, name: str, parameters: Optional[int] = None, position: Optional[int] = None
) -> ApacheCommentNode: # pragma: no cover
def add_child_comment(self, comment="", position=None): # pragma: no cover
"""Adds a new CommentNode to the sequence of children"""
new_comment = ApacheCommentNode(comment=assertions.PASS,
@@ -141,7 +131,7 @@ class ApacheBlockNode(ApacheDirectiveNode):
self.children += (new_comment,)
return new_comment
def find_blocks(self, name: str, exclude: bool = True) -> List["ApacheBlockNode"]: # pylint: disable=unused-argument
def find_blocks(self, name, exclude=True): # pylint: disable=unused-argument
"""Recursive search of BlockNodes from the sequence of children"""
return [ApacheBlockNode(name=assertions.PASS,
parameters=assertions.PASS,
@@ -149,7 +139,7 @@ class ApacheBlockNode(ApacheDirectiveNode):
filepath=assertions.PASS,
metadata=self.metadata)]
def find_directives(self, name: str, exclude: bool = True) -> List[ApacheDirectiveNode]: # pylint: disable=unused-argument
def find_directives(self, name, exclude=True): # pylint: disable=unused-argument
"""Recursive search of DirectiveNodes from the sequence of children"""
return [ApacheDirectiveNode(name=assertions.PASS,
parameters=assertions.PASS,
@@ -158,22 +148,22 @@ class ApacheBlockNode(ApacheDirectiveNode):
metadata=self.metadata)]
# pylint: disable=unused-argument
def find_comments(self, comment: str, exact: bool = False) -> List[ApacheCommentNode]:
def find_comments(self, comment, exact=False): # pragma: no cover
"""Recursive search of DirectiveNodes from the sequence of children"""
return [ApacheCommentNode(comment=assertions.PASS, # pragma: no cover
return [ApacheCommentNode(comment=assertions.PASS,
ancestor=self,
filepath=assertions.PASS,
metadata=self.metadata)]
def delete_child(self, child: ParserNode) -> None:
def delete_child(self, child): # pragma: no cover
"""Deletes a ParserNode from the sequence of children"""
return # pragma: no cover
return
def unsaved_files(self) -> List[str]:
def unsaved_files(self): # pragma: no cover
"""Returns a list of unsaved filepaths"""
return [assertions.PASS] # pragma: no cover
return [assertions.PASS]
def parsed_paths(self) -> List[str]:
def parsed_paths(self): # pragma: no cover
"""Returns a list of parsed configuration file paths"""
return [assertions.PASS]

View File

@@ -1,21 +1,12 @@
"""Dual parser node assertions"""
import fnmatch
from typing import Any
from typing import Iterable
from typing import List
from typing import Optional
from typing import Union
from certbot_apache._internal import interfaces
from certbot_apache._internal.interfaces import CommentNode
from certbot_apache._internal.interfaces import DirectiveNode
from certbot_apache._internal.interfaces import ParserNode
from certbot_apache._internal.obj import VirtualHost
PASS = "CERTBOT_PASS_ASSERT"
def assertEqual(first: ParserNode, second: ParserNode) -> None:
def assertEqual(first, second):
""" Equality assertion """
if isinstance(first, interfaces.CommentNode):
@@ -38,98 +29,84 @@ def assertEqual(first: ParserNode, second: ParserNode) -> None:
# (but identical) directory structures.
assert first.filepath == second.filepath
def assertEqualComment(first: ParserNode, second: ParserNode) -> None: # pragma: no cover
def assertEqualComment(first, second): # pragma: no cover
""" Equality assertion for CommentNode """
assert isinstance(first, interfaces.CommentNode)
assert isinstance(second, interfaces.CommentNode)
if not isPass(first.comment) and not isPass(second.comment):
assert first.comment == second.comment
if not isPass(first.comment) and not isPass(second.comment): # type: ignore
assert first.comment == second.comment # type: ignore
def _assertEqualDirectiveComponents(first: ParserNode, # pragma: no cover
second: ParserNode) -> None:
def _assertEqualDirectiveComponents(first, second): # pragma: no cover
""" Handles assertion for instance variables for DirectiveNode and BlockNode"""
# Enabled value cannot be asserted, because Augeas implementation
# is unable to figure that out.
# assert first.enabled == second.enabled
assert isinstance(first, DirectiveNode)
assert isinstance(second, DirectiveNode)
if not isPass(first.name) and not isPass(second.name):
assert first.name == second.name
if not isPass(first.parameters) and not isPass(second.parameters):
assert first.parameters == second.parameters
def assertEqualDirective(first: ParserNode, second: ParserNode) -> None:
def assertEqualDirective(first, second):
""" Equality assertion for DirectiveNode """
assert isinstance(first, interfaces.DirectiveNode)
assert isinstance(second, interfaces.DirectiveNode)
_assertEqualDirectiveComponents(first, second)
def isPass(value: Any) -> bool: # pragma: no cover
def isPass(value): # pragma: no cover
"""Checks if the value is set to PASS"""
if isinstance(value, bool):
return True
return PASS in value
def isPassDirective(block: DirectiveNode) -> bool:
def isPassDirective(block):
""" Checks if BlockNode or DirectiveNode should pass the assertion """
if isPass(block.name):
return True
if isPass(block.parameters): # pragma: no cover
if isPass(block.parameters): # pragma: no cover
return True
if isPass(block.filepath): # pragma: no cover
if isPass(block.filepath): # pragma: no cover
return True
return False
def isPassComment(comment: CommentNode) -> bool:
def isPassComment(comment):
""" Checks if CommentNode should pass the assertion """
if isPass(comment.comment):
return True
if isPass(comment.filepath): # pragma: no cover
if isPass(comment.filepath): # pragma: no cover
return True
return False
def isPassNodeList(nodelist: List[Union[DirectiveNode, CommentNode]]) -> bool: # pragma: no cover
def isPassNodeList(nodelist): # pragma: no cover
""" Checks if a ParserNode in the nodelist should pass the assertion,
this function is used for results of find_* methods. Unimplemented find_*
methods should return a sequence containing a single ParserNode instance
with assertion pass string."""
node: Optional[Union[DirectiveNode, CommentNode]]
try:
node = nodelist[0]
except IndexError:
node = None
if not node: # pragma: no cover
if not node: # pragma: no cover
return False
if isinstance(node, interfaces.DirectiveNode):
return isPassDirective(node)
return isPassComment(node)
def assertEqualSimple(first: Any, second: Any) -> None:
def assertEqualSimple(first, second):
""" Simple assertion """
if not isPass(first) and not isPass(second):
assert first == second
def isEqualVirtualHost(first: VirtualHost, second: VirtualHost) -> bool:
def isEqualVirtualHost(first, second):
"""
Checks that two VirtualHost objects are similar. There are some built
in differences with the implementations: VirtualHost created by ParserNode
@@ -149,8 +126,7 @@ def isEqualVirtualHost(first: VirtualHost, second: VirtualHost) -> bool:
first.ancestor == second.ancestor
)
def assertEqualPathsList(first: Iterable[str], second: Iterable[str]) -> None: # pragma: no cover
def assertEqualPathsList(first, second): # pragma: no cover
"""
Checks that the two lists of file paths match. This assertion allows for wildcard
paths.
@@ -160,6 +136,6 @@ def assertEqualPathsList(first: Iterable[str], second: Iterable[str]) -> None:
if any(isPass(path) for path in second):
return
for fpath in first:
assert any(fnmatch.fnmatch(fpath, spath) for spath in second)
assert any([fnmatch.fnmatch(fpath, spath) for spath in second])
for spath in second:
assert any(fnmatch.fnmatch(fpath, spath) for fpath in first)
assert any([fnmatch.fnmatch(fpath, spath) for fpath in first])

View File

@@ -64,39 +64,28 @@ Translates over to:
"/files/etc/apache2/apache2.conf/bLoCk[1]",
]
"""
from typing import Any
from typing import cast
from typing import Dict
from typing import Iterable
from typing import List
from typing import Optional
from typing import Set
from typing import Tuple
from typing import Union
from certbot import errors
from certbot.compat import os
from certbot_apache._internal import apache_util
from certbot_apache._internal import assertions
from certbot_apache._internal import interfaces
from certbot_apache._internal import parser
from certbot_apache._internal import parsernode_util as util
from certbot import errors
from certbot.compat import os
class AugeasParserNode(interfaces.ParserNode):
""" Augeas implementation of ParserNode interface """
def __init__(self, **kwargs: Any) -> None:
# pylint: disable=unused-variable
ancestor, dirty, filepath, metadata = util.parsernode_kwargs(kwargs)
super().__init__(**kwargs)
def __init__(self, **kwargs):
ancestor, dirty, filepath, metadata = util.parsernode_kwargs(kwargs) # pylint: disable=unused-variable
super(AugeasParserNode, self).__init__(**kwargs)
self.ancestor = ancestor
self.filepath = filepath
self.dirty = dirty
self.metadata = metadata
self.parser = cast(parser.ApacheParser,
self.metadata.get("augeasparser"))
self.parser = self.metadata.get("augeasparser")
try:
if self.metadata["augeaspath"].endswith("/"):
raise errors.PluginError(
@@ -107,20 +96,20 @@ class AugeasParserNode(interfaces.ParserNode):
except KeyError:
raise errors.PluginError("Augeas path is required")
def save(self, msg: Iterable[str]) -> None:
def save(self, msg):
self.parser.save(msg)
def find_ancestors(self, name: str) -> List["AugeasParserNode"]:
def find_ancestors(self, name):
"""
Searches for ancestor BlockNodes with a given name.
:param str name: Name of the BlockNode parent to search for
:returns: List of matching ancestor nodes.
:rtype: list of AugeasParserNode
:rtype: list of AugeasBlockNode
"""
ancestors: List["AugeasParserNode"] = []
ancestors = []
parent = self.metadata["augeaspath"]
while True:
@@ -130,12 +119,12 @@ class AugeasParserNode(interfaces.ParserNode):
if not parent or parent == "/files":
break
anc = self._create_blocknode(parent)
if anc.name is not None and anc.name.lower() == name.lower():
if anc.name.lower() == name.lower():
ancestors.append(anc)
return ancestors
def _create_blocknode(self, path: str) -> "AugeasBlockNode":
def _create_blocknode(self, path):
"""
Helper function to create a BlockNode from Augeas path. This is used by
AugeasParserNode.find_ancestors and AugeasBlockNode.
@@ -143,25 +132,21 @@ class AugeasParserNode(interfaces.ParserNode):
"""
name: str = self._aug_get_name(path)
metadata: Dict[str, Union[parser.ApacheParser, str]] = {
"augeasparser": self.parser, "augeaspath": path
}
name = self._aug_get_name(path)
metadata = {"augeasparser": self.parser, "augeaspath": path}
# Check if the file was included from the root config or initial state
file_path = apache_util.get_file_path(path)
if file_path is None:
raise ValueError(f"No file path found for vhost: {path}.") # pragma: no cover
enabled = self.parser.parsed_in_original(file_path)
enabled = self.parser.parsed_in_original(
apache_util.get_file_path(path)
)
return AugeasBlockNode(name=name,
enabled=enabled,
ancestor=assertions.PASS,
filepath=file_path,
filepath=apache_util.get_file_path(path),
metadata=metadata)
def _aug_get_name(self, path: str) -> str:
def _aug_get_name(self, path):
"""
Helper function to get name of a configuration block or variable from path.
"""
@@ -175,18 +160,20 @@ class AugeasParserNode(interfaces.ParserNode):
# remove [...], it's not allowed in Apache configuration and is used
# for indexing within Augeas
return name.split("[")[0]
name = name.split("[")[0]
return name
class AugeasCommentNode(AugeasParserNode):
""" Augeas implementation of CommentNode interface """
def __init__(self, **kwargs: Any) -> None:
def __init__(self, **kwargs):
comment, kwargs = util.commentnode_kwargs(kwargs) # pylint: disable=unused-variable
super().__init__(**kwargs)
super(AugeasCommentNode, self).__init__(**kwargs)
# self.comment = comment
self.comment = comment
def __eq__(self, other: Any) -> bool:
def __eq__(self, other):
if isinstance(other, self.__class__):
return (self.comment == other.comment and
self.filepath == other.filepath and
@@ -199,15 +186,15 @@ class AugeasCommentNode(AugeasParserNode):
class AugeasDirectiveNode(AugeasParserNode):
""" Augeas implementation of DirectiveNode interface """
def __init__(self, **kwargs: Any) -> None:
def __init__(self, **kwargs):
name, parameters, enabled, kwargs = util.directivenode_kwargs(kwargs)
super().__init__(**kwargs)
super(AugeasDirectiveNode, self).__init__(**kwargs)
self.name = name
self.enabled = enabled
if parameters:
self.set_parameters(parameters)
def __eq__(self, other: Any) -> bool:
def __eq__(self, other):
if isinstance(other, self.__class__):
return (self.name == other.name and
self.filepath == other.filepath and
@@ -218,7 +205,7 @@ class AugeasDirectiveNode(AugeasParserNode):
self.metadata == other.metadata)
return False
def set_parameters(self, parameters: Iterable[str]) -> None:
def set_parameters(self, parameters):
"""
Sets parameters of a DirectiveNode or BlockNode object.
@@ -237,7 +224,7 @@ class AugeasDirectiveNode(AugeasParserNode):
self.parser.aug.set(param_path, param)
@property
def parameters(self) -> Tuple[str, ...]:
def parameters(self):
"""
Fetches the parameters from Augeas tree, ensuring that the sequence always
represents the current state
@@ -247,22 +234,21 @@ class AugeasDirectiveNode(AugeasParserNode):
"""
return tuple(self._aug_get_params(self.metadata["augeaspath"]))
def _aug_get_params(self, path: str) -> List[str]:
def _aug_get_params(self, path):
"""Helper function to get parameters for DirectiveNodes and BlockNodes"""
arg_paths = self.parser.aug.match(path + "/arg")
args = [self.parser.get_arg(apath) for apath in arg_paths]
return [arg for arg in args if arg is not None]
return [self.parser.get_arg(apath) for apath in arg_paths]
class AugeasBlockNode(AugeasDirectiveNode):
""" Augeas implementation of BlockNode interface """
def __init__(self, **kwargs: Any) -> None:
super().__init__(**kwargs)
self.children: Tuple["AugeasBlockNode", ...] = ()
def __init__(self, **kwargs):
super(AugeasBlockNode, self).__init__(**kwargs)
self.children = ()
def __eq__(self, other: Any) -> bool:
def __eq__(self, other):
if isinstance(other, self.__class__):
return (self.name == other.name and
self.filepath == other.filepath and
@@ -275,39 +261,33 @@ class AugeasBlockNode(AugeasDirectiveNode):
return False
# pylint: disable=unused-argument
def add_child_block(self, name: str, # pragma: no cover
parameters: Optional[List[str]] = None,
position: Optional[int] = None) -> "AugeasBlockNode":
def add_child_block(self, name, parameters=None, position=None): # pragma: no cover
"""Adds a new BlockNode to the sequence of children"""
insertpath, realpath, before = self._aug_resolve_child_position(
name,
position
)
new_metadata: Dict[str, Any] = {"augeasparser": self.parser, "augeaspath": realpath}
new_metadata = {"augeasparser": self.parser, "augeaspath": realpath}
# Create the new block
self.parser.aug.insert(insertpath, name, before)
# Check if the file was included from the root config or initial state
file_path = apache_util.get_file_path(realpath)
if file_path is None:
raise errors.Error(f"No file path found for vhost: {realpath}") # pragma: no cover
enabled = self.parser.parsed_in_original(file_path)
# Parameters will be set at the initialization of the new object
return AugeasBlockNode(
name=name,
parameters=parameters,
enabled=enabled,
ancestor=assertions.PASS,
filepath=file_path,
metadata=new_metadata,
enabled = self.parser.parsed_in_original(
apache_util.get_file_path(realpath)
)
# Parameters will be set at the initialization of the new object
new_block = AugeasBlockNode(name=name,
parameters=parameters,
enabled=enabled,
ancestor=assertions.PASS,
filepath=apache_util.get_file_path(realpath),
metadata=new_metadata)
return new_block
# pylint: disable=unused-argument
def add_child_directive(self, name: str, # pragma: no cover
parameters: Optional[List[str]] = None,
position: Optional[int] = None) -> AugeasDirectiveNode:
def add_child_directive(self, name, parameters=None, position=None): # pragma: no cover
"""Adds a new DirectiveNode to the sequence of children"""
if not parameters:
@@ -324,50 +304,43 @@ class AugeasBlockNode(AugeasDirectiveNode):
# Set the directive key
self.parser.aug.set(realpath, name)
# Check if the file was included from the root config or initial state
file_path = apache_util.get_file_path(realpath)
if file_path is None:
raise errors.Error(f"No file path found for vhost: {realpath}") # pragma: no cover
enabled = self.parser.parsed_in_original(file_path)
return AugeasDirectiveNode(
name=name,
parameters=parameters,
enabled=enabled,
ancestor=assertions.PASS,
filepath=file_path,
metadata=new_metadata,
enabled = self.parser.parsed_in_original(
apache_util.get_file_path(realpath)
)
def add_child_comment(
self, comment: str = "", position: Optional[int] = None
) -> "AugeasCommentNode":
new_dir = AugeasDirectiveNode(name=name,
parameters=parameters,
enabled=enabled,
ancestor=assertions.PASS,
filepath=apache_util.get_file_path(realpath),
metadata=new_metadata)
return new_dir
def add_child_comment(self, comment="", position=None):
"""Adds a new CommentNode to the sequence of children"""
insertpath, realpath, before = self._aug_resolve_child_position(
"#comment",
position
)
new_metadata: Dict[str, Any] = {
"augeasparser": self.parser, "augeaspath": realpath,
}
new_metadata = {"augeasparser": self.parser, "augeaspath": realpath}
# Create the new comment
self.parser.aug.insert(insertpath, "#comment", before)
# Set the comment content
self.parser.aug.set(realpath, comment)
return AugeasCommentNode(
comment=comment,
ancestor=assertions.PASS,
filepath=apache_util.get_file_path(realpath),
metadata=new_metadata,
)
new_comment = AugeasCommentNode(comment=comment,
ancestor=assertions.PASS,
filepath=apache_util.get_file_path(realpath),
metadata=new_metadata)
return new_comment
def find_blocks(self, name: str, exclude: bool = True) -> List["AugeasBlockNode"]:
def find_blocks(self, name, exclude=True):
"""Recursive search of BlockNodes from the sequence of children"""
nodes: List["AugeasBlockNode"] = []
paths: Iterable[str] = self._aug_find_blocks(name)
nodes = []
paths = self._aug_find_blocks(name)
if exclude:
paths = self.parser.exclude_dirs(paths)
for path in paths:
@@ -375,7 +348,7 @@ class AugeasBlockNode(AugeasDirectiveNode):
return nodes
def find_directives(self, name: str, exclude: bool = True) -> List["AugeasDirectiveNode"]:
def find_directives(self, name, exclude=True):
"""Recursive search of DirectiveNodes from the sequence of children"""
nodes = []
@@ -394,14 +367,14 @@ class AugeasBlockNode(AugeasDirectiveNode):
return nodes
def find_comments(self, comment: str) -> List["AugeasCommentNode"]:
def find_comments(self, comment):
"""
Recursive search of DirectiveNodes from the sequence of children.
:param str comment: Comment content to search for.
"""
nodes: List["AugeasCommentNode"] = []
nodes = []
ownpath = self.metadata.get("augeaspath")
comments = self.parser.find_comments(comment, start=ownpath)
@@ -410,11 +383,11 @@ class AugeasBlockNode(AugeasDirectiveNode):
return nodes
def delete_child(self, child: "AugeasParserNode") -> None:
def delete_child(self, child):
"""
Deletes a ParserNode from the sequence of children, and raises an
exception if it's unable to do so.
:param AugeasParserNode child: A node to delete.
:param AugeasParserNode: child: A node to delete.
"""
if not self.parser.aug.remove(child.metadata["augeaspath"]):
@@ -423,11 +396,11 @@ class AugeasBlockNode(AugeasDirectiveNode):
"seem to exist.").format(child.metadata["augeaspath"])
)
def unsaved_files(self) -> Set[str]:
def unsaved_files(self):
"""Returns a list of unsaved filepaths"""
return self.parser.unsaved_files()
def parsed_paths(self) -> List[str]:
def parsed_paths(self):
"""
Returns a list of file paths that have currently been parsed into the parser
tree. The returned list may include paths with wildcard characters, for
@@ -438,7 +411,7 @@ class AugeasBlockNode(AugeasDirectiveNode):
:returns: list of file paths of files that have been parsed
"""
res_paths: List[str] = []
res_paths = []
paths = self.parser.existing_paths
for directory in paths:
@@ -447,7 +420,7 @@ class AugeasBlockNode(AugeasDirectiveNode):
return res_paths
def _create_commentnode(self, path: str) -> "AugeasCommentNode":
def _create_commentnode(self, path):
"""Helper function to create a CommentNode from Augeas path"""
comment = self.parser.aug.get(path)
@@ -460,16 +433,14 @@ class AugeasBlockNode(AugeasDirectiveNode):
filepath=apache_util.get_file_path(path),
metadata=metadata)
def _create_directivenode(self, path: str) -> "AugeasDirectiveNode":
def _create_directivenode(self, path):
"""Helper function to create a DirectiveNode from Augeas path"""
name = self.parser.get_arg(path)
metadata: Dict[str, Union[parser.ApacheParser, str]] = {
"augeasparser": self.parser, "augeaspath": path,
}
metadata = {"augeasparser": self.parser, "augeaspath": path}
# Check if the file was included from the root config or initial state
enabled: bool = self.parser.parsed_in_original(
enabled = self.parser.parsed_in_original(
apache_util.get_file_path(path)
)
return AugeasDirectiveNode(name=name,
@@ -478,12 +449,12 @@ class AugeasBlockNode(AugeasDirectiveNode):
filepath=apache_util.get_file_path(path),
metadata=metadata)
def _aug_find_blocks(self, name: str) -> Set[str]:
def _aug_find_blocks(self, name):
"""Helper function to perform a search to Augeas DOM tree to search
configuration blocks with a given name"""
# The code here is modified from configurator.get_virtual_hosts()
blk_paths: Set[str] = set()
blk_paths = set()
for vhost_path in list(self.parser.parser_paths):
paths = self.parser.aug.match(
("/files%s//*[label()=~regexp('%s')]" %
@@ -492,8 +463,7 @@ class AugeasBlockNode(AugeasDirectiveNode):
name.lower() in os.path.basename(path).lower()])
return blk_paths
def _aug_resolve_child_position(
self, name: str, position: Optional[int]) -> Tuple[str, str, bool]:
def _aug_resolve_child_position(self, name, position):
"""
Helper function that iterates through the immediate children and figures
out the insertion path for a new AugeasParserNode.
@@ -518,16 +488,16 @@ class AugeasBlockNode(AugeasDirectiveNode):
"""
# Default to appending
before: bool = False
before = False
all_children: str = self.parser.aug.match("{}/*".format(
all_children = self.parser.aug.match("{}/*".format(
self.metadata["augeaspath"])
)
# Calculate resulting_path
# Augeas indices start at 1. We use counter to calculate the index to
# be used in resulting_path.
counter: int = 1
counter = 1
for i, child in enumerate(all_children):
if position is not None and i >= position:
# We're not going to insert the new node to an index after this
@@ -536,7 +506,7 @@ class AugeasBlockNode(AugeasDirectiveNode):
if name == childname:
counter += 1
resulting_path: str = "{}/{}[{}]".format(
resulting_path = "{}/{}[{}]".format(
self.metadata["augeaspath"],
name,
counter
@@ -560,7 +530,7 @@ class AugeasBlockNode(AugeasDirectiveNode):
position
)
return insert_path, resulting_path, before
return (insert_path, resulting_path, before)
interfaces.CommentNode.register(AugeasCommentNode)

File diff suppressed because it is too large Load Diff

View File

@@ -1,22 +1,17 @@
"""Apache plugin constants."""
from typing import Dict
from typing import List
import pkg_resources
from certbot.compat import os
MOD_SSL_CONF_DEST = "options-ssl-apache.conf"
"""Name of the mod_ssl config file as saved
in `certbot.configuration.NamespaceConfig.config_dir`."""
"""Name of the mod_ssl config file as saved in `IConfig.config_dir`."""
UPDATED_MOD_SSL_CONF_DIGEST = ".updated-options-ssl-apache-conf-digest.txt"
"""Name of the hash of the updated or informed mod_ssl_conf as saved
in `certbot.configuration.NamespaceConfig.config_dir`."""
"""Name of the hash of the updated or informed mod_ssl_conf as saved in `IConfig.config_dir`."""
# NEVER REMOVE A SINGLE HASH FROM THIS LIST UNLESS YOU KNOW EXACTLY WHAT YOU ARE DOING!
ALL_SSL_OPTIONS_HASHES: List[str] = [
ALL_SSL_OPTIONS_HASHES = [
'2086bca02db48daf93468332543c60ac6acdb6f0b58c7bfdf578a5d47092f82a',
'4844d36c9a0f587172d9fa10f4f1c9518e3bcfa1947379f155e16a70a728c21a',
'5a922826719981c0a234b1fbcd495f3213e49d2519e845ea0748ba513044b65b',
@@ -39,39 +34,39 @@ AUGEAS_LENS_DIR = pkg_resources.resource_filename(
"certbot_apache", os.path.join("_internal", "augeas_lens"))
"""Path to the Augeas lens directory"""
REWRITE_HTTPS_ARGS: List[str] = [
REWRITE_HTTPS_ARGS = [
"^", "https://%{SERVER_NAME}%{REQUEST_URI}", "[L,NE,R=permanent]"]
"""Apache version<2.3.9 rewrite rule arguments used for redirections to
https vhost"""
REWRITE_HTTPS_ARGS_WITH_END: List[str] = [
REWRITE_HTTPS_ARGS_WITH_END = [
"^", "https://%{SERVER_NAME}%{REQUEST_URI}", "[END,NE,R=permanent]"]
"""Apache version >= 2.3.9 rewrite rule arguments used for redirections to
https vhost"""
OLD_REWRITE_HTTPS_ARGS: List[List[str]] = [
OLD_REWRITE_HTTPS_ARGS = [
["^", "https://%{SERVER_NAME}%{REQUEST_URI}", "[L,QSA,R=permanent]"],
["^", "https://%{SERVER_NAME}%{REQUEST_URI}", "[END,QSA,R=permanent]"]]
HSTS_ARGS: List[str] = ["always", "set", "Strict-Transport-Security",
HSTS_ARGS = ["always", "set", "Strict-Transport-Security",
"\"max-age=31536000\""]
"""Apache header arguments for HSTS"""
UIR_ARGS: List[str] = ["always", "set", "Content-Security-Policy", "upgrade-insecure-requests"]
UIR_ARGS = ["always", "set", "Content-Security-Policy",
"upgrade-insecure-requests"]
HEADER_ARGS: Dict[str, List[str]] = {
"Strict-Transport-Security": HSTS_ARGS, "Upgrade-Insecure-Requests": UIR_ARGS,
}
HEADER_ARGS = {"Strict-Transport-Security": HSTS_ARGS,
"Upgrade-Insecure-Requests": UIR_ARGS}
AUTOHSTS_STEPS: List[int] = [60, 300, 900, 3600, 21600, 43200, 86400]
AUTOHSTS_STEPS = [60, 300, 900, 3600, 21600, 43200, 86400]
"""AutoHSTS increase steps: 1min, 5min, 15min, 1h, 6h, 12h, 24h"""
AUTOHSTS_PERMANENT: int = 31536000
AUTOHSTS_PERMANENT = 31536000
"""Value for the last max-age of HSTS"""
AUTOHSTS_FREQ: int = 172800
AUTOHSTS_FREQ = 172800
"""Minimum time since last increase to perform a new one: 48h"""
MANAGED_COMMENT: str = "DO NOT REMOVE - Managed by Certbot"
MANAGED_COMMENT_ID: str = MANAGED_COMMENT + ", VirtualHost id: {0}"
MANAGED_COMMENT = "DO NOT REMOVE - Managed by Certbot"
MANAGED_COMMENT_ID = MANAGED_COMMENT+", VirtualHost id: {0}"
"""Managed by Certbot comments and the VirtualHost identification template"""

View File

@@ -1,28 +1,24 @@
"""Contains UI methods for Apache operations."""
import logging
from typing import Iterable
from typing import List
from typing import Optional
from typing import Sequence
from typing import Tuple
from certbot_apache._internal.obj import VirtualHost
import zope.component
from certbot import errors
from certbot import interfaces
from certbot.compat import os
from certbot.display import util as display_util
import certbot.display.util as display_util
logger = logging.getLogger(__name__)
def select_vhost_multiple(vhosts: Optional[List[VirtualHost]]) -> List[VirtualHost]:
def select_vhost_multiple(vhosts):
"""Select multiple Vhosts to install the certificate for
:param vhosts: Available Apache VirtualHosts
:type vhosts: :class:`list` of type `~VirtualHost`
:type vhosts: :class:`list` of type `~obj.Vhost`
:returns: List of VirtualHosts
:rtype: :class:`list`of type `~VirtualHost`
:rtype: :class:`list`of type `~obj.Vhost`
"""
if not vhosts:
return []
@@ -30,7 +26,7 @@ def select_vhost_multiple(vhosts: Optional[List[VirtualHost]]) -> List[VirtualHo
# Remove the extra newline from the last entry
if tags_list:
tags_list[-1] = tags_list[-1][:-1]
code, names = display_util.checklist(
code, names = zope.component.getUtility(interfaces.IDisplay).checklist(
"Which VirtualHosts would you like to install the wildcard certificate for?",
tags=tags_list, force_interactive=True)
if code == display_util.OK:
@@ -38,8 +34,7 @@ def select_vhost_multiple(vhosts: Optional[List[VirtualHost]]) -> List[VirtualHo
return return_vhosts
return []
def _reversemap_vhosts(names: Iterable[str], vhosts: Iterable[VirtualHost]) -> List[VirtualHost]:
def _reversemap_vhosts(names, vhosts):
"""Helper function for select_vhost_multiple for mapping string
representations back to actual vhost objects"""
return_vhosts = []
@@ -50,14 +45,11 @@ def _reversemap_vhosts(names: Iterable[str], vhosts: Iterable[VirtualHost]) -> L
return_vhosts.append(vhost)
return return_vhosts
def select_vhost(domain: str, vhosts: Sequence[VirtualHost]) -> Optional[VirtualHost]:
def select_vhost(domain, vhosts):
"""Select an appropriate Apache Vhost.
:param str domain: Domain for vhost selection
:param vhosts: Available Apache VirtualHosts
:type vhosts: :class:`list` of type `~VirtualHost`
:type vhosts: :class:`list` of type `~obj.Vhost`
:returns: VirtualHost or `None`
:rtype: `~obj.Vhost` or `None`
@@ -70,8 +62,7 @@ def select_vhost(domain: str, vhosts: Sequence[VirtualHost]) -> Optional[Virtual
return vhosts[tag]
return None
def _vhost_menu(domain: str, vhosts: Iterable[VirtualHost]) -> Tuple[str, int]:
def _vhost_menu(domain, vhosts):
"""Select an appropriate Apache Vhost.
:param vhosts: Available Apache Virtual Hosts
@@ -112,23 +103,23 @@ def _vhost_menu(domain: str, vhosts: Iterable[VirtualHost]) -> Tuple[str, int]:
https="HTTPS" if vhost.ssl else "",
active="Enabled" if vhost.enabled else "",
fn_size=filename_size,
name_size=disp_name_size),
name_size=disp_name_size)
)
try:
code, tag = display_util.menu(
f"We were unable to find a vhost with a ServerName "
f"or Address of {domain}.{os.linesep}Which virtual host would you "
f"like to choose?",
code, tag = zope.component.getUtility(interfaces.IDisplay).menu(
"We were unable to find a vhost with a ServerName "
"or Address of {0}.{1}Which virtual host would you "
"like to choose?".format(domain, os.linesep),
choices, force_interactive=True)
except errors.MissingCommandlineFlag:
msg = (
f"Encountered vhost ambiguity when trying to find a vhost for "
f"{domain} but was unable to ask for user "
f"guidance in non-interactive mode. Certbot may need "
f"vhosts to be explicitly labelled with ServerName or "
f"ServerAlias directives.")
logger.error(msg)
"Encountered vhost ambiguity when trying to find a vhost for "
"{0} but was unable to ask for user "
"guidance in non-interactive mode. Certbot may need "
"vhosts to be explicitly labelled with ServerName or "
"ServerAlias directives.".format(domain))
logger.warning(msg)
raise errors.MissingCommandlineFlag(msg)
return code, tag

View File

@@ -1,45 +1,20 @@
""" Dual ParserNode implementation """
from typing import Any
from typing import Generic
from typing import Iterable
from typing import List
from typing import Optional
from typing import Set
from typing import Tuple
from typing import Type
from typing import TYPE_CHECKING
from typing import TypeVar
from certbot_apache._internal import apacheparser
from certbot_apache._internal import assertions
from certbot_apache._internal import augeasparser
from certbot_apache._internal import interfaces
if TYPE_CHECKING:
from certbot_apache._internal.apacheparser import ApacheParserNode # pragma: no cover
from certbot_apache._internal.augeasparser import AugeasParserNode # pragma: no cover
GenericAugeasParserNode = TypeVar("GenericAugeasParserNode", bound="AugeasParserNode")
GenericApacheParserNode = TypeVar("GenericApacheParserNode", bound="ApacheParserNode")
GenericDualNode = TypeVar("GenericDualNode", bound="DualNodeBase")
class DualNodeBase(Generic[GenericAugeasParserNode, GenericApacheParserNode]):
class DualNodeBase:
""" Dual parser interface for in development testing. This is used as the
base class for dual parser interface classes. This class handles runtime
attribute value assertions."""
def __init__(self, primary: GenericAugeasParserNode,
secondary: GenericApacheParserNode) -> None:
self.primary = primary
self.secondary = secondary
def save(self, msg: str) -> None: # pragma: no cover
def save(self, msg): # pragma: no cover
""" Call save for both parsers """
self.primary.save(msg)
self.secondary.save(msg)
def __getattr__(self, aname: str) -> Any:
def __getattr__(self, aname):
""" Attribute value assertion """
firstval = getattr(self.primary, aname)
secondval = getattr(self.secondary, aname)
@@ -53,12 +28,11 @@ class DualNodeBase(Generic[GenericAugeasParserNode, GenericApacheParserNode]):
assertions.assertEqualSimple(firstval, secondval)
return firstval
def find_ancestors(self, name: str) -> List["DualNodeBase"]:
def find_ancestors(self, name):
""" Traverses the ancestor tree and returns ancestors matching name """
return self._find_helper(DualBlockNode, "find_ancestors", name)
def _find_helper(self, nodeclass: Type[GenericDualNode], findfunc: str, search: str,
**kwargs: Any) -> List[GenericDualNode]:
def _find_helper(self, nodeclass, findfunc, search, **kwargs):
"""A helper for find_* functions. The function specific attributes should
be passed as keyword arguments.
@@ -98,11 +72,10 @@ class DualNodeBase(Generic[GenericAugeasParserNode, GenericApacheParserNode]):
return new_nodes
class DualCommentNode(DualNodeBase[augeasparser.AugeasCommentNode,
apacheparser.ApacheCommentNode]):
class DualCommentNode(DualNodeBase):
""" Dual parser implementation of CommentNode interface """
def __init__(self, **kwargs: Any) -> None:
def __init__(self, **kwargs):
""" This initialization implementation allows ordinary initialization
of CommentNode objects as well as creating a DualCommentNode object
using precreated or fetched CommentNode objects if provided as optional
@@ -122,21 +95,19 @@ class DualCommentNode(DualNodeBase[augeasparser.AugeasCommentNode,
if primary or secondary:
assert primary and secondary
super().__init__(primary, secondary)
self.primary = primary
self.secondary = secondary
else:
super().__init__(augeasparser.AugeasCommentNode(**kwargs),
apacheparser.ApacheCommentNode(**kwargs))
self.primary = augeasparser.AugeasCommentNode(**kwargs)
self.secondary = apacheparser.ApacheCommentNode(**kwargs)
assertions.assertEqual(self.primary, self.secondary)
class DualDirectiveNode(DualNodeBase[augeasparser.AugeasDirectiveNode,
apacheparser.ApacheDirectiveNode]):
class DualDirectiveNode(DualNodeBase):
""" Dual parser implementation of DirectiveNode interface """
parameters: str
def __init__(self, **kwargs: Any) -> None:
def __init__(self, **kwargs):
""" This initialization implementation allows ordinary initialization
of DirectiveNode objects as well as creating a DualDirectiveNode object
using precreated or fetched DirectiveNode objects if provided as optional
@@ -147,6 +118,8 @@ class DualDirectiveNode(DualNodeBase[augeasparser.AugeasDirectiveNode,
:param DirectiveNode primary: Primary pre-created DirectiveNode, mainly
used when creating new DualParser nodes using add_* methods.
:param DirectiveNode secondary: Secondary pre-created DirectiveNode
"""
kwargs.setdefault("primary", None)
@@ -156,14 +129,15 @@ class DualDirectiveNode(DualNodeBase[augeasparser.AugeasDirectiveNode,
if primary or secondary:
assert primary and secondary
super().__init__(primary, secondary)
self.primary = primary
self.secondary = secondary
else:
super().__init__(augeasparser.AugeasDirectiveNode(**kwargs),
apacheparser.ApacheDirectiveNode(**kwargs))
self.primary = augeasparser.AugeasDirectiveNode(**kwargs)
self.secondary = apacheparser.ApacheDirectiveNode(**kwargs)
assertions.assertEqual(self.primary, self.secondary)
def set_parameters(self, parameters: Iterable[str]) -> None:
def set_parameters(self, parameters):
""" Sets parameters and asserts that both implementation successfully
set the parameter sequence """
@@ -172,11 +146,10 @@ class DualDirectiveNode(DualNodeBase[augeasparser.AugeasDirectiveNode,
assertions.assertEqual(self.primary, self.secondary)
class DualBlockNode(DualNodeBase[augeasparser.AugeasBlockNode,
apacheparser.ApacheBlockNode]):
class DualBlockNode(DualNodeBase):
""" Dual parser implementation of BlockNode interface """
def __init__(self, **kwargs: Any) -> None:
def __init__(self, **kwargs):
""" This initialization implementation allows ordinary initialization
of BlockNode objects as well as creating a DualBlockNode object
using precreated or fetched BlockNode objects if provided as optional
@@ -191,20 +164,20 @@ class DualBlockNode(DualNodeBase[augeasparser.AugeasBlockNode,
kwargs.setdefault("primary", None)
kwargs.setdefault("secondary", None)
primary: Optional[augeasparser.AugeasBlockNode] = kwargs.pop("primary")
secondary: Optional[apacheparser.ApacheBlockNode] = kwargs.pop("secondary")
primary = kwargs.pop("primary")
secondary = kwargs.pop("secondary")
if primary or secondary:
assert primary and secondary
super().__init__(primary, secondary)
self.primary = primary
self.secondary = secondary
else:
super().__init__(augeasparser.AugeasBlockNode(**kwargs),
apacheparser.ApacheBlockNode(**kwargs))
self.primary = augeasparser.AugeasBlockNode(**kwargs)
self.secondary = apacheparser.ApacheBlockNode(**kwargs)
assertions.assertEqual(self.primary, self.secondary)
def add_child_block(self, name: str, parameters: Optional[List[str]] = None,
position: Optional[int] = None) -> "DualBlockNode":
def add_child_block(self, name, parameters=None, position=None):
""" Creates a new child BlockNode, asserts that both implementations
did it in a similar way, and returns a newly created DualBlockNode object
encapsulating both of the newly created objects """
@@ -212,10 +185,10 @@ class DualBlockNode(DualNodeBase[augeasparser.AugeasBlockNode,
primary_new = self.primary.add_child_block(name, parameters, position)
secondary_new = self.secondary.add_child_block(name, parameters, position)
assertions.assertEqual(primary_new, secondary_new)
return DualBlockNode(primary=primary_new, secondary=secondary_new)
new_block = DualBlockNode(primary=primary_new, secondary=secondary_new)
return new_block
def add_child_directive(self, name: str, parameters: Optional[List[str]] = None,
position: Optional[int] = None) -> DualDirectiveNode:
def add_child_directive(self, name, parameters=None, position=None):
""" Creates a new child DirectiveNode, asserts that both implementations
did it in a similar way, and returns a newly created DualDirectiveNode
object encapsulating both of the newly created objects """
@@ -223,22 +196,21 @@ class DualBlockNode(DualNodeBase[augeasparser.AugeasBlockNode,
primary_new = self.primary.add_child_directive(name, parameters, position)
secondary_new = self.secondary.add_child_directive(name, parameters, position)
assertions.assertEqual(primary_new, secondary_new)
return DualDirectiveNode(primary=primary_new, secondary=secondary_new)
new_dir = DualDirectiveNode(primary=primary_new, secondary=secondary_new)
return new_dir
def add_child_comment(self, comment: str = "",
position: Optional[int] = None) -> DualCommentNode:
def add_child_comment(self, comment="", position=None):
""" Creates a new child CommentNode, asserts that both implementations
did it in a similar way, and returns a newly created DualCommentNode
object encapsulating both of the newly created objects """
primary_new = self.primary.add_child_comment(comment=comment, position=position)
secondary_new = self.secondary.add_child_comment(name=comment, position=position)
primary_new = self.primary.add_child_comment(comment, position)
secondary_new = self.secondary.add_child_comment(comment, position)
assertions.assertEqual(primary_new, secondary_new)
return DualCommentNode(primary=primary_new, secondary=secondary_new)
new_comment = DualCommentNode(primary=primary_new, secondary=secondary_new)
return new_comment
def _create_matching_list(self, primary_list: Iterable[interfaces.ParserNode],
secondary_list: Iterable[interfaces.ParserNode]
) -> List[Tuple[interfaces.ParserNode, interfaces.ParserNode]]:
def _create_matching_list(self, primary_list, secondary_list):
""" Matches the list of primary_list to a list of secondary_list and
returns a list of tuples. This is used to create results for find_
methods.
@@ -265,7 +237,7 @@ class DualBlockNode(DualNodeBase[augeasparser.AugeasBlockNode,
raise AssertionError("Could not find a matching node.")
return matched
def find_blocks(self, name: str, exclude: bool = True) -> List["DualBlockNode"]:
def find_blocks(self, name, exclude=True):
"""
Performs a search for BlockNodes using both implementations and does simple
checks for results. This is built upon the assumption that unimplemented
@@ -277,7 +249,7 @@ class DualBlockNode(DualNodeBase[augeasparser.AugeasBlockNode,
return self._find_helper(DualBlockNode, "find_blocks", name,
exclude=exclude)
def find_directives(self, name: str, exclude: bool = True) -> List[DualDirectiveNode]:
def find_directives(self, name, exclude=True):
"""
Performs a search for DirectiveNodes using both implementations and
checks the results. This is built upon the assumption that unimplemented
@@ -289,7 +261,7 @@ class DualBlockNode(DualNodeBase[augeasparser.AugeasBlockNode,
return self._find_helper(DualDirectiveNode, "find_directives", name,
exclude=exclude)
def find_comments(self, comment: str) -> List[DualCommentNode]:
def find_comments(self, comment):
"""
Performs a search for CommentNodes using both implementations and
checks the results. This is built upon the assumption that unimplemented
@@ -300,7 +272,7 @@ class DualBlockNode(DualNodeBase[augeasparser.AugeasBlockNode,
return self._find_helper(DualCommentNode, "find_comments", comment)
def delete_child(self, child: "DualBlockNode") -> None:
def delete_child(self, child):
"""Deletes a child from the ParserNode implementations. The actual
ParserNode implementations are used here directly in order to be able
to match a child to the list of children."""
@@ -308,7 +280,7 @@ class DualBlockNode(DualNodeBase[augeasparser.AugeasBlockNode,
self.primary.delete_child(child.primary)
self.secondary.delete_child(child.secondary)
def unsaved_files(self) -> Set[str]:
def unsaved_files(self):
""" Fetches the list of unsaved file paths and asserts that the lists
match """
primary_files = self.primary.unsaved_files()
@@ -317,7 +289,7 @@ class DualBlockNode(DualNodeBase[augeasparser.AugeasBlockNode,
return primary_files
def parsed_paths(self) -> List[str]:
def parsed_paths(self):
"""
Returns a list of file paths that have currently been parsed into the parser
tree. The returned list may include paths with wildcard characters, for

View File

@@ -1,7 +1,7 @@
""" Entry point for Apache Plugin """
from typing import Dict
from typing import Type
from distutils.version import LooseVersion
from certbot import util
from certbot_apache._internal import configurator
from certbot_apache._internal import override_arch
from certbot_apache._internal import override_centos
@@ -10,11 +10,8 @@ from certbot_apache._internal import override_debian
from certbot_apache._internal import override_fedora
from certbot_apache._internal import override_gentoo
from certbot_apache._internal import override_suse
from certbot_apache._internal import override_void
from certbot import util
OVERRIDE_CLASSES: Dict[str, Type[configurator.ApacheConfigurator]] = {
OVERRIDE_CLASSES = {
"arch": override_arch.ArchConfigurator,
"cloudlinux": override_centos.CentOSConfigurator,
"darwin": override_darwin.DarwinConfigurator,
@@ -38,19 +35,17 @@ OVERRIDE_CLASSES: Dict[str, Type[configurator.ApacheConfigurator]] = {
"sles": override_suse.OpenSUSEConfigurator,
"scientific": override_centos.CentOSConfigurator,
"scientific linux": override_centos.CentOSConfigurator,
"void": override_void.VoidConfigurator,
}
def get_configurator() -> Type[configurator.ApacheConfigurator]:
def get_configurator():
""" Get correct configurator class based on the OS fingerprint """
os_name, os_version = util.get_os_info()
os_name = os_name.lower()
override_class = None
# Special case for older Fedora versions
min_version = util.parse_loose_version('29')
if os_name == 'fedora' and util.parse_loose_version(os_version) < min_version:
if os_name == 'fedora' and LooseVersion(os_version) < LooseVersion('29'):
os_name = 'fedora_old'
try:
@@ -60,7 +55,8 @@ def get_configurator() -> Type[configurator.ApacheConfigurator]:
os_like = util.get_systemd_os_like()
if os_like:
for os_name in os_like:
override_class = OVERRIDE_CLASSES.get(os_name)
if os_name in OVERRIDE_CLASSES.keys():
override_class = OVERRIDE_CLASSES[os_name]
if not override_class:
# No override class found, return the generic configurator
override_class = configurator.ApacheConfigurator

View File

@@ -3,20 +3,13 @@ import errno
import logging
from typing import List
from typing import Set
from typing import TYPE_CHECKING
from certbot_apache._internal.obj import VirtualHost
from certbot_apache._internal.parser import get_aug_path
from acme.challenges import KeyAuthorizationChallengeResponse
from certbot import errors
from certbot.achallenges import KeyAuthorizationAnnotatedChallenge
from certbot.compat import filesystem
from certbot.compat import os
from certbot.plugins import common
if TYPE_CHECKING:
from certbot_apache._internal.configurator import ApacheConfigurator # pragma: no cover
from certbot_apache._internal.obj import VirtualHost # pylint: disable=unused-import
from certbot_apache._internal.parser import get_aug_path
logger = logging.getLogger(__name__)
@@ -53,9 +46,8 @@ class ApacheHttp01(common.ChallengePerformer):
</Location>
"""
def __init__(self, configurator: "ApacheConfigurator") -> None:
super().__init__(configurator)
self.configurator: "ApacheConfigurator"
def __init__(self, *args, **kwargs):
super(ApacheHttp01, self).__init__(*args, **kwargs)
self.challenge_conf_pre = os.path.join(
self.configurator.conf("challenge-location"),
"le_http_01_challenge_pre.conf")
@@ -67,7 +59,7 @@ class ApacheHttp01(common.ChallengePerformer):
"http_challenges")
self.moded_vhosts: Set[VirtualHost] = set()
def perform(self) -> List[KeyAuthorizationChallengeResponse]:
def perform(self):
"""Perform all HTTP-01 challenges."""
if not self.achalls:
return []
@@ -75,7 +67,8 @@ class ApacheHttp01(common.ChallengePerformer):
# About to make temporary changes to the config
self.configurator.save("Changes before challenge setup", True)
self.configurator.ensure_listen(str(self.configurator.config.http01_port))
self.configurator.ensure_listen(str(
self.configurator.config.http01_port))
self.prepare_http01_modules()
responses = self._set_up_challenges()
@@ -86,7 +79,7 @@ class ApacheHttp01(common.ChallengePerformer):
return responses
def prepare_http01_modules(self) -> None:
def prepare_http01_modules(self):
"""Make sure that we have the needed modules available for http01"""
if self.configurator.conf("handle-modules"):
@@ -99,13 +92,13 @@ class ApacheHttp01(common.ChallengePerformer):
if mod + "_module" not in self.configurator.parser.modules:
self.configurator.enable_mod(mod, temp=True)
def _mod_config(self) -> None:
def _mod_config(self):
selected_vhosts: List[VirtualHost] = []
http_port = str(self.configurator.config.http01_port)
# Search for VirtualHosts matching by name
for chall in self.achalls:
selected_vhosts += self._matching_vhosts(chall.domain)
# Search for matching VirtualHosts
for vh in self._matching_vhosts(chall.domain):
selected_vhosts.append(vh)
# Ensure that we have one or more VirtualHosts that we can continue
# with. (one that listens to port configured with --http-01-port)
@@ -114,13 +107,9 @@ class ApacheHttp01(common.ChallengePerformer):
if any(a.is_wildcard() or a.get_port() == http_port for a in vhost.addrs):
found = True
# If there's at least one eligible VirtualHost, also add all unnamed VirtualHosts
# because they might match at runtime (#8890)
if found:
selected_vhosts += self._unnamed_vhosts()
# Otherwise, add every Virtualhost which listens on the right port
else:
selected_vhosts += self._relevant_vhosts()
if not found:
for vh in self._relevant_vhosts():
selected_vhosts.append(vh)
# Add the challenge configuration
for vh in selected_vhosts:
@@ -148,7 +137,7 @@ class ApacheHttp01(common.ChallengePerformer):
with open(self.challenge_conf_post, "w") as new_conf:
new_conf.write(config_text_post)
def _matching_vhosts(self, domain: str) -> List[VirtualHost]:
def _matching_vhosts(self, domain):
"""Return all VirtualHost objects that have the requested domain name or
a wildcard name that would match the domain in ServerName or ServerAlias
directive.
@@ -162,9 +151,9 @@ class ApacheHttp01(common.ChallengePerformer):
return matching_vhosts
def _relevant_vhosts(self) -> List[VirtualHost]:
def _relevant_vhosts(self):
http01_port = str(self.configurator.config.http01_port)
relevant_vhosts: List[VirtualHost] = []
relevant_vhosts = []
for vhost in self.configurator.vhosts:
if any(a.is_wildcard() or a.get_port() == http01_port for a in vhost.addrs):
if not vhost.ssl:
@@ -178,11 +167,7 @@ class ApacheHttp01(common.ChallengePerformer):
return relevant_vhosts
def _unnamed_vhosts(self) -> List[VirtualHost]:
"""Return all VirtualHost objects with no ServerName"""
return [vh for vh in self.configurator.vhosts if vh.name is None]
def _set_up_challenges(self) -> List[KeyAuthorizationChallengeResponse]:
def _set_up_challenges(self):
if not os.path.isdir(self.challenge_dir):
old_umask = filesystem.umask(0o022)
try:
@@ -200,11 +185,10 @@ class ApacheHttp01(common.ChallengePerformer):
return responses
def _set_up_challenge(self, achall: KeyAuthorizationAnnotatedChallenge
) -> KeyAuthorizationChallengeResponse:
def _set_up_challenge(self, achall):
response, validation = achall.response_and_validation()
name: str = os.path.join(self.challenge_dir, achall.chall.encode("token"))
name = os.path.join(self.challenge_dir, achall.chall.encode("token"))
self.configurator.reverter.register_file_creation(True, name)
with open(name, 'wb') as f:
@@ -213,7 +197,7 @@ class ApacheHttp01(common.ChallengePerformer):
return response
def _set_up_include_directives(self, vhost: VirtualHost) -> None:
def _set_up_include_directives(self, vhost):
"""Includes override configuration to the beginning and to the end of
VirtualHost. Note that this include isn't added to Augeas search tree"""

View File

@@ -98,18 +98,11 @@ names and parameters in case insensitive manner. This does not apply to everythi
however, for example the parameters of a conditional statement may be case sensitive.
For this reason the internal representation of data should not ignore the case.
"""
import abc
from typing import Any
from typing import Dict
from typing import List
from typing import Optional
from typing import Tuple
from typing import TypeVar
GenericParserNode = TypeVar("GenericParserNode", bound="ParserNode")
class ParserNode(metaclass=abc.ABCMeta):
class ParserNode(object, metaclass=abc.ABCMeta):
"""
ParserNode is the basic building block of the tree of such nodes,
representing the structure of the configuration. It is largely meant to keep
@@ -151,13 +144,9 @@ class ParserNode(metaclass=abc.ABCMeta):
# for the ParserNode instance.
metadata: Dict[str, Any]
"""
ancestor: Optional["ParserNode"]
dirty: bool
filepath: Optional[str]
metadata: Dict[str, Any]
@abc.abstractmethod
def __init__(self, **kwargs: Any) -> None:
def __init__(self, **kwargs):
"""
Initializes the ParserNode instance, and sets the ParserNode specific
instance variables. This is not meant to be used directly, but through
@@ -181,7 +170,7 @@ class ParserNode(metaclass=abc.ABCMeta):
"""
@abc.abstractmethod
def save(self, msg: str) -> None:
def save(self, msg):
"""
Save traverses the children, and attempts to write the AST to disk for
all the objects that are marked dirty. The actual operation of course
@@ -200,7 +189,7 @@ class ParserNode(metaclass=abc.ABCMeta):
"""
@abc.abstractmethod
def find_ancestors(self: GenericParserNode, name: str) -> List[GenericParserNode]:
def find_ancestors(self, name):
"""
Traverses the ancestor tree up, searching for BlockNodes with a specific
name.
@@ -229,10 +218,9 @@ class CommentNode(ParserNode, metaclass=abc.ABCMeta):
comment: str
"""
comment: str
@abc.abstractmethod
def __init__(self, **kwargs: Any) -> None:
def __init__(self, **kwargs):
"""
Initializes the CommentNode instance and sets its instance variables.
@@ -250,12 +238,10 @@ class CommentNode(ParserNode, metaclass=abc.ABCMeta):
created or changed after the last save. Default: False.
:type dirty: bool
"""
super().__init__( # pragma: no cover
ancestor=kwargs['ancestor'],
dirty=kwargs.get('dirty', False),
filepath=kwargs['filepath'],
metadata=kwargs.get('metadata', {}),
)
super(CommentNode, self).__init__(ancestor=kwargs['ancestor'],
dirty=kwargs.get('dirty', False),
filepath=kwargs['filepath'],
metadata=kwargs.get('metadata', {})) # pragma: no cover
class DirectiveNode(ParserNode, metaclass=abc.ABCMeta):
@@ -284,12 +270,9 @@ class DirectiveNode(ParserNode, metaclass=abc.ABCMeta):
parameters: Tuple[str, ...]
"""
enabled: bool
name: Optional[str]
parameters: Tuple[str, ...]
@abc.abstractmethod
def __init__(self, **kwargs: Any) -> None:
def __init__(self, **kwargs):
"""
Initializes the DirectiveNode instance and sets its instance variables.
@@ -319,19 +302,17 @@ class DirectiveNode(ParserNode, metaclass=abc.ABCMeta):
:type enabled: bool
"""
super().__init__( # pragma: no cover
ancestor=kwargs['ancestor'],
dirty=kwargs.get('dirty', False),
filepath=kwargs['filepath'],
metadata=kwargs.get('metadata', {}),
)
super(DirectiveNode, self).__init__(ancestor=kwargs['ancestor'],
dirty=kwargs.get('dirty', False),
filepath=kwargs['filepath'],
metadata=kwargs.get('metadata', {})) # pragma: no cover
@abc.abstractmethod
def set_parameters(self, parameters: List[str]) -> None:
def set_parameters(self, parameters):
"""
Sets the sequence of parameters for this ParserNode object without
whitespaces. While the whitespaces for parameters are discarded when using
this method, the whitespacing preceding the ParserNode itself should be
this method, the whitespacing preceeding the ParserNode itself should be
kept intact.
:param list parameters: sequence of parameters
@@ -378,14 +359,12 @@ class BlockNode(DirectiveNode, metaclass=abc.ABCMeta):
children: Tuple[ParserNode, ...]
"""
children: Tuple[ParserNode, ...]
@abc.abstractmethod
def add_child_block(self, name: str, parameters: Optional[List[str]] = None,
position: Optional[int] = None) -> "BlockNode":
def add_child_block(self, name, parameters=None, position=None):
"""
Adds a new BlockNode child node with provided values and marks the callee
BlockNode dirty. This is used to add new children to the AST. The preceding
BlockNode dirty. This is used to add new children to the AST. The preceeding
whitespaces should not be added based on the ancestor or siblings for the
newly created object. This is to match the current behavior of the legacy
parser implementation.
@@ -402,12 +381,11 @@ class BlockNode(DirectiveNode, metaclass=abc.ABCMeta):
"""
@abc.abstractmethod
def add_child_directive(self, name: str, parameters: Optional[List[str]] = None,
position: Optional[int] = None) -> DirectiveNode:
def add_child_directive(self, name, parameters=None, position=None):
"""
Adds a new DirectiveNode child node with provided values and marks the
callee BlockNode dirty. This is used to add new children to the AST. The
preceding whitespaces should not be added based on the ancestor or siblings
preceeding whitespaces should not be added based on the ancestor or siblings
for the newly created object. This is to match the current behavior of the
legacy parser implementation.
@@ -424,11 +402,11 @@ class BlockNode(DirectiveNode, metaclass=abc.ABCMeta):
"""
@abc.abstractmethod
def add_child_comment(self, comment: str = "", position: Optional[int] = None) -> CommentNode:
def add_child_comment(self, comment="", position=None):
"""
Adds a new CommentNode child node with provided value and marks the
callee BlockNode dirty. This is used to add new children to the AST. The
preceding whitespaces should not be added based on the ancestor or siblings
preceeding whitespaces should not be added based on the ancestor or siblings
for the newly created object. This is to match the current behavior of the
legacy parser implementation.
@@ -444,7 +422,7 @@ class BlockNode(DirectiveNode, metaclass=abc.ABCMeta):
"""
@abc.abstractmethod
def find_blocks(self, name: str, exclude: bool = True) -> List["BlockNode"]:
def find_blocks(self, name, exclude=True):
"""
Find a configuration block by name. This method walks the child tree of
ParserNodes under the instance it was called from. This way it is possible
@@ -461,7 +439,7 @@ class BlockNode(DirectiveNode, metaclass=abc.ABCMeta):
"""
@abc.abstractmethod
def find_directives(self, name: str, exclude: bool = True) -> List[DirectiveNode]:
def find_directives(self, name, exclude=True):
"""
Find a directive by name. This method walks the child tree of ParserNodes
under the instance it was called from. This way it is possible to search
@@ -479,7 +457,7 @@ class BlockNode(DirectiveNode, metaclass=abc.ABCMeta):
"""
@abc.abstractmethod
def find_comments(self, comment: str) -> List[CommentNode]:
def find_comments(self, comment):
"""
Find comments with value containing the search term.
@@ -495,7 +473,7 @@ class BlockNode(DirectiveNode, metaclass=abc.ABCMeta):
"""
@abc.abstractmethod
def delete_child(self, child: ParserNode) -> None:
def delete_child(self, child):
"""
Remove a specified child node from the list of children of the called
BlockNode object.
@@ -505,7 +483,7 @@ class BlockNode(DirectiveNode, metaclass=abc.ABCMeta):
"""
@abc.abstractmethod
def unsaved_files(self) -> List[str]:
def unsaved_files(self):
"""
Returns a list of file paths that have been changed since the last save
(or the initial configuration parse). The intended use for this method
@@ -518,7 +496,7 @@ class BlockNode(DirectiveNode, metaclass=abc.ABCMeta):
"""
@abc.abstractmethod
def parsed_paths(self) -> List[str]:
def parsed_paths(self):
"""
Returns a list of file paths that have currently been parsed into the parser
tree. The returned list may include paths with wildcard characters, for

View File

@@ -1,15 +1,6 @@
"""Module contains classes used by the Apache Configurator."""
import re
from typing import Any
from typing import Iterable
from typing import Optional
from typing import Pattern
from typing import Set
from typing import Union
from certbot_apache._internal.apacheparser import ApacheBlockNode
from certbot_apache._internal.augeasparser import AugeasBlockNode
from certbot_apache._internal.dualparser import DualBlockNode
from certbot.plugins import common
@@ -17,7 +8,7 @@ from certbot.plugins import common
class Addr(common.Addr):
"""Represents an Apache address."""
def __eq__(self, other: Any) -> bool:
def __eq__(self, other):
"""This is defined as equivalent within Apache.
ip_addr:* == ip_addr
@@ -29,20 +20,20 @@ class Addr(common.Addr):
self.is_wildcard() and other.is_wildcard()))
return False
def __repr__(self) -> str:
return f"certbot_apache._internal.obj.Addr({repr(self.tup)})"
def __repr__(self):
return "certbot_apache._internal.obj.Addr(" + repr(self.tup) + ")"
def __hash__(self) -> int: # pylint: disable=useless-super-delegation
def __hash__(self): # pylint: disable=useless-super-delegation
# Python 3 requires explicit overridden for __hash__ if __eq__ or
# __cmp__ is overridden. See https://bugs.python.org/issue2235
return super().__hash__()
return super(Addr, self).__hash__()
def _addr_less_specific(self, addr: "Addr") -> bool:
def _addr_less_specific(self, addr):
"""Returns if addr.get_addr() is more specific than self.get_addr()."""
# pylint: disable=protected-access
return addr._rank_specific_addr() > self._rank_specific_addr()
def _rank_specific_addr(self) -> int:
def _rank_specific_addr(self):
"""Returns numerical rank for get_addr()
:returns: 2 - FQ, 1 - wildcard, 0 - _default_
@@ -55,7 +46,7 @@ class Addr(common.Addr):
return 1
return 2
def conflicts(self, addr: "Addr") -> bool:
def conflicts(self, addr):
r"""Returns if address could conflict with correct function of self.
Could addr take away service provided by self within Apache?
@@ -83,11 +74,11 @@ class Addr(common.Addr):
return True
return False
def is_wildcard(self) -> bool:
def is_wildcard(self):
"""Returns if address has a wildcard port."""
return self.tup[1] == "*" or not self.tup[1]
def get_sni_addr(self, port: str) -> common.Addr:
def get_sni_addr(self, port):
"""Returns the least specific address that resolves on the port.
Examples:
@@ -127,16 +118,13 @@ class VirtualHost:
"""
# ?: is used for not returning enclosed characters
strip_name: Pattern = re.compile(r"^(?:.+://)?([^ :$]*)")
strip_name = re.compile(r"^(?:.+://)?([^ :$]*)")
def __init__(self, filepath: str, path: str, addrs: Set["Addr"], ssl: bool,
enabled: bool, name: Optional[str] = None, aliases: Optional[Set[str]] = None,
modmacro: bool = False, ancestor: Optional["VirtualHost"] = None,
node: Optional[Union[ApacheBlockNode, AugeasBlockNode, DualBlockNode]] = None
) -> None:
def __init__(self, filep, path, addrs, ssl, enabled, name=None,
aliases=None, modmacro=False, ancestor=None, node=None):
"""Initialize a VH."""
self.filep = filepath
self.filep = filep
self.path = path
self.addrs = addrs
self.name = name
@@ -147,7 +135,7 @@ class VirtualHost:
self.ancestor = ancestor
self.node = node
def get_names(self) -> Set[str]:
def get_names(self):
"""Return a set of all names."""
all_names: Set[str] = set()
all_names.update(self.aliases)
@@ -157,28 +145,39 @@ class VirtualHost:
return all_names
def __str__(self) -> str:
def __str__(self):
return (
f"File: {self.filep}\n"
f"Vhost path: {self.path}\n"
f"Addresses: {', '.join(str(addr) for addr in self.addrs)}\n"
f"Name: {self.name if self.name is not None else ''}\n"
f"Aliases: {', '.join(name for name in self.aliases)}\n"
f"TLS Enabled: {'Yes' if self.ssl else 'No'}\n"
f"Site Enabled: {'Yes' if self.enabled else 'No'}\n"
f"mod_macro Vhost: {'Yes' if self.modmacro else 'No'}"
)
"File: {filename}\n"
"Vhost path: {vhpath}\n"
"Addresses: {addrs}\n"
"Name: {name}\n"
"Aliases: {aliases}\n"
"TLS Enabled: {tls}\n"
"Site Enabled: {active}\n"
"mod_macro Vhost: {modmacro}".format(
filename=self.filep,
vhpath=self.path,
addrs=", ".join(str(addr) for addr in self.addrs),
name=self.name if self.name is not None else "",
aliases=", ".join(name for name in self.aliases),
tls="Yes" if self.ssl else "No",
active="Yes" if self.enabled else "No",
modmacro="Yes" if self.modmacro else "No"))
def display_repr(self) -> str:
def display_repr(self):
"""Return a representation of VHost to be used in dialog"""
return (
f"File: {self.filep}\n"
f"Addresses: {', '.join(str(addr) for addr in self.addrs)}\n"
f"Names: {', '.join(self.get_names())}\n"
f"HTTPS: {'Yes' if self.ssl else 'No'}\n"
)
"File: {filename}\n"
"Addresses: {addrs}\n"
"Names: {names}\n"
"HTTPS: {https}\n".format(
filename=self.filep,
addrs=", ".join(str(addr) for addr in self.addrs),
names=", ".join(self.get_names()),
https="Yes" if self.ssl else "No"))
def __eq__(self, other: Any) -> bool:
def __eq__(self, other):
if isinstance(other, self.__class__):
return (self.filep == other.filep and self.path == other.path and
self.addrs == other.addrs and
@@ -189,12 +188,12 @@ class VirtualHost:
return False
def __hash__(self) -> int:
def __hash__(self):
return hash((self.filep, self.path,
tuple(self.addrs), tuple(self.get_names()),
self.ssl, self.enabled, self.modmacro))
def conflicts(self, addrs: Iterable[Addr]) -> bool:
def conflicts(self, addrs):
"""See if vhost conflicts with any of the addrs.
This determines whether or not these addresses would/could overwrite
@@ -213,7 +212,7 @@ class VirtualHost:
return True
return False
def same_server(self, vhost: "VirtualHost", generic: bool = False) -> bool:
def same_server(self, vhost, generic=False):
"""Determines if the vhost is the same 'server'.
Used in redirection - indicates whether or not the two virtual hosts

View File

@@ -1,12 +1,15 @@
""" Distribution specific override class for Arch Linux """
import zope.interface
from certbot import interfaces
from certbot_apache._internal import configurator
from certbot_apache._internal.configurator import OsOptions
@zope.interface.provider(interfaces.IPluginFactory)
class ArchConfigurator(configurator.ApacheConfigurator):
"""Arch Linux specific ApacheConfigurator override class"""
OS_DEFAULTS = OsOptions(
OS_DEFAULTS = dict(
server_root="/etc/httpd",
vhost_root="/etc/httpd/conf",
vhost_files="*.conf",
@@ -15,5 +18,11 @@ class ArchConfigurator(configurator.ApacheConfigurator):
version_cmd=['apachectl', '-v'],
restart_cmd=['apachectl', 'graceful'],
conftest_cmd=['apachectl', 'configtest'],
enmod=None,
dismod=None,
le_vhost_ext="-le-ssl.conf",
handle_modules=False,
handle_sites=False,
challenge_location="/etc/httpd/conf",
bin=None,
)

View File

@@ -1,25 +1,26 @@
""" Distribution specific override class for CentOS family (RHEL, Fedora) """
import logging
from typing import Any
from typing import cast
from typing import List
import zope.interface
from certbot import errors
from certbot import interfaces
from certbot import util
from certbot.errors import MisconfigurationError
from certbot_apache._internal import apache_util
from certbot_apache._internal import configurator
from certbot_apache._internal import parser
from certbot_apache._internal.configurator import OsOptions
from certbot import errors
from certbot import util
from certbot.errors import MisconfigurationError
logger = logging.getLogger(__name__)
@zope.interface.provider(interfaces.IPluginFactory)
class CentOSConfigurator(configurator.ApacheConfigurator):
"""CentOS specific ApacheConfigurator override class"""
OS_DEFAULTS = OsOptions(
OS_DEFAULTS = dict(
server_root="/etc/httpd",
vhost_root="/etc/httpd/conf.d",
vhost_files="*.conf",
@@ -29,10 +30,16 @@ class CentOSConfigurator(configurator.ApacheConfigurator):
restart_cmd=['apachectl', 'graceful'],
restart_cmd_alt=['apachectl', 'restart'],
conftest_cmd=['apachectl', 'configtest'],
enmod=None,
dismod=None,
le_vhost_ext="-le-ssl.conf",
handle_modules=False,
handle_sites=False,
challenge_location="/etc/httpd/conf.d",
bin=None,
)
def config_test(self) -> None:
def config_test(self):
"""
Override config_test to mitigate configtest error in vanilla installation
of mod_ssl in Fedora. The error is caused by non-existent self-signed
@@ -44,16 +51,16 @@ class CentOSConfigurator(configurator.ApacheConfigurator):
fedora = os_info[0].lower() == "fedora"
try:
super().config_test()
super(CentOSConfigurator, self).config_test()
except errors.MisconfigurationError:
if fedora:
self._try_restart_fedora()
else:
raise
def _try_restart_fedora(self) -> None:
def _try_restart_fedora(self):
"""
Tries to restart httpd using systemctl to generate the self signed key pair.
Tries to restart httpd using systemctl to generate the self signed keypair.
"""
try:
@@ -62,34 +69,33 @@ class CentOSConfigurator(configurator.ApacheConfigurator):
raise errors.MisconfigurationError(str(err))
# Finish with actual config check to see if systemctl restart helped
super().config_test()
super(CentOSConfigurator, self).config_test()
def _prepare_options(self) -> None:
def _prepare_options(self):
"""
Override the options dictionary initialization in order to support
alternative restart cmd used in CentOS.
"""
super()._prepare_options()
if not self.options.restart_cmd_alt: # pragma: no cover
raise ValueError("OS option restart_cmd_alt must be set for CentOS.")
self.options.restart_cmd_alt[0] = self.options.ctl
super(CentOSConfigurator, self)._prepare_options()
cast(List[str], self.options["restart_cmd_alt"])[0] = self.option("ctl")
def get_parser(self) -> "CentOSParser":
def get_parser(self):
"""Initializes the ApacheParser"""
return CentOSParser(
self.options.server_root, self, self.options.vhost_root, self.version)
self.option("server_root"), self.option("vhost_root"),
self.version, configurator=self)
def _deploy_cert(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=arguments-differ
def _deploy_cert(self, *args, **kwargs): # pylint: disable=arguments-differ
"""
Override _deploy_cert in order to ensure that the Apache configuration
has "LoadModule ssl_module..." before parsing the VirtualHost configuration
that was created by Certbot
"""
super()._deploy_cert(*args, **kwargs)
super(CentOSConfigurator, self)._deploy_cert(*args, **kwargs)
if self.version < (2, 4, 0):
self._deploy_loadmodule_ssl_if_needed()
def _deploy_loadmodule_ssl_if_needed(self) -> None:
def _deploy_loadmodule_ssl_if_needed(self):
"""
Add "LoadModule ssl_module <pre-existing path>" to main httpd.conf if
it doesn't exist there already.
@@ -111,13 +117,13 @@ class CentOSConfigurator(configurator.ApacheConfigurator):
"use, and run Certbot again.")
raise MisconfigurationError(msg)
else:
loadmod_args = [arg for arg in path_args if arg]
loadmod_args = path_args
centos_parser: CentOSParser = cast(CentOSParser, self.parser)
if centos_parser.not_modssl_ifmodule(noarg_path):
if centos_parser.loc["default"] in noarg_path:
if self.parser.not_modssl_ifmodule(noarg_path): # pylint: disable=no-member
if self.parser.loc["default"] in noarg_path:
# LoadModule already in the main configuration file
if "ifmodule/" in noarg_path.lower() or "ifmodule[1]" in noarg_path.lower():
if ("ifmodule/" in noarg_path.lower() or
"ifmodule[1]" in noarg_path.lower()):
# It's the first or only IfModule in the file
return
# Populate the list of known !mod_ssl.c IfModules
@@ -148,7 +154,8 @@ class CentOSConfigurator(configurator.ApacheConfigurator):
self.parser.aug.remove(loadmod_path)
# Create a new IfModule !mod_ssl.c if not already found on path
ssl_ifmod = self.parser.get_ifmod(nodir_path, "!mod_ssl.c", beginning=True)[:-1]
ssl_ifmod = self.parser.get_ifmod(nodir_path, "!mod_ssl.c",
beginning=True)[:-1]
if ssl_ifmod not in correct_ifmods:
self.parser.add_dir(ssl_ifmod, "LoadModule", loadmod_args)
correct_ifmods.append(ssl_ifmod)
@@ -158,24 +165,24 @@ class CentOSConfigurator(configurator.ApacheConfigurator):
class CentOSParser(parser.ApacheParser):
"""CentOS specific ApacheParser override class"""
def __init__(self, *args: Any, **kwargs: Any) -> None:
def __init__(self, *args, **kwargs):
# CentOS specific configuration file for Apache
self.sysconfig_filep: str = "/etc/sysconfig/httpd"
super().__init__(*args, **kwargs)
self.sysconfig_filep = "/etc/sysconfig/httpd"
super(CentOSParser, self).__init__(*args, **kwargs)
def update_runtime_variables(self) -> None:
def update_runtime_variables(self):
""" Override for update_runtime_variables for custom parsing """
# Opportunistic, works if SELinux not enforced
super().update_runtime_variables()
super(CentOSParser, self).update_runtime_variables()
self.parse_sysconfig_var()
def parse_sysconfig_var(self) -> None:
def parse_sysconfig_var(self):
""" Parses Apache CLI options from CentOS configuration file """
defines = apache_util.parse_define_file(self.sysconfig_filep, "OPTIONS")
for k, v in defines.items():
self.variables[k] = v
for k in defines:
self.variables[k] = defines[k]
def not_modssl_ifmodule(self, path: str) -> bool:
def not_modssl_ifmodule(self, path):
"""Checks if the provided Augeas path has argument !mod_ssl"""
if "ifmodule" not in path.lower():

View File

@@ -1,17 +1,28 @@
""" Distribution specific override class for macOS """
import zope.interface
from certbot import interfaces
from certbot_apache._internal import configurator
from certbot_apache._internal.configurator import OsOptions
@zope.interface.provider(interfaces.IPluginFactory)
class DarwinConfigurator(configurator.ApacheConfigurator):
"""macOS specific ApacheConfigurator override class"""
OS_DEFAULTS = OsOptions(
OS_DEFAULTS = dict(
server_root="/etc/apache2",
vhost_root="/etc/apache2/other",
vhost_files="*.conf",
logs_root="/var/log/apache2",
ctl="apachectl",
version_cmd=['apachectl', '-v'],
restart_cmd=['apachectl', 'graceful'],
conftest_cmd=['apachectl', 'configtest'],
enmod=None,
dismod=None,
le_vhost_ext="-le-ssl.conf",
handle_modules=False,
handle_sites=False,
challenge_location="/etc/apache2/other",
bin=None,
)

View File

@@ -1,30 +1,42 @@
""" Distribution specific override class for Debian family (Ubuntu/Debian) """
import logging
from certbot_apache._internal import apache_util
from certbot_apache._internal import configurator
from certbot_apache._internal.configurator import OsOptions
from certbot_apache._internal.obj import VirtualHost
import zope.interface
from certbot import errors
from certbot import interfaces
from certbot import util
from certbot.compat import filesystem
from certbot.compat import os
from certbot_apache._internal import apache_util
from certbot_apache._internal import configurator
logger = logging.getLogger(__name__)
@zope.interface.provider(interfaces.IPluginFactory)
class DebianConfigurator(configurator.ApacheConfigurator):
"""Debian specific ApacheConfigurator override class"""
OS_DEFAULTS = OsOptions(
OS_DEFAULTS = dict(
server_root="/etc/apache2",
vhost_root="/etc/apache2/sites-available",
vhost_files="*",
logs_root="/var/log/apache2",
ctl="apache2ctl",
version_cmd=['apache2ctl', '-v'],
restart_cmd=['apache2ctl', 'graceful'],
conftest_cmd=['apache2ctl', 'configtest'],
enmod="a2enmod",
dismod="a2dismod",
le_vhost_ext="-le-ssl.conf",
handle_modules=True,
handle_sites=True,
challenge_location="/etc/apache2",
bin=None,
)
def enable_site(self, vhost: VirtualHost) -> None:
def enable_site(self, vhost):
"""Enables an available site, Apache reload required.
.. note:: Does not make sure that the site correctly works or that all
@@ -46,7 +58,7 @@ class DebianConfigurator(configurator.ApacheConfigurator):
if not os.path.isdir(os.path.dirname(enabled_path)):
# For some reason, sites-enabled / sites-available do not exist
# Call the parent method
return super().enable_site(vhost)
return super(DebianConfigurator, self).enable_site(vhost)
self.reverter.register_file_creation(False, enabled_path)
try:
os.symlink(vhost.filep, enabled_path)
@@ -56,7 +68,7 @@ class DebianConfigurator(configurator.ApacheConfigurator):
# Already in shape
vhost.enabled = True
return None
logger.error(
logger.warning(
"Could not symlink %s to %s, got error: %s", enabled_path,
vhost.filep, err.strerror)
errstring = ("Encountered error while trying to enable a " +
@@ -69,7 +81,7 @@ class DebianConfigurator(configurator.ApacheConfigurator):
self.save_notes += "Enabled site %s\n" % vhost.filep
return None
def enable_mod(self, mod_name: str, temp: bool = False) -> None:
def enable_mod(self, mod_name, temp=False):
"""Enables module in Apache.
Both enables and reloads Apache so module is active.
@@ -115,16 +127,16 @@ class DebianConfigurator(configurator.ApacheConfigurator):
# Reload is not necessary as DUMP_RUN_CFG uses latest config.
self.parser.update_runtime_variables()
def _enable_mod_debian(self, mod_name: str, temp: bool) -> None:
def _enable_mod_debian(self, mod_name, temp):
"""Assumes mods-available, mods-enabled layout."""
# Generate reversal command.
# Try to be safe here... check that we can probably reverse before
# applying enmod command
if (self.options.dismod is None or self.options.enmod is None
or not util.exe_exists(self.options.dismod)):
if not util.exe_exists(self.option("dismod")):
raise errors.MisconfigurationError(
"Unable to find a2dismod, please make sure a2enmod and "
"a2dismod are configured correctly for certbot.")
self.reverter.register_undo_command(temp, [self.options.dismod, "-f", mod_name])
util.run_script([self.options.enmod, mod_name])
self.reverter.register_undo_command(
temp, [self.option("dismod"), "-f", mod_name])
util.run_script([self.option("enmod"), mod_name])

View File

@@ -1,19 +1,22 @@
""" Distribution specific override class for Fedora 29+ """
from typing import Any
from typing import cast
from typing import List
import zope.interface
from certbot import errors
from certbot import interfaces
from certbot import util
from certbot_apache._internal import apache_util
from certbot_apache._internal import configurator
from certbot_apache._internal import parser
from certbot_apache._internal.configurator import OsOptions
from certbot import errors
from certbot import util
@zope.interface.provider(interfaces.IPluginFactory)
class FedoraConfigurator(configurator.ApacheConfigurator):
"""Fedora 29+ specific ApacheConfigurator override class"""
OS_DEFAULTS = OsOptions(
OS_DEFAULTS = dict(
server_root="/etc/httpd",
vhost_root="/etc/httpd/conf.d",
vhost_files="*.conf",
@@ -23,10 +26,16 @@ class FedoraConfigurator(configurator.ApacheConfigurator):
restart_cmd=['apachectl', 'graceful'],
restart_cmd_alt=['apachectl', 'restart'],
conftest_cmd=['apachectl', 'configtest'],
enmod=None,
dismod=None,
le_vhost_ext="-le-ssl.conf",
handle_modules=False,
handle_sites=False,
challenge_location="/etc/httpd/conf.d",
bin=None,
)
def config_test(self) -> None:
def config_test(self):
"""
Override config_test to mitigate configtest error in vanilla installation
of mod_ssl in Fedora. The error is caused by non-existent self-signed
@@ -34,18 +43,19 @@ class FedoraConfigurator(configurator.ApacheConfigurator):
during the first (re)start of httpd.
"""
try:
super().config_test()
super(FedoraConfigurator, self).config_test()
except errors.MisconfigurationError:
self._try_restart_fedora()
def get_parser(self) -> "FedoraParser":
def get_parser(self):
"""Initializes the ApacheParser"""
return FedoraParser(
self.options.server_root, self, self.options.vhost_root, self.version)
self.option("server_root"), self.option("vhost_root"),
self.version, configurator=self)
def _try_restart_fedora(self) -> None:
def _try_restart_fedora(self):
"""
Tries to restart httpd using systemctl to generate the self signed key pair.
Tries to restart httpd using systemctl to generate the self signed keypair.
"""
try:
util.run_script(['systemctl', 'restart', 'httpd'])
@@ -53,37 +63,35 @@ class FedoraConfigurator(configurator.ApacheConfigurator):
raise errors.MisconfigurationError(str(err))
# Finish with actual config check to see if systemctl restart helped
super().config_test()
super(FedoraConfigurator, self).config_test()
def _prepare_options(self) -> None:
def _prepare_options(self):
"""
Override the options dictionary initialization to keep using apachectl
instead of httpd and so take advantages of this new bash script in newer versions
of Fedora to restart httpd.
"""
super()._prepare_options()
self.options.restart_cmd[0] = 'apachectl'
if not self.options.restart_cmd_alt: # pragma: no cover
raise ValueError("OS option restart_cmd_alt must be set for Fedora.")
self.options.restart_cmd_alt[0] = 'apachectl'
self.options.conftest_cmd[0] = 'apachectl'
super(FedoraConfigurator, self)._prepare_options()
cast(List[str], self.options["restart_cmd"])[0] = 'apachectl'
cast(List[str], self.options["restart_cmd_alt"])[0] = 'apachectl'
cast(List[str], self.options["conftest_cmd"])[0] = 'apachectl'
class FedoraParser(parser.ApacheParser):
"""Fedora 29+ specific ApacheParser override class"""
def __init__(self, *args: Any, **kwargs: Any) -> None:
def __init__(self, *args, **kwargs):
# Fedora 29+ specific configuration file for Apache
self.sysconfig_filep = "/etc/sysconfig/httpd"
super().__init__(*args, **kwargs)
super(FedoraParser, self).__init__(*args, **kwargs)
def update_runtime_variables(self) -> None:
def update_runtime_variables(self):
""" Override for update_runtime_variables for custom parsing """
# Opportunistic, works if SELinux not enforced
super().update_runtime_variables()
super(FedoraParser, self).update_runtime_variables()
self._parse_sysconfig_var()
def _parse_sysconfig_var(self) -> None:
def _parse_sysconfig_var(self):
""" Parses Apache CLI options from Fedora configuration file """
defines = apache_util.parse_define_file(self.sysconfig_filep, "OPTIONS")
for k, v in defines.items():
self.variables[k] = v
for k in defines:
self.variables[k] = defines[k]

View File

@@ -1,61 +1,75 @@
""" Distribution specific override class for Gentoo Linux """
from typing import Any
from typing import cast
from typing import List
import zope.interface
from certbot import interfaces
from certbot_apache._internal import apache_util
from certbot_apache._internal import configurator
from certbot_apache._internal import parser
from certbot_apache._internal.configurator import OsOptions
@zope.interface.provider(interfaces.IPluginFactory)
class GentooConfigurator(configurator.ApacheConfigurator):
"""Gentoo specific ApacheConfigurator override class"""
OS_DEFAULTS = OsOptions(
OS_DEFAULTS = dict(
server_root="/etc/apache2",
vhost_root="/etc/apache2/vhosts.d",
vhost_files="*.conf",
logs_root="/var/log/apache2",
ctl="apache2ctl",
version_cmd=['apache2ctl', '-v'],
restart_cmd=['apache2ctl', 'graceful'],
restart_cmd_alt=['apache2ctl', 'restart'],
conftest_cmd=['apache2ctl', 'configtest'],
enmod=None,
dismod=None,
le_vhost_ext="-le-ssl.conf",
handle_modules=False,
handle_sites=False,
challenge_location="/etc/apache2/vhosts.d",
bin=None,
)
def _prepare_options(self) -> None:
def _prepare_options(self):
"""
Override the options dictionary initialization in order to support
alternative restart cmd used in Gentoo.
"""
super()._prepare_options()
if not self.options.restart_cmd_alt: # pragma: no cover
raise ValueError("OS option restart_cmd_alt must be set for Gentoo.")
self.options.restart_cmd_alt[0] = self.options.ctl
super(GentooConfigurator, self)._prepare_options()
cast(List[str], self.options["restart_cmd_alt"])[0] = self.option("ctl")
def get_parser(self) -> "GentooParser":
def get_parser(self):
"""Initializes the ApacheParser"""
return GentooParser(
self.options.server_root, self, self.options.vhost_root, self.version)
self.option("server_root"), self.option("vhost_root"),
self.version, configurator=self)
class GentooParser(parser.ApacheParser):
"""Gentoo specific ApacheParser override class"""
def __init__(self, *args: Any, **kwargs: Any) -> None:
def __init__(self, *args, **kwargs):
# Gentoo specific configuration file for Apache2
self.apacheconfig_filep = "/etc/conf.d/apache2"
super().__init__(*args, **kwargs)
super(GentooParser, self).__init__(*args, **kwargs)
def update_runtime_variables(self) -> None:
def update_runtime_variables(self):
""" Override for update_runtime_variables for custom parsing """
self.parse_sysconfig_var()
self.update_modules()
def parse_sysconfig_var(self) -> None:
def parse_sysconfig_var(self):
""" Parses Apache CLI options from Gentoo configuration file """
defines = apache_util.parse_define_file(self.apacheconfig_filep,
"APACHE2_OPTS")
for k, v in defines.items():
self.variables[k] = v
for k in defines:
self.variables[k] = defines[k]
def update_modules(self) -> None:
def update_modules(self):
"""Get loaded modules from httpd process, and add them to DOM"""
mod_cmd = [self.configurator.options.ctl, "modules"]
mod_cmd = [self.configurator.option("ctl"), "modules"]
matches = apache_util.parse_from_subprocess(mod_cmd, r"(.*)_module")
for mod in matches:
self.add_mod(mod.strip())

View File

@@ -1,19 +1,28 @@
""" Distribution specific override class for OpenSUSE """
import zope.interface
from certbot import interfaces
from certbot_apache._internal import configurator
from certbot_apache._internal.configurator import OsOptions
@zope.interface.provider(interfaces.IPluginFactory)
class OpenSUSEConfigurator(configurator.ApacheConfigurator):
"""OpenSUSE specific ApacheConfigurator override class"""
OS_DEFAULTS = OsOptions(
OS_DEFAULTS = dict(
server_root="/etc/apache2",
vhost_root="/etc/apache2/vhosts.d",
vhost_files="*.conf",
logs_root="/var/log/apache2",
ctl="apachectl",
version_cmd=['apachectl', '-v'],
restart_cmd=['apachectl', 'graceful'],
conftest_cmd=['apachectl', 'configtest'],
enmod="a2enmod",
dismod="a2dismod",
le_vhost_ext="-le-ssl.conf",
handle_modules=False,
handle_sites=False,
challenge_location="/etc/apache2/vhosts.d",
bin=None,
)

View File

@@ -1,19 +0,0 @@
""" Distribution specific override class for Void Linux """
from certbot_apache._internal import configurator
from certbot_apache._internal.configurator import OsOptions
class VoidConfigurator(configurator.ApacheConfigurator):
"""Void Linux specific ApacheConfigurator override class"""
OS_DEFAULTS = OsOptions(
server_root="/etc/apache",
vhost_root="/etc/apache/extra",
vhost_files="*.conf",
logs_root="/var/log/httpd",
ctl="apachectl",
version_cmd=['apachectl', '-v'],
restart_cmd=['apachectl', 'graceful'],
conftest_cmd=['apachectl', 'configtest'],
challenge_location="/etc/apache/extra",
)

View File

@@ -3,31 +3,13 @@ import copy
import fnmatch
import logging
import re
from typing import Collection
from typing import Dict
from typing import Iterable
from typing import List
from typing import Mapping
from typing import Optional
from typing import Pattern
from typing import Set
from typing import Tuple
from typing import TYPE_CHECKING
from typing import Union
from certbot_apache._internal import apache_util
from certbot_apache._internal import constants
from certbot import errors
from certbot.compat import os
if TYPE_CHECKING:
from certbot_apache._internal.configurator import ApacheConfigurator # pragma: no cover
try:
from augeas import Augeas
except ImportError: # pragma: no cover
Augeas = None
from certbot_apache._internal import apache_util
from certbot_apache._internal import constants
logger = logging.getLogger(__name__)
@@ -44,11 +26,11 @@ class ApacheParser:
default - user config file, name - NameVirtualHost,
"""
arg_var_interpreter: Pattern = re.compile(r"\$\{[^ \}]*}")
fnmatch_chars: Set[str] = {"*", "?", "\\", "[", "]"}
arg_var_interpreter = re.compile(r"\$\{[^ \}]*}")
fnmatch_chars = {"*", "?", "\\", "[", "]"}
def __init__(self, root: str, configurator: "ApacheConfigurator",
vhostroot: str, version: Tuple[int, ...] = (2, 4)) -> None:
def __init__(self, root, vhostroot=None, version=(2, 4),
configurator=None):
# Note: Order is important here.
# Needed for calling save() with reverter functionality that resides in
@@ -57,7 +39,8 @@ class ApacheParser:
self.configurator = configurator
# Initialize augeas
self.aug: Augeas = init_augeas()
self.aug = None
self.init_augeas()
if not self.check_aug_version():
raise errors.NotSupportedError(
@@ -65,13 +48,13 @@ class ApacheParser:
"version 1.2.0 or higher, please make sure you have you have "
"those installed.")
self.modules: Dict[str, Optional[str]] = {}
self.modules: Dict[str, str] = {}
self.parser_paths: Dict[str, List[str]] = {}
self.variables: Dict[str, str] = {}
# Find configuration root and make sure augeas can parse it.
self.root: str = os.path.abspath(root)
self.loc: Dict[str, str] = {"root": self._find_config_root()}
self.root = os.path.abspath(root)
self.loc = {"root": self._find_config_root()}
self.parse_file(self.loc["root"])
if version >= (2, 4):
@@ -93,14 +76,31 @@ class ApacheParser:
# Must also attempt to parse additional virtual host root
if vhostroot:
self.parse_file(os.path.abspath(vhostroot) + "/" +
self.configurator.options.vhost_files)
self.configurator.option("vhost_files"))
# check to see if there were unparsed define statements
if version < (2, 4):
if self.find_dir("Define", exclude=False):
raise errors.PluginError("Error parsing runtime variables")
def check_parsing_errors(self, lens: str) -> None:
def init_augeas(self):
""" Initialize the actual Augeas instance """
try:
import augeas
except ImportError: # pragma: no cover
raise errors.NoInstallationError("Problem in Augeas installation")
self.aug = augeas.Augeas(
# specify a directory to load our preferred lens from
loadpath=constants.AUGEAS_LENS_DIR,
# Do not save backup (we do it ourselves), do not load
# anything by default
flags=(augeas.Augeas.NONE |
augeas.Augeas.NO_MODL_AUTOLOAD |
augeas.Augeas.ENABLE_SPAN))
def check_parsing_errors(self, lens):
"""Verify Augeas can parse all of the lens files.
:param str lens: lens to check for errors
@@ -126,7 +126,7 @@ class ApacheParser:
self.aug.get(path + "/message")))
raise errors.PluginError(msg)
def check_aug_version(self) -> Union[bool, List[str]]:
def check_aug_version(self):
""" Checks that we have recent enough version of libaugeas.
If augeas version is recent enough, it will support case insensitive
regexp matching"""
@@ -141,7 +141,7 @@ class ApacheParser:
self.aug.remove("/test/path")
return matches
def unsaved_files(self) -> Set[str]:
def unsaved_files(self):
"""Lists files that have modified Augeas DOM but the changes have not
been written to the filesystem yet, used by `self.save()` and
ApacheConfigurator to check the file state.
@@ -180,7 +180,7 @@ class ApacheParser:
save_files.add(self.aug.get(path)[6:])
return save_files
def ensure_augeas_state(self) -> None:
def ensure_augeas_state(self):
"""Makes sure that all Augeas dom changes are written to files to avoid
loss of configuration directives when doing additional augeas parsing,
causing a possible augeas.load() resulting dom reset
@@ -190,7 +190,7 @@ class ApacheParser:
self.configurator.save_notes += "(autosave)"
self.configurator.save()
def save(self, save_files: Iterable[str]) -> None:
def save(self, save_files):
"""Saves all changes to the configuration files.
save() is called from ApacheConfigurator to handle the parser specific
@@ -200,13 +200,7 @@ class ApacheParser:
"""
self.configurator.save_notes = ""
ex_errs = self.aug.match("/augeas//error")
try:
self.aug.save()
except IOError:
self._log_save_errors(ex_errs)
raise
self.aug.save()
# Force reload if files were modified
# This is needed to recalculate augeas directive span
@@ -215,24 +209,21 @@ class ApacheParser:
self.aug.remove("/files/"+sf)
self.aug.load()
def _log_save_errors(self, ex_errs: Iterable[str]) -> None:
def _log_save_errors(self, ex_errs):
"""Log errors due to bad Augeas save.
:param list ex_errs: Existing errors before save
"""
# Check for the root of save problems
new_errs = [e for e in self.aug.match("/augeas//error") if e not in ex_errs]
new_errs = self.aug.match("/augeas//error")
# logger.error("During Save - %s", mod_conf)
logger.error("Unable to save files: %s. Attempted Save Notes: %s",
", ".join(err[13:len(err) - 6] for err in new_errs
# Only new errors caused by recent save
if err not in ex_errs), self.configurator.save_notes)
for err in new_errs:
logger.debug(
"Error %s saving %s: %s", self.aug.get(err), err[13:len(err) - 6],
self.aug.get(f"{err}/message"))
logger.error(
"Unable to save files: %s.%s", ", ".join(err[13:len(err) - 6] for err in new_errs),
f" Save Notes: {self.configurator.save_notes}" if self.configurator.save_notes else "")
def add_include(self, main_config: str, inc_path: str) -> None:
def add_include(self, main_config, inc_path):
"""Add Include for a new configuration file if one does not exist
:param str main_config: file path to main Apache config file
@@ -251,21 +242,21 @@ class ApacheParser:
new_file = os.path.basename(inc_path)
self.existing_paths.setdefault(new_dir, []).append(new_file)
def add_mod(self, mod_name: str) -> None:
def add_mod(self, mod_name):
"""Shortcut for updating parser modules."""
if mod_name + "_module" not in self.modules:
self.modules[mod_name + "_module"] = None
if "mod_" + mod_name + ".c" not in self.modules:
self.modules["mod_" + mod_name + ".c"] = None
def reset_modules(self) -> None:
def reset_modules(self):
"""Reset the loaded modules list. This is called from cleanup to clear
temporarily loaded modules."""
self.modules = {}
self.update_modules()
self.parse_modules()
def parse_modules(self) -> None:
def parse_modules(self):
"""Iterates on the configuration until no new modules are loaded.
..todo:: This should be attempted to be done with a binary to avoid
@@ -293,18 +284,19 @@ class ApacheParser:
match_name[6:])
self.modules.update(mods)
def update_runtime_variables(self) -> None:
def update_runtime_variables(self):
"""Update Includes, Defines and Includes from httpd config dump data"""
self.update_defines()
self.update_includes()
self.update_modules()
def update_defines(self) -> None:
def update_defines(self):
"""Updates the dictionary of known variables in the configuration"""
self.variables = apache_util.parse_defines(self.configurator.options.ctl)
def update_includes(self) -> None:
self.variables = apache_util.parse_defines(self.configurator.option("ctl"))
def update_includes(self):
"""Get includes from httpd process, and add them to DOM if needed"""
# Find_dir iterates over configuration for Include and IncludeOptional
@@ -312,34 +304,34 @@ class ApacheParser:
# configuration files
_ = self.find_dir("Include")
matches = apache_util.parse_includes(self.configurator.options.ctl)
matches = apache_util.parse_includes(self.configurator.option("ctl"))
if matches:
for i in matches:
if not self.parsed_in_current(i):
self.parse_file(i)
def update_modules(self) -> None:
def update_modules(self):
"""Get loaded modules from httpd process, and add them to DOM"""
matches = apache_util.parse_modules(self.configurator.options.ctl)
matches = apache_util.parse_modules(self.configurator.option("ctl"))
for mod in matches:
self.add_mod(mod.strip())
def filter_args_num(self, matches: str, args: int) -> List[str]:
def filter_args_num(self, matches, args):
"""Filter out directives with specific number of arguments.
This function makes the assumption that all related arguments are given
in order. Thus /files/apache/directive[5]/arg[2] must come immediately
after /files/apache/directive[5]/arg[1]. Runs in 1 linear pass.
:param str matches: Matches of all directives with arg nodes
:param string matches: Matches of all directives with arg nodes
:param int args: Number of args you would like to filter
:returns: List of directives that contain # of arguments.
(arg is stripped off)
"""
filtered: List[str] = []
filtered = []
if args == 1:
for i, match in enumerate(matches):
if match.endswith("/arg"):
@@ -356,7 +348,7 @@ class ApacheParser:
return filtered
def add_dir_to_ifmodssl(self, aug_conf_path: str, directive: str, args: List[str]) -> None:
def add_dir_to_ifmodssl(self, aug_conf_path, directive, args):
"""Adds directive and value to IfMod ssl block.
Adds given directive and value along configuration path within
@@ -365,7 +357,7 @@ class ApacheParser:
:param str aug_conf_path: Desired Augeas config path to add directive
:param str directive: Directive you would like to add, e.g. Listen
:param args: Values of the directive; list of str (eg. ["443"])
:param args: Values of the directive; str "443" or list of str
:type args: list
"""
@@ -382,7 +374,7 @@ class ApacheParser:
for i, arg in enumerate(args):
self.aug.set("%s/arg[%d]" % (nvh_path, i + 1), arg)
def get_ifmod(self, aug_conf_path: str, mod: str, beginning: bool = False) -> str:
def get_ifmod(self, aug_conf_path, mod, beginning=False):
"""Returns the path to <IfMod mod> and creates one if it doesn't exist.
:param str aug_conf_path: Augeas configuration path
@@ -404,7 +396,7 @@ class ApacheParser:
# Strip off "arg" at end of first ifmod path
return if_mods[0].rpartition("arg")[0]
def create_ifmod(self, aug_conf_path: str, mod: str, beginning: bool = False) -> str:
def create_ifmod(self, aug_conf_path, mod, beginning=False):
"""Creates a new <IfMod mod> and returns its path.
:param str aug_conf_path: Augeas configuration path
@@ -431,9 +423,7 @@ class ApacheParser:
self.aug.set(c_path_arg, mod)
return retpath
def add_dir(
self, aug_conf_path: Optional[str], directive: Optional[str], args: Union[List[str], str]
) -> None:
def add_dir(self, aug_conf_path, directive, args):
"""Appends directive to the end fo the file given by aug_conf_path.
.. note:: Not added to AugeasConfigurator because it may depend
@@ -445,7 +435,6 @@ class ApacheParser:
:type args: list or str
"""
aug_conf_path = aug_conf_path if aug_conf_path else ""
self.aug.set(aug_conf_path + "/directive[last() + 1]", directive)
if isinstance(args, list):
for i, value in enumerate(args, 1):
@@ -454,8 +443,7 @@ class ApacheParser:
else:
self.aug.set(aug_conf_path + "/directive[last()]/arg", args)
def add_dir_beginning(self, aug_conf_path: Optional[str], dirname: str,
args: Union[List[str], str]) -> None:
def add_dir_beginning(self, aug_conf_path, dirname, args):
"""Adds the directive to the beginning of defined aug_conf_path.
:param str aug_conf_path: Augeas configuration path to add directive
@@ -463,13 +451,8 @@ class ApacheParser:
:param args: Value of the directive. ie. Listen 443, 443 is arg
:type args: list or str
"""
aug_conf_path = aug_conf_path if aug_conf_path else ""
first_dir = aug_conf_path + "/directive[1]"
if self.aug.get(first_dir):
self.aug.insert(first_dir, "directive", True)
else:
self.aug.set(first_dir, "directive")
self.aug.insert(first_dir, "directive", True)
self.aug.set(first_dir, dirname)
if isinstance(args, list):
for i, value in enumerate(args, 1):
@@ -477,7 +460,7 @@ class ApacheParser:
else:
self.aug.set(first_dir + "/arg", args)
def add_comment(self, aug_conf_path: str, comment: str) -> None:
def add_comment(self, aug_conf_path, comment):
"""Adds the comment to the augeas path
:param str aug_conf_path: Augeas configuration path to add directive
@@ -486,7 +469,7 @@ class ApacheParser:
"""
self.aug.set(aug_conf_path + "/#comment[last() + 1]", comment)
def find_comments(self, arg: str, start: Optional[str] = None) -> List[str]:
def find_comments(self, arg, start=None):
"""Finds a comment with specified content from the provided DOM path
:param str arg: Comment content to search
@@ -508,8 +491,7 @@ class ApacheParser:
results.append(comment)
return results
def find_dir(self, directive: str, arg: Optional[str] = None,
start: Optional[str] = None, exclude: bool = True) -> List[str]:
def find_dir(self, directive, arg=None, start=None, exclude=True):
"""Finds directive in the configuration.
Recursively searches through config files to find directives
@@ -537,8 +519,6 @@ class ApacheParser:
:param bool exclude: Whether or not to exclude directives based on
variables and enabled modules
:rtype list
"""
# Cannot place member variable in the definition of the function so...
if not start:
@@ -587,7 +567,7 @@ class ApacheParser:
return ordered_matches
def get_all_args(self, match: str) -> List[Optional[str]]:
def get_all_args(self, match):
"""
Tries to fetch all arguments for a directive. See get_arg.
@@ -597,11 +577,11 @@ class ApacheParser:
"""
if match[-1] != "/":
match = match + "/"
match = match+"/"
allargs = self.aug.match(match + '*')
return [self.get_arg(arg) for arg in allargs]
def get_arg(self, match: str) -> Optional[str]:
def get_arg(self, match):
"""Uses augeas.get to get argument value and interprets result.
This also converts all variables and parameters appropriately.
@@ -616,7 +596,6 @@ class ApacheParser:
# e.g. strip now, not later
if not value:
return None
value = value.strip("'\"")
variables = ApacheParser.arg_var_interpreter.findall(value)
@@ -630,13 +609,13 @@ class ApacheParser:
return value
def get_root_augpath(self) -> str:
def get_root_augpath(self):
"""
Returns the Augeas path of root configuration.
"""
return get_aug_path(self.loc["root"])
def exclude_dirs(self, matches: Iterable[str]) -> List[str]:
def exclude_dirs(self, matches):
"""Exclude directives that are not loaded into the configuration."""
filters = [("ifmodule", self.modules.keys()), ("ifdefine", self.variables)]
@@ -650,7 +629,7 @@ class ApacheParser:
valid_matches.append(match)
return valid_matches
def _pass_filter(self, match: str, filter_: Tuple[str, Collection[str]]) -> bool:
def _pass_filter(self, match, filter_):
"""Determine if directive passes a filter.
:param str match: Augeas path
@@ -679,7 +658,7 @@ class ApacheParser:
return True
def standard_path_from_server_root(self, arg: str) -> str:
def standard_path_from_server_root(self, arg):
"""Ensure paths are consistent and absolute
:param str arg: Argument of directive
@@ -698,7 +677,7 @@ class ApacheParser:
arg = os.path.normpath(arg)
return arg
def _get_include_path(self, arg: Optional[str]) -> Optional[str]:
def _get_include_path(self, arg):
"""Converts an Apache Include directive into Augeas path.
Converts an Apache Include directive argument into an Augeas
@@ -718,8 +697,6 @@ class ApacheParser:
# if matchObj.group() != arg:
# logger.error("Error: Invalid regexp characters in %s", arg)
# return []
if arg is None:
return None # pragma: no cover
arg = self.standard_path_from_server_root(arg)
# Attempts to add a transform to the file if one does not already exist
@@ -744,7 +721,7 @@ class ApacheParser:
return get_aug_path(arg)
def fnmatch_to_re(self, clean_fn_match: str) -> str:
def fnmatch_to_re(self, clean_fn_match):
"""Method converts Apache's basic fnmatch to regular expression.
Assumption - Configs are assumed to be well-formed and only writable by
@@ -761,7 +738,7 @@ class ApacheParser:
# Since Python 3.6, it returns a different pattern like (?s:.*\.load)\Z
return fnmatch.translate(clean_fn_match)[4:-3] # pragma: no cover
def parse_file(self, filepath: str) -> None:
def parse_file(self, filepath):
"""Parse file with Augeas
Checks to see if file_path is parsed by Augeas
@@ -788,7 +765,7 @@ class ApacheParser:
self._add_httpd_transform(filepath)
self.aug.load()
def parsed_in_current(self, filep: Optional[str]) -> bool:
def parsed_in_current(self, filep):
"""Checks if the file path is parsed by current Augeas parser config
ie. returns True if the file is found on a path that's found in live
Augeas configuration.
@@ -798,11 +775,9 @@ class ApacheParser:
:returns: True if file is parsed in existing configuration tree
:rtype: bool
"""
if not filep:
return False # pragma: no cover
return self._parsed_by_parser_paths(filep, self.parser_paths)
def parsed_in_original(self, filep: Optional[str]) -> bool:
def parsed_in_original(self, filep):
"""Checks if the file path is parsed by existing Apache config.
ie. returns True if the file is found on a path that matches Include or
IncludeOptional statement in the Apache configuration.
@@ -812,11 +787,9 @@ class ApacheParser:
:returns: True if file is parsed in existing configuration tree
:rtype: bool
"""
if not filep:
return False # pragma: no cover
return self._parsed_by_parser_paths(filep, self.existing_paths)
def _parsed_by_parser_paths(self, filep: str, paths: Mapping[str, List[str]]) -> bool:
def _parsed_by_parser_paths(self, filep, paths):
"""Helper function that searches through provided paths and returns
True if file path is found in the set"""
for directory in paths:
@@ -825,7 +798,7 @@ class ApacheParser:
return True
return False
def _check_path_actions(self, filepath: str) -> Tuple[bool, bool]:
def _check_path_actions(self, filepath):
"""Determine actions to take with a new augeas path
This helper function will return a tuple that defines
@@ -850,7 +823,7 @@ class ApacheParser:
remove_old = False
return use_new, remove_old
def _remove_httpd_transform(self, filepath: str) -> None:
def _remove_httpd_transform(self, filepath):
"""Remove path from Augeas transform
:param str filepath: filepath to remove
@@ -865,7 +838,7 @@ class ApacheParser:
self.aug.remove(remove_inc[0])
self.parser_paths.pop(remove_dirname)
def _add_httpd_transform(self, incl: str) -> None:
def _add_httpd_transform(self, incl):
"""Add a transform to Augeas.
This function will correctly add a transform to augeas
@@ -875,7 +848,7 @@ class ApacheParser:
:param str incl: filepath to include for transform
"""
last_include: str = self.aug.match("/augeas/load/Httpd/incl [last()]")
last_include = self.aug.match("/augeas/load/Httpd/incl [last()]")
if last_include:
# Insert a new node immediately after the last incl
self.aug.insert(last_include[0], "incl", False)
@@ -893,7 +866,7 @@ class ApacheParser:
self.parser_paths[os.path.dirname(incl)] = [
os.path.basename(incl)]
def standardize_excl(self) -> None:
def standardize_excl(self):
"""Standardize the excl arguments for the Httpd lens in Augeas.
Note: Hack!
@@ -925,16 +898,16 @@ class ApacheParser:
self.aug.load()
def _set_locations(self) -> Dict[str, str]:
def _set_locations(self):
"""Set default location for directives.
Locations are given as file_paths
.. todo:: Make sure that files are included
"""
default: str = self.loc["root"]
default = self.loc["root"]
temp: str = os.path.join(self.root, "ports.conf")
temp = os.path.join(self.root, "ports.conf")
if os.path.isfile(temp):
listen = temp
name = temp
@@ -944,7 +917,7 @@ class ApacheParser:
return {"default": default, "listen": listen, "name": name}
def _find_config_root(self) -> str:
def _find_config_root(self):
"""Find the Apache Configuration Root file."""
location = ["apache2.conf", "httpd.conf", "conf/httpd.conf"]
for name in location:
@@ -953,7 +926,7 @@ class ApacheParser:
raise errors.NoInstallationError("Could not find configuration root")
def case_i(string: str) -> str:
def case_i(string):
"""Returns case insensitive regex.
Returns a sloppy, but necessary version of a case insensitive regex.
@@ -969,26 +942,10 @@ def case_i(string: str) -> str:
if c.isalpha() else c for c in re.escape(string))
def get_aug_path(file_path: str) -> str:
def get_aug_path(file_path):
"""Return augeas path for full filepath.
:param str file_path: Full filepath
"""
return "/files%s" % file_path
def init_augeas() -> Augeas:
""" Initialize the actual Augeas instance """
if not Augeas: # pragma: no cover
raise errors.NoInstallationError("Problem in Augeas installation")
return Augeas(
# specify a directory to load our preferred lens from
loadpath=constants.AUGEAS_LENS_DIR,
# Do not save backup (we do it ourselves), do not load
# anything by default
flags=(Augeas.NONE |
Augeas.NO_MODL_AUTOLOAD |
Augeas.ENABLE_SPAN))

View File

@@ -1,14 +1,7 @@
"""ParserNode utils"""
from typing import Any
from typing import Dict
from typing import Iterable
from typing import Optional
from typing import Tuple
from certbot_apache._internal.interfaces import ParserNode
def validate_kwargs(kwargs: Dict[str, Any], required_names: Iterable[str]) -> Dict[str, Any]:
def validate_kwargs(kwargs, required_names):
"""
Ensures that the kwargs dict has all the expected values. This function modifies
the kwargs dictionary, and hence the returned dictionary should be used instead
@@ -18,7 +11,7 @@ def validate_kwargs(kwargs: Dict[str, Any], required_names: Iterable[str]) -> Di
:param list required_names: List of required parameter names.
"""
validated_kwargs: Dict[str, Any] = {}
validated_kwargs = {}
for name in required_names:
try:
validated_kwargs[name] = kwargs.pop(name)
@@ -32,8 +25,7 @@ def validate_kwargs(kwargs: Dict[str, Any], required_names: Iterable[str]) -> Di
return validated_kwargs
def parsernode_kwargs(kwargs: Dict[str, Any]
) -> Tuple[Optional[ParserNode], bool, Optional[str], Dict[str, Any]]:
def parsernode_kwargs(kwargs):
"""
Validates keyword arguments for ParserNode. This function modifies the kwargs
dictionary, and hence the returned dictionary should be used instead in the
@@ -63,7 +55,7 @@ def parsernode_kwargs(kwargs: Dict[str, Any]
return kwargs["ancestor"], kwargs["dirty"], kwargs["filepath"], kwargs["metadata"]
def commentnode_kwargs(kwargs: Dict[str, Any]) -> Tuple[Optional[str], Dict[str, str]]:
def commentnode_kwargs(kwargs):
"""
Validates keyword arguments for CommentNode and sets the default values for
optional kwargs. This function modifies the kwargs dictionary, and hence the
@@ -98,8 +90,7 @@ def commentnode_kwargs(kwargs: Dict[str, Any]) -> Tuple[Optional[str], Dict[str,
return comment, kwargs
def directivenode_kwargs(kwargs: Dict[str, Any]
) -> Tuple[Optional[str], Tuple[str, ...], bool, Dict[str, Any]]:
def directivenode_kwargs(kwargs):
"""
Validates keyword arguments for DirectiveNode and BlockNode and sets the
default values for optional kwargs. This function modifies the kwargs

View File

@@ -0,0 +1,3 @@
# Remember to update setup.py to match the package versions below.
acme[dev]==0.29.0
certbot[dev]==1.6.0

2
certbot-apache/setup.cfg Normal file
View File

@@ -0,0 +1,2 @@
[bdist_wheel]
universal = 1

View File

@@ -1,16 +1,17 @@
from setuptools import find_packages
from setuptools import setup
version = '1.25.0.dev0'
version = '1.14.0.dev0'
# Remember to update local-oldest-requirements.txt when changing the minimum
# acme/certbot version.
install_requires = [
# We specify the minimum acme and certbot version as the current plugin
# version for simplicity. See
# https://github.com/certbot/certbot/issues/8761 for more info.
f'acme>={version}',
f'certbot>={version}',
'acme>=0.29.0',
'certbot>=1.6.0',
'python-augeas',
'setuptools>=41.6.0',
'setuptools>=39.0.1',
'zope.component',
'zope.interface',
]
dev_extras = [
@@ -23,9 +24,9 @@ setup(
description="Apache plugin for Certbot",
url='https://github.com/letsencrypt/letsencrypt',
author="Certbot Project",
author_email='certbot-dev@eff.org',
author_email='client-dev@letsencrypt.org',
license='Apache License 2.0',
python_requires='>=3.7',
python_requires='>=3.6',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Plugins',
@@ -34,10 +35,10 @@ setup(
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Programming Language :: Python :: 3.10',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Security',
'Topic :: System :: Installation/Setup',

View File

@@ -51,9 +51,9 @@ function Cleanup() {
# if our environment asks us to enable modules, do our best!
if [ "$1" = --debian-modules ] ; then
sudo DEBIAN_FRONTEND=noninteractive apt-get install -y apache2
sudo DEBIAN_FRONTEND=noninteractive apt-get install -y libapache2-mod-wsgi-py3
sudo DEBIAN_FRONTEND=noninteractive apt-get install -y libapache2-mod-macro
sudo apt-get install -y apache2
sudo apt-get install -y libapache2-mod-wsgi-py3
sudo apt-get install -y libapache2-mod-macro
for mod in ssl rewrite macro wsgi deflate userdir version mime setenvif ; do
echo -n enabling $mod

View File

@@ -25,29 +25,24 @@ def _get_augeasnode_mock(filepath):
metadata=metadata)
return augeasnode_mock
class AugeasParserNodeTest(util.ApacheTest): # pylint: disable=too-many-public-methods
"""Test AugeasParserNode using available test configurations"""
def setUp(self): # pylint: disable=arguments-differ
super().setUp()
super(AugeasParserNodeTest, self).setUp()
with mock.patch(
"certbot_apache._internal.configurator.ApacheConfigurator.get_parsernode_root"
) as mock_parsernode:
with mock.patch("certbot_apache._internal.configurator.ApacheConfigurator.get_parsernode_root") as mock_parsernode:
mock_parsernode.side_effect = _get_augeasnode_mock(
os.path.join(self.config_path, "apache2.conf"))
self.config = util.get_apache_configurator(
self.config_path, self.vhost_path, self.config_dir, self.work_dir,
use_parsernode=True,
)
self.config_path, self.vhost_path, self.config_dir, self.work_dir, use_parsernode=True)
self.vh_truth = util.get_vh_truth(
self.temp_dir, "debian_apache_2_4/multiple_vhosts")
def test_save(self):
with mock.patch('certbot_apache._internal.parser.ApacheParser.save') as mock_save:
self.config.parser_root.save("A save message")
self.assertIs(mock_save.called, True)
self.assertTrue(mock_save.called)
self.assertEqual(mock_save.call_args[0][0], "A save message")
def test_unsaved_files(self):
@@ -72,8 +67,7 @@ class AugeasParserNodeTest(util.ApacheTest): # pylint: disable=too-many-public-
"/Anything": "Anything",
}
for test in testcases:
# pylint: disable=protected-access
self.assertEqual(block._aug_get_name(test), testcases[test])
self.assertEqual(block._aug_get_name(test), testcases[test]) # pylint: disable=protected-access
def test_find_blocks(self):
blocks = self.config.parser_root.find_blocks("VirtualHost", exclude=False)
@@ -87,7 +81,7 @@ class AugeasParserNodeTest(util.ApacheTest): # pylint: disable=too-many-public-
def test_find_directive_found(self):
directives = self.config.parser_root.find_directives("Listen")
self.assertEqual(len(directives), 1)
self.assertIs(directives[0].filepath.endswith("/apache2/ports.conf"), True)
self.assertTrue(directives[0].filepath.endswith("/apache2/ports.conf"))
self.assertEqual(directives[0].parameters, (u'80',))
def test_find_directive_notfound(self):
@@ -102,29 +96,29 @@ class AugeasParserNodeTest(util.ApacheTest): # pylint: disable=too-many-public-
servername = vh.find_directives("servername")
self.assertEqual(servername[0].parameters[0], "certbot.demo")
found = True
self.assertIs(found, True)
self.assertTrue(found)
def test_find_comments(self):
rootcomment = self.config.parser_root.find_comments(
"This is the main Apache server configuration file. "
)
self.assertEqual(len(rootcomment), 1)
self.assertIs(rootcomment[0].filepath.endswith(
self.assertTrue(rootcomment[0].filepath.endswith(
"debian_apache_2_4/multiple_vhosts/apache2/apache2.conf"
), True)
))
def test_set_parameters(self):
servernames = self.config.parser_root.find_directives("servername")
names: List[str] = []
for servername in servernames:
names += servername.parameters
self.assertNotIn("going_to_set_this", names)
self.assertFalse("going_to_set_this" in names)
servernames[0].set_parameters(["something", "going_to_set_this"])
servernames = self.config.parser_root.find_directives("servername")
names = []
for servername in servernames:
names += servername.parameters
self.assertIn("going_to_set_this", names)
self.assertTrue("going_to_set_this" in names)
def test_set_parameters_atinit(self):
from certbot_apache._internal.augeasparser import AugeasDirectiveNode
@@ -137,7 +131,7 @@ class AugeasParserNodeTest(util.ApacheTest): # pylint: disable=too-many-public-
ancestor=assertions.PASS,
metadata=servernames[0].metadata
)
self.assertIs(mock_set.called, True)
self.assertTrue(mock_set.called)
self.assertEqual(
mock_set.call_args_list[0][0][0],
["test", "setting", "these"]
@@ -157,7 +151,7 @@ class AugeasParserNodeTest(util.ApacheTest): # pylint: disable=too-many-public-
self.assertEqual(len(servername.parameters), 3)
servername.set_parameters(["thisshouldnotexistpreviously"])
found = True
self.assertIs(found, True)
self.assertTrue(found)
# Verify params
servernames = self.config.parser_root.find_directives("servername")
@@ -167,7 +161,7 @@ class AugeasParserNodeTest(util.ApacheTest): # pylint: disable=too-many-public-
self.assertEqual(len(servername.parameters), 1)
servername.set_parameters(["thisshouldnotexistpreviously"])
found = True
self.assertIs(found, True)
self.assertTrue(found)
def test_add_child_comment(self):
newc = self.config.parser_root.add_child_comment("The content")
@@ -207,7 +201,7 @@ class AugeasParserNodeTest(util.ApacheTest): # pylint: disable=too-many-public-
rpath,
self.config.parser_root.metadata["augeaspath"]
)
self.assertIs(directive.startswith("NewBlock"), True)
self.assertTrue(directive.startswith("NewBlock"))
def test_add_child_block_beginning(self):
self.config.parser_root.add_child_block(
@@ -218,7 +212,7 @@ class AugeasParserNodeTest(util.ApacheTest): # pylint: disable=too-many-public-
root_path = self.config.parser_root.metadata["augeaspath"]
# Get first child
first = parser.aug.match("{}/*[1]".format(root_path))
self.assertIs(first[0].endswith("Beginning"), True)
self.assertTrue(first[0].endswith("Beginning"))
def test_add_child_block_append(self):
self.config.parser_root.add_child_block(
@@ -228,7 +222,7 @@ class AugeasParserNodeTest(util.ApacheTest): # pylint: disable=too-many-public-
root_path = self.config.parser_root.metadata["augeaspath"]
# Get last child
last = parser.aug.match("{}/*[last()]".format(root_path))
self.assertIs(last[0].endswith("VeryLast"), True)
self.assertTrue(last[0].endswith("VeryLast"))
def test_add_child_block_append_alt(self):
self.config.parser_root.add_child_block(
@@ -239,7 +233,7 @@ class AugeasParserNodeTest(util.ApacheTest): # pylint: disable=too-many-public-
root_path = self.config.parser_root.metadata["augeaspath"]
# Get last child
last = parser.aug.match("{}/*[last()]".format(root_path))
self.assertIs(last[0].endswith("VeryLastAlt"), True)
self.assertTrue(last[0].endswith("VeryLastAlt"))
def test_add_child_block_middle(self):
self.config.parser_root.add_child_block(
@@ -250,7 +244,7 @@ class AugeasParserNodeTest(util.ApacheTest): # pylint: disable=too-many-public-
root_path = self.config.parser_root.metadata["augeaspath"]
# Augeas indices start at 1 :(
middle = parser.aug.match("{}/*[6]".format(root_path))
self.assertIs(middle[0].endswith("Middle"), True)
self.assertTrue(middle[0].endswith("Middle"))
def test_add_child_block_existing_name(self):
parser = self.config.parser_root.parser
@@ -263,7 +257,7 @@ class AugeasParserNodeTest(util.ApacheTest): # pylint: disable=too-many-public-
)
new_block = parser.aug.match("{}/VirtualHost[2]".format(root_path))
self.assertEqual(len(new_block), 1)
self.assertIs(vh.metadata["augeaspath"].endswith("VirtualHost[2]"), True)
self.assertTrue(vh.metadata["augeaspath"].endswith("VirtualHost[2]"))
def test_node_init_error_bad_augeaspath(self):
from certbot_apache._internal.augeasparser import AugeasBlockNode
@@ -308,7 +302,7 @@ class AugeasParserNodeTest(util.ApacheTest): # pylint: disable=too-many-public-
self.assertEqual(len(dirs), 1)
self.assertEqual(dirs[0].parameters, ("with", "parameters"))
# The new directive was added to the very first line of the config
self.assertIs(dirs[0].metadata["augeaspath"].endswith("[1]"), True)
self.assertTrue(dirs[0].metadata["augeaspath"].endswith("[1]"))
def test_add_child_directive_exception(self):
self.assertRaises(
@@ -334,8 +328,8 @@ class AugeasParserNodeTest(util.ApacheTest): # pylint: disable=too-many-public-
ancs = vh.find_ancestors("Macro")
self.assertEqual(len(ancs), 0)
nonmacro_test = True
self.assertIs(macro_test, True)
self.assertIs(nonmacro_test, True)
self.assertTrue(macro_test)
self.assertTrue(nonmacro_test)
def test_find_ancestors_bad_path(self):
self.config.parser_root.metadata["augeaspath"] = ""

View File

@@ -18,7 +18,7 @@ class AutoHSTSTest(util.ApacheTest):
# pylint: disable=protected-access
def setUp(self): # pylint: disable=arguments-differ
super().setUp()
super(AutoHSTSTest, self).setUp()
self.config = util.get_apache_configurator(
self.config_path, self.vhost_path, self.config_dir, self.work_dir)
@@ -47,7 +47,7 @@ class AutoHSTSTest(util.ApacheTest):
self.config.parser.modules.pop("headers_module", None)
self.config.parser.modules.pop("mod_header.c", None)
self.config.enable_autohsts(mock.MagicMock(), ["ocspvhost.com"])
self.assertIs(mock_enable.called, True)
self.assertTrue(mock_enable.called)
@mock.patch("certbot_apache._internal.configurator.ApacheConfigurator.restart")
def test_autohsts_deploy_already_exists(self, _restart):
@@ -74,7 +74,7 @@ class AutoHSTSTest(util.ApacheTest):
# Verify increased value
self.assertEqual(self.get_autohsts_value(self.vh_truth[7].path),
inc_val)
self.assertIs(mock_prepare.called, True)
self.assertTrue(mock_prepare.called)
@mock.patch("certbot_apache._internal.configurator.ApacheConfigurator.restart")
@mock.patch("certbot_apache._internal.configurator.ApacheConfigurator._autohsts_increase")
@@ -88,7 +88,7 @@ class AutoHSTSTest(util.ApacheTest):
self.config.update_autohsts(mock.MagicMock())
# Freq not patched, so value shouldn't increase
self.assertIs(mock_increase.called, False)
self.assertFalse(mock_increase.called)
@mock.patch("certbot_apache._internal.configurator.ApacheConfigurator.restart")
@@ -135,28 +135,30 @@ class AutoHSTSTest(util.ApacheTest):
# Time mock is used to make sure that the execution does not
# continue when no autohsts entries exist in pluginstorage
self.config.update_autohsts(mock.MagicMock())
self.assertIs(mock_time.called, False)
self.assertFalse(mock_time.called)
def test_autohsts_make_permanent_noop(self):
self.config.storage.put = mock.MagicMock()
self.config.deploy_autohsts(mock.MagicMock())
# Make sure that the execution does not continue when no entries in store
self.assertIs(self.config.storage.put.called, False)
self.assertFalse(self.config.storage.put.called)
@mock.patch("certbot_apache._internal.display_ops.select_vhost")
def test_autohsts_no_ssl_vhost(self, mock_select):
mock_select.return_value = self.vh_truth[0]
with mock.patch("certbot_apache._internal.configurator.logger.error") as mock_log:
with mock.patch("certbot_apache._internal.configurator.logger.warning") as mock_log:
self.assertRaises(errors.PluginError,
self.config.enable_autohsts,
mock.MagicMock(), "invalid.example.com")
self.assertIn("Certbot was not able to find SSL", mock_log.call_args[0][0])
self.assertTrue(
"Certbot was not able to find SSL" in mock_log.call_args[0][0])
@mock.patch("certbot_apache._internal.configurator.ApacheConfigurator.restart")
@mock.patch("certbot_apache._internal.configurator.ApacheConfigurator.add_vhost_id")
def test_autohsts_dont_enhance_twice(self, mock_id, _restart):
mock_id.return_value = "1234567"
self.config.enable_autohsts(mock.MagicMock(), ["ocspvhost.com", "ocspvhost.com"])
self.config.enable_autohsts(mock.MagicMock(),
["ocspvhost.com", "ocspvhost.com"])
self.assertEqual(mock_id.call_count, 1)
def test_autohsts_remove_orphaned(self):
@@ -166,7 +168,7 @@ class AutoHSTSTest(util.ApacheTest):
self.config._autohsts_save_state()
self.config.update_autohsts(mock.MagicMock())
self.assertNotIn("orphan_id", self.config._autohsts)
self.assertFalse("orphan_id" in self.config._autohsts)
# Make sure it's removed from the pluginstorage file as well
self.config._autohsts = None
self.config._autohsts_fetch_state()
@@ -177,10 +179,11 @@ class AutoHSTSTest(util.ApacheTest):
self.config._autohsts_fetch_state()
self.config._autohsts["orphan_id"] = {"laststep": 999, "timestamp": 0}
self.config._autohsts_save_state()
with mock.patch("certbot_apache._internal.configurator.logger.error") as mock_log:
with mock.patch("certbot_apache._internal.configurator.logger.warning") as mock_log:
self.config.deploy_autohsts(mock.MagicMock())
self.assertIs(mock_log.called, True)
self.assertIn("VirtualHost with id orphan_id was not", mock_log.call_args[0][0])
self.assertTrue(mock_log.called)
self.assertTrue(
"VirtualHost with id orphan_id was not" in mock_log.call_args[0][0])
if __name__ == "__main__":

View File

@@ -1,6 +1,5 @@
"""Test for certbot_apache._internal.configurator for CentOS 6 overrides"""
import unittest
from unittest import mock
from certbot.compat import os
from certbot.errors import MisconfigurationError
@@ -37,9 +36,9 @@ class CentOS6Tests(util.ApacheTest):
test_dir = "centos6_apache/apache"
config_root = "centos6_apache/apache/httpd"
vhost_root = "centos6_apache/apache/httpd/conf.d"
super().setUp(test_dir=test_dir,
config_root=config_root,
vhost_root=vhost_root)
super(CentOS6Tests, self).setUp(test_dir=test_dir,
config_root=config_root,
vhost_root=vhost_root)
self.config = util.get_apache_configurator(
self.config_path, self.vhost_path, self.config_dir, self.work_dir,
@@ -48,7 +47,8 @@ class CentOS6Tests(util.ApacheTest):
self.temp_dir, "centos6_apache/apache")
def test_get_parser(self):
self.assertIsInstance(self.config.parser, override_centos.CentOSParser)
self.assertTrue(isinstance(self.config.parser,
override_centos.CentOSParser))
def test_get_virtual_hosts(self):
"""Make sure all vhosts are being properly found."""
@@ -65,15 +65,14 @@ class CentOS6Tests(util.ApacheTest):
raise Exception("Missed: %s" % vhost) # pragma: no cover
self.assertEqual(found, 2)
@mock.patch("certbot_apache._internal.configurator.display_util.notify")
def test_loadmod_default(self, unused_mock_notify):
def test_loadmod_default(self):
ssl_loadmods = self.config.parser.find_dir(
"LoadModule", "ssl_module", exclude=False)
self.assertEqual(len(ssl_loadmods), 1)
# Make sure the LoadModule ssl_module is in ssl.conf (default)
self.assertIn("ssl.conf", ssl_loadmods[0])
self.assertTrue("ssl.conf" in ssl_loadmods[0])
# ...and that it's not inside of <IfModule>
self.assertNotIn("IfModule", ssl_loadmods[0])
self.assertFalse("IfModule" in ssl_loadmods[0])
# Get the example vhost
self.config.assoc["test.example.com"] = self.vh_truth[0]
@@ -94,10 +93,9 @@ class CentOS6Tests(util.ApacheTest):
# ...and both of them should be wrapped in <IfModule !mod_ssl.c>
# lm[:-17] strips off /directive/arg[1] from the path.
ifmod_args = self.config.parser.get_all_args(lm[:-17])
self.assertIn("!mod_ssl.c", ifmod_args)
self.assertTrue("!mod_ssl.c" in ifmod_args)
@mock.patch("certbot_apache._internal.configurator.display_util.notify")
def test_loadmod_multiple(self, unused_mock_notify):
def test_loadmod_multiple(self):
sslmod_args = ["ssl_module", "modules/mod_ssl.so"]
# Adds another LoadModule to main httpd.conf in addtition to ssl.conf
self.config.parser.add_dir(self.config.parser.loc["default"], "LoadModule",
@@ -106,7 +104,7 @@ class CentOS6Tests(util.ApacheTest):
pre_loadmods = self.config.parser.find_dir(
"LoadModule", "ssl_module", exclude=False)
# LoadModules are not within IfModule blocks
self.assertIs(any("ifmodule" in m.lower() for m in pre_loadmods), False)
self.assertFalse(any("ifmodule" in m.lower() for m in pre_loadmods))
self.config.assoc["test.example.com"] = self.vh_truth[0]
self.config.deploy_cert(
"random.demo", "example/cert.pem", "example/key.pem",
@@ -115,12 +113,9 @@ class CentOS6Tests(util.ApacheTest):
"LoadModule", "ssl_module", exclude=False)
for mod in post_loadmods:
with self.subTest(mod=mod):
# pylint: disable=no-member
self.assertIs(self.config.parser.not_modssl_ifmodule(mod), True)
self.assertTrue(self.config.parser.not_modssl_ifmodule(mod)) #pylint: disable=no-member
@mock.patch("certbot_apache._internal.configurator.display_util.notify")
def test_loadmod_rootconf_exists(self, unused_mock_notify):
def test_loadmod_rootconf_exists(self):
sslmod_args = ["ssl_module", "modules/mod_ssl.so"]
rootconf_ifmod = self.config.parser.get_ifmod(
parser.get_aug_path(self.config.parser.loc["default"]),
@@ -147,8 +142,7 @@ class CentOS6Tests(util.ApacheTest):
self.config.parser.get_all_args(mods[0][:-7]),
sslmod_args)
@mock.patch("certbot_apache._internal.configurator.display_util.notify")
def test_neg_loadmod_already_on_path(self, unused_mock_notify):
def test_neg_loadmod_already_on_path(self):
loadmod_args = ["ssl_module", "modules/mod_ssl.so"]
ifmod = self.config.parser.get_ifmod(
self.vh_truth[1].path, "!mod_ssl.c", beginning=True)
@@ -191,8 +185,7 @@ class CentOS6Tests(util.ApacheTest):
# Make sure that none was changed
self.assertEqual(pre_matches, post_matches)
@mock.patch("certbot_apache._internal.configurator.display_util.notify")
def test_loadmod_not_found(self, unused_mock_notify):
def test_loadmod_not_found(self):
# Remove all existing LoadModule ssl_module... directives
orig_loadmods = self.config.parser.find_dir("LoadModule",
"ssl_module",
@@ -208,20 +201,20 @@ class CentOS6Tests(util.ApacheTest):
post_loadmods = self.config.parser.find_dir("LoadModule",
"ssl_module",
exclude=False)
self.assertEqual(post_loadmods, [])
self.assertFalse(post_loadmods)
def test_no_ifmod_search_false(self):
#pylint: disable=no-member
self.assertIs(self.config.parser.not_modssl_ifmodule(
self.assertFalse(self.config.parser.not_modssl_ifmodule(
"/path/does/not/include/ifmod"
), False)
self.assertIs(self.config.parser.not_modssl_ifmodule(
))
self.assertFalse(self.config.parser.not_modssl_ifmodule(
""
), False)
self.assertIs(self.config.parser.not_modssl_ifmodule(
))
self.assertFalse(self.config.parser.not_modssl_ifmodule(
"/path/includes/IfModule/but/no/arguments"
), False)
))
if __name__ == "__main__":

View File

@@ -34,7 +34,6 @@ def get_vh_truth(temp_dir, config_name):
]
return vh_truth
class FedoraRestartTest(util.ApacheTest):
"""Tests for Fedora specific self-signed certificate override"""
@@ -42,9 +41,9 @@ class FedoraRestartTest(util.ApacheTest):
test_dir = "centos7_apache/apache"
config_root = "centos7_apache/apache/httpd"
vhost_root = "centos7_apache/apache/httpd/conf.d"
super().setUp(test_dir=test_dir,
config_root=config_root,
vhost_root=vhost_root)
super(FedoraRestartTest, self).setUp(test_dir=test_dir,
config_root=config_root,
vhost_root=vhost_root)
self.config = util.get_apache_configurator(
self.config_path, self.vhost_path, self.config_dir, self.work_dir,
os_info="fedora_old")
@@ -97,9 +96,9 @@ class MultipleVhostsTestCentOS(util.ApacheTest):
test_dir = "centos7_apache/apache"
config_root = "centos7_apache/apache/httpd"
vhost_root = "centos7_apache/apache/httpd/conf.d"
super().setUp(test_dir=test_dir,
config_root=config_root,
vhost_root=vhost_root)
super(MultipleVhostsTestCentOS, self).setUp(test_dir=test_dir,
config_root=config_root,
vhost_root=vhost_root)
self.config = util.get_apache_configurator(
self.config_path, self.vhost_path, self.config_dir, self.work_dir,
@@ -141,8 +140,8 @@ class MultipleVhostsTestCentOS(util.ApacheTest):
self.assertEqual(mock_get.call_count, 3)
self.assertEqual(len(self.config.parser.modules), 4)
self.assertEqual(len(self.config.parser.variables), 2)
self.assertIn("TEST2", self.config.parser.variables)
self.assertIn("mod_another.c", self.config.parser.modules)
self.assertTrue("TEST2" in self.config.parser.variables)
self.assertTrue("mod_another.c" in self.config.parser.modules)
def test_get_virtual_hosts(self):
"""Make sure all vhosts are being properly found."""
@@ -173,11 +172,11 @@ class MultipleVhostsTestCentOS(util.ApacheTest):
mock_osi.return_value = ("centos", "7")
self.config.parser.update_runtime_variables()
self.assertIn("mock_define", self.config.parser.variables)
self.assertIn("mock_define_too", self.config.parser.variables)
self.assertIn("mock_value", self.config.parser.variables)
self.assertTrue("mock_define" in self.config.parser.variables)
self.assertTrue("mock_define_too" in self.config.parser.variables)
self.assertTrue("mock_value" in self.config.parser.variables)
self.assertEqual("TRUE", self.config.parser.variables["mock_value"])
self.assertIn("MOCK_NOSEP", self.config.parser.variables)
self.assertTrue("MOCK_NOSEP" in self.config.parser.variables)
self.assertEqual("NOSEP_VAL", self.config.parser.variables["NOSEP_TWO"])
@mock.patch("certbot_apache._internal.configurator.util.run_script")

View File

@@ -11,7 +11,8 @@ class ComplexParserTest(util.ParserTest):
"""Apache Parser Test."""
def setUp(self): # pylint: disable=arguments-differ
super().setUp("complex_parsing", "complex_parsing")
super(ComplexParserTest, self).setUp(
"complex_parsing", "complex_parsing")
self.setup_variables()
# This needs to happen after due to setup_variables not being run
@@ -77,12 +78,12 @@ class ComplexParserTest(util.ParserTest):
def test_load_modules(self):
"""If only first is found, there is bad variable parsing."""
self.assertIn("status_module", self.parser.modules)
self.assertIn("mod_status.c", self.parser.modules)
self.assertTrue("status_module" in self.parser.modules)
self.assertTrue("mod_status.c" in self.parser.modules)
# This is in an IfDefine
self.assertIn("ssl_module", self.parser.modules)
self.assertIn("mod_ssl.c", self.parser.modules)
self.assertTrue("ssl_module" in self.parser.modules)
self.assertTrue("mod_ssl.c" in self.parser.modules)
def verify_fnmatch(self, arg, hit=True):
"""Test if Include was correctly parsed."""

View File

@@ -14,13 +14,15 @@ import util
class ConfiguratorReverterTest(util.ApacheTest):
"""Test for ApacheConfigurator reverter methods"""
def setUp(self): # pylint: disable=arguments-differ
super().setUp()
super(ConfiguratorReverterTest, self).setUp()
self.config = util.get_apache_configurator(
self.config_path, self.vhost_path, self.config_dir, self.work_dir)
self.vh_truth = util.get_vh_truth(self.temp_dir, "debian_apache_2_4/multiple_vhosts")
self.vh_truth = util.get_vh_truth(
self.temp_dir, "debian_apache_2_4/multiple_vhosts")
def tearDown(self):
shutil.rmtree(self.config_dir)
@@ -28,13 +30,17 @@ class ConfiguratorReverterTest(util.ApacheTest):
shutil.rmtree(self.temp_dir)
def test_bad_save_checkpoint(self):
self.config.reverter.add_to_checkpoint = mock.Mock(side_effect=errors.ReverterError)
self.config.parser.add_dir(self.vh_truth[0].path, "Test", "bad_save_ckpt")
self.config.reverter.add_to_checkpoint = mock.Mock(
side_effect=errors.ReverterError)
self.config.parser.add_dir(
self.vh_truth[0].path, "Test", "bad_save_ckpt")
self.assertRaises(errors.PluginError, self.config.save)
def test_bad_save_finalize_checkpoint(self):
self.config.reverter.finalize_checkpoint = mock.Mock(side_effect=errors.ReverterError)
self.config.parser.add_dir(self.vh_truth[0].path, "Test", "bad_save_ckpt")
self.config.reverter.finalize_checkpoint = mock.Mock(
side_effect=errors.ReverterError)
self.config.parser.add_dir(
self.vh_truth[0].path, "Test", "bad_save_ckpt")
self.assertRaises(errors.PluginError, self.config.save, "Title")
def test_finalize_save(self):
@@ -66,7 +72,8 @@ class ConfiguratorReverterTest(util.ApacheTest):
self.assertEqual(mock_load.call_count, 1)
def test_rollback_error(self):
self.config.reverter.rollback_checkpoints = mock.Mock(side_effect=errors.ReverterError)
self.config.reverter.rollback_checkpoints = mock.Mock(
side_effect=errors.ReverterError)
self.assertRaises(errors.PluginError, self.config.rollback_checkpoints)
def test_recovery_routine_reload(self):

View File

@@ -30,7 +30,7 @@ class MultipleVhostsTest(util.ApacheTest):
"""Test two standard well-configured HTTP vhosts."""
def setUp(self): # pylint: disable=arguments-differ
super().setUp()
super(MultipleVhostsTest, self).setUp()
self.config = util.get_apache_configurator(
self.config_path, self.vhost_path, self.config_dir, self.work_dir)
@@ -83,8 +83,8 @@ class MultipleVhostsTest(util.ApacheTest):
self.config.prepare()
except errors.PluginError as err:
err_msg = str(err)
self.assertIn("lock", err_msg)
self.assertIn(self.config.conf("server-root"), err_msg)
self.assertTrue("lock" in err_msg)
self.assertTrue(self.config.conf("server-root") in err_msg)
else: # pragma: no cover
self.fail("Exception wasn't raised!")
@@ -103,9 +103,9 @@ class MultipleVhostsTest(util.ApacheTest):
"handle_modules", "handle_sites", "ctl"]
exp = {}
for k in ApacheConfigurator.OS_DEFAULTS.__dict__.keys():
for k in ApacheConfigurator.OS_DEFAULTS:
if k in parserargs:
exp[k.replace("_", "-")] = getattr(ApacheConfigurator.OS_DEFAULTS, k)
exp[k.replace("_", "-")] = ApacheConfigurator.OS_DEFAULTS[k]
# Special cases
exp["vhost-root"] = None
@@ -116,8 +116,7 @@ class MultipleVhostsTest(util.ApacheTest):
# Make sure that all (and only) the expected values exist
self.assertEqual(len(mock_add.call_args_list), len(found))
for e in exp:
with self.subTest(e=e):
self.assertIn(e, found)
self.assertTrue(e in found)
del os.environ["CERTBOT_DOCS"]
@@ -129,14 +128,16 @@ class MultipleVhostsTest(util.ApacheTest):
def test_all_configurators_defaults_defined(self):
from certbot_apache._internal.entrypoint import OVERRIDE_CLASSES
from certbot_apache._internal.configurator import ApacheConfigurator
parameters = set(ApacheConfigurator.OS_DEFAULTS.__dict__.keys())
parameters = set(ApacheConfigurator.OS_DEFAULTS.keys())
for cls in OVERRIDE_CLASSES.values():
self.assertIs(parameters.issubset(set(cls.OS_DEFAULTS.__dict__.keys())), True)
self.assertTrue(parameters.issubset(set(cls.OS_DEFAULTS.keys())))
def test_constant(self):
self.assertIn("debian_apache_2_4/multiple_vhosts/apache", self.config.options.server_root)
self.assertTrue("debian_apache_2_4/multiple_vhosts/apache" in
self.config.option("server_root"))
self.assertEqual(self.config.option("nonexistent"), None)
@certbot_util.patch_display_util()
@certbot_util.patch_get_utility()
def test_get_all_names(self, mock_getutility):
mock_utility = mock_getutility()
mock_utility.notification = mock.MagicMock(return_value=True)
@@ -145,7 +146,7 @@ class MultipleVhostsTest(util.ApacheTest):
"nonsym.link", "vhost.in.rootconf", "www.certbot.demo",
"duplicate.example.com"})
@certbot_util.patch_display_util()
@certbot_util.patch_get_utility()
@mock.patch("certbot_apache._internal.configurator.socket.gethostbyaddr")
def test_get_all_names_addrs(self, mock_gethost, mock_getutility):
mock_gethost.side_effect = [("google.com", "", ""), socket.error]
@@ -162,9 +163,9 @@ class MultipleVhostsTest(util.ApacheTest):
names = self.config.get_all_names()
self.assertEqual(len(names), 9)
self.assertIn("zombo.com", names)
self.assertIn("google.com", names)
self.assertIn("certbot.demo", names)
self.assertTrue("zombo.com" in names)
self.assertTrue("google.com" in names)
self.assertTrue("certbot.demo" in names)
def test_get_bad_path(self):
self.assertEqual(apache_util.get_file_path(None), None)
@@ -188,14 +189,16 @@ class MultipleVhostsTest(util.ApacheTest):
True, False)
# pylint: disable=protected-access
self.config._add_servernames(ssl_vh1)
self.assertIsNone(self.config._add_servername_alias("oy_vey", ssl_vh1))
self.assertTrue(
self.config._add_servername_alias("oy_vey", ssl_vh1) is None)
def test_add_servernames_alias(self):
self.config.parser.add_dir(
self.vh_truth[2].path, "ServerAlias", ["*.le.co"])
# pylint: disable=protected-access
self.config._add_servernames(self.vh_truth[2])
self.assertEqual(self.vh_truth[2].get_names(), {"*.le.co", "ip-172-30-0-17"})
self.assertEqual(
self.vh_truth[2].get_names(), {"*.le.co", "ip-172-30-0-17"})
def test_get_virtual_hosts(self):
"""Make sure all vhosts are being properly found."""
@@ -244,8 +247,8 @@ class MultipleVhostsTest(util.ApacheTest):
self.vh_truth[0].get_names(), chosen_vhost.get_names())
# Make sure we go from HTTP -> HTTPS
self.assertIs(self.vh_truth[0].ssl, False)
self.assertIs(chosen_vhost.ssl, True)
self.assertFalse(self.vh_truth[0].ssl)
self.assertTrue(chosen_vhost.ssl)
@mock.patch("certbot_apache._internal.configurator.ApacheConfigurator._find_best_vhost")
@mock.patch("certbot_apache._internal.parser.ApacheParser.add_dir")
@@ -254,7 +257,7 @@ class MultipleVhostsTest(util.ApacheTest):
ret_vh.enabled = False
mock_find.return_value = self.vh_truth[8]
self.config.choose_vhost("whatever.com")
self.assertIs(mock_add.called, True)
self.assertTrue(mock_add.called)
@mock.patch("certbot_apache._internal.display_ops.select_vhost")
def test_choose_vhost_select_vhost_with_temp(self, mock_select):
@@ -289,17 +292,23 @@ class MultipleVhostsTest(util.ApacheTest):
def test_findbest_continues_on_short_domain(self):
# pylint: disable=protected-access
self.assertIsNone(self.config._find_best_vhost("purple.com"))
chosen_vhost = self.config._find_best_vhost("purple.com")
self.assertEqual(None, chosen_vhost)
def test_findbest_continues_on_long_domain(self):
# pylint: disable=protected-access
self.assertIsNone(self.config._find_best_vhost("green.red.purple.com"))
chosen_vhost = self.config._find_best_vhost("green.red.purple.com")
self.assertEqual(None, chosen_vhost)
def test_find_best_vhost(self):
# pylint: disable=protected-access
self.assertEqual(self.vh_truth[3], self.config._find_best_vhost("certbot.demo"))
self.assertEqual(self.vh_truth[0], self.config._find_best_vhost("encryption-example.demo"))
self.assertEqual(self.config._find_best_vhost("does-not-exist.com"), None)
self.assertEqual(
self.vh_truth[3], self.config._find_best_vhost("certbot.demo"))
self.assertEqual(
self.vh_truth[0],
self.config._find_best_vhost("encryption-example.demo"))
self.assertEqual(
self.config._find_best_vhost("does-not-exist.com"), None)
def test_find_best_vhost_variety(self):
# pylint: disable=protected-access
@@ -329,19 +338,18 @@ class MultipleVhostsTest(util.ApacheTest):
vhosts = self.config._non_default_vhosts(self.config.vhosts)
self.assertEqual(len(vhosts), 10)
@mock.patch('certbot_apache._internal.configurator.display_util.notify')
def test_deploy_cert_enable_new_vhost(self, unused_mock_notify):
def test_deploy_cert_enable_new_vhost(self):
# Create
ssl_vhost = self.config.make_vhost_ssl(self.vh_truth[0])
self.config.parser.modules["ssl_module"] = None
self.config.parser.modules["mod_ssl.c"] = None
self.config.parser.modules["socache_shmcb_module"] = None
self.assertIs(ssl_vhost.enabled, False)
self.assertFalse(ssl_vhost.enabled)
self.config.deploy_cert(
"encryption-example.demo", "example/cert.pem", "example/key.pem",
"example/cert_chain.pem", "example/fullchain.pem")
self.assertIs(ssl_vhost.enabled, True)
self.assertTrue(ssl_vhost.enabled)
def test_no_duplicate_include(self):
def mock_find_dir(directive, argument, _):
@@ -358,7 +366,7 @@ class MultipleVhostsTest(util.ApacheTest):
if a[0][1] == "Include" and a[0][2] == self.config.mod_ssl_conf:
tried_to_add = True
# Include should be added, find_dir is not patched, and returns falsy
self.assertIs(tried_to_add, True)
self.assertTrue(tried_to_add)
self.config.parser.find_dir = mock_find_dir
mock_add.reset_mock()
@@ -368,8 +376,7 @@ class MultipleVhostsTest(util.ApacheTest):
self.fail("Include shouldn't be added, as patched find_dir 'finds' existing one") \
# pragma: no cover
@mock.patch('certbot_apache._internal.configurator.display_util.notify')
def test_deploy_cert(self, unused_mock_notify):
def test_deploy_cert(self):
self.config.parser.modules["ssl_module"] = None
self.config.parser.modules["mod_ssl.c"] = None
self.config.parser.modules["socache_shmcb_module"] = None
@@ -387,16 +394,20 @@ class MultipleVhostsTest(util.ApacheTest):
f_args.append(self.config.parser.get_arg(d))
return f_args
# Verify that the dummy directives do not exist
self.assertNotIn(
"insert_cert_file_path", find_args(vhostpath, "SSLCertificateFile"))
self.assertNotIn(
"insert_key_file_path", find_args(vhostpath, "SSLCertificateKeyFile"))
self.assertFalse(
"insert_cert_file_path" in find_args(vhostpath,
"SSLCertificateFile"))
self.assertFalse(
"insert_key_file_path" in find_args(vhostpath,
"SSLCertificateKeyFile"))
orig_add_dummy(vhostpath)
# Verify that the dummy directives exist
self.assertIn(
"insert_cert_file_path", find_args(vhostpath, "SSLCertificateFile"))
self.assertIn(
"insert_key_file_path", find_args(vhostpath, "SSLCertificateKeyFile"))
self.assertTrue(
"insert_cert_file_path" in find_args(vhostpath,
"SSLCertificateFile"))
self.assertTrue(
"insert_key_file_path" in find_args(vhostpath,
"SSLCertificateKeyFile"))
# pylint: disable=protected-access
self.config._add_dummy_ssl_directives = mock_add_dummy_ssl
@@ -408,8 +419,8 @@ class MultipleVhostsTest(util.ApacheTest):
self.config.save()
# Verify ssl_module was enabled.
self.assertIs(self.vh_truth[1].enabled, True)
self.assertIn("ssl_module", self.config.parser.modules)
self.assertTrue(self.vh_truth[1].enabled)
self.assertTrue("ssl_module" in self.config.parser.modules)
loc_cert = self.config.parser.find_dir(
"sslcertificatefile", "example/cert.pem", self.vh_truth[1].path)
@@ -445,15 +456,17 @@ class MultipleVhostsTest(util.ApacheTest):
def test_is_name_vhost(self):
addr = obj.Addr.fromstring("*:80")
self.assertIs(self.config.is_name_vhost(addr), True)
self.assertTrue(self.config.is_name_vhost(addr))
self.config.version = (2, 2)
self.assertIs(self.config.is_name_vhost(addr), False)
self.assertFalse(self.config.is_name_vhost(addr))
def test_add_name_vhost(self):
self.config.add_name_vhost(obj.Addr.fromstring("*:443"))
self.config.add_name_vhost(obj.Addr.fromstring("*:80"))
self.assertTrue(self.config.parser.find_dir("NameVirtualHost", "*:443", exclude=False))
self.assertTrue(self.config.parser.find_dir("NameVirtualHost", "*:80"))
self.assertTrue(self.config.parser.find_dir(
"NameVirtualHost", "*:443", exclude=False))
self.assertTrue(self.config.parser.find_dir(
"NameVirtualHost", "*:80"))
def test_add_listen_80(self):
mock_find = mock.Mock()
@@ -462,8 +475,8 @@ class MultipleVhostsTest(util.ApacheTest):
self.config.parser.find_dir = mock_find
self.config.parser.add_dir = mock_add_dir
self.config.ensure_listen("80")
self.assertIs(mock_add_dir.called, True)
self.assertIs(mock_find.called, True)
self.assertTrue(mock_add_dir.called)
self.assertTrue(mock_find.called)
self.assertEqual(mock_add_dir.call_args[0][1], "Listen")
self.assertEqual(mock_add_dir.call_args[0][2], "80")
@@ -488,13 +501,13 @@ class MultipleVhostsTest(util.ApacheTest):
# Test
self.config.ensure_listen("8080")
self.assertEqual(mock_add_dir.call_count, 3)
self.assertIs(mock_add_dir.called, True)
self.assertTrue(mock_add_dir.called)
self.assertEqual(mock_add_dir.call_args[0][1], "Listen")
call_found = False
for mock_call in mock_add_dir.mock_calls:
if mock_call[1][2] == ['1.2.3.4:8080']:
call_found = True
self.assertIs(call_found, True)
self.assertTrue(call_found)
@mock.patch("certbot_apache._internal.parser.ApacheParser.reset_modules")
def test_prepare_server_https(self, mock_reset):
@@ -617,8 +630,8 @@ class MultipleVhostsTest(util.ApacheTest):
def test_make_vhost_ssl_nonsymlink(self):
ssl_vhost_slink = self.config.make_vhost_ssl(self.vh_truth[8])
self.assertIs(ssl_vhost_slink.ssl, True)
self.assertIs(ssl_vhost_slink.enabled, True)
self.assertTrue(ssl_vhost_slink.ssl)
self.assertTrue(ssl_vhost_slink.enabled)
self.assertEqual(ssl_vhost_slink.name, "nonsym.link")
def test_make_vhost_ssl_nonexistent_vhost_path(self):
@@ -639,8 +652,8 @@ class MultipleVhostsTest(util.ApacheTest):
self.assertEqual(len(ssl_vhost.addrs), 1)
self.assertEqual({obj.Addr.fromstring("*:443")}, ssl_vhost.addrs)
self.assertEqual(ssl_vhost.name, "encryption-example.demo")
self.assertIs(ssl_vhost.ssl, True)
self.assertIs(ssl_vhost.enabled, False)
self.assertTrue(ssl_vhost.ssl)
self.assertFalse(ssl_vhost.enabled)
self.assertEqual(self.config.is_name_vhost(self.vh_truth[0]),
self.config.is_name_vhost(ssl_vhost))
@@ -719,14 +732,15 @@ class MultipleVhostsTest(util.ApacheTest):
def test_get_ssl_vhost_path(self):
# pylint: disable=protected-access
self.assertIs(self.config._get_ssl_vhost_path("example_path").endswith(".conf"), True)
self.assertTrue(
self.config._get_ssl_vhost_path("example_path").endswith(".conf"))
def test_add_name_vhost_if_necessary(self):
# pylint: disable=protected-access
self.config.add_name_vhost = mock.Mock()
self.config.version = (2, 2)
self.config._add_name_vhost_if_necessary(self.vh_truth[0])
self.assertIs(self.config.add_name_vhost.called, True)
self.assertTrue(self.config.add_name_vhost.called)
new_addrs = set()
for addr in self.vh_truth[0].addrs:
@@ -765,9 +779,9 @@ class MultipleVhostsTest(util.ApacheTest):
for i, achall in enumerate(achalls):
self.config.cleanup([achall])
if i == len(achalls) - 1:
self.assertIs(mock_restart.called, True)
self.assertTrue(mock_restart.called)
else:
self.assertIs(mock_restart.called, False)
self.assertFalse(mock_restart.called)
@mock.patch("certbot_apache._internal.configurator.ApacheConfigurator.restart")
@mock.patch("certbot_apache._internal.apache_util._get_runtime_cfg")
@@ -780,10 +794,10 @@ class MultipleVhostsTest(util.ApacheTest):
self.config._chall_out.add(achall) # pylint: disable=protected-access
self.config.cleanup([achalls[-1]])
self.assertIs(mock_restart.called, False)
self.assertFalse(mock_restart.called)
self.config.cleanup(achalls)
self.assertIs(mock_restart.called, True)
self.assertTrue(mock_restart.called)
@mock.patch("certbot.util.run_script")
def test_get_version(self, mock_script):
@@ -832,18 +846,18 @@ class MultipleVhostsTest(util.ApacheTest):
self.assertTrue(self.config.more_info())
def test_get_chall_pref(self):
self.assertIsInstance(self.config.get_chall_pref(""), list)
self.assertTrue(isinstance(self.config.get_chall_pref(""), list))
def test_install_ssl_options_conf(self):
path = os.path.join(self.work_dir, "test_it")
other_path = os.path.join(self.work_dir, "other_test_it")
self.config.install_ssl_options_conf(path, other_path)
self.assertIs(os.path.isfile(path), True)
self.assertIs(os.path.isfile(other_path), True)
self.assertTrue(os.path.isfile(path))
self.assertTrue(os.path.isfile(other_path))
# TEST ENHANCEMENTS
def test_supported_enhancements(self):
self.assertIsInstance(self.config.supported_enhancements(), list)
self.assertTrue(isinstance(self.config.supported_enhancements(), list))
def test_find_http_vhost_without_ancestor(self):
# pylint: disable=protected-access
@@ -878,20 +892,20 @@ class MultipleVhostsTest(util.ApacheTest):
self.config.enhance, "certbot.demo", "unknown_enhancement")
def test_enhance_no_ssl_vhost(self):
with mock.patch("certbot_apache._internal.configurator.logger.error") as mock_log:
with mock.patch("certbot_apache._internal.configurator.logger.warning") as mock_log:
self.assertRaises(errors.PluginError, self.config.enhance,
"certbot.demo", "redirect")
# Check that correct logger.warning was printed
self.assertIn("not able to find", mock_log.call_args[0][0])
self.assertIn("\"redirect\"", mock_log.call_args[0][0])
self.assertTrue("not able to find" in mock_log.call_args[0][0])
self.assertTrue("\"redirect\"" in mock_log.call_args[0][0])
mock_log.reset_mock()
self.assertRaises(errors.PluginError, self.config.enhance,
"certbot.demo", "ensure-http-header", "Test")
# Check that correct logger.warning was printed
self.assertIn("not able to find", mock_log.call_args[0][0])
self.assertIn("Test", mock_log.call_args[0][0])
self.assertTrue("not able to find" in mock_log.call_args[0][0])
self.assertTrue("Test" in mock_log.call_args[0][0])
@mock.patch("certbot.util.exe_exists")
def test_ocsp_stapling(self, mock_exe):
@@ -969,7 +983,7 @@ class MultipleVhostsTest(util.ApacheTest):
# pylint: disable=protected-access
http_vh = self.config._get_http_vhost(ssl_vh)
self.assertIs(http_vh.ssl, False)
self.assertFalse(http_vh.ssl)
@mock.patch("certbot.util.run_script")
@mock.patch("certbot.util.exe_exists")
@@ -1024,7 +1038,7 @@ class MultipleVhostsTest(util.ApacheTest):
self.config.enhance("certbot.demo", "ensure-http-header",
"Upgrade-Insecure-Requests")
self.assertIn("headers_module", self.config.parser.modules)
self.assertTrue("headers_module" in self.config.parser.modules)
# Get the ssl vhost for certbot.demo
ssl_vhost = self.config.assoc["certbot.demo"]
@@ -1076,8 +1090,8 @@ class MultipleVhostsTest(util.ApacheTest):
self.assertEqual(len(rw_rule), 3)
# [:-3] to remove the vhost index number
self.assertIs(rw_engine[0].startswith(self.vh_truth[3].path[:-3]), True)
self.assertIs(rw_rule[0].startswith(self.vh_truth[3].path[:-3]), True)
self.assertTrue(rw_engine[0].startswith(self.vh_truth[3].path[:-3]))
self.assertTrue(rw_rule[0].startswith(self.vh_truth[3].path[:-3]))
def test_rewrite_rule_exists(self):
# Skip the enable mod
@@ -1086,7 +1100,7 @@ class MultipleVhostsTest(util.ApacheTest):
self.config.parser.add_dir(
self.vh_truth[3].path, "RewriteRule", ["Unknown"])
# pylint: disable=protected-access
self.assertIs(self.config._is_rewrite_exists(self.vh_truth[3]), True)
self.assertTrue(self.config._is_rewrite_exists(self.vh_truth[3]))
def test_rewrite_engine_exists(self):
# Skip the enable mod
@@ -1126,10 +1140,10 @@ class MultipleVhostsTest(util.ApacheTest):
# three args to rw_rule + 1 arg for the pre existing rewrite
self.assertEqual(len(rw_rule), 5)
# [:-3] to remove the vhost index number
self.assertIs(rw_engine[0].startswith(self.vh_truth[3].path[:-3]), True)
self.assertIs(rw_rule[0].startswith(self.vh_truth[3].path[:-3]), True)
self.assertTrue(rw_engine[0].startswith(self.vh_truth[3].path[:-3]))
self.assertTrue(rw_rule[0].startswith(self.vh_truth[3].path[:-3]))
self.assertIn("rewrite_module", self.config.parser.modules)
self.assertTrue("rewrite_module" in self.config.parser.modules)
@mock.patch("certbot.util.run_script")
@mock.patch("certbot.util.exe_exists")
@@ -1187,7 +1201,7 @@ class MultipleVhostsTest(util.ApacheTest):
"ApacheConfigurator._verify_no_certbot_redirect")
with mock.patch(verify_no_redirect) as mock_verify:
self.config.enhance("green.blue.purple.com", "redirect")
self.assertIs(mock_verify.called, False)
self.assertFalse(mock_verify.called)
def test_redirect_from_previous_run(self):
# Skip the enable mod
@@ -1228,16 +1242,16 @@ class MultipleVhostsTest(util.ApacheTest):
def test_sift_rewrite_rule(self):
# pylint: disable=protected-access
small_quoted_target = "RewriteRule ^ \"http://\""
self.assertIs(self.config._sift_rewrite_rule(small_quoted_target), False)
self.assertFalse(self.config._sift_rewrite_rule(small_quoted_target))
https_target = "RewriteRule ^ https://satoshi"
self.assertIs(self.config._sift_rewrite_rule(https_target), True)
self.assertTrue(self.config._sift_rewrite_rule(https_target))
normal_target = "RewriteRule ^/(.*) http://www.a.com:1234/$1 [L,R]"
self.assertIs(self.config._sift_rewrite_rule(normal_target), False)
self.assertFalse(self.config._sift_rewrite_rule(normal_target))
not_rewriterule = "NotRewriteRule ^ ..."
self.assertIs(self.config._sift_rewrite_rule(not_rewriterule), False)
self.assertFalse(self.config._sift_rewrite_rule(not_rewriterule))
def get_key_and_achalls(self):
"""Return testing achallenges."""
@@ -1266,17 +1280,18 @@ class MultipleVhostsTest(util.ApacheTest):
vhost = self.vh_truth[0]
vhost.enabled = False
vhost.filep = inc_path
self.assertEqual(self.config.parser.find_dir("Include", inc_path), [])
self.assertNotIn(os.path.dirname(inc_path), self.config.parser.existing_paths)
self.assertFalse(self.config.parser.find_dir("Include", inc_path))
self.assertFalse(
os.path.dirname(inc_path) in self.config.parser.existing_paths)
self.config.enable_site(vhost)
self.assertGreaterEqual(len(self.config.parser.find_dir("Include", inc_path)), 1)
self.assertIn(os.path.dirname(inc_path), self.config.parser.existing_paths)
self.assertIn(
os.path.basename(inc_path), self.config.parser.existing_paths[
self.assertTrue(self.config.parser.find_dir("Include", inc_path))
self.assertTrue(
os.path.dirname(inc_path) in self.config.parser.existing_paths)
self.assertTrue(
os.path.basename(inc_path) in self.config.parser.existing_paths[
os.path.dirname(inc_path)])
@mock.patch('certbot_apache._internal.configurator.display_util.notify')
def test_deploy_cert_not_parsed_path(self, unused_mock_notify):
def test_deploy_cert_not_parsed_path(self):
# Make sure that we add include to root config for vhosts when
# handle-sites is false
self.config.parser.modules["ssl_module"] = None
@@ -1295,7 +1310,7 @@ class MultipleVhostsTest(util.ApacheTest):
"example/cert.pem", "example/key.pem",
"example/cert_chain.pem")
# Test that we actually called add_include
self.assertIs(mock_add.called, True)
self.assertTrue(mock_add.called)
shutil.rmtree(tmp_path)
def test_deploy_cert_no_mod_ssl(self):
@@ -1314,7 +1329,7 @@ class MultipleVhostsTest(util.ApacheTest):
ret_vh.enabled = True
self.config.enable_site(ret_vh)
# Make sure that we return early
self.assertIs(mock_parsed.called, False)
self.assertFalse(mock_parsed.called)
def test_enable_mod_unsupported(self):
self.assertRaises(errors.MisconfigurationError,
@@ -1335,7 +1350,7 @@ class MultipleVhostsTest(util.ApacheTest):
# And the actual returned values
self.assertEqual(len(vhs), 1)
self.assertEqual(vhs[0].name, "certbot.demo")
self.assertIs(vhs[0].ssl, True)
self.assertTrue(vhs[0].ssl)
self.assertNotEqual(vhs[0], self.vh_truth[3])
@@ -1347,7 +1362,7 @@ class MultipleVhostsTest(util.ApacheTest):
mock_select_vhs.return_value = [self.vh_truth[1]]
vhs = self.config._choose_vhosts_wildcard("*.certbot.demo",
create_ssl=False)
self.assertIs(mock_makessl.called, False)
self.assertFalse(mock_makessl.called)
self.assertEqual(vhs[0], self.vh_truth[1])
@mock.patch("certbot_apache._internal.configurator.ApacheConfigurator._vhosts_for_wildcard")
@@ -1364,15 +1379,15 @@ class MultipleVhostsTest(util.ApacheTest):
self.assertEqual(mock_select_vhs.call_args[0][0][0], self.vh_truth[7])
self.assertEqual(len(mock_select_vhs.call_args_list), 1)
# Ensure that make_vhost_ssl was not called, vhost.ssl == true
self.assertIs(mock_makessl.called, False)
self.assertFalse(mock_makessl.called)
# And the actual returned values
self.assertEqual(len(vhs), 1)
self.assertIs(vhs[0].ssl, True)
self.assertTrue(vhs[0].ssl)
self.assertEqual(vhs[0], self.vh_truth[7])
@mock.patch('certbot_apache._internal.configurator.display_util.notify')
def test_deploy_cert_wildcard(self, unused_mock_notify):
def test_deploy_cert_wildcard(self):
# pylint: disable=protected-access
mock_choose_vhosts = mock.MagicMock()
mock_choose_vhosts.return_value = [self.vh_truth[7]]
@@ -1381,7 +1396,7 @@ class MultipleVhostsTest(util.ApacheTest):
with mock.patch(mock_d) as mock_dep:
self.config.deploy_cert("*.wildcard.example.org", "/tmp/path",
"/tmp/path", "/tmp/path", "/tmp/path")
self.assertIs(mock_dep.called, True)
self.assertTrue(mock_dep.called)
self.assertEqual(len(mock_dep.call_args_list), 1)
self.assertEqual(self.vh_truth[7], mock_dep.call_args_list[0][0][0])
@@ -1403,7 +1418,7 @@ class MultipleVhostsTest(util.ApacheTest):
self.config._wildcard_vhosts["*.certbot.demo"] = [self.vh_truth[3]]
self.config.enhance("*.certbot.demo", "ensure-http-header",
"Upgrade-Insecure-Requests")
self.assertIs(mock_choose.called, False)
self.assertFalse(mock_choose.called)
@mock.patch("certbot_apache._internal.configurator.ApacheConfigurator._choose_vhosts_wildcard")
def test_enhance_wildcard_no_install(self, mock_choose):
@@ -1413,7 +1428,7 @@ class MultipleVhostsTest(util.ApacheTest):
self.config.parser.modules["headers_module"] = None
self.config.enhance("*.certbot.demo", "ensure-http-header",
"Upgrade-Insecure-Requests")
self.assertIs(mock_choose.called, True)
self.assertTrue(mock_choose.called)
def test_add_vhost_id(self):
for vh in [self.vh_truth[0], self.vh_truth[1], self.vh_truth[2]]:
@@ -1462,9 +1477,9 @@ class AugeasVhostsTest(util.ApacheTest):
td = "debian_apache_2_4/augeas_vhosts"
cr = "debian_apache_2_4/augeas_vhosts/apache2"
vr = "debian_apache_2_4/augeas_vhosts/apache2/sites-available"
super().setUp(test_dir=td,
config_root=cr,
vhost_root=vr)
super(AugeasVhostsTest, self).setUp(test_dir=td,
config_root=cr,
vhost_root=vr)
self.config = util.get_apache_configurator(
self.config_path, self.vhost_path, self.config_dir,
@@ -1492,8 +1507,7 @@ class AugeasVhostsTest(util.ApacheTest):
names = (
"an.example.net", "another.example.net", "an.other.example.net")
for name in names:
with self.subTest(name=name):
self.assertNotIn(name, self.config.choose_vhost(name).aliases)
self.assertFalse(name in self.config.choose_vhost(name).aliases)
@mock.patch("certbot_apache._internal.obj.VirtualHost.conflicts")
def test_choose_vhost_without_matching_wildcard(self, mock_conflicts):
@@ -1501,7 +1515,7 @@ class AugeasVhostsTest(util.ApacheTest):
mock_path = "certbot_apache._internal.display_ops.select_vhost"
with mock.patch(mock_path, lambda _, vhosts: vhosts[0]):
for name in ("a.example.net", "other.example.net"):
self.assertIn(name, self.config.choose_vhost(name).aliases)
self.assertTrue(name in self.config.choose_vhost(name).aliases)
@mock.patch("certbot_apache._internal.obj.VirtualHost.conflicts")
def test_choose_vhost_wildcard_not_found(self, mock_conflicts):
@@ -1534,7 +1548,6 @@ class AugeasVhostsTest(util.ApacheTest):
self.assertRaises(errors.PluginError, self.config.make_vhost_ssl,
broken_vhost)
class MultiVhostsTest(util.ApacheTest):
"""Test configuration with multiple virtualhosts in a single file."""
# pylint: disable=protected-access
@@ -1543,7 +1556,9 @@ class MultiVhostsTest(util.ApacheTest):
td = "debian_apache_2_4/multi_vhosts"
cr = "debian_apache_2_4/multi_vhosts/apache2"
vr = "debian_apache_2_4/multi_vhosts/apache2/sites-available"
super().setUp(test_dir=td, config_root=cr, vhost_root=vr)
super(MultiVhostsTest, self).setUp(test_dir=td,
config_root=cr,
vhost_root=vr)
self.config = util.get_apache_configurator(
self.config_path, self.vhost_path,
@@ -1564,8 +1579,9 @@ class MultiVhostsTest(util.ApacheTest):
self.assertEqual(len(ssl_vhost.addrs), 1)
self.assertEqual({obj.Addr.fromstring("*:443")}, ssl_vhost.addrs)
self.assertEqual(ssl_vhost.name, "banana.vomit.com")
self.assertIs(ssl_vhost.ssl, True)
self.assertIs(ssl_vhost.enabled, False)
self.assertTrue(ssl_vhost.ssl)
self.assertFalse(ssl_vhost.enabled)
self.assertEqual(self.config.is_name_vhost(self.vh_truth[1]),
self.config.is_name_vhost(ssl_vhost))
@@ -1591,13 +1607,14 @@ class MultiVhostsTest(util.ApacheTest):
self.assertEqual(self.config._get_new_vh_path(without_index, both),
with_index_2[0])
@mock.patch("certbot_apache._internal.configurator.display_util.notify")
def test_make_vhost_ssl_with_existing_rewrite_rule(self, mock_notify):
@certbot_util.patch_get_utility()
def test_make_vhost_ssl_with_existing_rewrite_rule(self, mock_get_utility):
self.config.parser.modules["rewrite_module"] = None
ssl_vhost = self.config.make_vhost_ssl(self.vh_truth[4])
self.assertTrue(self.config.parser.find_dir("RewriteEngine", "on", ssl_vhost.path, False))
self.assertTrue(self.config.parser.find_dir(
"RewriteEngine", "on", ssl_vhost.path, False))
with open(ssl_vhost.filep) as the_file:
conf_text = the_file.read()
@@ -1605,13 +1622,13 @@ class MultiVhostsTest(util.ApacheTest):
"\"https://new.example.com/docs/$1\" [R,L]")
uncommented_rewrite_rule = ("RewriteRule \"^/docs/(.+)\" "
"\"http://new.example.com/docs/$1\" [R,L]")
self.assertIn(commented_rewrite_rule, conf_text)
self.assertIn(uncommented_rewrite_rule, conf_text)
self.assertEqual(mock_notify.call_count, 1)
self.assertIn("Some rewrite rules", mock_notify.call_args[0][0])
self.assertTrue(commented_rewrite_rule in conf_text)
self.assertTrue(uncommented_rewrite_rule in conf_text)
mock_get_utility().add_message.assert_called_once_with(mock.ANY,
mock.ANY)
@mock.patch("certbot_apache._internal.configurator.display_util.notify")
def test_make_vhost_ssl_with_existing_rewrite_conds(self, mock_notify):
@certbot_util.patch_get_utility()
def test_make_vhost_ssl_with_existing_rewrite_conds(self, mock_get_utility):
self.config.parser.modules["rewrite_module"] = None
ssl_vhost = self.config.make_vhost_ssl(self.vh_truth[3])
@@ -1630,21 +1647,21 @@ class MultiVhostsTest(util.ApacheTest):
"https://%{SERVER_NAME}%{REQUEST_URI} "
"[L,NE,R=permanent]")
self.assertIn(not_commented_cond1, conf_line_set)
self.assertIn(not_commented_rewrite_rule, conf_line_set)
self.assertTrue(not_commented_cond1 in conf_line_set)
self.assertTrue(not_commented_rewrite_rule in conf_line_set)
self.assertIn(commented_cond1, conf_line_set)
self.assertIn(commented_cond2, conf_line_set)
self.assertIn(commented_rewrite_rule, conf_line_set)
self.assertEqual(mock_notify.call_count, 1)
self.assertIn("Some rewrite rules", mock_notify.call_args[0][0])
self.assertTrue(commented_cond1 in conf_line_set)
self.assertTrue(commented_cond2 in conf_line_set)
self.assertTrue(commented_rewrite_rule in conf_line_set)
mock_get_utility().add_message.assert_called_once_with(mock.ANY,
mock.ANY)
class InstallSslOptionsConfTest(util.ApacheTest):
"""Test that the options-ssl-nginx.conf file is installed and updated properly."""
def setUp(self): # pylint: disable=arguments-differ
super().setUp()
super(InstallSslOptionsConfTest, self).setUp()
self.config = util.get_apache_configurator(
self.config_path, self.vhost_path, self.config_dir, self.work_dir)
@@ -1657,7 +1674,7 @@ class InstallSslOptionsConfTest(util.ApacheTest):
return crypto_util.sha256sum(self.config.pick_apache_config())
def _assert_current_file(self):
self.assertIs(os.path.isfile(self.config.mod_ssl_conf), True)
self.assertTrue(os.path.isfile(self.config.mod_ssl_conf))
self.assertEqual(crypto_util.sha256sum(self.config.mod_ssl_conf),
self._current_ssl_options_hash())
@@ -1665,7 +1682,7 @@ class InstallSslOptionsConfTest(util.ApacheTest):
# prepare should have placed a file there
self._assert_current_file()
os.remove(self.config.mod_ssl_conf)
self.assertIs(os.path.isfile(self.config.mod_ssl_conf), False)
self.assertFalse(os.path.isfile(self.config.mod_ssl_conf))
self._call()
self._assert_current_file()
@@ -1687,8 +1704,8 @@ class InstallSslOptionsConfTest(util.ApacheTest):
mod_ssl_conf.write("a new line for the wrong hash\n")
with mock.patch("certbot.plugins.common.logger") as mock_logger:
self._call()
self.assertIs(mock_logger.warning.called, False)
self.assertIs(os.path.isfile(self.config.mod_ssl_conf), True)
self.assertFalse(mock_logger.warning.called)
self.assertTrue(os.path.isfile(self.config.mod_ssl_conf))
self.assertEqual(crypto_util.sha256sum(
self.config.pick_apache_config()),
self._current_ssl_options_hash())
@@ -1711,7 +1728,7 @@ class InstallSslOptionsConfTest(util.ApacheTest):
# only print warning once
with mock.patch("certbot.plugins.common.logger") as mock_logger:
self._call()
self.assertIs(mock_logger.warning.called, False)
self.assertFalse(mock_logger.warning.called)
def test_ssl_config_files_hash_in_all_hashes(self):
"""
@@ -1727,14 +1744,12 @@ class InstallSslOptionsConfTest(util.ApacheTest):
"certbot_apache", os.path.join("_internal", "tls_configs"))
all_files = [os.path.join(tls_configs_dir, name) for name in os.listdir(tls_configs_dir)
if name.endswith('options-ssl-apache.conf')]
self.assertGreaterEqual(len(all_files), 1)
self.assertTrue(all_files)
for one_file in all_files:
file_hash = crypto_util.sha256sum(one_file)
self.assertIn(
file_hash, ALL_SSL_OPTIONS_HASHES,
f"Constants.ALL_SSL_OPTIONS_HASHES must be appended with the sha256 "
f"hash of {one_file} when it is updated."
)
self.assertTrue(file_hash in ALL_SSL_OPTIONS_HASHES,
"Constants.ALL_SSL_OPTIONS_HASHES must be appended with the sha256 "
"hash of {0} when it is updated.".format(one_file))
def test_openssl_version(self):
self.config._openssl_version = None
@@ -1759,7 +1774,7 @@ class InstallSslOptionsConfTest(util.ApacheTest):
# ssl_module statically linked
self.config._openssl_version = None
self.config.parser.modules['ssl_module'] = None
self.config.options.bin = '/fake/path/to/httpd'
self.config.options['bin'] = '/fake/path/to/httpd'
with mock.patch("certbot_apache._internal.configurator."
"ApacheConfigurator._open_module_file") as mock_omf:
mock_omf.return_value = some_string_contents
@@ -1768,14 +1783,14 @@ class InstallSslOptionsConfTest(util.ApacheTest):
def test_current_version(self):
self.config.version = (2, 4, 10)
self.config._openssl_version = '1.0.2m'
self.assertIn('old', self.config.pick_apache_config())
self.assertTrue('old' in self.config.pick_apache_config())
self.config.version = (2, 4, 11)
self.config._openssl_version = '1.0.2m'
self.assertIn('current', self.config.pick_apache_config())
self.assertTrue('current' in self.config.pick_apache_config())
self.config._openssl_version = '1.0.2a'
self.assertIn('old', self.config.pick_apache_config())
self.assertTrue('old' in self.config.pick_apache_config())
def test_openssl_version_warns(self):
self.config._openssl_version = '1.0.2a'
@@ -1784,28 +1799,28 @@ class InstallSslOptionsConfTest(util.ApacheTest):
self.config._openssl_version = None
with mock.patch("certbot_apache._internal.configurator.logger.warning") as mock_log:
self.assertEqual(self.config.openssl_version(), None)
self.assertIn("Could not find ssl_module", mock_log.call_args[0][0])
self.assertTrue("Could not find ssl_module" in mock_log.call_args[0][0])
# When no ssl_module is present at all
self.config._openssl_version = None
self.assertNotIn("ssl_module", self.config.parser.modules)
self.assertTrue("ssl_module" not in self.config.parser.modules)
with mock.patch("certbot_apache._internal.configurator.logger.warning") as mock_log:
self.assertEqual(self.config.openssl_version(), None)
self.assertIn("Could not find ssl_module", mock_log.call_args[0][0])
self.assertTrue("Could not find ssl_module" in mock_log.call_args[0][0])
# When ssl_module is statically linked but --apache-bin not provided
self.config._openssl_version = None
self.config.options.bin = None
self.config.options['bin'] = None
self.config.parser.modules['ssl_module'] = None
with mock.patch("certbot_apache._internal.configurator.logger.warning") as mock_log:
self.assertEqual(self.config.openssl_version(), None)
self.assertIn("ssl_module is statically linked but", mock_log.call_args[0][0])
self.assertTrue("ssl_module is statically linked but" in mock_log.call_args[0][0])
self.config.parser.modules['ssl_module'] = "/fake/path"
with mock.patch("certbot_apache._internal.configurator.logger.warning") as mock_log:
# Check that correct logger.warning was printed
self.assertEqual(self.config.openssl_version(), None)
self.assertIn("Unable to read", mock_log.call_args[0][0])
self.assertTrue("Unable to read" in mock_log.call_args[0][0])
contents_missing_openssl = b"these contents won't match the regex"
with mock.patch("certbot_apache._internal.configurator."
@@ -1814,7 +1829,7 @@ class InstallSslOptionsConfTest(util.ApacheTest):
with mock.patch("certbot_apache._internal.configurator.logger.warning") as mock_log:
# Check that correct logger.warning was printed
self.assertEqual(self.config.openssl_version(), None)
self.assertIn("Could not find OpenSSL", mock_log.call_args[0][0])
self.assertTrue("Could not find OpenSSL" in mock_log.call_args[0][0])
def test_open_module_file(self):
mock_open = mock.mock_open(read_data="testing 12 3")

View File

@@ -9,7 +9,6 @@ except ImportError: # pragma: no cover
from certbot import errors
from certbot.compat import os
from certbot.tests import util as certbot_util
from certbot_apache._internal import apache_util
from certbot_apache._internal import obj
import util
@@ -21,7 +20,7 @@ class MultipleVhostsTestDebian(util.ApacheTest):
_multiprocess_can_split_ = True
def setUp(self): # pylint: disable=arguments-differ
super().setUp()
super(MultipleVhostsTestDebian, self).setUp()
self.config = util.get_apache_configurator(
self.config_path, self.vhost_path, self.config_dir, self.work_dir,
os_info="debian")
@@ -45,41 +44,40 @@ class MultipleVhostsTestDebian(util.ApacheTest):
def test_enable_mod_unsupported_dirs(self):
shutil.rmtree(os.path.join(self.config.parser.root, "mods-enabled"))
self.assertRaises(errors.NotSupportedError, self.config.enable_mod, "ssl")
self.assertRaises(
errors.NotSupportedError, self.config.enable_mod, "ssl")
@mock.patch("certbot.util.run_script")
@mock.patch("certbot.util.exe_exists")
@mock.patch("certbot_apache._internal.apache_util.subprocess.run")
def test_enable_mod(self, mock_run, mock_exe_exists, mock_run_script):
mock_run.return_value.stdout = "Define: DUMP_RUN_CFG"
mock_run.return_value.stderr = ""
mock_run.return_value.returncode = 0
@mock.patch("certbot_apache._internal.apache_util.subprocess.Popen")
def test_enable_mod(self, mock_popen, mock_exe_exists, mock_run_script):
mock_popen().communicate.return_value = ("Define: DUMP_RUN_CFG", "")
mock_popen().returncode = 0
mock_exe_exists.return_value = True
self.config.enable_mod("ssl")
self.assertIn("ssl_module", self.config.parser.modules)
self.assertIn("mod_ssl.c", self.config.parser.modules)
self.assertTrue("ssl_module" in self.config.parser.modules)
self.assertTrue("mod_ssl.c" in self.config.parser.modules)
self.assertIs(mock_run_script.called, True)
self.assertTrue(mock_run_script.called)
def test_deploy_cert_enable_new_vhost(self):
# Create
ssl_vhost = self.config.make_vhost_ssl(self.vh_truth[0])
self.config.parser.modules["ssl_module"] = None
self.config.parser.modules["mod_ssl.c"] = None
self.assertIs(ssl_vhost.enabled, False)
with certbot_util.patch_display_util():
self.config.deploy_cert(
"encryption-example.demo", "example/cert.pem", "example/key.pem",
"example/cert_chain.pem", "example/fullchain.pem")
self.assertIs(ssl_vhost.enabled, True)
# Make sure that we don't error out if symlink already exists
ssl_vhost.enabled = False
self.assertIs(ssl_vhost.enabled, False)
self.config.deploy_cert(
"encryption-example.demo", "example/cert.pem", "example/key.pem",
"example/cert_chain.pem", "example/fullchain.pem")
self.assertIs(ssl_vhost.enabled, True)
self.assertFalse(ssl_vhost.enabled)
self.config.deploy_cert(
"encryption-example.demo", "example/cert.pem", "example/key.pem",
"example/cert_chain.pem", "example/fullchain.pem")
self.assertTrue(ssl_vhost.enabled)
# Make sure that we don't error out if symlink already exists
ssl_vhost.enabled = False
self.assertFalse(ssl_vhost.enabled)
self.config.deploy_cert(
"encryption-example.demo", "example/cert.pem", "example/key.pem",
"example/cert_chain.pem", "example/fullchain.pem")
self.assertTrue(ssl_vhost.enabled)
def test_enable_site_failure(self):
self.config.parser.root = "/tmp/nonexistent"
@@ -102,15 +100,14 @@ class MultipleVhostsTestDebian(util.ApacheTest):
# Get the default 443 vhost
self.config.assoc["random.demo"] = self.vh_truth[1]
with certbot_util.patch_display_util():
self.config.deploy_cert(
"random.demo", "example/cert.pem", "example/key.pem",
"example/cert_chain.pem", "example/fullchain.pem")
self.config.deploy_cert(
"random.demo", "example/cert.pem", "example/key.pem",
"example/cert_chain.pem", "example/fullchain.pem")
self.config.save()
# Verify ssl_module was enabled.
self.assertIs(self.vh_truth[1].enabled, True)
self.assertIn("ssl_module", self.config.parser.modules)
self.assertTrue(self.vh_truth[1].enabled)
self.assertTrue("ssl_module" in self.config.parser.modules)
loc_cert = self.config.parser.find_dir(
"sslcertificatefile", "example/fullchain.pem",
@@ -169,7 +166,7 @@ class MultipleVhostsTestDebian(util.ApacheTest):
# This will create an ssl vhost for certbot.demo
self.config.choose_vhost("certbot.demo")
self.config.enhance("certbot.demo", "staple-ocsp")
self.assertIn("socache_shmcb_module", self.config.parser.modules)
self.assertTrue("socache_shmcb_module" in self.config.parser.modules)
@mock.patch("certbot.util.run_script")
@mock.patch("certbot.util.exe_exists")
@@ -182,7 +179,7 @@ class MultipleVhostsTestDebian(util.ApacheTest):
self.config.choose_vhost("certbot.demo")
self.config.enhance("certbot.demo", "ensure-http-header",
"Strict-Transport-Security")
self.assertIn("headers_module", self.config.parser.modules)
self.assertTrue("headers_module" in self.config.parser.modules)
@mock.patch("certbot.util.run_script")
@mock.patch("certbot.util.exe_exists")
@@ -193,10 +190,10 @@ class MultipleVhostsTestDebian(util.ApacheTest):
# This will create an ssl vhost for certbot.demo
self.config.choose_vhost("certbot.demo")
self.config.enhance("certbot.demo", "redirect")
self.assertIn("rewrite_module", self.config.parser.modules)
self.assertTrue("rewrite_module" in self.config.parser.modules)
def test_enable_site_already_enabled(self):
self.assertIs(self.vh_truth[1].enabled, True)
self.assertTrue(self.vh_truth[1].enabled)
self.config.enable_site(self.vh_truth[1])
def test_enable_site_call_parent(self):
@@ -206,7 +203,7 @@ class MultipleVhostsTestDebian(util.ApacheTest):
vh = self.vh_truth[0]
vh.enabled = False
self.config.enable_site(vh)
self.assertIs(e_s.called, True)
self.assertTrue(e_s.called)
@mock.patch("certbot.util.exe_exists")
def test_enable_mod_no_disable(self, mock_exe_exists):

View File

@@ -3,8 +3,8 @@ import unittest
try:
import mock
except ImportError: # pragma: no cover
from unittest import mock # type: ignore
except ImportError: # pragma: no cover
from unittest import mock # type: ignore
from certbot import errors
from certbot.display import util as display_util
@@ -23,9 +23,9 @@ class SelectVhostMultiTest(unittest.TestCase):
self.base_dir, "debian_apache_2_4/multiple_vhosts")
def test_select_no_input(self):
self.assertEqual(len(select_vhost_multiple([])), 0)
self.assertFalse(select_vhost_multiple([]))
@certbot_util.patch_display_util()
@certbot_util.patch_get_utility()
def test_select_correct(self, mock_util):
mock_util().checklist.return_value = (
display_util.OK, [self.vhosts[3].display_repr(),
@@ -33,16 +33,15 @@ class SelectVhostMultiTest(unittest.TestCase):
vhs = select_vhost_multiple([self.vhosts[3],
self.vhosts[2],
self.vhosts[1]])
self.assertIn(self.vhosts[2], vhs)
self.assertIn(self.vhosts[3], vhs)
self.assertNotIn(self.vhosts[1], vhs)
self.assertTrue(self.vhosts[2] in vhs)
self.assertTrue(self.vhosts[3] in vhs)
self.assertFalse(self.vhosts[1] in vhs)
@certbot_util.patch_display_util()
@certbot_util.patch_get_utility()
def test_select_cancel(self, mock_util):
mock_util().checklist.return_value = (display_util.CANCEL, "whatever")
vhs = select_vhost_multiple([self.vhosts[2], self.vhosts[3]])
self.assertEqual(vhs, [])
self.assertFalse(vhs)
class SelectVhostTest(unittest.TestCase):
"""Tests for certbot_apache._internal.display_ops.select_vhost."""
@@ -57,40 +56,41 @@ class SelectVhostTest(unittest.TestCase):
from certbot_apache._internal.display_ops import select_vhost
return select_vhost("example.com", vhosts)
@certbot_util.patch_display_util()
@certbot_util.patch_get_utility()
def test_successful_choice(self, mock_util):
mock_util().menu.return_value = (display_util.OK, 3)
self.assertEqual(self.vhosts[3], self._call(self.vhosts))
@certbot_util.patch_display_util()
@certbot_util.patch_get_utility()
def test_noninteractive(self, mock_util):
mock_util().menu.side_effect = errors.MissingCommandlineFlag("no vhost default")
try:
self._call(self.vhosts)
except errors.MissingCommandlineFlag as e:
self.assertIn("vhost ambiguity", str(e))
self.assertTrue("vhost ambiguity" in str(e))
@certbot_util.patch_display_util()
@certbot_util.patch_get_utility()
def test_more_info_cancel(self, mock_util):
mock_util().menu.side_effect = [
(display_util.CANCEL, -1),
]
self.assertIsNone(self._call(self.vhosts))
self.assertEqual(None, self._call(self.vhosts))
def test_no_vhosts(self):
self.assertIsNone(self._call([]))
self.assertEqual(self._call([]), None)
@mock.patch("certbot_apache._internal.display_ops.display_util")
@certbot_util.patch_get_utility()
@mock.patch("certbot_apache._internal.display_ops.logger")
def test_small_display(self, mock_logger, mock_display_util):
def test_small_display(self, mock_logger, mock_util, mock_display_util):
mock_display_util.WIDTH = 20
mock_display_util.menu.return_value = (display_util.OK, 0)
mock_util().menu.return_value = (display_util.OK, 0)
self._call(self.vhosts)
self.assertEqual(mock_logger.debug.call_count, 1)
@certbot_util.patch_display_util()
@certbot_util.patch_get_utility()
def test_multiple_names(self, mock_util):
mock_util().menu.return_value = (display_util.OK, 5)

View File

@@ -53,20 +53,20 @@ class DualParserNodeTest(unittest.TestCase): # pylint: disable=too-many-public-
primary=self.block.secondary,
secondary=self.block.primary)
# Switched around
self.assertEqual(cnode.primary, self.comment.secondary)
self.assertEqual(cnode.secondary, self.comment.primary)
self.assertEqual(dnode.primary, self.directive.secondary)
self.assertEqual(dnode.secondary, self.directive.primary)
self.assertEqual(bnode.primary, self.block.secondary)
self.assertEqual(bnode.secondary, self.block.primary)
self.assertTrue(cnode.primary is self.comment.secondary)
self.assertTrue(cnode.secondary is self.comment.primary)
self.assertTrue(dnode.primary is self.directive.secondary)
self.assertTrue(dnode.secondary is self.directive.primary)
self.assertTrue(bnode.primary is self.block.secondary)
self.assertTrue(bnode.secondary is self.block.primary)
def test_set_params(self):
params = ("first", "second")
self.directive.primary.set_parameters = mock.Mock()
self.directive.secondary.set_parameters = mock.Mock()
self.directive.set_parameters(params)
self.assertIs(self.directive.primary.set_parameters.called, True)
self.assertIs(self.directive.secondary.set_parameters.called, True)
self.assertTrue(self.directive.primary.set_parameters.called)
self.assertTrue(self.directive.secondary.set_parameters.called)
def test_set_parameters(self):
pparams = mock.MagicMock()
@@ -76,8 +76,8 @@ class DualParserNodeTest(unittest.TestCase): # pylint: disable=too-many-public-
self.directive.primary.set_parameters = pparams
self.directive.secondary.set_parameters = sparams
self.directive.set_parameters(("param", "seq"))
self.assertIs(pparams.called, True)
self.assertIs(sparams.called, True)
self.assertTrue(pparams.called)
self.assertTrue(sparams.called)
def test_delete_child(self):
pdel = mock.MagicMock()
@@ -85,8 +85,8 @@ class DualParserNodeTest(unittest.TestCase): # pylint: disable=too-many-public-
self.block.primary.delete_child = pdel
self.block.secondary.delete_child = sdel
self.block.delete_child(self.comment)
self.assertIs(pdel.called, True)
self.assertIs(sdel.called, True)
self.assertTrue(pdel.called)
self.assertTrue(sdel.called)
def test_unsaved_files(self):
puns = mock.MagicMock()
@@ -96,8 +96,8 @@ class DualParserNodeTest(unittest.TestCase): # pylint: disable=too-many-public-
self.block.primary.unsaved_files = puns
self.block.secondary.unsaved_files = suns
self.block.unsaved_files()
self.assertIs(puns.called, True)
self.assertIs(suns.called, True)
self.assertTrue(puns.called)
self.assertTrue(suns.called)
def test_getattr_equality(self):
self.directive.primary.variableexception = "value"
@@ -140,8 +140,8 @@ class DualParserNodeTest(unittest.TestCase): # pylint: disable=too-many-public-
self.block.primary.add_child_block = mock_first
self.block.secondary.add_child_block = mock_second
self.block.add_child_block("Block")
self.assertIs(mock_first.called, True)
self.assertIs(mock_second.called, True)
self.assertTrue(mock_first.called)
self.assertTrue(mock_second.called)
def test_add_child_directive(self):
mock_first = mock.MagicMock(return_value=self.directive.primary)
@@ -149,8 +149,8 @@ class DualParserNodeTest(unittest.TestCase): # pylint: disable=too-many-public-
self.block.primary.add_child_directive = mock_first
self.block.secondary.add_child_directive = mock_second
self.block.add_child_directive("Directive")
self.assertIs(mock_first.called, True)
self.assertIs(mock_second.called, True)
self.assertTrue(mock_first.called)
self.assertTrue(mock_second.called)
def test_add_child_comment(self):
mock_first = mock.MagicMock(return_value=self.comment.primary)
@@ -158,8 +158,8 @@ class DualParserNodeTest(unittest.TestCase): # pylint: disable=too-many-public-
self.block.primary.add_child_comment = mock_first
self.block.secondary.add_child_comment = mock_second
self.block.add_child_comment("Comment")
self.assertIs(mock_first.called, True)
self.assertIs(mock_second.called, True)
self.assertTrue(mock_first.called)
self.assertTrue(mock_second.called)
def test_find_comments(self):
pri_comments = [augeasparser.AugeasCommentNode(comment="some comment",
@@ -183,9 +183,9 @@ class DualParserNodeTest(unittest.TestCase): # pylint: disable=too-many-public-
# Check that every comment response is represented in the list of
# DualParserNode instances.
for p in p_dcoms:
self.assertIn(p, p_coms)
self.assertTrue(p in p_coms)
for s in s_dcoms:
self.assertIn(s, s_coms)
self.assertTrue(s in s_coms)
def test_find_blocks_first_passing(self):
youshallnotpass = [augeasparser.AugeasBlockNode(name="notpassing",
@@ -207,8 +207,8 @@ class DualParserNodeTest(unittest.TestCase): # pylint: disable=too-many-public-
assertions.assertEqual(block.primary, block.secondary)
except AssertionError: # pragma: no cover
self.fail("Assertion should have passed")
self.assertIs(assertions.isPassDirective(block.primary), True)
self.assertIs(assertions.isPassDirective(block.secondary), False)
self.assertTrue(assertions.isPassDirective(block.primary))
self.assertFalse(assertions.isPassDirective(block.secondary))
def test_find_blocks_second_passing(self):
youshallnotpass = [augeasparser.AugeasBlockNode(name="notpassing",
@@ -230,8 +230,8 @@ class DualParserNodeTest(unittest.TestCase): # pylint: disable=too-many-public-
assertions.assertEqual(block.primary, block.secondary)
except AssertionError: # pragma: no cover
self.fail("Assertion should have passed")
self.assertIs(assertions.isPassDirective(block.primary), False)
self.assertIs(assertions.isPassDirective(block.secondary), True)
self.assertFalse(assertions.isPassDirective(block.primary))
self.assertTrue(assertions.isPassDirective(block.secondary))
def test_find_dirs_first_passing(self):
notpassing = [augeasparser.AugeasDirectiveNode(name="notpassing",
@@ -253,8 +253,8 @@ class DualParserNodeTest(unittest.TestCase): # pylint: disable=too-many-public-
assertions.assertEqual(directive.primary, directive.secondary)
except AssertionError: # pragma: no cover
self.fail("Assertion should have passed")
self.assertIs(assertions.isPassDirective(directive.primary), True)
self.assertIs(assertions.isPassDirective(directive.secondary), False)
self.assertTrue(assertions.isPassDirective(directive.primary))
self.assertFalse(assertions.isPassDirective(directive.secondary))
def test_find_dirs_second_passing(self):
notpassing = [augeasparser.AugeasDirectiveNode(name="notpassing",
@@ -276,8 +276,8 @@ class DualParserNodeTest(unittest.TestCase): # pylint: disable=too-many-public-
assertions.assertEqual(directive.primary, directive.secondary)
except AssertionError: # pragma: no cover
self.fail("Assertion should have passed")
self.assertIs(assertions.isPassDirective(directive.primary), False)
self.assertIs(assertions.isPassDirective(directive.secondary), True)
self.assertFalse(assertions.isPassDirective(directive.primary))
self.assertTrue(assertions.isPassDirective(directive.secondary))
def test_find_coms_first_passing(self):
notpassing = [augeasparser.AugeasCommentNode(comment="notpassing",
@@ -299,8 +299,8 @@ class DualParserNodeTest(unittest.TestCase): # pylint: disable=too-many-public-
assertions.assertEqual(comment.primary, comment.secondary)
except AssertionError: # pragma: no cover
self.fail("Assertion should have passed")
self.assertIs(assertions.isPassComment(comment.primary), True)
self.assertIs(assertions.isPassComment(comment.secondary), False)
self.assertTrue(assertions.isPassComment(comment.primary))
self.assertFalse(assertions.isPassComment(comment.secondary))
def test_find_coms_second_passing(self):
notpassing = [augeasparser.AugeasCommentNode(comment="notpassing",
@@ -322,8 +322,8 @@ class DualParserNodeTest(unittest.TestCase): # pylint: disable=too-many-public-
assertions.assertEqual(comment.primary, comment.secondary)
except AssertionError: # pragma: no cover
self.fail("Assertion should have passed")
self.assertIs(assertions.isPassComment(comment.primary), False)
self.assertIs(assertions.isPassComment(comment.secondary), True)
self.assertFalse(assertions.isPassComment(comment.primary))
self.assertTrue(assertions.isPassComment(comment.secondary))
def test_find_blocks_no_pass_equal(self):
notpassing1 = [augeasparser.AugeasBlockNode(name="notpassing",
@@ -341,9 +341,8 @@ class DualParserNodeTest(unittest.TestCase): # pylint: disable=too-many-public-
blocks = self.block.find_blocks("anything")
for block in blocks:
with self.subTest(block=block):
self.assertEqual(block.primary, block.secondary)
self.assertIsNot(block.primary, block.secondary)
self.assertEqual(block.primary, block.secondary)
self.assertTrue(block.primary is not block.secondary)
def test_find_dirs_no_pass_equal(self):
notpassing1 = [augeasparser.AugeasDirectiveNode(name="notpassing",
@@ -361,9 +360,8 @@ class DualParserNodeTest(unittest.TestCase): # pylint: disable=too-many-public-
directives = self.block.find_directives("anything")
for directive in directives:
with self.subTest(directive=directive):
self.assertEqual(directive.primary, directive.secondary)
self.assertIsNot(directive.primary, directive.secondary)
self.assertEqual(directive.primary, directive.secondary)
self.assertTrue(directive.primary is not directive.secondary)
def test_find_comments_no_pass_equal(self):
notpassing1 = [augeasparser.AugeasCommentNode(comment="notpassing",
@@ -381,9 +379,8 @@ class DualParserNodeTest(unittest.TestCase): # pylint: disable=too-many-public-
comments = self.block.find_comments("anything")
for comment in comments:
with self.subTest(comment=comment):
self.assertEqual(comment.primary, comment.secondary)
self.assertIsNot(comment.primary, comment.secondary)
self.assertEqual(comment.primary, comment.secondary)
self.assertTrue(comment.primary is not comment.secondary)
def test_find_blocks_no_pass_notequal(self):
notpassing1 = [augeasparser.AugeasBlockNode(name="notpassing",
@@ -427,8 +424,8 @@ class DualParserNodeTest(unittest.TestCase): # pylint: disable=too-many-public-
self.block.primary.parsed_paths = mock_p
self.block.secondary.parsed_paths = mock_s
self.block.parsed_paths()
self.assertIs(mock_p.called, True)
self.assertIs(mock_s.called, True)
self.assertTrue(mock_p.called)
self.assertTrue(mock_s.called)
def test_parsed_paths_error(self):
mock_p = mock.MagicMock(return_value=['/path/file.conf'])
@@ -444,5 +441,5 @@ class DualParserNodeTest(unittest.TestCase): # pylint: disable=too-many-public-
self.block.primary.find_ancestors = primarymock
self.block.secondary.find_ancestors = secondarymock
self.block.find_ancestors("anything")
self.assertIs(primarymock.called, True)
self.assertIs(secondarymock.called, True)
self.assertTrue(primarymock.called)
self.assertTrue(secondarymock.called)

View File

@@ -41,7 +41,7 @@ class EntryPointTest(unittest.TestCase):
with mock.patch("certbot.util.get_os_info") as mock_info:
mock_info.return_value = ("nonexistent", "irrelevant")
with mock.patch("certbot.util.get_systemd_os_like") as mock_like:
mock_like.return_value = ["unknown"]
mock_like.return_value = ["unknonwn"]
self.assertEqual(entrypoint.get_configurator(),
configurator.ApacheConfigurator)

View File

@@ -46,9 +46,9 @@ class FedoraRestartTest(util.ApacheTest):
test_dir = "centos7_apache/apache"
config_root = "centos7_apache/apache/httpd"
vhost_root = "centos7_apache/apache/httpd/conf.d"
super().setUp(test_dir=test_dir,
config_root=config_root,
vhost_root=vhost_root)
super(FedoraRestartTest, self).setUp(test_dir=test_dir,
config_root=config_root,
vhost_root=vhost_root)
self.config = util.get_apache_configurator(
self.config_path, self.vhost_path, self.config_dir, self.work_dir,
os_info="fedora")
@@ -90,9 +90,9 @@ class MultipleVhostsTestFedora(util.ApacheTest):
test_dir = "centos7_apache/apache"
config_root = "centos7_apache/apache/httpd"
vhost_root = "centos7_apache/apache/httpd/conf.d"
super().setUp(test_dir=test_dir,
config_root=config_root,
vhost_root=vhost_root)
super(MultipleVhostsTestFedora, self).setUp(test_dir=test_dir,
config_root=config_root,
vhost_root=vhost_root)
self.config = util.get_apache_configurator(
self.config_path, self.vhost_path, self.config_dir, self.work_dir,
@@ -134,8 +134,8 @@ class MultipleVhostsTestFedora(util.ApacheTest):
self.assertEqual(mock_get.call_count, 3)
self.assertEqual(len(self.config.parser.modules), 4)
self.assertEqual(len(self.config.parser.variables), 2)
self.assertIn("TEST2", self.config.parser.variables)
self.assertIn("mod_another.c", self.config.parser.modules)
self.assertTrue("TEST2" in self.config.parser.variables)
self.assertTrue("mod_another.c" in self.config.parser.modules)
@mock.patch("certbot_apache._internal.configurator.util.run_script")
def test_get_version(self, mock_run_script):
@@ -172,11 +172,11 @@ class MultipleVhostsTestFedora(util.ApacheTest):
mock_osi.return_value = ("fedora", "29")
self.config.parser.update_runtime_variables()
self.assertIn("mock_define", self.config.parser.variables)
self.assertIn("mock_define_too", self.config.parser.variables)
self.assertIn("mock_value", self.config.parser.variables)
self.assertTrue("mock_define" in self.config.parser.variables)
self.assertTrue("mock_define_too" in self.config.parser.variables)
self.assertTrue("mock_value" in self.config.parser.variables)
self.assertEqual("TRUE", self.config.parser.variables["mock_value"])
self.assertIn("MOCK_NOSEP", self.config.parser.variables)
self.assertTrue("MOCK_NOSEP" in self.config.parser.variables)
self.assertEqual("NOSEP_VAL", self.config.parser.variables["NOSEP_TWO"])
@mock.patch("certbot_apache._internal.configurator.util.run_script")

View File

@@ -50,9 +50,9 @@ class MultipleVhostsTestGentoo(util.ApacheTest):
test_dir = "gentoo_apache/apache"
config_root = "gentoo_apache/apache/apache2"
vhost_root = "gentoo_apache/apache/apache2/vhosts.d"
super().setUp(test_dir=test_dir,
config_root=config_root,
vhost_root=vhost_root)
super(MultipleVhostsTestGentoo, self).setUp(test_dir=test_dir,
config_root=config_root,
vhost_root=vhost_root)
# pylint: disable=line-too-long
with mock.patch("certbot_apache._internal.override_gentoo.GentooParser.update_runtime_variables"):
@@ -63,7 +63,8 @@ class MultipleVhostsTestGentoo(util.ApacheTest):
self.temp_dir, "gentoo_apache/apache")
def test_get_parser(self):
self.assertIsInstance(self.config.parser, override_gentoo.GentooParser)
self.assertTrue(isinstance(self.config.parser,
override_gentoo.GentooParser))
def test_get_virtual_hosts(self):
"""Make sure all vhosts are being properly found."""
@@ -90,7 +91,7 @@ class MultipleVhostsTestGentoo(util.ApacheTest):
with mock.patch("certbot_apache._internal.override_gentoo.GentooParser.update_modules"):
self.config.parser.update_runtime_variables()
for define in defines:
self.assertIn(define, self.config.parser.variables)
self.assertTrue(define in self.config.parser.variables)
@mock.patch("certbot_apache._internal.apache_util.parse_from_subprocess")
def test_no_binary_configdump(self, mock_subprocess):
@@ -100,11 +101,11 @@ class MultipleVhostsTestGentoo(util.ApacheTest):
with mock.patch("certbot_apache._internal.override_gentoo.GentooParser.update_modules"):
self.config.parser.update_runtime_variables()
self.config.parser.reset_modules()
self.assertIs(mock_subprocess.called, False)
self.assertFalse(mock_subprocess.called)
self.config.parser.update_runtime_variables()
self.config.parser.reset_modules()
self.assertIs(mock_subprocess.called, True)
self.assertTrue(mock_subprocess.called)
@mock.patch("certbot_apache._internal.apache_util._get_runtime_cfg")
def test_opportunistic_httpd_runtime_parsing(self, mock_get):
@@ -128,7 +129,7 @@ class MultipleVhostsTestGentoo(util.ApacheTest):
self.assertEqual(mock_get.call_count, 1)
self.assertEqual(len(self.config.parser.modules), 4)
self.assertIn("mod_another.c", self.config.parser.modules)
self.assertTrue("mod_another.c" in self.config.parser.modules)
@mock.patch("certbot_apache._internal.configurator.util.run_script")
def test_alt_restart_works(self, mock_run_script):

View File

@@ -24,7 +24,7 @@ class ApacheHttp01Test(util.ApacheTest):
"""Test for certbot_apache._internal.http_01.ApacheHttp01."""
def setUp(self, *args, **kwargs): # pylint: disable=arguments-differ
super().setUp(*args, **kwargs)
super(ApacheHttp01Test, self).setUp(*args, **kwargs)
self.account_key = self.rsa512jwk
self.achalls: List[achallenges.KeyAuthorizationAnnotatedChallenge] = []
@@ -51,7 +51,7 @@ class ApacheHttp01Test(util.ApacheTest):
self.http = ApacheHttp01(self.config)
def test_empty_perform(self):
self.assertEqual(len(self.http.perform()), 0)
self.assertFalse(self.http.perform())
@mock.patch("certbot_apache._internal.configurator.ApacheConfigurator.enable_mod")
def test_enable_modules_apache_2_2(self, mock_enmod):
@@ -77,7 +77,7 @@ class ApacheHttp01Test(util.ApacheTest):
self.http.prepare_http01_modules()
self.assertIs(mock_enmod.called, True)
self.assertTrue(mock_enmod.called)
calls = mock_enmod.call_args_list
other_calls = []
for call in calls:
@@ -125,18 +125,6 @@ class ApacheHttp01Test(util.ApacheTest):
domain="duplicate.example.com", account_key=self.account_key)]
self.common_perform_test(achalls, vhosts)
def test_configure_name_and_blank(self):
domain = "certbot.demo"
vhosts = [v for v in self.config.vhosts if v.name == domain or v.name is None]
achalls = [
achallenges.KeyAuthorizationAnnotatedChallenge(
challb=acme_util.chall_to_challb(
challenges.HTTP01(token=((b'a' * 16))),
"pending"),
domain=domain, account_key=self.account_key),
]
self.common_perform_test(achalls, vhosts)
def test_no_vhost(self):
for achall in self.achalls:
self.http.add_chall(achall)
@@ -186,7 +174,7 @@ class ApacheHttp01Test(util.ApacheTest):
def common_perform_test(self, achalls, vhosts):
"""Tests perform with the given achalls."""
challenge_dir = self.http.challenge_dir
self.assertIs(os.path.exists(challenge_dir), False)
self.assertFalse(os.path.exists(challenge_dir))
for achall in achalls:
self.http.add_chall(achall)
@@ -194,8 +182,8 @@ class ApacheHttp01Test(util.ApacheTest):
achall.response(self.account_key) for achall in achalls]
self.assertEqual(self.http.perform(), expected_response)
self.assertIs(os.path.isdir(self.http.challenge_dir), True)
self.assertIs(filesystem.has_min_permissions(self.http.challenge_dir, 0o755), True)
self.assertTrue(os.path.isdir(self.http.challenge_dir))
self.assertTrue(filesystem.has_min_permissions(self.http.challenge_dir, 0o755))
self._test_challenge_conf()
for achall in achalls:
@@ -211,7 +199,7 @@ class ApacheHttp01Test(util.ApacheTest):
vhost.path)
self.assertEqual(len(matches), 1)
self.assertIs(os.path.exists(challenge_dir), True)
self.assertTrue(os.path.exists(challenge_dir))
@mock.patch("certbot_apache._internal.http_01.filesystem.makedirs")
def test_failed_makedirs(self, mock_makedirs):
@@ -226,20 +214,20 @@ class ApacheHttp01Test(util.ApacheTest):
with open(self.http.challenge_conf_post) as f:
post_conf_contents = f.read()
self.assertIn("RewriteEngine on", pre_conf_contents)
self.assertIn("RewriteRule", pre_conf_contents)
self.assertTrue("RewriteEngine on" in pre_conf_contents)
self.assertTrue("RewriteRule" in pre_conf_contents)
self.assertIn(self.http.challenge_dir, post_conf_contents)
self.assertTrue(self.http.challenge_dir in post_conf_contents)
if self.config.version < (2, 4):
self.assertIn("Allow from all", post_conf_contents)
self.assertTrue("Allow from all" in post_conf_contents)
else:
self.assertIn("Require all granted", post_conf_contents)
self.assertTrue("Require all granted" in post_conf_contents)
def _test_challenge_file(self, achall):
name = os.path.join(self.http.challenge_dir, achall.chall.encode("token"))
validation = achall.validation(self.account_key)
self.assertIs(filesystem.has_min_permissions(name, 0o644), True)
self.assertTrue(filesystem.has_min_permissions(name, 0o644))
with open(name, 'rb') as f:
self.assertEqual(f.read(), validation.encode())

View File

@@ -44,14 +44,15 @@ class VirtualHostTest(unittest.TestCase):
"fp", "vhp",
{Addr.fromstring("*:443"), Addr.fromstring("1.2.3.4:443")},
False, False)
self.assertIs(complex_vh.conflicts([self.addr1]), True)
self.assertIs(complex_vh.conflicts([self.addr2]), True)
self.assertIs(complex_vh.conflicts([self.addr_default]), False)
self.assertTrue(complex_vh.conflicts([self.addr1]))
self.assertTrue(complex_vh.conflicts([self.addr2]))
self.assertFalse(complex_vh.conflicts([self.addr_default]))
self.assertIs(self.vhost1.conflicts([self.addr2]), True)
self.assertIs(self.vhost1.conflicts([self.addr_default]), False)
self.assertTrue(self.vhost1.conflicts([self.addr2]))
self.assertFalse(self.vhost1.conflicts([self.addr_default]))
self.assertIs(self.vhost2.conflicts([self.addr1, self.addr_default]), False)
self.assertFalse(self.vhost2.conflicts([self.addr1,
self.addr_default]))
def test_same_server(self):
from certbot_apache._internal.obj import VirtualHost
@@ -66,12 +67,12 @@ class VirtualHostTest(unittest.TestCase):
"fp", "vhp", {self.addr2, self.addr_default},
False, False, None)
self.assertIs(self.vhost1.same_server(self.vhost2), True)
self.assertIs(no_name1.same_server(no_name2), True)
self.assertTrue(self.vhost1.same_server(self.vhost2))
self.assertTrue(no_name1.same_server(no_name2))
self.assertIs(self.vhost1.same_server(no_name1), False)
self.assertIs(no_name1.same_server(no_name3), False)
self.assertIs(no_name1.same_server(no_name4), False)
self.assertFalse(self.vhost1.same_server(no_name1))
self.assertFalse(no_name1.same_server(no_name3))
self.assertFalse(no_name1.same_server(no_name4))
class AddrTest(unittest.TestCase):
@@ -87,9 +88,9 @@ class AddrTest(unittest.TestCase):
self.addr_default = Addr.fromstring("_default_:443")
def test_wildcard(self):
self.assertIs(self.addr.is_wildcard(), False)
self.assertIs(self.addr1.is_wildcard(), True)
self.assertIs(self.addr2.is_wildcard(), True)
self.assertFalse(self.addr.is_wildcard())
self.assertTrue(self.addr1.is_wildcard())
self.assertTrue(self.addr2.is_wildcard())
def test_get_sni_addr(self):
from certbot_apache._internal.obj import Addr
@@ -102,29 +103,29 @@ class AddrTest(unittest.TestCase):
def test_conflicts(self):
# Note: Defined IP is more important than defined port in match
self.assertIs(self.addr.conflicts(self.addr1), True)
self.assertIs(self.addr.conflicts(self.addr2), True)
self.assertIs(self.addr.conflicts(self.addr_defined), True)
self.assertIs(self.addr.conflicts(self.addr_default), False)
self.assertTrue(self.addr.conflicts(self.addr1))
self.assertTrue(self.addr.conflicts(self.addr2))
self.assertTrue(self.addr.conflicts(self.addr_defined))
self.assertFalse(self.addr.conflicts(self.addr_default))
self.assertIs(self.addr1.conflicts(self.addr), False)
self.assertIs(self.addr1.conflicts(self.addr_defined), True)
self.assertIs(self.addr1.conflicts(self.addr_default), False)
self.assertFalse(self.addr1.conflicts(self.addr))
self.assertTrue(self.addr1.conflicts(self.addr_defined))
self.assertFalse(self.addr1.conflicts(self.addr_default))
self.assertIs(self.addr_defined.conflicts(self.addr1), False)
self.assertIs(self.addr_defined.conflicts(self.addr2), False)
self.assertIs(self.addr_defined.conflicts(self.addr), False)
self.assertIs(self.addr_defined.conflicts(self.addr_default), False)
self.assertFalse(self.addr_defined.conflicts(self.addr1))
self.assertFalse(self.addr_defined.conflicts(self.addr2))
self.assertFalse(self.addr_defined.conflicts(self.addr))
self.assertFalse(self.addr_defined.conflicts(self.addr_default))
self.assertIs(self.addr_default.conflicts(self.addr), True)
self.assertIs(self.addr_default.conflicts(self.addr1), True)
self.assertIs(self.addr_default.conflicts(self.addr_defined), True)
self.assertTrue(self.addr_default.conflicts(self.addr))
self.assertTrue(self.addr_default.conflicts(self.addr1))
self.assertTrue(self.addr_default.conflicts(self.addr_defined))
# Self test
self.assertIs(self.addr.conflicts(self.addr), True)
self.assertIs(self.addr1.conflicts(self.addr1), True)
self.assertTrue(self.addr.conflicts(self.addr))
self.assertTrue(self.addr1.conflicts(self.addr1))
# This is a tricky one...
self.assertIs(self.addr1.conflicts(self.addr2), True)
self.assertTrue(self.addr1.conflicts(self.addr2))
def test_equal(self):
self.assertEqual(self.addr1, self.addr2)

View File

@@ -16,7 +16,7 @@ class BasicParserTest(util.ParserTest):
"""Apache Parser Test."""
def setUp(self): # pylint: disable=arguments-differ
super().setUp()
super(BasicParserTest, self).setUp()
def tearDown(self):
shutil.rmtree(self.temp_dir)
@@ -35,19 +35,6 @@ class BasicParserTest(util.ParserTest):
self.parser.aug.save = mock_save
self.assertRaises(errors.PluginError, self.parser.unsaved_files)
@mock.patch("certbot_apache._internal.parser.logger")
def test_bad_save_errors(self, mock_logger):
nx_path = "/non/existent/path.conf"
self.parser.aug.set("/augeas/load/Httpd/incl[last()]", nx_path)
self.parser.add_dir(f"/files{nx_path}", "AddDirective", "test")
self.assertRaises(IOError, self.parser.save, {})
mock_logger.error.assert_called_with(
'Unable to save files: %s.%s', '/non/existent/path.conf', mock.ANY)
mock_logger.debug.assert_called_with(
"Error %s saving %s: %s", "mk_augtemp",
"/non/existent/path.conf", "No such file or directory")
def test_aug_version(self):
mock_match = mock.Mock(return_value=["something"])
self.parser.aug.match = mock_match
@@ -55,7 +42,7 @@ class BasicParserTest(util.ParserTest):
self.assertEqual(self.parser.check_aug_version(),
["something"])
self.parser.aug.match.side_effect = RuntimeError
self.assertIs(self.parser.check_aug_version(), False)
self.assertFalse(self.parser.check_aug_version())
def test_find_config_root_no_root(self):
# pylint: disable=protected-access
@@ -93,7 +80,8 @@ class BasicParserTest(util.ParserTest):
aug_default = "/files" + self.parser.loc["default"]
self.parser.add_dir(aug_default, "AddDirective", "test")
self.assertTrue(self.parser.find_dir("AddDirective", "test", aug_default))
self.assertTrue(
self.parser.find_dir("AddDirective", "test", aug_default))
self.parser.add_dir(aug_default, "AddList", ["1", "2", "3", "4"])
matches = self.parser.find_dir("AddList", None, aug_default)
@@ -106,24 +94,20 @@ class BasicParserTest(util.ParserTest):
"AddDirectiveBeginning",
"testBegin")
self.assertTrue(self.parser.find_dir("AddDirectiveBeginning", "testBegin", aug_default))
self.assertTrue(
self.parser.find_dir("AddDirectiveBeginning", "testBegin", aug_default))
self.assertEqual(self.parser.aug.get(aug_default+"/directive[1]"), "AddDirectiveBeginning")
self.assertEqual(
self.parser.aug.get(aug_default+"/directive[1]"),
"AddDirectiveBeginning")
self.parser.add_dir_beginning(aug_default, "AddList", ["1", "2", "3", "4"])
matches = self.parser.find_dir("AddList", None, aug_default)
for i, match in enumerate(matches):
self.assertEqual(self.parser.aug.get(match), str(i + 1))
for name in ("empty.conf", "no-directives.conf"):
conf = "/files" + os.path.join(self.parser.root, "sites-available", name)
self.parser.add_dir_beginning(conf, "AddDirectiveBeginning", "testBegin")
self.assertGreater(
len(self.parser.find_dir("AddDirectiveBeginning", "testBegin", conf)),
0
)
def test_empty_arg(self):
self.assertIsNone(self.parser.get_arg("/files/whatever/nonexistent"))
self.assertEqual(None,
self.parser.get_arg("/files/whatever/nonexistent"))
def test_add_dir_to_ifmodssl(self):
"""test add_dir_to_ifmodssl.
@@ -142,7 +126,7 @@ class BasicParserTest(util.ParserTest):
matches = self.parser.find_dir("FakeDirective", "123")
self.assertEqual(len(matches), 1)
self.assertIn("IfModule", matches[0])
self.assertTrue("IfModule" in matches[0])
def test_add_dir_to_ifmodssl_multiple(self):
from certbot_apache._internal.parser import get_aug_path
@@ -156,7 +140,7 @@ class BasicParserTest(util.ParserTest):
matches = self.parser.find_dir("FakeDirective")
self.assertEqual(len(matches), 3)
self.assertIn("IfModule", matches[0])
self.assertTrue("IfModule" in matches[0])
def test_get_aug_path(self):
from certbot_apache._internal.parser import get_aug_path
@@ -181,7 +165,7 @@ class BasicParserTest(util.ParserTest):
with mock.patch("certbot_apache._internal.parser.logger") as mock_logger:
self.parser.parse_modules()
# Make sure that we got None return value and logged the file
self.assertIs(mock_logger.debug.called, True)
self.assertTrue(mock_logger.debug.called)
@mock.patch("certbot_apache._internal.parser.ApacheParser.find_dir")
@mock.patch("certbot_apache._internal.apache_util._get_runtime_cfg")
@@ -199,8 +183,6 @@ class BasicParserTest(util.ParserTest):
'Define: DUMP_RUN_CFG\n'
'Define: U_MICH\n'
'Define: TLS=443\n'
'Define: WITH_ASSIGNMENT=URL=http://example.com\n'
'Define: EMPTY=\n'
'Define: example_path=Documents/path\n'
'User: name="www-data" id=33 not_used\n'
'Group: name="www-data" id=33 not_used\n'
@@ -279,10 +261,7 @@ class BasicParserTest(util.ParserTest):
mock_cfg.side_effect = mock_get_vars
expected_vars = {"TEST": "", "U_MICH": "", "TLS": "443",
"example_path": "Documents/path",
"WITH_ASSIGNMENT": "URL=http://example.com",
"EMPTY": "",
}
"example_path": "Documents/path"}
self.parser.modules = {}
with mock.patch(
@@ -317,19 +296,28 @@ class BasicParserTest(util.ParserTest):
# path derived from root configuration Include statements
self.assertEqual(mock_parse.call_count, 1)
@mock.patch("certbot_apache._internal.apache_util.subprocess.run")
def test_update_runtime_vars_bad_ctl(self, mock_run):
mock_run.side_effect = OSError
@mock.patch("certbot_apache._internal.apache_util._get_runtime_cfg")
def test_update_runtime_vars_bad_output(self, mock_cfg):
mock_cfg.return_value = "Define: TLS=443=24"
self.parser.update_runtime_variables()
mock_cfg.return_value = "Define: DUMP_RUN_CFG\nDefine: TLS=443=24"
self.assertRaises(
errors.PluginError, self.parser.update_runtime_variables)
@mock.patch("certbot_apache._internal.configurator.ApacheConfigurator.option")
@mock.patch("certbot_apache._internal.apache_util.subprocess.Popen")
def test_update_runtime_vars_bad_ctl(self, mock_popen, mock_opt):
mock_popen.side_effect = OSError
mock_opt.return_value = "nonexistent"
self.assertRaises(
errors.MisconfigurationError,
self.parser.update_runtime_variables)
@mock.patch("certbot_apache._internal.apache_util.subprocess.run")
def test_update_runtime_vars_bad_exit(self, mock_run):
mock_proc = mock_run.return_value
mock_proc.stdout = ""
mock_proc.stderr = ""
mock_proc.returncode = -1
@mock.patch("certbot_apache._internal.apache_util.subprocess.Popen")
def test_update_runtime_vars_bad_exit(self, mock_popen):
mock_popen().communicate.return_value = ("", "")
mock_popen.returncode = -1
self.assertRaises(
errors.MisconfigurationError,
self.parser.update_runtime_variables)
@@ -339,27 +327,27 @@ class BasicParserTest(util.ParserTest):
self.parser.add_comment(get_aug_path(self.parser.loc["name"]), "123456")
comm = self.parser.find_comments("123456")
self.assertEqual(len(comm), 1)
self.assertIn(self.parser.loc["name"], comm[0])
self.assertTrue(self.parser.loc["name"] in comm[0])
class ParserInitTest(util.ApacheTest):
def setUp(self): # pylint: disable=arguments-differ
super().setUp()
super(ParserInitTest, self).setUp()
def tearDown(self):
shutil.rmtree(self.temp_dir)
shutil.rmtree(self.config_dir)
shutil.rmtree(self.work_dir)
@mock.patch("certbot_apache._internal.parser.init_augeas")
@mock.patch("certbot_apache._internal.parser.ApacheParser.init_augeas")
def test_prepare_no_augeas(self, mock_init_augeas):
from certbot_apache._internal.parser import ApacheParser
mock_init_augeas.side_effect = errors.NoInstallationError
self.config.config_test = mock.Mock()
self.assertRaises(
errors.NoInstallationError, ApacheParser,
os.path.relpath(self.config_path), self.config,
"/dummy/vhostpath", version=(2, 4, 22))
os.path.relpath(self.config_path), "/dummy/vhostpath",
version=(2, 4, 22), configurator=self.config)
def test_init_old_aug(self):
from certbot_apache._internal.parser import ApacheParser
@@ -367,8 +355,8 @@ class ParserInitTest(util.ApacheTest):
mock_c.return_value = False
self.assertRaises(
errors.NotSupportedError,
ApacheParser, os.path.relpath(self.config_path), self.config,
"/dummy/vhostpath", version=(2, 4, 22))
ApacheParser, os.path.relpath(self.config_path),
"/dummy/vhostpath", version=(2, 4, 22), configurator=self.config)
@mock.patch("certbot_apache._internal.apache_util._get_runtime_cfg")
def test_unparseable(self, mock_cfg):
@@ -376,8 +364,8 @@ class ParserInitTest(util.ApacheTest):
mock_cfg.return_value = ('Define: TEST')
self.assertRaises(
errors.PluginError,
ApacheParser, os.path.relpath(self.config_path), self.config,
"/dummy/vhostpath", version=(2, 2, 22))
ApacheParser, os.path.relpath(self.config_path),
"/dummy/vhostpath", version=(2, 2, 22), configurator=self.config)
def test_root_normalized(self):
from certbot_apache._internal.parser import ApacheParser
@@ -388,7 +376,7 @@ class ParserInitTest(util.ApacheTest):
self.temp_dir,
"debian_apache_2_4/////multiple_vhosts/../multiple_vhosts/apache2")
parser = ApacheParser(path, self.config, "/dummy/vhostpath")
parser = ApacheParser(path, "/dummy/vhostpath", configurator=self.config)
self.assertEqual(parser.root, self.config_path)
@@ -397,7 +385,8 @@ class ParserInitTest(util.ApacheTest):
with mock.patch("certbot_apache._internal.parser.ApacheParser."
"update_runtime_variables"):
parser = ApacheParser(
os.path.relpath(self.config_path), self.config, "/dummy/vhostpath")
os.path.relpath(self.config_path),
"/dummy/vhostpath", configurator=self.config)
self.assertEqual(parser.root, self.config_path)
@@ -406,7 +395,8 @@ class ParserInitTest(util.ApacheTest):
with mock.patch("certbot_apache._internal.parser.ApacheParser."
"update_runtime_variables"):
parser = ApacheParser(
self.config_path + os.path.sep, self.config, "/dummy/vhostpath")
self.config_path + os.path.sep,
"/dummy/vhostpath", configurator=self.config)
self.assertEqual(parser.root, self.config_path)

View File

@@ -20,7 +20,7 @@ class ConfiguratorParserNodeTest(util.ApacheTest): # pylint: disable=too-many-p
"""Test AugeasParserNode using available test configurations"""
def setUp(self): # pylint: disable=arguments-differ
super().setUp()
super(ConfiguratorParserNodeTest, self).setUp()
self.config = util.get_apache_configurator(
self.config_path, self.vhost_path, self.config_dir,
@@ -32,7 +32,7 @@ class ConfiguratorParserNodeTest(util.ApacheTest): # pylint: disable=too-many-p
self.config.USE_PARSERNODE = True
vhosts = self.config.get_virtual_hosts()
# Legacy get_virtual_hosts() do not set the node
self.assertIsNotNone(vhosts[0].node)
self.assertTrue(vhosts[0].node is not None)
def test_parsernode_get_vhosts_mismatch(self):
vhosts = self.config.get_virtual_hosts_v2()

View File

@@ -18,7 +18,7 @@ class DummyParserNode(interfaces.ParserNode):
self.dirty = dirty
self.filepath = filepath
self.metadata = metadata
super().__init__(**kwargs)
super(DummyParserNode, self).__init__(**kwargs)
def save(self, msg): # pragma: no cover
"""Save"""
@@ -38,7 +38,7 @@ class DummyCommentNode(DummyParserNode):
"""
comment, kwargs = util.commentnode_kwargs(kwargs)
self.comment = comment
super().__init__(**kwargs)
super(DummyCommentNode, self).__init__(**kwargs)
class DummyDirectiveNode(DummyParserNode):
@@ -54,7 +54,7 @@ class DummyDirectiveNode(DummyParserNode):
self.parameters = parameters
self.enabled = enabled
super().__init__(**kwargs)
super(DummyDirectiveNode, self).__init__(**kwargs)
def set_parameters(self, parameters): # pragma: no cover
"""Set parameters"""

View File

@@ -1,5 +0,0 @@
<VirtualHost *:80>
<Location />
Require all denied
</Location>
</VirtualHost>

View File

@@ -1,16 +1,18 @@
"""Common utilities for certbot_apache."""
import shutil
import sys
import unittest
import augeas
import josepy as jose
try:
import mock
except ImportError: # pragma: no cover
from unittest import mock # type: ignore
except ImportError: # pragma: no cover
from unittest import mock # type: ignore
import zope.component
from certbot.compat import os
from certbot.display import util as display_util
from certbot.plugins import common
from certbot.tests import util as test_util
from certbot_apache._internal import configurator
@@ -65,7 +67,10 @@ class ParserTest(ApacheTest):
def setUp(self, test_dir="debian_apache_2_4/multiple_vhosts",
config_root="debian_apache_2_4/multiple_vhosts/apache2",
vhost_root="debian_apache_2_4/multiple_vhosts/apache2/sites-available"):
super().setUp(test_dir, config_root, vhost_root)
super(ParserTest, self).setUp(test_dir, config_root, vhost_root)
zope.component.provideUtility(display_util.FileDisplay(sys.stdout,
False))
from certbot_apache._internal.parser import ApacheParser
self.aug = augeas.Augeas(
@@ -73,7 +78,7 @@ class ParserTest(ApacheTest):
with mock.patch("certbot_apache._internal.parser.ApacheParser."
"update_runtime_variables"):
self.parser = ApacheParser(
self.config_path, self.config, self.vhost_path)
self.config_path, self.vhost_path, configurator=self.config)
def get_apache_configurator(
@@ -118,11 +123,11 @@ def get_apache_configurator(
version=version, use_parsernode=use_parsernode,
openssl_version=openssl_version)
if not conf_vhost_path:
config_class.OS_DEFAULTS.vhost_root = vhost_path
config_class.OS_DEFAULTS["vhost_root"] = vhost_path
else:
# Custom virtualhost path was requested
config.config.apache_vhost_root = conf_vhost_path
config.config.apache_ctl = config_class.OS_DEFAULTS.ctl
config.config.apache_ctl = config_class.OS_DEFAULTS["ctl"]
config.prepare()
return config

Some files were not shown because too many files have changed in this diff Show More