Compare commits

...

5 Commits

Author SHA1 Message Date
Adrien Ferrand
ddf1544647 Normal timeout to check everything is working as expected 2020-12-11 18:00:03 +01:00
Adrien Ferrand
41857f45e2 Focus on snap jobs + timeout 10s to test 2020-12-11 17:46:51 +01:00
Adrien Ferrand
d27ff05844 Try a new approach 2020-12-11 17:41:26 +01:00
Adrien Ferrand
ff18b86144 Merge branch 'master' into snap-build-timeout 2020-12-11 13:04:40 +01:00
Adrien Ferrand
cee8de69cf Setup a timeout 2020-11-30 19:56:56 +01:00
3 changed files with 160 additions and 149 deletions

View File

@@ -1,118 +1,118 @@
jobs:
- job: docker_build
pool:
vmImage: ubuntu-18.04
strategy:
matrix:
amd64:
DOCKER_ARCH: amd64
# Do not run the heavy non-amd64 builds for test branches
${{ if not(startsWith(variables['Build.SourceBranchName'], 'test-')) }}:
arm32v6:
DOCKER_ARCH: arm32v6
arm64v8:
DOCKER_ARCH: arm64v8
steps:
- bash: set -e && tools/docker/build.sh $(dockerTag) $DOCKER_ARCH
displayName: Build the Docker images
# We don't filter for the Docker Hub organization to continue to allow
# easy testing of these scripts on forks.
- bash: |
set -e
DOCKER_IMAGES=$(docker images --filter reference='*/certbot' --filter reference='*/dns-*' --format '{{.Repository}}')
docker save --output images.tar $DOCKER_IMAGES
displayName: Save the Docker images
# If the name of the tar file or artifact changes, the deploy stage will
# also need to be updated.
- bash: set -e && mv images.tar $(Build.ArtifactStagingDirectory)
displayName: Prepare Docker artifact
- task: PublishPipelineArtifact@1
inputs:
path: $(Build.ArtifactStagingDirectory)
artifact: docker_$(DOCKER_ARCH)
displayName: Store Docker artifact
- job: docker_run
dependsOn: docker_build
pool:
vmImage: ubuntu-18.04
steps:
- task: DownloadPipelineArtifact@2
inputs:
artifact: docker_amd64
path: $(Build.SourcesDirectory)
displayName: Retrieve Docker images
- bash: set -e && docker load --input $(Build.SourcesDirectory)/images.tar
displayName: Load Docker images
- bash: |
set -ex
DOCKER_IMAGES=$(docker images --filter reference='*/certbot' --filter reference='*/dns-*' --format '{{.Repository}}:{{.Tag}}')
for DOCKER_IMAGE in ${DOCKER_IMAGES}
do docker run --rm "${DOCKER_IMAGE}" plugins --prepare
done
displayName: Run integration tests for Docker images
- job: installer_build
pool:
vmImage: vs2017-win2016
steps:
- task: UsePythonVersion@0
inputs:
versionSpec: 3.7
architecture: x86
addToPath: true
- script: python windows-installer/construct.py
displayName: Build Certbot installer
- task: CopyFiles@2
inputs:
sourceFolder: $(System.DefaultWorkingDirectory)/windows-installer/build/nsis
contents: '*.exe'
targetFolder: $(Build.ArtifactStagingDirectory)
- task: PublishPipelineArtifact@1
inputs:
path: $(Build.ArtifactStagingDirectory)
# If we change the artifact's name, it should also be changed in tools/create_github_release.py
artifact: windows-installer
displayName: Publish Windows installer
- job: installer_run
dependsOn: installer_build
strategy:
matrix:
win2019:
imageName: windows-2019
win2016:
imageName: vs2017-win2016
pool:
vmImage: $(imageName)
steps:
- powershell: |
if ($PSVersionTable.PSVersion.Major -ne 5) {
throw "Powershell version is not 5.x"
}
condition: eq(variables['imageName'], 'vs2017-win2016')
displayName: Check Powershell 5.x is used in vs2017-win2016
- task: UsePythonVersion@0
inputs:
versionSpec: 3.8
addToPath: true
- task: DownloadPipelineArtifact@2
inputs:
artifact: windows-installer
path: $(Build.SourcesDirectory)/bin
displayName: Retrieve Windows installer
- script: |
python -m venv venv
venv\Scripts\python tools\pipstrap.py
venv\Scripts\python tools\pip_install.py -e certbot-ci
env:
PIP_NO_BUILD_ISOLATION: no
displayName: Prepare Certbot-CI
- script: |
set PATH=%ProgramFiles(x86)%\Certbot\bin;%PATH%
venv\Scripts\python -m pytest certbot-ci\windows_installer_integration_tests --allow-persistent-changes --installer-path $(Build.SourcesDirectory)\bin\certbot-beta-installer-win32.exe
displayName: Run windows installer integration tests
- script: |
set PATH=%ProgramFiles(x86)%\Certbot\bin;%PATH%
venv\Scripts\python -m pytest certbot-ci\certbot_integration_tests\certbot_tests -n 4
displayName: Run certbot integration tests
# - job: docker_build
# pool:
# vmImage: ubuntu-18.04
# strategy:
# matrix:
# amd64:
# DOCKER_ARCH: amd64
# # Do not run the heavy non-amd64 builds for test branches
# ${{ if not(startsWith(variables['Build.SourceBranchName'], 'test-')) }}:
# arm32v6:
# DOCKER_ARCH: arm32v6
# arm64v8:
# DOCKER_ARCH: arm64v8
# steps:
# - bash: set -e && tools/docker/build.sh $(dockerTag) $DOCKER_ARCH
# displayName: Build the Docker images
# # We don't filter for the Docker Hub organization to continue to allow
# # easy testing of these scripts on forks.
# - bash: |
# set -e
# DOCKER_IMAGES=$(docker images --filter reference='*/certbot' --filter reference='*/dns-*' --format '{{.Repository}}')
# docker save --output images.tar $DOCKER_IMAGES
# displayName: Save the Docker images
# # If the name of the tar file or artifact changes, the deploy stage will
# # also need to be updated.
# - bash: set -e && mv images.tar $(Build.ArtifactStagingDirectory)
# displayName: Prepare Docker artifact
# - task: PublishPipelineArtifact@1
# inputs:
# path: $(Build.ArtifactStagingDirectory)
# artifact: docker_$(DOCKER_ARCH)
# displayName: Store Docker artifact
# - job: docker_run
# dependsOn: docker_build
# pool:
# vmImage: ubuntu-18.04
# steps:
# - task: DownloadPipelineArtifact@2
# inputs:
# artifact: docker_amd64
# path: $(Build.SourcesDirectory)
# displayName: Retrieve Docker images
# - bash: set -e && docker load --input $(Build.SourcesDirectory)/images.tar
# displayName: Load Docker images
# - bash: |
# set -ex
# DOCKER_IMAGES=$(docker images --filter reference='*/certbot' --filter reference='*/dns-*' --format '{{.Repository}}:{{.Tag}}')
# for DOCKER_IMAGE in ${DOCKER_IMAGES}
# do docker run --rm "${DOCKER_IMAGE}" plugins --prepare
# done
# displayName: Run integration tests for Docker images
# - job: installer_build
# pool:
# vmImage: vs2017-win2016
# steps:
# - task: UsePythonVersion@0
# inputs:
# versionSpec: 3.7
# architecture: x86
# addToPath: true
# - script: python windows-installer/construct.py
# displayName: Build Certbot installer
# - task: CopyFiles@2
# inputs:
# sourceFolder: $(System.DefaultWorkingDirectory)/windows-installer/build/nsis
# contents: '*.exe'
# targetFolder: $(Build.ArtifactStagingDirectory)
# - task: PublishPipelineArtifact@1
# inputs:
# path: $(Build.ArtifactStagingDirectory)
# # If we change the artifact's name, it should also be changed in tools/create_github_release.py
# artifact: windows-installer
# displayName: Publish Windows installer
# - job: installer_run
# dependsOn: installer_build
# strategy:
# matrix:
# win2019:
# imageName: windows-2019
# win2016:
# imageName: vs2017-win2016
# pool:
# vmImage: $(imageName)
# steps:
# - powershell: |
# if ($PSVersionTable.PSVersion.Major -ne 5) {
# throw "Powershell version is not 5.x"
# }
# condition: eq(variables['imageName'], 'vs2017-win2016')
# displayName: Check Powershell 5.x is used in vs2017-win2016
# - task: UsePythonVersion@0
# inputs:
# versionSpec: 3.8
# addToPath: true
# - task: DownloadPipelineArtifact@2
# inputs:
# artifact: windows-installer
# path: $(Build.SourcesDirectory)/bin
# displayName: Retrieve Windows installer
# - script: |
# python -m venv venv
# venv\Scripts\python tools\pipstrap.py
# venv\Scripts\python tools\pip_install.py -e certbot-ci
# env:
# PIP_NO_BUILD_ISOLATION: no
# displayName: Prepare Certbot-CI
# - script: |
# set PATH=%ProgramFiles(x86)%\Certbot\bin;%PATH%
# venv\Scripts\python -m pytest certbot-ci\windows_installer_integration_tests --allow-persistent-changes --installer-path $(Build.SourcesDirectory)\bin\certbot-beta-installer-win32.exe
# displayName: Run windows installer integration tests
# - script: |
# set PATH=%ProgramFiles(x86)%\Certbot\bin;%PATH%
# venv\Scripts\python -m pytest certbot-ci\certbot_integration_tests\certbot_tests -n 4
# displayName: Run certbot integration tests
- job: snaps_build
pool:
vmImage: ubuntu-18.04
@@ -144,7 +144,7 @@ jobs:
git config --global user.name "$(Build.RequestedFor)"
mkdir -p ~/.local/share/snapcraft/provider/launchpad
cp $(credentials.secureFilePath) ~/.local/share/snapcraft/provider/launchpad/credentials
python3 tools/snap/build_remote.py ALL --archs ${ARCHS}
python3 tools/snap/build_remote.py ALL --archs ${ARCHS} --timeout 19800
displayName: Build snaps
- script: |
set -e

View File

@@ -1,6 +1,6 @@
stages:
- stage: TestAndPackage
jobs:
- template: ../jobs/standard-tests-jobs.yml
- template: ../jobs/extended-tests-jobs.yml
# - template: ../jobs/standard-tests-jobs.yml
# - template: ../jobs/extended-tests-jobs.yml
- template: ../jobs/packaging-jobs.yml

View File

@@ -1,22 +1,22 @@
#!/usr/bin/env python3
import argparse
import glob
import datetime
from multiprocessing import Pool, Process, Manager, Event
import glob
import re
import subprocess
import sys
import tempfile
import time
from multiprocessing import Pool, Process, Manager
from os.path import join, realpath, dirname, basename, exists
CERTBOT_DIR = dirname(dirname(dirname(realpath(__file__))))
PLUGINS = [basename(path) for path in glob.glob(join(CERTBOT_DIR, 'certbot-dns-*'))]
def _execute_build(target, archs, status, workspace):
process = subprocess.Popen([
'snapcraft', 'remote-build', '--launchpad-accept-public-upload', '--recover', '--build-on', ','.join(archs)
'snapcraft', 'remote-build', '--launchpad-accept-public-upload', '--recover',
'--build-on', ','.join(archs)
], stdout=subprocess.PIPE, stderr=subprocess.STDOUT, universal_newlines=True, cwd=workspace)
process_output = []
@@ -27,7 +27,7 @@ def _execute_build(target, archs, status, workspace):
return process.wait(), process_output
def _build_snap(target, archs, status, lock):
def _build_snap(target, archs, status, running, lock):
status[target] = {arch: '...' for arch in archs}
if target == 'certbot':
@@ -39,7 +39,8 @@ def _build_snap(target, archs, status, lock):
while retry:
exit_code, process_output = _execute_build(target, archs, status, workspace)
print(f'Build {target} for {",".join(archs)} (attempt {4-retry}/3) ended with exit code {exit_code}.')
print(f'Build {target} for {",".join(archs)} (attempt {4-retry}/3) ended with '
f'exit code {exit_code}.')
sys.stdout.flush()
with lock:
@@ -49,7 +50,8 @@ def _build_snap(target, archs, status, lock):
# We expect to have all target snaps available, or something bad happened.
snaps_list = glob.glob(join(workspace, '*.snap'))
if not len(snaps_list) == len(archs):
print(f'Some of the expected snaps for a successful build are missing (current list: {snaps_list}).')
print('Some of the expected snaps for a successful build are missing '
f'(current list: {snaps_list}).')
dump_output = True
else:
break
@@ -63,9 +65,12 @@ def _build_snap(target, archs, status, lock):
print(f'Dumping snapcraft remote-build output build for {target}:')
print('\n'.join(process_output))
# Retry the remote build if it has been interrupted (non zero status code) or if some builds have failed.
# Retry the remote build if it has been interrupted (non zero status code)
# or if some builds have failed.
retry = retry - 1
running[target] = False
return {target: workspace}
@@ -96,15 +101,11 @@ def _dump_status_helper(archs, status):
sys.stdout.flush()
def _dump_status(archs, status, stop_event):
while not stop_event.wait(10):
print('Remote build status at {0}'.format(datetime.datetime.now()))
def _dump_status(archs, status, running):
while any(running.values()):
print(f'Remote build status at {datetime.datetime.now()}')
_dump_status_helper(archs, status)
def _dump_status_final(archs, status):
print('Results for remote build finished at {0}'.format(datetime.datetime.now()))
_dump_status_helper(archs, status)
time.sleep(10)
def _dump_results(targets, archs, status, workspaces):
@@ -120,10 +121,10 @@ def _dump_results(targets, archs, status, workspaces):
if not exists(build_output_path):
build_output = f'No output has been dumped by snapcraft remote-build.'
else:
with open(join(workspaces[target], '{0}_{1}.txt'.format(target, arch))) as file_h:
with open(join(workspaces[target], f'{target}_{arch}.txt')) as file_h:
build_output = file_h.read()
print('Output for failed build target={0} arch={1}'.format(target, arch))
print(f'Output for failed build target={target} arch={arch}')
print('-------------------------------------------')
print(build_output)
print('-------------------------------------------')
@@ -134,6 +135,10 @@ def _dump_results(targets, archs, status, workspaces):
else:
print('Some builds failed.')
print()
print(f'Results for remote build finished at {datetime.datetime.now()}')
_dump_status_helper(archs, status)
return failures
@@ -143,6 +148,8 @@ def main():
help='the list of snaps to build')
parser.add_argument('--archs', nargs='+', choices=['amd64', 'arm64', 'armhf'], default=['amd64'],
help='the architectures for which snaps are built')
parser.add_argument('--timeout', type=int, default=None,
help='build process will fail after the provided timeout (in seconds)')
args = parser.parse_args()
archs = set(args.archs)
@@ -158,7 +165,7 @@ def main():
# If we're building anything other than just Certbot, we need to
# generate the snapcraft files for the DNS plugins.
if targets != set(('certbot',)):
if targets != {'certbot'}:
subprocess.run(['tools/snap/generate_dnsplugins_all.sh'],
check=True, cwd=CERTBOT_DIR)
@@ -169,25 +176,29 @@ def main():
with Manager() as manager, Pool(processes=len(targets)) as pool:
status = manager.dict()
running = manager.dict({target: True for target in targets})
lock = manager.Lock()
stop_event = Event()
state_process = Process(target=_dump_status, args=(archs, status, stop_event))
state_process.start()
async_results = [pool.apply_async(_build_snap, (target, archs, status, running, lock))
for target in targets]
async_results = [pool.apply_async(_build_snap, (target, archs, status, lock)) for target in targets]
process = Process(target=_dump_status, args=(archs, status, running))
process.start()
workspaces = {}
for async_result in async_results:
workspaces.update(async_result.get())
try:
process.join(args.timeout)
stop_event.set()
state_process.join()
if process.is_alive():
raise ValueError(f"Timeout out reached ({args.timeout} seconds) during the build!")
failures = _dump_results(targets, archs, status, workspaces)
_dump_status_final(archs, status)
workspaces = {}
for async_result in async_results:
workspaces.update(async_result.get())
return 1 if failures else 0
if _dump_results(targets, archs, status, workspaces):
raise ValueError("There were failures during the build!")
finally:
process.terminate()
if __name__ == '__main__':