Build Darling on a faster private CI machine

This machine is not always available, but when it is,
it's much faster (about 5-10 mins) than both the
GitHub Actions CI (3+ hours) and the old private CI machine (1+ hour).
This commit is contained in:
Ariel Abreu 2023-04-13 10:12:22 -04:00
parent 84a6ae50d4
commit 9797e2d32a
No known key found for this signature in database
GPG Key ID: C06B805216EDEEED
2 changed files with 46 additions and 128 deletions

17
ci/Dockerfile Normal file
View File

@ -0,0 +1,17 @@
# this Dockerfile must be built using the following command:
# docker build -f ../ci/Dockerfile .
# this command must be run while in the `debian` directory in the root of the repo.
FROM ubuntu:jammy
LABEL name=darling-build-image version=0.1.0
ARG DEBIAN_FRONTEND="noninteractive"
RUN cp /etc/apt/sources.list /etc/apt/sources.list.d/sources-src.list && sed -i 's|deb http|deb-src http|g' /etc/apt/sources.list.d/sources-src.list
RUN apt-get -y update
RUN apt-get -y install cmake clang bison flex libfuse-dev libudev-dev pkg-config libc6-dev-i386 gcc-multilib libcairo2-dev libgl1-mesa-dev libglu1-mesa-dev libtiff5-dev libfreetype6-dev git git-lfs libelf-dev libxml2-dev libegl1-mesa-dev libfontconfig1-dev libbsd-dev libxrandr-dev libxcursor-dev libgif-dev libavutil-dev libpulse-dev libavformat-dev libavcodec-dev libswresample-dev libdbus-1-dev libxkbfile-dev libssl-dev python2 llvm-dev libvulkan-dev && apt clean -y
RUN apt-get -y install lsb-release lsb-core && apt clean -y
RUN groupadd -g 1001 ci
RUN useradd -u 1001 -g 1001 -m ci
RUN apt-get -y install devscripts equivs debhelper && apt clean -y
COPY control /control
RUN mk-build-deps -i -r -t "apt-get --no-install-recommends -y" /control && apt clean -y
RUN rm /control
USER ci

157
ci/Jenkinsfile vendored
View File

@ -1,5 +1,13 @@
pipeline {
agent any
agent {
docker {
image 'registry.git.facekapow.dev/darling-build-image:latest'
registryUrl 'https://registry.git.facekapow.dev'
alwaysPull true
args '-u ci:ci'
label 'darling'
}
}
triggers {
githubPush()
@ -8,161 +16,54 @@ pipeline {
environment {
DEB_DISTRO = sh(script: 'lsb_release -cs', returnStdout: true).trim()
ESCAPED_JOB_NAME = sh(script: 'echo "${JOB_NAME}" | sed \'s/\\//-/g\'', returnStdout: true).trim()
REAL_WORKSPACE = "/home/jenkins/workspace/${JOB_BASE_NAME}/${JOB_NAME}"
}
options {
skipDefaultCheckout()
disableConcurrentBuilds abortPrevious: true
}
stages {
stage('Start Job') {
steps {
dir("${REAL_WORKSPACE}") {
scmSkip(deleteBuild: true, skipPattern:'.*\\[ci skip\\].*')
scmSkip(deleteBuild: true, skipPattern:'.*\\[ci skip\\].*')
dir('source') {
checkout scm
}
script {
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) milestone(buildNumber - 1)
milestone(buildNumber)
while (fileExists('.job-running')) {
sleep 1
}
}
touch '.job-running'
}
}
}
stage('Workspace Setup') {
when {
not {
expression {
return fileExists("${REAL_WORKSPACE}/.workspace-setup")
}
}
}
steps {
dir("${REAL_WORKSPACE}") {
sh 'virt-clone --connect qemu:///system --original dtest-base --name dtest-${ESCAPED_JOB_NAME} --auto-clone --mac RANDOM'
touch '.vm-cloned'
sh 'virsh --connect qemu:///system snapshot-create-as --domain dtest-${ESCAPED_JOB_NAME} --name before-darling'
touch '.vm-snapshotted'
touch '.workspace-setup'
dir('source') {
checkout scm
}
}
}
stage('Build') {
steps {
dir("${REAL_WORKSPACE}") {
dir('source') {
sh 'git submodule update --init --recursive'
sh 'cd src/external/swift && git lfs install && git lfs pull'
touch '../.submodules-cloned'
dir('source') {
sh 'git submodule update --init --recursive'
sh 'cd src/external/swift && git lfs install && git lfs pull'
touch '../.submodules-cloned'
//sh 'tools/debian/make-deb --dsc'
sh 'tools/debian/make-deb'
}
sh 'rm -rf out'
sh 'mkdir out'
dir('out') {
sh 'mv ../*.deb ./'
sh 'rm ../*.*'
}
archiveArtifacts artifacts: 'out/**/*', fingerprint: true
//sh 'tools/debian/make-deb --dsc'
sh 'tools/debian/make-deb'
}
}
}
stage('Test') {
steps {
dir("${REAL_WORKSPACE}") {
sh 'virsh --connect qemu:///system start dtest-${ESCAPED_JOB_NAME}'
touch '.vm-running'
sh 'rm -rf out'
sh 'mkdir out'
script {
def vmIP = ""
waitUntil {
vmIP = sh(script: 'virtip dtest-${ESCAPED_JOB_NAME}', returnStdout: true)
return vmIP != ""
}
vmIP = vmIP.trim()
def remote = [:]
remote.name = 'dtest-box'
remote.host = vmIP
remote.user = 'dtest'
remote.password = 'dtest'
remote.allowAnyHosts = true
remote.retryCount = 5
remote.retryWaitSec = 2
sshPut remote: remote, from: 'out', into: '.'
sshCommand remote: remote, command: 'echo dtest | sudo -S apt install -y ./out/darling_*.deb'
// the initial shell must be in a screen because the pty is handed to launchd and ssh will stay connected
// even after the shell exits because launchd is still using the pty.
//
// if the command fails, we'll know when we run our next command (if it did fail, that means Darling is failing to run
// so launchd won't be holding on to the pty)
sshCommand remote: remote, command: 'screen -d -m darling shell'
sleep 5 // give Darling some time to initialize
// if this doesn't report back within 3 minutes, something's definitely broken in Darling, so stop the testing early
timeout(time: 180, unit: 'SECONDS') {
sshCommand remote: remote, command: 'darling shell echo hi'
}
}
dir('out') {
sh 'mv ../*.deb ./'
sh 'rm ../*.*'
}
archiveArtifacts artifacts: 'out/**/*', fingerprint: true
}
}
}
post {
cleanup {
dir("${REAL_WORKSPACE}") {
script {
if (fileExists('.vm-running')) {
// try to shut it down normally
sh 'virsh --connect qemu:///system shutdown dtest-${ESCAPED_JOB_NAME}'
try {
retry(5) {
sleep 1
def isRunning = sh(script: 'virsh --connect qemu:///system list --all | grep " dtest-${ESCAPED_JOB_NAME} " | awk \'{ print $3}\'', returnStdout: true).trim()
if (isRunning != "" && isRunning == "running") {
throw new Exception("VM not shutdown yet")
}
}
} catch (Exception e) {
// force shut it down
sh 'virsh --connect qemu:///system destroy dtest-${ESCAPED_JOB_NAME}'
}
sh 'virsh --connect qemu:///system snapshot-revert --domain dtest-${ESCAPED_JOB_NAME} --snapshotname before-darling'
sh 'rm .vm-running'
}
if (!fileExists('.submodules-cloned')) {
sh 'rm -rf source'
}
script {
if (!fileExists('.submodules-cloned')) {
sh 'rm -rf source'
}
sh 'rm .job-running'
}
}
}