From 756b94b2ae00952abb133680da81b67b924fd225 Mon Sep 17 00:00:00 2001 From: Zack Cerza Date: Mon, 3 Nov 2025 15:07:02 -0700 Subject: [PATCH] build in same file --- ceph-dev-pipeline/build/Jenkinsfile | 577 +++++++++++++++++++++++++++- 1 file changed, 570 insertions(+), 7 deletions(-) diff --git a/ceph-dev-pipeline/build/Jenkinsfile b/ceph-dev-pipeline/build/Jenkinsfile index 0535c7d3..f0836d77 100644 --- a/ceph-dev-pipeline/build/Jenkinsfile +++ b/ceph-dev-pipeline/build/Jenkinsfile @@ -6,7 +6,141 @@ def containerDists = ['centos9', 'rocky10'] def base_node_label = "gigantic" def parallelBuilds = [:] -def buildImpl; +def ceph_build_repo = "https://github.com/ceph/ceph-build" +def ceph_build_branch = "main" +def ubuntu_releases = [ + "noble": "24.04", + "jammy": "22.04", + "focal": "20.04", +] +def debian_releases = [ + "bookworm": "12", + "bullseye": "11", +] + +def ceph_release_spec_template = ''' +Name: ceph-release +Version: 1 +Release: 0%{?dist} +Summary: Ceph Development repository configuration +Group: System Environment/Base +License: GPLv2 +URL: ${project_url} +Source0: ceph.repo +BuildRoot: %{_tmppath}/%{name}-%{version}-%{release}-root-%(%{__id_u} -n) +BuildArch: noarch + +%description +This package contains the Ceph repository GPG key as well as configuration +for yum and up2date. + +%prep + +%setup -q -c -T +install -pm 644 %{SOURCE0} . + +%build + +%install +rm -rf %{buildroot} +%if 0%{defined suse_version} +install -dm 755 %{buildroot}/%{_sysconfdir}/zypp +install -dm 755 %{buildroot}/%{_sysconfdir}/zypp/repos.d +install -pm 644 %{SOURCE0} \ + %{buildroot}/%{_sysconfdir}/zypp/repos.d +%else +install -dm 755 %{buildroot}/%{_sysconfdir}/yum.repos.d +install -pm 644 %{SOURCE0} \ + %{buildroot}/%{_sysconfdir}/yum.repos.d +%endif + +%clean + +%post + +%postun + +%files +%defattr(-,root,root,-) +%if 0%{defined suse_version} +/etc/zypp/repos.d/* +%else +/etc/yum.repos.d/* +%endif + +%changelog +* Mon Apr 28 2025 Zack Cerza 1-1 +''' + +def ceph_release_repo_template = ''' +[Ceph] +name=Ceph packages for \\$basearch +baseurl=${repo_base_url}/\\$basearch +enabled=1 +gpgcheck=0 +type=rpm-md +gpgkey=https://download.ceph.com/keys/autobuild.asc + +[Ceph-noarch] +name=Ceph noarch packages +baseurl=${repo_base_url}/noarch +enabled=1 +gpgcheck=0 +type=rpm-md +gpgkey=https://download.ceph.com/keys/autobuild.asc + +[ceph-source] +name=Ceph source packages +baseurl=${repo_base_url}/SRPMS +enabled=1 +gpgcheck=0 +type=rpm-md +gpgkey=https://download.ceph.com/keys/autobuild.asc +''' + +@NonCPS +def get_ceph_release_spec_text(project_url) { + def engine = new groovy.text.SimpleTemplateEngine() + def template = engine.createTemplate(ceph_release_spec_template) + def text = template.make(["project_url": project_url]) + return text.toString() +} + +@NonCPS +def get_ceph_release_repo_text(base_url) { + def engine = new groovy.text.SimpleTemplateEngine() + def template = engine.createTemplate(ceph_release_repo_template) + def text = template.make(["repo_base_url": base_url]) + return text.toString() +} + +def build_matrix = [:] + +def get_os_info(dist) { + def os = [ + "name": dist, + "version": dist, + "version_name": dist, + "pkg_type": "NONE", + ] + def matcher = dist =~ /^(centos|rhel|rocky|fedora)(\d+)/ + if ( matcher.find() ) { + os.name = matcher.group(1) + os.version = os.version_name = matcher.group(2) + os.pkg_type = "rpm" + } else if ( debian_releases.keySet().contains(dist) ) { + os.name = "debian" + os.version = debian_releases[dist] + os.pkg_type = "deb" + } else if ( ubuntu_releases.keySet().contains(dist) ) { + os.name = "ubuntu" + os.version = ubuntu_releases[env.DIST] + os.pkg_type = "deb" + } + // We need to set matcher to null right after using it to avoid a java.io.NotSerializableException + matcher = null + return os +} pipeline { agent any @@ -53,12 +187,441 @@ pipeline { if ( crimsonFlavors.contains(FLAVOR) && (DIST != 'centos9' || ARCH != 'x86_64') ) continue; if ( env.CI_COMPILE == true || (env.CI_CONTAINER == "true" && containerDists.contains(DIST)) ) { parallelBuilds["${DIST} ${ARCH} ${FLAVOR}"] = { - node { - label "(installed-os-centos9||installed-os-noble)&&${ARCH}&&${base_node_label}" - buildImpl = load 'ceph-dev-pipeline/build/build.groovy' - buildImpl.do_build([ - env: env, - ]); + agent { + label "(installed-os-centos9||installed-os-noble)&&${ARCH}&&${base_node_label}" } + stages { + stage("node") { + steps { + script { + build_matrix["${DIST}_${ARCH}"] = env.CI_COMPILE.toBoolean() + sh "hostname -f" + def node_shortname = env.NODE_NAME.split('\\+')[-1] + def node_url = new URI([env.JENKINS_URL, "computer", env.NODE_NAME].join("/")).normalize() + println("DIST=${env.DIST} ARCH=${env.ARCH} FLAVOR=${env.FLAVOR}") + println("${node_shortname}") + println("${node_url}") + def os = get_os_info(env.DIST) + println("OS_NAME=${os.name}") + println("OS_PKG_TYPE=${os.pkg_type}") + println("OS_VERSION=${os.version}") + println("OS_VERSION_NAME=${os.version_name}") + } + sh './scripts/setup_container_runtime.sh' + sh "cat /etc/os-release" + } + } + stage("checkout ceph-build") { + steps { + checkout scmGit( + branches: [[name: env.CEPH_BUILD_BRANCH]], + userRemoteConfigs: [[url: ceph_build_repo]], + extensions: [ + [$class: 'CleanBeforeCheckout'] + ], + ) + } + } + stage("copy artifacts") { + steps { + // COPY + script { + def artifact_filter = "dist/sha1,dist/version,dist/other_envvars,dist/ceph-*.tar.bz2" + def os = get_os_info(env.DIST) + if ( env.CI_COMPILE && os.pkg_type == "deb" ) { + artifact_filter += ",dist/ceph_*.diff.gz,dist/ceph_*.dsc" + } + println artifact_filter + copyArtifacts( + projectName: env.SETUP_JOB, + selector: specific(buildNumber: env.SETUP_BUILD_ID), + filter: artifact_filter, + ) + } + // MISC + script { + sh 'sudo journalctl --show-cursor -n 0 --no-pager | tail -n1 | cut -d\" \" -f3 > $WORKSPACE/cursor' + } + // BUILD DESC + script { + def sha1_trimmed = env.SHA1.trim().toLowerCase() + def sha1_props = readProperties file: "${WORKSPACE}/dist/sha1" + sha1_from_artifact = sha1_props.SHA1.trim().toLowerCase() + if ( env.SHA1 && sha1_from_artifact != sha1_trimmed ) { + error message: "SHA1 from artifact (${sha1_from_artifact}) does not match parameter value (${sha1_trimmed})" + } else if ( ! env.SHA1 ) { + env.SHA1 = sha1_from_artifact + } + println "SHA1=${sha1_trimmed}" + env.VERSION = readFile(file: "${WORKSPACE}/dist/version").trim() + // In a release build, dist/other_envvars contains CEPH_REPO, and chacra_url + // as written during ceph-source-dist but we don't to be able to define + // ceph-releases.git or chacra.ceph.com as parameters for ceph-dev-pipeline. + def props = readProperties file: "${WORKSPACE}/dist/other_envvars" + for (p in props) { + env."${p.key}" = p.value + } + def branch_ui_value = env.BRANCH + def sha1_ui_value = env.SHA1 + if (env.CEPH_REPO?.find(/https?:\/\/github.com\//)) { + // If this is a release build, link to ceph-release.git's $BRANCH-release branch + def suffix = (env.RELEASE_BUILD?.trim() == "true") ? "-release" : "" + + def branch_url = "${env.CEPH_REPO}/tree/${env.BRANCH}${suffix}" + branch_ui_value = "${env.BRANCH}${suffix}" + def commit_url = "${env.CEPH_REPO}/commit/${env.SHA1}" + sha1_ui_value = "${env.SHA1}" + } + def shaman_url = "https://shaman.ceph.com/builds/ceph/${env.BRANCH}/${env.SHA1}" + def build_description = """\ + BRANCH=${branch_ui_value}
+ SHA1=${sha1_ui_value}
+ VERSION=${env.VERSION}
+ DISTROS=${env.DISTROS}
+ ARCHS=${env.ARCHS}
+ FLAVORS=${env.FLAVORS}
+ SETUP_BUILD_ID=${env.SETUP_BUILD_ID}
+ shaman builds for this branch+commit + """.stripIndent() + buildDescription build_description + } + // EXTRACT + script { + sh "sha256sum dist/*" + sh "cat dist/sha1 dist/version" + sh '''#!/bin/bash + set -ex + cd dist + mkdir ceph + tar --strip-components=1 -C ceph -xjf ceph-${VERSION}.tar.bz2 ceph-${VERSION}/{container,ceph.spec,ceph.spec.in,debian,Dockerfile.build,do_cmake.sh,install-deps.sh,run-make-check.sh,make-debs.sh,make-dist,make-srpm.sh,src/script} + ''' + } + } + } + stage("check for built packages") { + when { + environment name: 'THROWAWAY', value: 'false' + expression { return build_matrix["${DIST}_${ARCH}"] == true } + } + environment { + CHACRACTL_KEY = credentials('chacractl-key') + SHAMAN_API_KEY = credentials('shaman-api-key') + } + steps { + script { + sh './scripts/setup_chacractl.sh' + def chacra_url = sh( + script: '''grep url ~/.chacractl | cut -d'"' -f2''', + returnStdout: true, + ).trim() + def os = get_os_info(env.DIST) + def chacra_endpoint = "ceph/${env.BRANCH}/${env.SHA1}/${os.name}/${os.version_name}/${env.ARCH}/flavors/${env.FLAVOR}/" + def chacractl_rc = sh( + script: "$HOME/.local/bin/chacractl exists binaries/${chacra_endpoint}", + returnStatus: true, + ) + if ( chacractl_rc == 0 && env.FORCE != "true" ) { + println("Skipping compilation since chacra already has artifacts. To override, use THROWAWAY=true (to skip this check) or FORCE=true (to re-upload artifacts).") + build_matrix["${DIST}_${ARCH}"] = false + } + } + } + } + stage("builder container") { + environment { + CONTAINER_REPO_CREDS = credentials('quay-ceph-io-ceph-ci') + DOCKER_HUB_CREDS = credentials('dgalloway-docker-hub') + } + when { + expression { return build_matrix["${DIST}_${ARCH}"] == true } + } + steps { + script { + env.CEPH_BUILDER_IMAGE = "${env.CONTAINER_REPO_HOSTNAME}/${env.CONTAINER_REPO_ORGANIZATION}/ceph-build" + sh '''#!/bin/bash + set -ex + podman login -u ${CONTAINER_REPO_CREDS_USR} -p ${CONTAINER_REPO_CREDS_PSW} ${CONTAINER_REPO_HOSTNAME}/${CONTAINER_REPO_ORGANIZATION} + podman login -u ${DOCKER_HUB_CREDS_USR} -p ${DOCKER_HUB_CREDS_PSW} docker.io + ''' + def ceph_builder_tag_short = "${env.BRANCH}.${env.DIST}.${ARCH}.${FLAVOR}" + def ceph_builder_tag = "${env.SHA1[0..6]}.${ceph_builder_tag_short}" + sh """#!/bin/bash -ex + podman pull ${env.CEPH_BUILDER_IMAGE}:${ceph_builder_tag} || \ + podman pull ${env.CEPH_BUILDER_IMAGE}:${ceph_builder_tag_short} || \ + true + """ + sh """#!/bin/bash + set -ex + echo > .env + [[ $FLAVOR == crimson* ]] && echo "WITH_CRIMSON=true" >> .env || true + cd dist/ceph + python3 src/script/build-with-container.py --env-file=${env.WORKSPACE}/.env --image-repo=${env.CEPH_BUILDER_IMAGE} --tag=${ceph_builder_tag} --image-variant=packages -d ${DIST} -e build-container + podman tag ${env.CEPH_BUILDER_IMAGE}:${ceph_builder_tag} ${env.CEPH_BUILDER_IMAGE}:${ceph_builder_tag_short} + """ + sh """#!/bin/bash -ex + podman push ${env.CEPH_BUILDER_IMAGE}:${ceph_builder_tag_short} + podman push ${env.CEPH_BUILDER_IMAGE}:${ceph_builder_tag} + """ + } + } + } + stage("build") { + environment { + SHAMAN_API_KEY = credentials('shaman-api-key') + SCCACHE_BUCKET_CREDS = credentials('ibm-cloud-sccache-bucket') + } + when { + expression { return build_matrix["${DIST}_${ARCH}"] == true } + } + steps { + script { + def os = get_os_info(env.DIST) + sh "./scripts/update_shaman.sh started ceph ${os.name} ${os.version_name} $ARCH" + env.AWS_ACCESS_KEY_ID = env.SCCACHE_BUCKET_CREDS_USR + env.AWS_SECRET_ACCESS_KEY = env.SCCACHE_BUCKET_CREDS_PSW + def ceph_builder_tag = "${env.SHA1[0..6]}.${env.BRANCH}.${env.DIST}.${ARCH}.${FLAVOR}" + def bwc_command_base = "python3 src/script/build-with-container.py --image-repo=${env.CEPH_BUILDER_IMAGE} --tag=${ceph_builder_tag} -d ${DIST} --image-variant=packages --ceph-version ${env.VERSION}" + def bwc_command = bwc_command_base + def bwc_cmd_sccache_flags = "" + if ( env.DWZ == "false" ) { + sh '''#!/bin/bash + echo "DWZ=$DWZ" >> .env + ''' + bwc_cmd_sccache_flags = "--env-file=${env.WORKSPACE}/.env"; + } + if ( env.SCCACHE == "true" ) { + sh '''#!/bin/bash + echo "SCCACHE=$SCCACHE" >> .env + echo "SCCACHE_CONF=/ceph/sccache.conf" >> .env + echo "SCCACHE_ERROR_LOG=/ceph/sccache_log.txt" >> .env + echo "SCCACHE_LOG=debug" >> .env + echo "AWS_ACCESS_KEY_ID=$AWS_ACCESS_KEY_ID" >> .env + echo "AWS_SECRET_ACCESS_KEY=$AWS_SECRET_ACCESS_KEY" >> .env + echo "CEPH_BUILD_NORMALIZE_PATHS=true" >> .env + ''' + // TODO: un-hardcode this + writeFile( + file: "dist/ceph/sccache.conf", + text: """\ + [cache.s3] + bucket = "ceph-sccache" + endpoint = "s3.us-south.cloud-object-storage.appdomain.cloud" + use_ssl = true + key_prefix = "" + server_side_encryption = false + no_credentials = false + region = "auto" + """ + ) + bwc_cmd_sccache_flags = "--env-file=${env.WORKSPACE}/.env"; + } + def ceph_extra_cmake_args = ""; + def deb_build_profiles = ""; + switch (env.FLAVOR) { + case "default": + ceph_extra_cmake_args += " -DALLOCATOR=tcmalloc" + if (env.RELEASE_BUILD?.toBoolean()) { + ceph_extra_cmake_args += " -DWITH_SYSTEM_BOOST=OFF -DWITH_BOOST_VALGRIND=ON" + } + break + case ~/crimson.*/: + deb_build_profiles = "pkg.ceph.crimson"; + break + default: + println "FLAVOR=${env.FLAVOR} is invalid" + assert false + } + bwc_command = "${bwc_command} ${bwc_cmd_sccache_flags}" + if ( os.pkg_type == "deb" ) { + def sccache_flag = "-DWITH_SCCACHE=ON" + if ( env.SCCACHE == "true" && ! ceph_extra_cmake_args.contains(sccache_flag) ) { + ceph_extra_cmake_args += " ${sccache_flag}" + } + if ( deb_build_profiles ) { + sh """#!/bin/bash + echo "DEB_BUILD_PROFILES=${deb_build_profiles}" >> .env + """ + } + bwc_command = "${bwc_command} -e debs" + } else if ( env.DIST =~ /^(centos|rhel|rocky|fedora).*/ ) { + def rpmbuild_args = "" + if ( env.SCCACHE == "true" ) rpmbuild_args += " -R--with=sccache" + if ( env.DWZ == "false" ) rpmbuild_args += " -R--without=dwz" + if ( env.FLAVOR == "default" ) rpmbuild_args += " -R--with=tcmalloc" + if ( env.FLAVOR.startsWith("crimson") ) rpmbuild_args += " -R--with=crimson" + bwc_command = "${bwc_command}${rpmbuild_args} -e rpm" + } else if ( env.DIST =~ /suse|sles/ ) { + throw new Exception("bwc not implemented for ${env.DIST}") + } else if ( env.DIST =~ /windows/ ) { + throw new Exception("bwc not implemented for ${env.DIST}") + } else { + throw new Exception("DIST '${env.DIST}' is invalid!") + } + sh """#!/bin/bash + echo "CEPH_EXTRA_CMAKE_ARGS=${ceph_extra_cmake_args}" >> .env + """ + sh """#!/bin/bash -ex + cd dist/ceph + ln ../ceph-${env.VERSION}.tar.bz2 . + ${bwc_command} + """ + if ( os.pkg_type == "deb" ) { + sh """#!/bin/bash -ex + cd dist/ceph + ${bwc_command_base} -e custom -- "dpkg-deb --fsys-tarfile /ceph/debs/*/pool/main/c/ceph/cephadm_${VERSION}*.deb | tar -x -f - --strip-components=3 ./usr/sbin/cephadm" + ln ./cephadm ../../ + """ + } else if ( env.DIST =~ /^(centos|rhel|rocky|fedora).*/ ) { + sh """#!/bin/bash -ex + cd dist/ceph + ${bwc_command_base} -e custom -- "rpm2cpio /ceph/rpmbuild/RPMS/noarch/cephadm-*.rpm | cpio -i --to-stdout *sbin/cephadm > cephadm" + ln ./cephadm ../../ + """ + } + } + } + post { + always { + script { + if (fileExists('dist/ceph/sccache_log.txt')) { + sh """ + if [ -f "${env.WORKSPACE}/dist/ceph/sccache_log.txt" ]; then + ln dist/ceph/sccache_log.txt sccache_log_${env.DIST}_${env.ARCH}_${env.FLAVOR}.txt + fi + """ + // The below is to work around an issue where Jenkins would recurse into + // dist/ceph/qa and get stuck trying to follow the .qa symlinks. This was + // discovered by seeing many messages about "too many levels of symbolic links" + // in the system journal. + sh "find ${env.WORKSPACE}/dist/ceph/ -name .qa -exec rm {} \\;" + archiveArtifacts( + artifacts: 'sccache_log*.txt', + allowEmptyArchive: true, + fingerprint: true, + ) + } + } + } + unsuccessful { + script { + def os = get_os_info(env.DIST) + sh "./scripts/update_shaman.sh failed ceph ${os.name} ${os.version_name} $ARCH" + } + } + } + } + stage("upload packages") { + environment { + CHACRACTL_KEY = credentials('chacractl-key') + SHAMAN_API_KEY = credentials('shaman-api-key') + } + when { + expression { return build_matrix["${DIST}_${ARCH}"] == true } + } + steps { + script { + def chacra_url = sh( + script: '''grep url ~/.chacractl | cut -d'"' -f2''', + returnStdout: true, + ).trim() + def os = get_os_info(env.DIST) + if ( os.pkg_type == "rpm" ) { + sh """#!/bin/bash + set -ex + cd ./dist/ceph + mkdir -p ./rpmbuild/SRPMS/ + ln ceph-*.src.rpm ./rpmbuild/SRPMS/ + """ + // Push packages to chacra.ceph.com under the 'test' ref if ceph-release-pipeline's TEST=true + env.SHA1 = env.TEST?.toBoolean() ? 'test' : env.SHA1 + def spec_text = get_ceph_release_spec_text("${chacra_url}r/ceph/${env.BRANCH}/${env.SHA1}/${os.name}/${os.version_name}/flavors/${env.FLAVOR}/") + writeFile( + file: "dist/ceph/rpmbuild/SPECS/ceph-release.spec", + text: spec_text, + ) + def repo_text = get_ceph_release_repo_text("${chacra_url}/r/ceph/${env.BRANCH}/${env.SHA1}/${os.name}/${os.version_name}/flavors/${env.FLAVOR}") + writeFile( + file: "dist/ceph/rpmbuild/SOURCES/ceph.repo", + text: repo_text, + ) + def ceph_builder_tag = "${env.SHA1[0..6]}.${env.BRANCH}.${env.DIST}.${ARCH}.${FLAVOR}" + def bwc_command_base = "python3 src/script/build-with-container.py --image-repo=${env.CEPH_BUILDER_IMAGE} --tag=${ceph_builder_tag} -d ${DIST} --image-variant=packages --ceph-version ${env.VERSION}" + bwc_command = "${bwc_command_base} -e custom -- rpmbuild -bb --define \\'_topdir /ceph/rpmbuild\\' /ceph/rpmbuild/SPECS/ceph-release.spec" + sh """#!/bin/bash + set -ex + cd $WORKSPACE/dist/ceph + ${bwc_command} + """ + } + sh """#!/bin/bash + export CHACRA_URL="${chacra_url}" + export OS_NAME="${os.name}" + export OS_VERSION="${os.version}" + export OS_VERSION_NAME="${os.version_name}" + export OS_PKG_TYPE="${os.pkg_type}" + if [ "$THROWAWAY" != "true" ]; then ./scripts/chacra_upload.sh; fi + """ + } + } + post { + success { + script { + def os = get_os_info(env.DIST) + sh "./scripts/update_shaman.sh completed ceph ${os.name} ${os.version_name} $ARCH" + } + } + unsuccessful { + script { + def os = get_os_info(env.DIST) + sh "./scripts/update_shaman.sh failed ceph ${os.name} ${os.version_name} $ARCH" + } + } + } + } + stage("container") { + when { + environment name: "CI_CONTAINER", value: "true" + environment name: "DIST", value: "centos9" + } + environment { + CONTAINER_REPO_CREDS = credentials('quay-ceph-io-ceph-ci') + } + steps { + script { + env.CONTAINER_REPO_USERNAME = env.CONTAINER_REPO_CREDS_USR + env.CONTAINER_REPO_PASSWORD = env.CONTAINER_REPO_CREDS_PSW + distro = sh( + script: '. /etc/os-release && echo -n $ID', + returnStdout: true, + ) + release = sh( + script: '. /etc/os-release && echo -n $VERSION_ID', + returnStdout: true, + ) + cephver = env.VERSION.trim() + sh """#!/bin/bash + export DISTRO=${distro} + export RELEASE=${release} + export cephver=${cephver} + ./scripts/build_container + """ + } + } + } + } + post { + success { + sh 'echo success: $DIST $ARCH $FLAVOR' + } + unsuccessful { + sh 'echo failure: $DIST $ARCH $FLAVOR' + } + always { + script { + sh 'hostname' + sh 'sudo journalctl -k -c $(cat ${WORKSPACE}/cursor)' + sh 'podman unshare chown -R 0:0 ${WORKSPACE}/' + } + } } } } -- 2.39.5