Page Menu
Home
Software Heritage
Search
Configure Global Search
Log In
Files
F9124137
No One
Temporary
Actions
View File
Edit File
Delete File
View Transforms
Subscribe
Mute Notifications
Award Token
Flag For Later
Size
9 KB
Subscribers
None
View Options
diff --git a/jobs/templates/debian/gbp-buildpackage.groovy.j2 b/jobs/templates/debian/gbp-buildpackage.groovy.j2
index 43f2932..5f1d335 100644
--- a/jobs/templates/debian/gbp-buildpackage.groovy.j2
+++ b/jobs/templates/debian/gbp-buildpackage.groovy.j2
@@ -1,273 +1,275 @@
def repo_name = '{{repo_name}}'
def changelog_distribution
def repo_host = 'pergamon.internal.softwareheritage.org'
def repo_user = 'swhdebianrepo'
def repo_path = '/srv/softwareheritage/repository'
def upload_target = "${repo_path}/incoming"
def repo_command = "umask 002; reprepro -vb ${repo_path} processincoming incoming"
def backport_job = '/' + (currentBuild.fullProjectName.split('/')[0..-2] + ['automatic-backport']).join('/')
pipeline {
agent { label 'debian' }
environment {
PHAB_CONDUIT_URL = 'https://forge.softwareheritage.org/api/'
}
stages {
stage('Notify Phabricator start') {
when {
beforeAgent true
expression { params.PHID }
}
agent { label 'swh-tox' }
steps {
withCredentials([
string(credentialsId: 'swh-public-ci',
variable: 'PHAB_CONDUIT_TOKEN')]) {
sh '''
python3 -m pyarcanist send-message work $PHID
'''
}
}
}
stage('Checkout') {
steps {
cleanWs()
checkout([
$class: 'GitSCM',
branches: [[name: params.GIT_REVISION]],
userRemoteConfigs: [[
url: "https://forge.softwareheritage.org/source/${repo_name}.git",
]],
extensions: [
[$class: 'RelativeTargetDirectory', relativeTargetDir: repo_name],
],
])
script {
dir(repo_name) {
if(!fileExists('debian/changelog')) {
error('Trying to build a debian package without a debian/changelog')
}
changelog_distribution = sh(
script: 'dpkg-parsechangelog -SDistribution',
returnStdout: true,
).trim()
def parsed_gbp_config = readProperties(
file: 'debian/gbp.conf',
defaults: ['debian-branch': 'master'],
)
def debian_branch = parsed_gbp_config['debian-branch']
+ def pristine_tar = parsed_gbp_config['pristine-tar']
+ def upstream_branch = parsed_gbp_config['upstream-branch']
sh """
git checkout -b ${debian_branch}
- git branch -f pristine-tar origin/pristine-tar
- git branch -f debian/upstream origin/debian/upstream
+ [ ${pristine_tar} = "True" ] && git branch -f pristine-tar origin/pristine-tar
+ git branch -f ${upstream_branch} origin/${upstream_branch}
"""
}
}
}
}
stage('Build package') {
when {
beforeAgent true
expression { changelog_distribution != 'UNRELEASED' }
}
steps {
script {
dir (repo_name) {
def wanted_extra_repositories = params.EXTRA_REPOSITORIES.split(',')
def build_dep_resolver
def extra_repositories = []
def extra_repository_keys = []
def base_distribution = changelog_distribution.split('-')[0]
def backports = changelog_distribution.endsWith('-backports')
def swh = changelog_distribution.endsWith('-swh')
if (base_distribution in ['unstable', 'experimental'] || 'incoming' in wanted_extra_repositories) {
def suites = []
if (base_distribution == 'unstable') {
suites = ['buildd-unstable']
} else if (base_distribution == 'experimental') {
suites = ['buildd-unstable', 'buildd-experimental']
} else {
suites = ["buildd-${base_distribution}-proposed-updates"]
if (backports || swh) {
suites.add("buildd-${base_distribution}-backports")
}
}
suites.each {suite ->
extra_repositories.add("deb http://incoming.debian.org/debian-buildd/ ${suite} main")
}
}
if (swh || 'swh' in wanted_extra_repositories) {
def swh_distribution = "${base_distribution}-swh"
if (base_distribution in ['unstable', 'experimental']) {
swh_distribution = 'unstable'
}
extra_repositories.add("deb [trusted=yes] https://debian.softwareheritage.org/ ${swh_distribution} main")
}
if ((backports || swh || 'backports' in wanted_extra_repositories) && !(base_distribution in ['unstable', 'experimental'])) {
extra_repositories.add("deb http://deb.debian.org/debian/ ${base_distribution}-backports main")
build_dep_resolver = 'aptitude'
}
if ('ceph' in wanted_extra_repositories && !(base_distribution in ['unstable', 'experimental'])) {
if (base_distribution != 'stretch') {
extra_repositories.add("deb http://download.proxmox.com/debian/ceph-octopus/ ${base_distribution} main")
extra_repository_keys.add('proxmox.asc')
} else {
extra_repositories.add("deb http://download.proxmox.com/debian/ceph-luminous/ ${base_distribution} main")
extra_repository_keys.add('proxmox.asc')
}
}
if ('pgdg' in wanted_extra_repositories && !(base_distribution in ['unstable', 'experimental'])) {
extra_repositories.add("deb http://apt.postgresql.org/pub/repos/apt/ ${base_distribution}-pgdg main")
extra_repository_keys.add('postgres.asc')
}
if ('elasticsearch' in wanted_extra_repositories) {
extra_repositories.add("deb https://artifacts.elastic.co/packages/7.x/apt stable main")
extra_repository_keys.add('elasticsearch.asc')
}
if ('cassandra' in wanted_extra_repositories) {
extra_repositories.add("deb http://www.apache.org/dist/cassandra/debian 40x main")
extra_repository_keys.add('cassandra.asc')
}
if (params.BUILD_DEP_RESOLVER) {
build_dep_resolver = params.BUILD_DEP_RESOLVER
}
def hostname = sh(
script: "hostname --fqdn",
returnStdout: true,
).trim();
def short_hostname = hostname - '.internal.softwareheritage.org';
def uploader = "Software Heritage autobuilder (on ${short_hostname}) <jenkins@${hostname}>"
def gbp_buildpackage = [
'gbp buildpackage',
'--git-builder=sbuild',
'--nolog',
'--batch',
'--no-clean-source',
'--no-run-lintian',
'--arch-all',
'--source',
'--force-orig-source',
"--uploader='${uploader}'",
]
if (build_dep_resolver != null) {
gbp_buildpackage.add("--build-dep-resolver=${build_dep_resolver}")
}
extra_repositories.each { repo ->
gbp_buildpackage.add("--extra-repository='${repo}'")
}
extra_repository_keys.each { key ->
gbp_buildpackage.add("--extra-repository-key=/usr/share/keyrings/extra-repositories/${key}")
}
def gbp_buildpackage_cmd = gbp_buildpackage.join(' ')
sh(script: gbp_buildpackage_cmd)
}
if (params.DO_UPLOAD) {
sh(script: 'debsign *.changes')
}
archiveArtifacts(
artifacts: sh(
script: 'dcmd echo *.changes',
returnStdout: true
).split().join(','),
fingerprint: true,
)
}
}
}
stage('Upload package') {
when {
beforeAgent true
expression { changelog_distribution != 'UNRELEASED' }
expression { params.DO_UPLOAD }
}
steps {
sshagent (credentials: ['jenkins-debian-repo-ssh']) {
sh """
dcmd rsync -v *.changes ${repo_user}@${repo_host}:${upload_target}
ssh ${repo_user}@${repo_host} '${repo_command}'
"""
}
}
}
stage('Prepare backport') {
when {
beforeAgent true
expression { changelog_distribution != 'UNRELEASED' }
expression { params.BACKPORT_ON_SUCCESS }
expression { jobExists(backport_job) }
}
steps {
script {
params.BACKPORT_ON_SUCCESS.split(',').each { bpo_pair ->
def (src_suite, dst_suite) = bpo_pair.split('>')
if (src_suite == changelog_distribution) {
build(
job: backport_job,
parameters: [
string(name: 'GIT_TAG', value: params.GIT_REVISION),
string(name: 'SOURCE', value: src_suite),
string(name: 'DESTINATION', value: dst_suite),
],
wait: false,
)
}
}
}
}
}
}
post {
always {
node('swh-tox') {
withCredentials([
string(credentialsId: 'swh-public-ci',
variable: 'PHAB_CONDUIT_TOKEN')]) {
withEnv(["JOBSTATUS=${currentBuild.currentResult}"]) {
sh '''
if [ "$JOBSTATUS" = "SUCCESS" ]; then
MSGTYPE=pass
else
MSGTYPE=fail
fi
echo "Current job status is $JOBSTATUS -> $MGSTYPE"
if [ -n "$PHID" ]; then
python3 -m pyarcanist send-message $MSGTYPE $PHID
fi
'''
}
}
}
}
}
}
File Metadata
Details
Attached
Mime Type
text/x-diff
Expires
Sat, Jun 21, 6:43 PM (2 w, 6 h ago)
Storage Engine
blob
Storage Format
Raw Data
Storage Handle
3251221
Attached To
rCJSWH Jenkins jobs
Event Timeline
Log In to Comment