Page MenuHomeSoftware Heritage

No OneTemporary

diff --git a/jobs/swh-packages.yaml b/jobs/swh-packages.yaml
index 5d88171..037a7df 100644
--- a/jobs/swh-packages.yaml
+++ b/jobs/swh-packages.yaml
@@ -1,349 +1,351 @@
- job-group:
name: "swh-jobs-{name}"
gitlab_url: https://gitlab-staging.swh.network
gitlab_connection_name: gitlab-staging
gitlab_project_name: "swh/devel/{repo_name}"
jobs:
- "{name}"
- "{name}/incoming-tag"
- "{name}/pypi-upload"
- "{name}/tests"
- "{name}/tests-on-diff"
- "{name}/gitlab-tests"
+ - "{name}/gitlab-incoming-tag"
+ - "{name}/gitlab-pypi-upload"
- "debian/packages/{name}"
- "debian/packages/{name}/update-for-release"
- "debian/packages/{name}/automatic-backport"
- "debian/packages/{name}/gbp-buildpackage"
- project:
name: DAUTH
display-name: swh-auth
repo_name: swh-auth
pkg: auth
python_module: swh.auth
jobs:
- "swh-jobs-{name}"
- project:
name: DCORE
display-name: swh-core
repo_name: swh-core
pkg: core
python_module: swh.core
jobs:
- "swh-jobs-{name}"
- project:
name: DCNT
display-name: swh-counters
repo_name: swh-counters
pkg: counters
python_module: swh.counters
jobs:
- "swh-jobs-{name}"
- project:
name: DDATASET
display-name: swh-dataset
repo_name: swh-dataset
pkg: dataset
python_module: swh.dataset
jobs:
- "swh-jobs-{name}"
- project:
name: DDEP
display-name: swh-deposit
repo_name: swh-deposit
pkg: deposit
python_module: swh.deposit
jobs:
- "swh-jobs-{name}"
- project:
name: DGRPH
display-name: swh-graph
repo_name: swh-graph
pkg: graph
python_module: swh.graph
jobs:
- "swh-jobs-{name}"
- project:
name: DGQL
display-name: swh-graphql
repo_name: swh-graphql
pkg: graphql
python_module: swh.graphql
jobs:
- "swh-jobs-{name}"
- project:
name: DCIDX
display-name: swh-indexer
repo_name: swh-indexer
pkg: indexer
python_module: swh.indexer
jobs:
- "swh-jobs-{name}"
- project:
name: DICP
display-name: swh-icinga-plugins
repo_name: swh-icinga-plugins
pkg: icinga-plugins
python_module: swh.icinga_plugins
jobs:
- "swh-jobs-{name}"
- project:
name: DJNL
display-name: swh-journal
repo_name: swh-journal
docker_image: kafka
pkg: journal
python_module: swh.journal
jobs:
- "swh-jobs-{name}"
- project:
name: DLS
display-name: swh-lister
repo_name: swh-lister
pkg: lister
python_module: swh.lister
jobs:
- "swh-jobs-{name}"
- project:
name: DLDBZR
display-name: swh-loader-bzr
repo_name: swh-loader-bzr
pkg: loader.bzr
python_module: swh.loader.bzr
jobs:
- "swh-jobs-{name}"
- project:
name: DLDBASE
display-name: swh-loader-core
repo_name: swh-loader-core
pkg: loader.core
python_module: swh.loader.core
jobs:
- "swh-jobs-{name}"
- project:
name: DLDCVS
display-name: swh-loader-cvs
repo_name: swh-loader-cvs
pkg: loader.cvs
python_module: swh.loader.cvs
include_bdist: false
jobs:
- "swh-jobs-{name}"
- project:
name: DLDG
display-name: swh-loader-git
repo_name: swh-loader-git
pkg: loader.git
python_module: swh.loader.git
jobs:
- "swh-jobs-{name}"
- project:
name: DLDMD
display-name: swh-loader-metadata
repo_name: swh-loader-metadata
pkg: loader.metadata
python_module: swh.loader.metadata
jobs:
- "swh-jobs-{name}"
- project:
name: DLDHG
display-name: swh-loader-mercurial
repo_name: swh-loader-mercurial
pkg: loader.mercurial
python_module: swh.loader.mercurial
jobs:
- "swh-jobs-{name}"
- project:
name: DLDSVN
display-name: swh-loader-svn
repo_name: swh-loader-svn
pkg: loader.svn
python_module: swh.loader.svn
jobs:
- "swh-jobs-{name}"
- project:
name: DMOD
display-name: swh-model
repo_name: swh-model
pkg: model
python_module: swh.model
jobs:
- "swh-jobs-{name}"
- project:
name: DMFCD
display-name: swh-clearlydefined
repo_name: swh-clearlydefined
pkg: clearlydefined
python_module: swh.clearlydefined
jobs:
- "swh-jobs-{name}"
- project:
name: DOBJS
display-name: swh-objstorage
repo_name: swh-objstorage
pkg: objstorage
deb-extra-repositories: ceph
python_module: swh.objstorage
jobs:
- "swh-jobs-{name}"
- project:
name: DOBJSRPL
display-name: swh-objstorage-replayer
repo_name: swh-objstorage-replayer
pkg: objstorage.replayer
python_module: swh.objstorage.replayer
jobs:
- "swh-jobs-{name}"
- project:
name: DOPH
display-name: swh-perfecthash
repo_name: swh-perfecthash
pkg: perfecthash
python_module: swh.perfecthash
include_bdist: false
jobs:
- "swh-jobs-{name}"
- project:
name: DPROV
display-name: swh-provenance
repo_name: swh-provenance
pkg: provenance
python_module: swh.provenance
timeout: 15
jobs:
- "swh-jobs-{name}"
- project:
name: DSEA
display-name: swh-search
repo_name: swh-search
pkg: search
deb-extra-repositories: elasticsearch
python_module: swh.search
jobs:
- "swh-jobs-{name}"
- project:
name: TSBX
display-name: swh-sandbox
repo_name: swh-sandbox
pkg: sandbox
incoming_tag_auto_pypi_host: test.pypi.org
python_module: swh.sandbox
jobs:
- "{name}"
- "{name}/incoming-tag"
- "{name}/pypi-upload"
- "{name}/tests"
- "{name}/tests-on-diff"
- project:
name: DTSCN
display-name: swh-scanner
repo_name: swh-scanner
pkg: scanner
python_module: swh.scanner
jobs:
- "swh-jobs-{name}"
- project:
name: DSCH
display-name: swh-scheduler
repo_name: swh-scheduler
pkg: scheduler
python_module: swh.scheduler
jobs:
- "swh-jobs-{name}"
- project:
name: DSCRUB
display-name: swh-scrubber
repo_name: swh-scrubber
pkg: scrubber
python_module: swh.scrubber
jobs:
- "swh-jobs-{name}"
- project:
name: DSTO
display-name: swh-storage
repo_name: swh-storage
pkg: storage
deb-extra-repositories: cassandra
python_module: swh.storage
timeout: 25
jobs:
- "swh-jobs-{name}"
- project:
name: DVAU
display-name: swh-vault
repo_name: swh-vault
pkg: vault
python_module: swh.vault
jobs:
- "swh-jobs-{name}"
- project:
name: DWAPPS
display-name: swh-web
repo_name: swh-web
pkg: web
python_module: swh.web
do_cypress: true
nb_cypress_runners: 4
timeout: 30
max_concurrent: 3
jobs:
- "swh-jobs-{name}"
- project:
name: DWCLI
display-name: swh-web-client
repo_name: swh-web-client
pkg: web-client
python_module: swh.web.client
jobs:
- "swh-jobs-{name}"
- project:
name: DFUSE
display-name: swh-fuse
repo_name: swh-fuse
pkg: fuse
python_module: swh.fuse
docker_options: --privileged --device /dev/fuse
jobs:
- "swh-jobs-{name}"
- project:
name: DTPL
display-name: swh-py-template
repo_name: swh-py-template
pkg: py-template
python_module: swh.foo
jobs:
- "swh-jobs-{name}"
diff --git a/jobs/templates/incoming-tag.groovy.j2 b/jobs/templates/incoming-tag.groovy.j2
index 7254d21..6fee916 100644
--- a/jobs/templates/incoming-tag.groovy.j2
+++ b/jobs/templates/incoming-tag.groovy.j2
@@ -1,71 +1,102 @@
pipeline {
agent none
stages {
stage('Refresh tag list') {
agent any
steps {
checkout([
$class: 'GitSCM',
+ {%- if gitlab_project %}
+ userRemoteConfigs: [[
+ name:'origin', url: '{{gitlab_url}}/{{gitlab_project_name}}.git',
+ refspec: '+refs/tags/*:refs/remotes/origin/tags*'
+ ]],
+ branches: [[
+ name: "${env.gitlabSourceBranch}"
+ ]],
+ browser: [
+ $class: 'GitLab',
+ repoUrl: '{{gitlab_url}}/{{gitlab_project_name}}'
+ ],
+ extensions: [[$class: 'CloneOption', honorRefspec: true]],
+ {%- else %}
userRemoteConfigs: [[
url: 'https://forge.softwareheritage.org/source/{{repo_name}}.git',
]],
branches: [[
name: params.GIT_TAG,
]],
browser: [
$class: 'Phabricator',
repo: '{{repo_name}}',
repoUrl: 'https://forge.softwareheritage.org/',
],
+ {%- endif %}
])
}
}
stage('Build and upload PyPI package') {
when {
+ {%- if gitlab_project %}
+ expression { "${env.gitlabSourceBranch}" ==~ /refs\/tags\/v\d+(.\d+)+/ }
+ expression { jobExists('/{{name}}/gitlab-pypi-upload') }
+ {%- else %}
expression { params.GIT_TAG ==~ /v\d+(.\d+)+/ }
expression { jobExists('/{{name}}/pypi-upload') }
+ {%- endif %}
+
}
steps {
build(
+ {%- if gitlab_project %}
+ job: '/{{name}}/gitlab-pypi-upload',
+ {%- else %}
job: '/{{name}}/pypi-upload',
+ {%- endif %}
parameters: [
+ {%- if gitlab_project %}
+ string(name: 'GIT_TAG', value: "${env.gitlabSourceBranch}"),
+ {%- else %}
string(name: 'GIT_TAG', value: params.GIT_TAG),
+ {%- endif %}
string(name: 'PYPI_HOST', value: '{{incoming_tag_auto_pypi_host}}'),
],
)
}
}
+ {%- if not gitlab_project %}
stage('Update Debian packaging for new release') {
when {
expression { params.GIT_TAG ==~ /v\d+(.\d+)+/ }
expression { jobExists('/debian/packages/{{name}}/update-for-release') }
}
steps {
build(
job: '/debian/packages/{{name}}/update-for-release',
parameters: [
string(name: 'GIT_TAG', value: params.GIT_TAG),
],
wait: false,
)
}
}
stage('Build Debian package') {
when {
expression { params.GIT_TAG ==~ /debian\/.*/ }
expression { !(params.GIT_TAG ==~ /debian\/upstream\/.*/) }
expression { jobExists('/debian/packages/{{name}}/gbp-buildpackage') }
}
steps {
build(
job: '/debian/packages/{{name}}/gbp-buildpackage',
parameters: [
string(name: 'GIT_REVISION', value: params.GIT_TAG),
booleanParam(name: 'DO_UPLOAD', value: true),
],
wait: false,
)
}
}
+ {%- endif %}
}
}
diff --git a/jobs/templates/incoming-tag.yaml b/jobs/templates/incoming-tag.yaml
index 1fe1950..ca5c944 100644
--- a/jobs/templates/incoming-tag.yaml
+++ b/jobs/templates/incoming-tag.yaml
@@ -1,25 +1,39 @@
- job-template: &incoming_tag
name: "{name}/incoming-tag"
display-name: Incoming tag
project-type: pipeline
- auth-token: 'ph4br1cat0r'
+ auth-token: "ph4br1cat0r"
incoming_tag_auto_pypi_host: pypi.org
sandbox: true
+ gitlab_project: false
properties:
- build-discarder:
num-to-keep: 20
parameters:
- git-parameter:
name: GIT_TAG
description: git tag to process
type: PT_TAG
sortMode: DESCENDING_SMART
selectedValue: TOP
- dsl:
- !include-jinja2: incoming-tag.groovy.j2
+ dsl: !include-jinja2: incoming-tag.groovy.j2
- job-template:
name: "debian/deps/{name}/incoming-tag"
- dsl:
- !include-jinja2: dependency-incoming-tag.groovy.j2
+ dsl: !include-jinja2: dependency-incoming-tag.groovy.j2
+ <<: *incoming_tag
+
+- job-template:
+ name: "{name}/gitlab-incoming-tag"
+ display-name: Incoming tag (GitLab)
+ gitlab_project: true
+ triggers:
+ - gitlab:
+ trigger-push: true
+ trigger-merge-request: false
+ ci-skip: false
+ set-build-description: true
+ add-ci-message: true
+ # secret jenkins token is generated when executing tox
+ secret-token: !include-raw: jenkins-token
<<: *incoming_tag
diff --git a/jobs/templates/swh-pipeline.groovy.j2 b/jobs/templates/swh-pipeline.groovy.j2
index d549a6e..63bc0c6 100644
--- a/jobs/templates/swh-pipeline.groovy.j2
+++ b/jobs/templates/swh-pipeline.groovy.j2
@@ -1,190 +1,192 @@
pipeline {
{% filter indent(width=2) %}
{%- include 'includes/agent-docker.groovy.j2' -%}
{% endfilter %}
options {
// require "Throttle Concurrent Builds" Jenkins plugin
throttleJobProperty(
categories: [],
limitOneJobWithMatchingParams: false,
maxConcurrentPerNode: {{ max_concurrent }},
maxConcurrentTotal: {{ max_concurrent }},
paramsToUseForLimit: '',
throttleEnabled: true,
throttleOption: 'project',
)
{%- if gitlab_project %}
gitLabConnection("{{gitlab_connection_name}}")
{%- endif %}
}
{%- if not gitlab_project %}
environment {
PHAB_CONDUIT_URL = 'https://forge.softwareheritage.org/api/'
}
{%- endif %}
stages {
stage('Checkout') {
steps {
{%- if not gitlab_project %}
{% filter indent(width=8) %}
{%- include 'includes/create-phabricator-artifacts.groovy.j2' -%}
{% endfilter %}
{%- endif %}
checkout([
$class: 'GitSCM',
{%- if gitlab_project %}
userRemoteConfigs: [[
name:'origin', url: '{{gitlab_url}}/{{gitlab_project_name}}.git',
- refspec: '+refs/heads/*:refs/remotes/origin/* +refs/merge-requests/*/head:refs/remotes/origin/merge-requests/*',
+ refspec: '+refs/heads/*:refs/remotes/origin/* \
+ +refs/merge-requests/*/head:refs/remotes/origin/merge-requests/* \
+ +refs/tags/*:refs/remotes/origin/tags*',
]],
branches: [[
- name: "origin/${env.gitlabSourceBranch}"
+ name: "${params.REVISION}" ?: "${origin}/${env.gitlabSourceBranch}"
]],
browser: [
$class: 'GitLab',
repoUrl: '{{gitlab_url}}/{{gitlab_project_name}}'
],
{%- else %}
userRemoteConfigs: [[
url: 'https://forge.softwareheritage.org/source/{{repo_name}}.git',
refspec: '+refs/heads/master:refs/remotes/origin/master',
]],
branches: [[
name: "${params.REVISION}"
]],
browser: [
$class: 'Phabricator',
repoUrl: 'https://forge.softwareheritage.org', repo: '{{name}}'
],
{%- endif %}
doGenerateSubmoduleConfigurations: false,
extensions: [[$class: 'CloneOption', honorRefspec: true]],
gitTool: 'Default',
submoduleCfg: [],
])
}
}
{%- if not gitlab_project and phabricator_diff %}
stage('Apply phabricator diff') {
steps {
{% filter indent(width=8) %}
{%- include 'includes/script-apply-phabricator-diff.groovy.j2' -%}
{% endfilter %}
}
}
{%- endif %}
stage ('flake8') {
steps {
sh '''python3 -m tox -e flake8'''
}
}
stage ('mypy') {
steps {
sh '''python3 -m tox -e mypy'''
}
}
stage ('radon') {
steps {
sh '''
mkdir -p reports
python3 -m radon raw --json swh/ > reports/raw_report.json
python3 -m radon cc --json swh/ > reports/cc_report.json
python3 -m radon mi --json swh/ > reports/mi_report.json
python3 -m radon hal --json swh/ > reports/hal_report.json
python3 -m radon cc --xml swh/ > reports/cc_report.xml
'''
}
post {
always {
// Archive a few report files
archiveArtifacts(
allowEmptyArchive: true,
artifacts: 'reports/*',
fingerprint: true,
)
// Warnings NG
recordIssues(
enabledForFailure: true,
tools: [
ccm(pattern: '**/reports/cc_report.xml'),
],
)
}
}
}
stage('Tests') {
options {
timeout(time: {{ timeout }}, unit: 'MINUTES')
}
parallel {
{% filter indent(width=8) %}
{%- include 'includes/stage-python-tests.groovy.j2' -%}
{% endfilter %}
{%- if do_cypress %}
{% filter indent(width=8) %}
{%- include 'includes/stage-cypress-tests.groovy.j2' -%}
{% endfilter %}
{%- endif %}
stage('Sphinx documentation') {
{% filter indent(width=10) %}
{%- include 'includes/agent-docker-sphinx.groovy.j2' -%}
{% endfilter %}
steps {
sh '''
if tox -a | grep -x sphinx >/dev/null
then
tox -e sphinx
else
echo WARNING: no sphinx environment in tox.ini
fi
'''
}
} // sphinx doc
} // parallel
} // Tests stage
} // stages
post {
{%- if gitlab_project %}
failure {
updateGitlabCommitStatus name: 'jenkins', state: 'failed'
}
success {
updateGitlabCommitStatus name: 'jenkins', state: 'success'
}
aborted {
updateGitlabCommitStatus name: 'jenkins', state: 'canceled'
}
{%- else %}
always {
step([$class: 'PhabricatorNotifier',
commentOnSuccess: true,
commentWithConsoleLinkOnFailure: true,
commentFile: '.phabricator-comment',
commentSize: '1000000',
preserveFormatting: true,
processLint: true,
lintFile: '.phabricator-lint',
lintFileSize: '1000000',
])
} // always
{%- endif %}
cleanup {
cleanWs()
}
} // post
} // pipeline
diff --git a/jobs/templates/swh-pipeline.yaml b/jobs/templates/swh-pipeline.yaml
index b8ae5ea..7981b41 100644
--- a/jobs/templates/swh-pipeline.yaml
+++ b/jobs/templates/swh-pipeline.yaml
@@ -1,65 +1,70 @@
- job-template: &master_tests
name: "{name}/tests"
display-name: "master branch"
project-type: pipeline
docker_image: tox
cypress_docker_image: cypress
sphinx_docker_image: sphinx
default-tox-environment: py3
concurrent: true
triggers:
- timed: "@daily"
sandbox: true
auth-token: "ph4br1cat0r"
properties:
- build-discarder:
days-to-keep: 90
artifact-num-to-keep: 20
phabricator_diff: false
gitlab_project: false
do_cypress: false
timeout: 10
max_concurrent: 0
parameters:
- git-parameter:
name: REVISION
type: PT_REVISION
defaultValue: master
description: Git revision to build.
- string:
name: PHID
description: PHID of the Phabricator target object on which results will be reported.
- string:
name: TOX_ENVIRONMENT
description: tox environment to use for the main tox run
default: "{default-tox-environment}"
dsl: !include-jinja2: swh-pipeline.groovy.j2
- job-template:
name: "{name}/gitlab-tests"
display-name: "gitlab builds"
auth-token:
properties:
- gitlab-logo:
repository-name: "{gitlab_project_name}"
triggers:
- gitlab:
trigger-push: true
trigger-merge-request: true
trigger-open-merge-request-push: both
trigger-closed-merge-request: false
ci-skip: false
set-build-description: true
add-note-merge-request: true
add-vote-merge-request: true
add-ci-message: true
cancel-pending-builds-on-update: true
+ # secret jenkins token is generated when executing tox
secret-token: !include-raw: jenkins-token
gitlab_project: true
parameters:
+ - git-parameter:
+ name: REVISION
+ type: string
+ description: Git revision to build.
- string:
name: TOX_ENVIRONMENT
description: tox environment to use for the main tox run
default: "{default-tox-environment}"
<<: *master_tests
diff --git a/jobs/templates/swh-pypi.groovy.j2 b/jobs/templates/swh-pypi.groovy.j2
index 859becf..853c21c 100644
--- a/jobs/templates/swh-pypi.groovy.j2
+++ b/jobs/templates/swh-pypi.groovy.j2
@@ -1,111 +1,129 @@
def module_name = '{{repo_name}}'.replace('-', '.')
def PYPI_UPLOAD_HOST
switch (params.PYPI_HOST) {
case 'pypi.org':
PYPI_UPLOAD_HOST = 'upload.pypi.org'
break
default:
PYPI_UPLOAD_HOST = params.PYPI_HOST
break
}
def BDIST_WHEEL = 'bdist_wheel'
if (!params.INCLUDE_BDIST) {
BDIST_WHEEL = ''
}
pipeline {
{% filter indent(width=2) %}
{%- include 'includes/agent-docker.groovy.j2' -%}
{% endfilter %}
stages {
stage('Run tests') {
when {
expression { return !params.SKIP_TESTS }
beforeAgent true
}
agent none
steps {
build(
+ {%- if gitlab_project %}
+ job: '/{{name}}/gitlab-tests',
+ {%- else %}
job: '/{{name}}/tests',
+ {%- endif %}
parameters: [
string(name: 'REVISION', value: params.GIT_TAG),
],
propagate: !params.IGNORE_TESTS,
)
}
}
stage('Checkout') {
steps {
- checkout([$class: 'GitSCM',
- branches: [[name: params.GIT_TAG]],
- doGenerateSubmoduleConfigurations: false,
- extensions: [],
- gitTool: 'Default',
- submoduleCfg: [],
- userRemoteConfigs: [[url: 'https://forge.softwareheritage.org/source/{{repo_name}}.git']]
+ checkout([
+ $class: 'GitSCM',
+ {%- if gitlab_project %}
+ userRemoteConfigs: [[
+ name:'origin', url: '{{gitlab_url}}/{{gitlab_project_name}}.git',
+ refspec: '+refs/tags/*:refs/remotes/origin/tags*'
+ ]],
+ branches: [[
+ name: "${params.GIT_TAG}"
+ ]],
+ extensions: [[$class: 'CloneOption', honorRefspec: true]],
+ {%- else %}
+ userRemoteConfigs: [[url: 'https://forge.softwareheritage.org/source/{{repo_name}}.git']],
+ branches: [[name: params.GIT_TAG]],
+ {%- endif %}
+ doGenerateSubmoduleConfigurations: false,
+ extensions: [],
+ gitTool: 'Default',
+ submoduleCfg: [],
])
sh '''rm -rf dist/'''
}
}
stage('Build') {
steps {
withCredentials([
string(credentialsId: 'sentry-auth-token',
variable: 'SENTRY_AUTH_TOKEN')]) {
sh """
# Build javascript assets
if [ -f yarn.lock ]; then
yarn install --frozen-lockfile
yarn build
fi
# Build java assets
if [ -d java ]; then
for pom in \$( find java/ -name pom.xml ) ; do
mvn -f \$pom compile assembly:single
done
fi
python3 setup.py sdist ${BDIST_WHEEL}
"""
archiveArtifacts allowEmptyArchive: true,
artifacts: 'dist/*',
fingerprint: true
}
}
}
+ {%- if not gitlab_project %}
stage('Publish') {
when {
anyOf {
expression { return params.FORCE_UPLOAD }
expression {
LASTV=sh(returnStdout: true,
script:"curl -s https://${params.PYPI_HOST}/pypi/${module_name}/json | jq -r .info.version || true").trim()
return 'v'+LASTV != params.GIT_TAG
}
}
}
steps {
withCredentials([
usernamePassword(credentialsId: PYPI_UPLOAD_HOST,
usernameVariable: 'TWINE_USERNAME',
passwordVariable: 'TWINE_PASSWORD')]) {
sh "python3 -m twine upload --verbose --repository-url https://${PYPI_UPLOAD_HOST}/legacy/ dist/*"
}
}
}
+ {%- endif %}
}
post {
cleanup {
cleanWs()
}
}
}
diff --git a/jobs/templates/swh-pypi.yaml b/jobs/templates/swh-pypi.yaml
index 9d4cc07..27123f3 100644
--- a/jobs/templates/swh-pypi.yaml
+++ b/jobs/templates/swh-pypi.yaml
@@ -1,52 +1,58 @@
-- job-template:
+- job-template: &pypi_upload
name: "{name}/pypi-upload"
display-name: "Upload to PyPI"
docker_image: tox
include_bdist: true
project-type: pipeline
sandbox: true
+ gitlab_project: false
properties:
- build-discarder:
artifact-num-to-keep: 10
- copyartifact:
- projects: '*'
+ projects: "*"
parameters:
- git-parameter:
name: GIT_TAG
description: git tag to process
type: PT_TAG
sortMode: DESCENDING_SMART
selectedValue: TOP
- choice:
name: PYPI_HOST
description: |
PYPI registry to publish to when built revision is a tag. There
must exists a usernamePassword credential object with that name.
The pypi JSON API endpoint is built as https://$PYPI_HOST/project/<name>/json
choices:
- test.pypi.org
- pypi.org
default: test.pypi.org
- bool:
name: FORCE_UPLOAD
default: false
description: |
Force uploading python packages on the chosen PYPI registry, even if
the package already exists.
- bool:
name: SKIP_TESTS
default: false
description: |
Do not run tests on the repository.
- bool:
name: IGNORE_TESTS
default: false
description: |
Proceed even if the tests are failing on the repository.
- bool:
name: INCLUDE_BDIST
- default: '{include_bdist}'
+ default: "{include_bdist}"
description: |
Include a binary distribution in the PyPI upload.
- dsl:
- !include-jinja2: swh-pypi.groovy.j2
+ dsl: !include-jinja2: swh-pypi.groovy.j2
+
+- job-template:
+ name: "{name}/gitlab-pypi-upload"
+ display-name: "Upload to PyPI (GitLab)"
+ gitlab_project: true
+ <<: *pypi_upload
diff --git a/jobs/tools/jenkins-jobs-builder.yaml b/jobs/tools/jenkins-jobs-builder.yaml
index 57e2d87..45d5c3e 100644
--- a/jobs/tools/jenkins-jobs-builder.yaml
+++ b/jobs/tools/jenkins-jobs-builder.yaml
@@ -1,58 +1,69 @@
- job:
name: jenkins-tools/swh-jenkins-jobs-builder
project-type: pipeline
description: Update jenkins jobs
node: built-in
auth-token: "ph4br1cat0r"
properties:
- build-discarder:
days-to-keep: 7
dsl: |
pipeline {
agent any
stages {
stage('Checkout Repository') {
steps {
checkout([
$class: 'GitSCM',
branches: [[name: 'gitlab-integration']],
userRemoteConfigs: [[
url: "http://forge.softwareheritage.org/source/swh-jenkins-jobs.git",
]],
])
}
}
stage('Update Jenkins jobs') {
steps {
sh('tox -- update --delete-old')
}
}
stage('Setup jenkins integration on gitlab') {
steps {
script {
projects = readYaml(file: 'jobs/swh-packages.yaml')
for (project in projects) {
if (project.containsKey("project")) {
def jenkinsFolder = project.get('project').get('name')
def repoName= project.get('project').get('repo_name')
build(
job: '/jenkins-tools/gitlab-jenkins-integration',
parameters: [
string(name: 'jenkins_job_name', value: "${jenkinsFolder}/gitlab-tests"),
string(name: 'gitlab_project', value: "swh/devel/${repoName}"),
],
wait: false,
)
+ build(
+ job: '/jenkins-tools/gitlab-jenkins-integration',
+ parameters: [
+ string(name: 'jenkins_job_name', value: "${jenkinsFolder}/gitlab-incoming-tag"),
+ string(name: 'gitlab_project', value: "swh/devel/${repoName}"),
+ booleanParam(name: 'push_events', value: false),
+ booleanParam(name: 'merge_request_events', value: false),
+ booleanParam(name: 'tag_push_events', value: true),
+ ],
+ wait: false,
+ )
}
}
}
}
}
}
}

File Metadata

Mime Type
text/x-diff
Expires
Fri, Jul 4, 3:11 PM (4 d, 8 h ago)
Storage Engine
blob
Storage Format
Raw Data
Storage Handle
3277768

Event Timeline