Jenkins MultiBranch Pipeline: Select to build only 2 specific branches - jenkins

I have a Jenkins MultiBranch project and I want the test circle to run only on two specific branches on master and on dev. I tried to add on all stages the following
when { anyOf { branch 'master'; branch 'dev' } }
but the only thing I managed to achieve was to deactivate all branch runs
Here is my full pipeline Jenkinsfile
pipeline {
agent any
triggers {
cron('H 0 * * *')
}
options {
disableConcurrentBuilds()
}
stages {
stage('Prepare env') {
when { anyOf { branch 'master'; branch 'dev' } }
steps {
sh 'rm -rf venv'
sh 'rm -rf "${WORKSPACE}/uploads"'
sh 'rm -rf "${WORKSPACE}/downloads"'
sh 'mkdir "${WORKSPACE}/uploads"'
sh 'mkdir "${WORKSPACE}/downloads"'
catchError(buildResult: 'SUCCESS', stageResult: 'FAILURE')
{
sh 'docker kill $(docker ps -q)'
sh 'docker rm $(docker ps -a -q)'
sh 'docker volume rm $(docker volume ls -q)'
}
}
}
stage('Start Services') {
when { anyOf { branch 'master'; branch 'dev' } }
steps {
}
}
stage('Test Common') {
when { anyOf { branch 'master'; branch 'dev' } }
steps {
}
}
stage('Test Validations') {
when { anyOf { branch 'master'; branch 'dev' } }
steps {
}
}
stage('Test CSV Issuance') {
when { anyOf { branch 'master'; branch 'dev' } }
steps {
}
}
stage('Test XLS Issuance') {
when { anyOf { branch 'master'; branch 'dev' } }
steps {
}
}
stage('Clean env') {
when { anyOf { branch 'master'; branch 'dev' } }
steps {
sh 'rm -rf venv'
sh 'rm -rf "${WORKSPACE}/uploads"'
sh 'rm -rf "${WORKSPACE}/downloads"'
catchError(buildResult: 'SUCCESS', stageResult: 'FAILURE')
{
sh 'docker kill $(docker ps -q)'
sh 'docker rm $(docker ps -a -q)'
sh 'docker volume rm $(docker volume ls -q)'
}
}
}
}

Can you post the full pipeline you have?
You would use the when block on the stage you want run only on the two branches e.g.
pipeline {
agent any
stages
{
stage ("Testing") {
when {
anyOf {
branch 'master'
branch 'dev'
}
}
steps {
echo "run testing"
}
}
stage ("everything") {
steps{
echo "run on all branches"
}
}
}
}
tested pipeline
pipeline {
agent any
stages {
stage("stage") {
when { anyOf { branch 'master'; branch 'dev' } }
steps {
echo "Hello"
}
}
}
}
on master
[Pipeline] stage
[Pipeline] { (stage)
[Pipeline] echo
Hello
[Pipeline] }
[Pipeline] // stage
On Fish
[Pipeline] stage
[Pipeline] { (stage)
Stage "stage" skipped due to when conditional
[Pipeline] }
[Pipeline] // stage

Related

How to continue Jenking stage on failure

I would like to run pipeline with 2 stages. If any of the stage is failed, next stage should be started (not skipped). Currently if 1st stage is failed, next stage will be skipped.
Thank you for any help.
pipeline {
options { buildDiscarder(logRotator(numToKeepStr: '5', artifactNumToKeepStr: '5')) }
agent { label 'docker_gradle' }
triggers {
cron(env.BRANCH_NAME == 'develop' || env.BRANCH_NAME == 'master' ? '#daily' : '')
}
stages {
stage('Init') {
steps {
sh 'chmod +x gradlew'
}
}
stage('task1') {
when { anyOf { branch 'feature/*'; branch 'develop' }}
steps {
container(name: 'gradle') {
sh 'gradle clean task1'
}
}
}
stage('task2') {
when { anyOf { branch 'feature/*'; branch 'develop' }}
steps {
container(name: 'gradle') {
sh 'gradle clean task2'
}
}
}
}
post {
always {
script {
currentBuild.result = currentBuild.result ?: 'SUCCESS'
cucumber buildStatus: 'UNSTABLE',
failedFeaturesNumber: 1,
failedScenariosNumber: 1,
skippedStepsNumber: 1,
failedStepsNumber: 1,
reportTitle: 'Reoport',
fileIncludePattern: '**/cucumber.json',
sortingMethod: 'ALPHABETICAL',
trendsLimit: 100
}
}
}
}
1.You can change sh 'gradle clean task1' to
sh 'gradle clean task1 || true'
This will make sh return success even if sh scrip fails
2.You can also use try catch
Check this link: https://www.jenkins.io/doc/book/pipeline/syntax/#flow-control
for example:
stage("task1"){
steps {
script {
try {
sh 'gradle clean task1'
} catch (err) {
echo err.getMessage()
}
}
}
}

Not Able to run python command inside jenkins pipeline script

I am using Jenkins inside a docker container using following command
docker pull jenkins/jenkins
docker run -p 8080:8080 --name=jenkins-master jenkins/jenkins
getting this error
calc.py/var/jenkins_home/workspace/python calculator#tmp/durable-b7e99e01/script.sh: 1: /var/jenkins_home/workspace/python calculator#tmp/durable-b7e99e01/script.sh: calc.py: not found
repository link - -https://github.com/jatin633/Calculator.git
pipeline {
agent any
stages {
stage('Build') {
steps {
echo 'Building the application....'
}
}
stage('Test') {
steps {
echo 'Testing the application....'
git branch: 'python', url: 'https://github.com/jatin633/Calculator.git'
sh 'python calc.py'
sh 'python calculator_unittest.py'
}
}
stage('Deploy') {
steps {
echo 'Deploy the application....'
}
}
}
}
I think the calc.py file is not in root, so maybe try to change:
stage('Test') {
steps {
echo 'Testing the application....'
git branch: 'python', url: 'https://github.com/jatin633/Calculator.git'
sh 'python calc.py'
sh 'python calculator_unittest.py'
}
}
into:
stage('Checkout') {
steps {
echo 'Testing the application....'
git branch: 'python', url: 'https://github.com/jatin633/Calculator.git'
}
}
stage('Run Program') {
dir('sources') {
steps {
sh 'python calc.py'
sh 'python calculator_unittest.py'
}
}
}
Let me know if it helped. Regards!

How to run nodeJS tests with Jenkins

I am learning about Jenkins CI and using pipeline to stage my jobs. I've ran into a halt where my tests aren't running. Please take a look at my "Jenkins Images" link. As you can see its stuck in the --coverage table. Typically, if i were to run my tests on my local machine, i would have to enter wcommand to get node to run all tests; however, i don't think it would be the same in a jenkins setting
Jenkins Image
Jenkinsfile
def gv
pipeline {
agent any
tools {nodejs "node"}
parameters {
choice(name: 'VERSION', choices: ['1.1.0', '1.2.0', '1.3.0'], description: '')
booleanParam(name: 'executeTests', defaultValue: true, description: '')
}
stages {
stage("init") {
steps {
script {
gv = load "script.groovy"
CODE_CHANGES = gv.getGitChanges()
}
}
}
stage("build frontend") {
steps {
dir("client") {
sh 'npm install'
echo 'building client'
}
}
}
stage("build backend") {
steps {
dir("server") {
sh 'npm install'
echo 'building server...'
}
}
}
stage("test") {
when {
expression {
script {
env.BRANCH_NAME.toString().equals('feature-jenkins') && CODE_CHANGES == false
}
}
}
steps {
dir("/client") {
sh 'npm test'
echo 'testing application'
}
}
}
stage("deploy") {
steps {
script {
gv.deployApp()
}
}
}
}
}

Jenkins - Parallel stages with docker for PHP

I'm new with Jenkins, and I would like to get almost the same behavior as in GitLab CI for one of my PHP project.
I use Docker to test my project on several PHP versions.
What I want ?
Running build and test in parallel.
Build creates my application, sources come from a git repository, and I run the composer install command.
Dockerfile is stored in /var/lib/jenkins/Docker
My Dockerfile has a parameter (PHP_VERSION) which allow me to choose the PHP version I want
customWorkspace seems to work
Here is my Jenkinsfile to do so :
updateGitlabCommitStatus name: 'build', state: 'pending'
pipeline {
agent none
post {
failure {
updateGitlabCommitStatus name: 'build', state: 'failed'
}
success {
updateGitlabCommitStatus name: 'build', state: 'success'
}
}
stages {
stage('build') {
parallel {
stage('build-php5.4') {
agent {
dockerfile {
additionalBuildArgs '--build-arg PHP_VERSION=54'
dir '/var/lib/jenkins/Docker'
customWorkspace './build-php5.4'
}
}
steps {
sh 'pwd'
sh 'ls'
sh 'rm -Rf composer.lock vendor'
sh 'composer install'
}
}
stage('build-php7.0') {
agent {
dockerfile {
additionalBuildArgs '--build-arg PHP_VERSION=70'
dir '/var/lib/jenkins/Docker'
customWorkspace './build-php7.0'
}
}
steps {
sh 'pwd'
sh 'rm -Rf composer.lock vendor'
sh 'composer install'
}
}
}
}
stage('tests') {
parallel {
stage('test-php5.4') {
agent {
dockerfile {
additionalBuildArgs '--build-arg PHP_VERSION=54'
dir '/var/lib/jenkins/Docker'
customWorkspace './build-php5.4'
}
}
steps {
sh 'pwd'
sh 'php --version'
sh 'php vendor/phpunit/phpunit/phpunit tests'
}
}
stage('test-php7.0') {
agent {
dockerfile {
additionalBuildArgs '--build-arg PHP_VERSION=70'
dir '/var/lib/jenkins/Docker'
customWorkspace './build-php7.0'
}
}
steps {
sh 'pwd'
sh 'php --version'
sh 'php vendor/phpunit/phpunit/phpunit tests'
}
}
}
}
}
}
And here is the result :
It looks good, but it isn't, and I don't really understand the underlying behavior.
As you can see, the test-php54 stage uses the last created Docker container :
I'm sure I'm wrong on a lot of steps, but do you think I can do it this way ?
Ok, I found the main problem.
The fact is I use the same Dockerfile, but with different parameters.
If I create one Dockerfile for PHP 5.4 and another one for PHP 7.0, the stages are parallelized correctly.
updateGitlabCommitStatus name: 'build', state: 'pending'
pipeline {
agent none
post {
failure {
updateGitlabCommitStatus name: 'build', state: 'failed'
}
success {
updateGitlabCommitStatus name: 'build', state: 'success'
}
}
stages {
stage('build') {
parallel {
stage('build-php5.4') {
agent {
dockerfile {
dir '/var/lib/jenkins/Docker'
filename 'Dockerfile-php5.4'
customWorkspace './build-php5.4'
}
}
steps {
sh 'rm -Rf composer.lock vendor'
sh 'composer install'
}
}
stage('build-php7.0') {
agent {
dockerfile {
dir '/var/lib/jenkins/Docker'
filename 'Dockerfile-php7.0'
customWorkspace './build-php7.0'
}
}
steps {
sh 'rm -Rf composer.lock vendor'
sh 'composer install'
}
}
}
}
stage('tests') {
parallel {
stage('test-php5.4') {
agent {
dockerfile {
dir '/var/lib/jenkins/Docker'
filename 'Dockerfile-php5.4'
customWorkspace './build-php5.4'
}
}
steps {
sh 'php --version'
sh 'php vendor/phpunit/phpunit/phpunit tests'
}
}
stage('test-php7.0') {
agent {
dockerfile {
dir '/var/lib/jenkins/Docker'
filename 'Dockerfile-php7.0'
customWorkspace './build-php7.0'
}
}
steps {
sh 'php --version'
sh 'php vendor/phpunit/phpunit/phpunit tests'
}
}
}
}
}
}
This seems to work ! :)

Jenkins Pipeline Custom Scripts (shutdown application)

I have several jenkins pipelines that in my case call "docker-compose up" at the end of the build to run the containers/application. Now, I don't need the containers/application to be up all the time and I would like to have a way from the jenkins pipeline page to shutdown (docker-compose stop) the application to free resources for other builds. Do you know any good way to do this?
You can declare a choice parameter (ex: stop_containers) with values YES and NO in your Jenkins job that is responsible for stopping the containers.
Then in the Build section select Execute shell and add the following script to it:
#!/bin/bash
if [ "$stop_containers" = "YES" ]
then
//Command for stopping containers....
else
echo "Do Nothing."
fi
Now, whenever you will run your job it will ask if you want to stop the containers or not. Choose the required option, YES if you want to stop the containers.
If it is a Pipeline Job then you can define a stage to perform the stopping the container operation.
stage('stop containers') {
properties(
[parameters([choice(choices: ["YES", "NO"].join("\n"),
description: 'Some choice parameter',
name: 'STOP_CONTAINERS')])])
agent label:'some-node'
when {
expression {
return env.STOP_CONTAINERS = 'YES';
}
}
steps {
//command to stop containers
}
}
This is the way I have done it looking at
Converting Conditional Build Steps to Jenkins Pipeline.
pipeline {
agent any
parameters {
choice(
choices: 'full_deploy_and_run_docker_container\nrun_docker_containers\nstop_docker_containers',
description: '',
name: 'REQUESTED_ACTION')
}
tools {
maven 'Maven x.x.x'
jdk 'jdkYuWZ'
}
environment {
SVN_URL = 'http://svn.myexample.com/svn/myproject/trunk/'
DOCKER_PROJECT_DIR = '/home/myuser/docker/containers/myproject/trunk'
}
stages {
stage ('Initialize') {
steps {
sh '''
echo "Initialize..."
'''
}
}
stage ('Checkout') {
when {
expression { params.REQUESTED_ACTION == 'full_deploy_and_run_docker_container' }
}
steps {
checkout([$class: 'SubversionSCM', additionalCredentials: [], excludedCommitMessages: '', excludedRegions: '', excludedRevprop: '', excludedUsers: '', filterChangelog: false, ignoreDirPropChanges: false, includedRegions: '', locations: [[credentialsId: ' something here ', depthOption: 'infinity', ignoreExternalsOption: true, local: '.', remote: "$SVN_URL"]], workspaceUpdater: [$class: 'UpdateUpdater']])
}
}
stage ('Build') {
when {
expression { params.REQUESTED_ACTION == 'full_deploy_and_run_docker_container' }
}
steps {
sh 'mvn clean package'
}
}
stage ('Test') {
when {
expression { params.REQUESTED_ACTION == 'full_deploy_and_run_docker_container' }
}
steps {
sh 'echo "Other tests..."'
}
}
stage ('Deploy') {
when {
expression { params.REQUESTED_ACTION == 'full_deploy_and_run_docker_container' }
}
steps {
sh 'echo "The deploy here"'
}
}
stage ('Docker compose run') {
when {
expression { params.REQUESTED_ACTION == 'full_deploy_and_run_docker_container' ||
params.REQUESTED_ACTION == 'run_docker_containers'
}
}
steps {
sh '''
cd $DOCKER_PROJECT_DIR
docker-compose up -d
'''
}
}
stage ('Docker compose stop') {
when {
expression { params.REQUESTED_ACTION == 'stop_docker_containers' }
}
steps {
sh '''
cd $DOCKER_PROJECT_DIR
docker-compose stop
'''
}
}
stage ('Cleanup') {
steps {
cleanWs()
}
}
}
}
A simple way, you can use sh command in jenkins pipeline for this.
node {
stage('stop') {
sh "ssh root#host.com"
sh "cd /diretorio_do_docker-compose/"
sh "docker-compose stop"
}
}

Resources