Reading .json file from Workspace in Jenkins with Groovy script - jenkins

I want to read a .json file in the stage Prepare Artifacts, which is there in workspace.
How can I read the workspace file path in the staging groovy and run the code?
The below code I used:
checkout scm: [
$class: 'GitSCM',
branches: [[name: "FETCH_HEAD"]],
extensions: [
[$class: 'RelativeTargetDirectory',
relativeTargetDir: repo2],
[$class: 'WipeWorkspace'],
[$class: 'CloneOption',
depth: 1,
noTags: true,
reference: '',
shallow: true,
honorRefspec: true],
[$class: 'CheckoutOption',
timeout: 30]],
gitTool: 'Default',
submoduleCfg: [],
userRemoteConfigs: [
[credentialsId: 'JENKINS_LOGIN',
]
]
]
def releasePackages = readJSON file: "./BuildAutomation/Jenkins/Pipeline//files/release_package.json"
println releasePackages
}
}
}
stage('Prepare Variable') {
steps {
script{
for(file in releasePackages[buildMod]['India']) {
bat("xcopy ..\\CALReleaseOutput\\${file} ..\\${IndiaReleaseOutputFolder}\\${file} /I /E /Y")
}
for(file in releasePackages[buildMod]['Russia']) {
bat("xcopy ..\\CALReleaseOutput\\${file} ..\\${RussiaReleaseOutputFolder}\\${file} /I /E /Y")
}
zip archive: false, dir: "..\\${b2bReleaseOutputFolder}", glob: '', zipFile: "..\\CALReleaseOutput_${tagFoldername}_B2B.zip"
}
}
}
}
}
}
}
When I run above one, I got error message as below
Console Output

If you are going to use variables between stages you have to define them as Global variables. Hence try defining releasePackages outside the pipeline. Following is an example.
def releasePackages
pipeline {
agent any
stages {
stage('SetVariable') {
steps {
script {
releasePackages = readJSON file: "./BuildAutomation/Jenkins/Pipeline//files/release_package.json"
echo "$releasePackages"
}
}
}
stage('UseVariable') {
steps {
echo "$releasePackages"
}
}
}
}

Related

Jenkins Pipeline deadlock issue

I am using Jenkins 2.204.5. I have 2 files with same name (member.py) in different folders and when I do any changes to these files the jenkins runs 2 jobs (Admin Deployment and Member Deployment). Both the jobs are waiting/blocked and looks after each for completion and gets into a deadlock situation. I had to manually kill any one of the jobs to come out of the deadlock. I found below the jenkinsfile, trying to see whether any changes would help me to resolve this issue. Kindly do the needful.
pipeline {
agent { label 'master' }
stages {
stage ('build') {
steps {
slackSend channel: '#dev', color: 'good', message: 'LambdaPosts pipeline started!'
deleteDir()
checkout([$class: 'GitSCM', branches: [[name: '*/master']], doGenerateSubmoduleConfigurations: false, extensions: [[$class: 'RelativeTargetDirectory', relativeTargetDir: 'LambdaPosts']], submoduleCfg: [], userRemoteConfigs: [[credentialsId: 'XXXX-XXXX-XXXX-XXXX', url: 'git#XX.XXXXX.XX:XXXXXX/LambdaPosts.git']]])
checkout([$class: 'GitSCM', branches: [[name: '*/master']], doGenerateSubmoduleConfigurations: false, extensions: [[$class: 'RelativeTargetDirectory', relativeTargetDir: 'python-bc']], submoduleCfg: [], userRemoteConfigs: [[credentialsId: 'XXXX-XXXX-XXXX-XXXX', url: 'git#XX.XXXXX.XX:XXXXXX/python-bc.git']]])
checkout([$class: 'GitSCM', branches: [[name: '*/master']], doGenerateSubmoduleConfigurations: false, extensions: [[$class: 'SparseCheckoutPaths', sparseCheckoutPaths: [[path: 'docker'], [path: 'docker-compose.yml']]], [$class: 'RelativeTargetDirectory', relativeTargetDir: 'bc']], submoduleCfg: [], userRemoteConfigs: [[credentialsId: 'XXXXXX-XXX-XX-key', url: 'git#XX.XXXXX.XX:XXXXXX/bc.git']]])
stash name: 'lp', useDefaultExcludes: false
node ( 'lambda' ) {
dir('bc') {
sh """docker-compose down || true"""
}
sh """sudo chown -R ubuntu:ubuntu * || true"""
sh """sudo rm -rf * || true"""
deleteDir()
sh """rm -rf ~/.local/"""
unstash 'lp'
sh """mkdir -p builds"""
sh """mv python-bc LambdaPosts/"""
sh """cd LambdaPosts && python3 -m pip install pytest boto3"""
sh """cd LambdaPosts && ./jenkins_build.sh"""
}
}
}
stage ('pytest') {
steps {
node ( 'lambda' ) {
sh """cd LambdaPosts/python-bc && AWS_DEFAULT_REGION=us-east-1 SETUP=jenkins DB_DATABASE=dev_bc python3 -m pytest"""
}
}
}
stage ('deploy') {
steps {
timeout(time: 600, unit: 'SECONDS') {
input 'Do you want to deploy to dev?'
}
node ( 'lambda' ) {
sh """cd LambdaPosts && ./jenkins_deploy.sh dev"""
sh """cd LambdaPosts && ./jenkins_deploy.sh test"""
sh """cd LambdaPosts && ./jenkins_deploy.sh beta"""
}
slackSend channel: '#dev', color: 'good', message: 'LambdaPosts ready for live deployment. If you would like to release, click "Proceed" within the next 10 minutes: https://jenkins.XXXXXX.com/job/LambdaPosts%20pipeline/lastBuild/console'
timeout(time: 600, unit: 'SECONDS') {
input 'Do you want to deploy to live?'
}
node ( 'lambda' ) {
sh """cd LambdaPosts && ./jenkins_deploy.sh live"""
}
slackSend channel: '#dev', color: 'good', message: 'LambdaPosts live deployment completed!'
}
}
}
}

restarting one stage in jenkins pipeline wiping out existing directory

I am using jenkins declarative pipeline jenkinsfile for our project. we want to try the option restart at stage.
pipeline {
agent { label 'worker' }
stages {
stage('clean directory') {
steps {
cleanWs()
}
}
stage('checkout') {
steps {
checkout([$class: 'GitSCM', branches: [[name: 'develop']], extensions: [[$class: 'RelativeTargetDirectory', relativeTargetDir: devops], [$class: 'LocalBranch', localBranch: "**"]], userRemoteConfigs: [[credentialsId: 'xxxxxx', url: git#github.com/test/devops.git]]])
checkout([$class: 'GitSCM', branches: [[name: 'develop']], extensions: [[$class: 'RelativeTargetDirectory', relativeTargetDir: harness], [$class: 'LocalBranch', localBranch: "**"]], userRemoteConfigs: [[credentialsId: 'xxxxxx', url: git#github.com/test/harness.git]]])
checkout([$class: 'GitSCM', branches: [[name: 'develop']], extensions: [[$class: 'RelativeTargetDirectory', relativeTargetDir: automation], [$class: 'LocalBranch', localBranch: "**"]], userRemoteConfigs: [[credentialsId: 'xxxxxx', url: git#github.com/test/automation.git]]])
}
}
stage('build initial commit to release train') {
steps {
sh '''#!/bin/bash
export TASK="build_initial_commit"
cd automation
sh main.sh
'''
}
}
stage('deploy application') {
steps {
sh '''#!/bin/bashexport TASK="deploy"
cd automation
sh main.sh
'''
}
}
}
}
and in jenkins I am using 'Pipeline script from SCM'. Jenkinsfile is present in automation.git repo (which is also defined in checkout stage)
Whenever I am restarting stage from GUI from 3rd one .. the workspace directory automatically gets cleaned up and it checksout automation.git ..
and the run fails as the other cloned repos were got cleaned...
how to handle this.. I want to restart the stage without wiping out the workspace dir..
if we just want to run the 3rd step 'deploy application' ..
I am not able to do , as the step depends on all 3 repos.. and
while restarting only 3rd stage the workspace is getting wiped out.. and as checkout is done in 1st stage(skipped) ... job is failing
how do I run only 3rd stage with retaining the old workspace ..
How about this:
SHOULD_CLEAN = true
pipeline {
agent { label 'worker' }
stages {
stage('clean directory') {
steps {
script {
if (SHOULD_CLEAN) {
cleanWs()
SHOULD_CLEAN = false
} else {
echo 'Skipping workspace clean'
}
}
}
}

how to fail the jenkins build if any test cases are failed using findText plugin

I have a stage in Jenkins as follows, How do I mark the build to fail or unstable if there is a test case failure? I generated the script pipeline for textfinder plugin but it is not working. "findText alsoCheckConsoleOutput: true, regexp: 'There are test failures.', unstableIfFound: true" not sure where to place the textFinder regex.
pipeline {
agent none
tools {
maven 'maven_3_6_0'
}
options {
timestamps ()
buildDiscarder(logRotator(numToKeepStr:'5'))
}
environment {
JAVA_HOME = "/Users/jenkins/jdk-11.0.2.jdk/Contents/Home/"
imageTag = ""
}
parameters {
choice(name: 'buildEnv', choices: ['dev', 'test', 'preprod', 'production', 'prodg'], description: 'Environment for Image build')
choice(name: 'ENVIRONMENT', choices: ['dev', 'test', 'preprod', 'production', 'prodg'], description: 'Environment for Deploy')
}
stages {
stage("Tests") {
agent { label "xxxx_Slave"}
steps {
checkout([$class: 'GitSCM', branches: [[name: 'yyyyyyyyyyz']], doGenerateSubmoduleConfigurations: false, extensions: [], submoduleCfg: [], userRemoteConfigs: [[credentialsId: 'zzzzzzzzzzz', url: 'abcdefgh.git']]])
sh'''
cd dashboard
mvn -f pom.xml surefire-report:report -X -Dsurefire.suiteXmlFiles=src/test/resources/smoke_test.xml site -DgenerateReports=false
'''
}
}
}
}
All I did to make this request possible is as below:
added a post block of code below the steps block code.
post {
Success {
findText alsoCheckConsoleOutput: true, refexp: 'There are test failures.', unstableIfFound: true
}
}

Jenkins pipeline checkout based on parameter provided

I have Jenkins declarative pipeline and want to checkout a branch based on parameter provided
def envToBranch = [:]
envToBranch['dev'] = 'develop'
envToBranch['stg'] = 'stage'
envToBranch['prod'] = 'master'
pipeline {
parameters {
choice(name: 'ENV', choices: ['dev', 'stg', 'prod'], description: 'Application environment')
}
stages {
stage('Checkout') {
steps {
checkout([$class: 'GitSCM',
branches: [[name: '<how-to-access-mapping-here>']],
doGenerateSubmoduleConfigurations: false,
extensions: [
[$class: 'SparseCheckoutPaths', sparseCheckoutPaths:[[$class:'SparseCheckoutPath', path:'ansible/']]]
],
submoduleCfg: [],
userRemoteConfigs: [
[credentialsId: 'my-creds',
url: 'git#github.com:MyOrg/my-repo.git']
]])
}
}
}
}
So how can I access a mapping within checkout directive? Am I able to do the same within script directive?

How to start jobs(parallel) from the main job with different inputs in jenkins?

I am using jenkins and having scripted syntax in jenkinsfile
In the main job after source checkout I need to run other job n times (parallel) with different inputs .
Any tips to start this?
def checkout(repo, branch) {
checkout(changelog: false,
poll: false,
scm: [$class : 'GitSCM',
branches : [[name: "*/${branch}"]],
doGenerateSubmoduleConfigurations: false,
recursiveSubmodules : true,
extensions : [[$class: 'LocalBranch', localBranch: "${branch}"]],
submoduleCfg : [], userRemoteConfigs: [[credentialsId: '', url: "${repo}"]]])
withCredentials([[$class : '',
credentialsId : '',
passwordVariable: '',
usernameVariable: '']]) {
sh "git clean -f && git reset --hard origin/${branch}"
}
}
node("jenkins02") {
stage('Checkout') {
checkout gitHubRepo, gitBranch
}
}
We do this by storing all the jobs we want to run in a Map and then pass it into the parallel step for execution. So you just setup the different params and add each definition into the map, then execute.
Map jobs = [:]
jobs.put('job-1', {
stage('job-1') {
node {
build(job: "myorg/job-1/master", parameters: [new StringParameterValue('PARAM_NAME','VAL1')], propagate: false)
}
}
})
jobs.put('job-2', {
stage('job-2') {
node {
build(job: "myorg/job-2/master", parameters: [new StringParameterValue('PARAM_NAME','VAL2')], propagate: false)
}
}
})
parallel(jobs)

Resources