I configed a jenkins pipeline to build a project that get from github. But I got an error at step 2 - Build image. Then, I tried to add user admin (of jenkins) to group "docker", and I can run build command successfully without error when login by user admin in the kubernetes master vm, however still error with jenkins. I used blueocean plugin for creating the pipeline. Do you know how to fix this ?
UPDATE: Please see my jenkinsfile
pipeline {
environment {
registry = "192.168.64.162:5000/justme/myweb"
dockerImage = ""
}
agent any
stages {
stage('Checkout Source') {
steps {
git 'https://github.com/taibc/playjenkins.git'
}
}
stage('Build image') {
steps{
script {
dockerImage = docker.build registry + ":$BUILD_NUMBER"
}
}
}
stage('Push Image') {
steps{
script {
docker.withRegistry( "" ) {
dockerImage.push()
}
}
}
}
stage('Deploy App') {
steps {
script {
kubernetesDeploy(configs: "myweb.yaml", kubeconfigId: "mykubeconfig")
}
}
}
}
}
I resolve this problem by installing Jenkins to another server (not belong to kubernetes cluster). But, I got another problem when deploying app as the link: https://github.com/jenkinsci/kubernetes-cd-plugin/issues/122
Here my yaml file
---
apiVersion: extensions/v1beta1
kind: Deployment
metadata:
labels:
app: myweb
name: myweb
spec:
replicas: 1
selector:
matchLabels:
app: myweb
template:
metadata:
labels:
app: myweb
spec:
containers:
- image: 192.168.94.162:5000/justme/myweb:1
imagePullPolicy: Always
name: myweb
---
apiVersion: v1
kind: Service
metadata:
labels:
app: myweb
name: myweb
spec:
ports:
- nodePort: 32223
port: 80
protocol: TCP
targetPort: 80
selector:
app: myweb
type: NodePort
Here my jenkinsscript
pipeline {
environment {
registry = "192.168.94.162:5000/justme/myweb"
dockerImage = ""
}
agent any
stages {
stage('Checkout Source') {
steps {
git 'https://github.com/taibc/playjenkins.git'
}
}
stage('Build image') {
steps{
script {
dockerImage = docker.build registry + ":$BUILD_NUMBER"
}
}
}
stage('Push Image') {
steps{
script {
docker.withRegistry( "" ) {
dockerImage.push()
}
}
}
}
stage('Deploy App') {
steps {
script {
kubernetesDeploy(configs: "myweb.yaml", kubeconfigId: "mykubeconfig")
}
}
}
}
}
Related
Hi I am new to Devops and I am trying to create a simple Jenkins Pipleine. In which I need to push the docker image to dockerhub. Entire pipeline including Jenkins is hosted on Kubernetes.
Below is the Jenkinsfile. I am giving the credentials inline which is bad. I have stored my dockerhub password in Jenkins as a secret text, but not sure how to reference it here.
pipeline {
agent {
kubernetes {
yaml '''
apiVersion: v1
kind: Pod
spec:
containers:
- name: docker
image: docker:latest
command:
- cat
tty: true
volumeMounts:
- mountPath: /var/run/docker.sock
name: docker-sock
volumes:
- name: docker-sock
hostPath:
path: /var/run/docker.sock
'''
}
}
stages {
stage('Clone') {
steps {
container('docker') {
git branch: 'main', changelog: false, poll: false, url: '<my github URL>'
}
}
}
stage('Build-Docker-Image') {
steps {
container('docker') {
sh 'docker build -t <my-dockerhub-username>/testing-image:latest .'
}
}
}
stage('Login-Into-Docker') {
steps {
container('docker') {
sh 'docker login -u <my-dockerhub-username> -p <my-dockerhub-password>'
}
}
}
stage('Push-Images-Docker-to-DockerHub') {
steps {
container('docker') {
sh 'docker push <my-dockerhub-username>/testing-image:latest'
}
}
}
stage('Remove docker Images') {
steps {
container('docker') {
sh 'docker image prune -af'
}
}
}
}
post {
always {
container('docker') {
sh 'docker logout'
}
}
}
}
I tried
stage('Push-Images-Docker-to-DockerHub') {
steps {
withCredentials([string(credentialsId: 'Docker_PW', variable: 'Docker_PW')])
container('docker') {
sh 'docker push <my-dockerhub-username>/testing-image:latest'
}
}
}
But my pipeline fails after coming to this step. Please assist if possible.
Do it this way:
environment {
DOCKERHUB_CREDENTIALS=credentials('dockerhub')
}
stages {
stage('gitclone') {
...
}
stage('Login') {
steps {
sh 'echo $DOCKERHUB_CREDENTIALS_PSW | docker login -u $DOCKERHUB_CREDENTIALS_USR --password-stdin'
}
}
stage('Push') {
...
}
}
Refers to: https://github.com/shazforiot/How-to-Push-docker-image-to-Docker-Hub-using-Jenkins-Pipeline/blob/main/Jenkinsfile
I am trying to run my integration tests with Testcontainers on Jenkins Kubernetes Docker in Docker container.
Testcontainer version: 1.15.3
However, it always fails to get the Container.getMappedPort(X) inside the DinD Container.
It works absolutely fine on my local setup and manages to get the mapped port.
Has anyone encounter this issue before or has a solution for this?
My Jenkins file
#!groovy
def label = "debug-${UUID.randomUUID().toString()}"
podTemplate(label: label, slaveConnectTimeout: '10', containers: [
containerTemplate(
name: 'docker-in-docker',
image: 'cfzen/dind:java11',
privileged: true,
workingDir: '/home/jenkins/agent',
ttyEnabled: true,
command: 'cat',
envVars: [
envVar(key: 'TESTCONTAINERS_HOST_OVERRIDE', value: 'tcp://localhost:2375'),
envVar(key: 'TESTCONTAINERS_RYUK_DISABLED', value: 'true'),
]
),
containerTemplate(
name: 'helm-kubectl',
image: 'dtzar/helm-kubectl',
workingDir: '/home/jenkins/agent/',
ttyEnabled: true,
command: 'cat'
)
],
volumes: [hostPathVolume(mountPath: '/var/run/docker.sock', hostPath: '/var/run/docker.sock'),],
annotations: [
podAnnotation(key: 'iam.amazonaws.com/role',
value: 'arn:aws:iam::xxxxxxxxxxx')
],
)
{
node(label) {
deleteDir()
stage('Checkout') {
checkout scm
def shortCommit = sh(returnStdout: true, script: "git log -n 1 --pretty=format:'%h'").trim()
currentBuild.description = "${shortCommit}"
}
stage('Run Integration tests') {
container('docker-in-docker') {
withCredentials([
usernamePassword(credentialsId: 'jenkins-artifactory-credentials',
passwordVariable: 'ARTIFACTORY_SERVER_PASSWORD',
usernameVariable: 'ARTIFACTORY_SERVER_USERNAME')])
{
echo 'Run Integration tests'
sh("mvn -B clean verify -q -s mvn/local-settings.xml")
}
}
}
TestRunner:
#RunWith(CucumberWithSerenity.class)
#CucumberOptions(features = "classpath:features")
public final class RunCucumberIT {
#BeforeClass
public static void init(){
Containers.POSTGRES.start();
System.out.println("Exposed port of db is"+Containers.POSTGRES.getExposedPorts());
System.out.println("Assigned port of db is"+Containers.POSTGRES.getFirstMappedPort());
Containers.WIREMOCK.start();
Containers.S3.start();
}
private RunCucumberIT() {
}
}
Fails at Containers.POSTGRES.getFirstMappedPort()
Requested port (X) is not mapped
I have this configuration in my pipeline job
def k8sTestPodTemplate(docker_image) {
return """
apiVersion: v1
kind: Pod
metadata:
name: my-agent
labels:
name: my-agent
spec:
serviceAccountName: jenkins
containers:
- name: python
image: ${docker_image}
command: ["/bin/bash", "-c", "cat"]
tty: true
"""
}
pipeline {
agent none
stages {
stage('Run tests') {
parallel {
stage('Tests Python 3.5') {
agent {
kubernetes {
defaultContainer 'jnlp'
yaml k8sTestPodTemplate('python:3.5')
}
}
steps {
container('python') {
sh "echo 'Hello from Python 3.5'"
}
}
}
stage('Tests Python 3.6') {
agent {
kubernetes {
defaultContainer 'jnlp'
yaml k8sTestPodTemplate('python:3.6')
}
}
steps {
container('python') {
sh "echo 'Hello from Python 3.6'"
}
}
}
stage('Tests Python 3.7') {
agent {
kubernetes {
defaultContainer 'jnlp'
yaml k8sTestPodTemplate('python:3.7')
}
}
steps {
container('python') {
sh "echo 'Hello from Python 3.7'"
}
}
}
}
}
}
}
But as you can see I could easily improve this code to something like that:
def k8sTestPodTemplate(docker_image) {
return """
apiVersion: v1
kind: Pod
metadata:
name: my-agent
labels:
name: my-agent
spec:
serviceAccountName: jenkins
containers:
- name: python
image: ${docker_image}
command: ["/bin/bash", "-c", "cat"]
tty: true
"""
}
def generateStage(docker_image) {
return {
stage("Tests ${docker_image}") {
agent {
kubernetes {
defaultContainer 'jnlp'
yaml k8sTestPodTemplate("${docker_image}")
}
}
steps {
container('python') {
sh "echo ${docker_image}"
}
}
}
}
}
pipeline {
agent none
stages {
stage('Run tests') {
parallel {
generateStage("python:3.5")
generateStage("python:3.6")
generateStage("python:3.7")
}
}
}
}
But I cannot get this to work. The problem is that Jenkins is raising an error
No such DSL method 'agent' found among steps
I am using the "agent" directive inside the "step" directive and the agent is being generated dynamically.
I am having hard to trying to figure out how to add envVars to kubernet inside Jenkinsfile.
I am pretty sure the issue is in my syntax because I am getting following error
ava.lang.ClassCastException: class org.csanchez.jenkins.plugins.kubernetes.
ContainerTemplate.setEnvVars() expects java.util.List<org.csanchez.jenkins.plugins.kubernetes.model.TemplateEnvVar
> but received class java.lang.String
when I have it coded this way
stage("build") {
agent {
kubernetes {
label 'kubernetes'
containerTemplate {
name 'jnlp'
image 'ubuntu:last'
ttyEnabled true
label 'label'
envVars '
envVar(key: "filePath", value: "/home/abcde/abc" )'
}
}
}
Can you guys please point me to right direction? How do I define list variable in Jenkinsfile?
My Jenkinsfile setup
pipeline {
agent any
parameters {
string(name: 'Abc', defaultValue: 'origin', description: 'test project')
}
options {
timestamps()
timeout(60)
}
stages {
stage('Build') {
parallel {
stage("build") {
agent {
kubernetes {
label 'kubernetes'
containerTemplate {
name 'jnlp'
image 'ubuntu:latest'
ttyEnabled true
label 'label'
envVars 'envVar(key: "filePath", value: "/home/abcde/abc" )'
}
}
}
steps {
container('jnlp') {
timeout(60) {
// build process
}
}
}
}
}
}
}
post {
success {
sh "success"
}
failure {
sh "failed"
}
unstable {
sh "unsable"
}
}
}
With above code, I will get following error
ava.lang.ClassCastException: class org.csanchez.jenkins.plugins.kubernetes.
ContainerTemplate.setEnvVars() expects java.util.List<org.csanchez.jenkins.plugins.kubernetes.model.TemplateEnvVar
> but received class java.lang.String
Look at their example https://github.com/jenkinsci/kubernetes-plugin/blob/f6cff5d7e9ce9da3279660159e0cb064efac534f/examples/selenium.groovy#L18
looks like in your case it should be
stage("build") {
agent {
kubernetes {
label 'kubernetes'
containerTemplate {
name: 'jnlp',
image: 'ubuntu:last',
ttyEnabled: true,
label: 'kub_catkin_node',
envVars: [
containerEnvVar(key: "filePath", value: "/home/abcde/abc" )
]
}
}
}
}
This is something supported from the UI and also from pipelines, but it might not be well supported in declarative pipelines.
One solution could be to use pipeline scripts. An other could be to check if its better supported in later version (if you are not already on the latest).
This is how i got this to work. Careful with Yaml syntax. Yaml doesn't like tabs
pipeline {
agent any
parameters {
string(name: 'Abc', defaultValue: 'origin', description: 'The Gitlab project name')
}
options {
timestamps()
timeout(60)
}
stages {
stage('Build') {
parallel {
stage("build") {
agent {
kubernetes {
label 'label'
defaultContainer 'jnlp'
yaml """
apiVersion: v1
kind: Pod
metadata:
labels:
some-label: label
spec:
containers:
- name: jnlp
image: ubuntu:latest
tty: true
env:
- name: 'filePATH'
value: 'fileValue'
"""
}
steps {
container('jnlp') {
timeout(60) {
// build process
}
}
}
}
}
}
}
post {
success {
sh "success"
}
failure {
sh "failed"
}
unstable {
sh "unsable"
}
}
}
I try to use the post steps with the Jenkins kubernetes plugin. Does anyone has an idea?
java.lang.NoSuchMethodError: No such DSL method 'post' found among steps
My pipeline:
podTemplate(
label: 'jenkins-pipeline',
cloud: 'minikube',
volumes: [
hostPathVolume(mountPath: '/var/run/docker.sock', hostPath: '/var/run/docker.sock'),
]) {
node('jenkins-pipeline') {
stage('test') {
container('maven') {
println 'do some testing stuff'
}
}
post {
always {
println "test"
}
}
}
}
As of this writing, Post is only supported in declarative pipelines.
You could have a look at their declarative example if you absolutely must use post.
pipeline {
agent {
kubernetes {
//cloud 'kubernetes'
label 'mypod'
containerTemplate {
name 'maven'
image 'maven:3.3.9-jdk-8-alpine'
ttyEnabled true
command 'cat'
}
}
}
stages {
stage('Run maven') {
steps {
container('maven') {
sh 'mvn -version'
}
}
}
}
}
This example shows how to use the post step using the Kubernetes plugin:
pipeline {
agent {
kubernetes {
label "my-test-pipeline-${BUILD_NUMBER}"
containerTemplate {
name "my-container"
image "alpine:3.15.0"
command "sleep"
args "99d"
}
}
}
stages {
stage('Stage 1') {
steps {
container('my-container') {
sh '''
set -e
echo "Hello world!"
sleep 10
echo "I waited"
echo "forcing a fail"
exit 1
'''
}
}
}
}
post {
unsuccessful {
container('my-container') {
sh '''
set +e
echo "Cleaning up stuff here"
'''
}
}
}
}