Jenkins. Running docker containers in parallel (declarative) - docker

I want to run two docker container in declarative Jenkins pipeline, because I have container with backend which utilises Selenium server container for test. I know that there is a scripted example but I wonder if there is a declarative option.
Scripted looks like this:
node {
checkout scm
docker.image('mysql:5').withRun('-e "MYSQL_ROOT_PASSWORD=my-secret-pw"') { c ->
docker.image('mysql:5').inside("--link ${c.id}:db") {
/* Wait until mysql service is up */
sh 'while ! mysqladmin ping -hdb --silent; do sleep 1; done'
}
docker.image('centos:7').inside("--link ${c.id}:db") {
/*
* Run some tests which require MySQL, and assume that it is
* available on the host name `db`
*/
sh 'make check'
}
}
}

In the end I used description from here.
withRun - executes commands on the host
inside - inside the container
stage ('Test') {
steps {
// Create network where I will connect all containers
sh 'docker network create test'
script {
//withRun command starts the container and doesn't stop it untill all inside is executed.
//Commands inside are executed on HOST machine
docker.image('selenium/standalone-chrome').withRun("-p 4444:4444 --name=selenium -itd --network=test") {
docker.image("$CONTAINER_NAME:front").withRun("-p 3001:80 --name=front -itd --network=test") {
//We start backend container...
docker.image("$CONTAINER_NAME:back").withRun("-p 8001:80 --name=back -itd --network=test") {
//...and with inside command execute commands *surprise* inside the container
docker.image("$CONTAINER_NAME:back").inside("-itd --network=test") {
//execute commands inside the container
}
}
}
}
}
}
}

Related

How to run TestCafe runner class with Docker

I am new to TestCafe and want to run my testcases with runner class in Docker container.
I am able to run single testcase through Docker. But when i want to run with runner class, i am not able to do that.
I have followed this thread https://github.com/DevExpress/testcafe/issues/2761 but i don't know how to define "Environment initialization steps from the TestCafe Docker script in your runner."
my runner file
const testCafe = require('testcafe');
const fs = require('fs');
function runTest(tcOptions) {
testCafe('localhost', 8080, 8081)
.then(function(tc) {
let runner = tc.createRunner();
return runner
.src(['./apps/tests/*.test.js'])
.browsers(['firefox'])
.concurrency(4)
.reporter([
'list',
{
name: 'html',
output: './dist/testcafe/testCafe-report.html'
}
])
.screenshots('./dist/testcafe/testcafe-screenshots', true)
.run(tcOptions)
.catch(function(error) {
console.log(error);
});
})
.then(failedCount => {
if (failedCount > 0) console.log('Error Tests failed: ' + failedCount);
else console.log('All Desktop Tests Passed');
process.exit(0);
})
}
const tcOptions = {
debugMode: false
};
runTest(tcOptions);
and running this Docker command
docker run -v `pwd`/:/tests -e "NODE_PATH=/tests/node_modules" -it --entrypoint node testcafe/testcafe /tests/apps/testcafe//testcafe-desktop-run.js
{ Error: No tests to run. Either the test files contain no tests or the filter function is too restrictive.
at Bootstrapper._getTests (/tests/node_modules/testcafe/src/runner/bootstrapper.js:92:19) code: 'E1009', data: [] }
You need to define the full path to your test files or change your working directory to the /tests directory in the container.
Besides, this step is intended to run the in-memory display server. You may skip it if you are going to run tests in a headless browser.
Here is a command that works on my side with the Runner class:
docker run -v //c/Users/User/test-docker:/tests -w=/tests -it --entrypoint node testcafe/testcafe /tests/test-run.js

Single SSH connection in Jenkins pipeline

I've created my Jenkinsfile for building my project in production and the pipeline looks like this:
pipeline {
agent any
stages {
stage('Pull') {
steps {
sh '''ssh ${SSH_USER}#${SERVER_ADDRESS} <<EOF
cd ${SOURCE_FOLDER}/project
git pull
git status
EOF'''
}
}
stage('Composer') {
parallel {
stage('Composer') {
steps {
sh '''ssh ${SSH_USER}#${SERVER_ADDRESS} <<EOF
docker run --rm -v ${SOURCE_FOLDER}/project:/app composer/composer:latest install
EOF'''
}
}
stage('Composer 2') {
steps {
sh '''ssh ${SSH_USER}#${SERVER_ADDRESS} <<EOF
docker run --rm -v ${SOURCE_FOLDER}/project/sub:/app
composer/composer:latest install
EOF'''
}
}
}
}
}
}
Is there a way to have all the stages all in one single SSH connection in order to minimise the overhead and the connection number?
I've done all the SSL stuff manually by creating the keys and pasting the public key on the production machine.
You can create a function for the connection and pass the SSH_USER & SERVER_ADDRESS as input parameters to that function. Call this function from all your stages.

How do I set up postgres database in Jenkins pipeline?

I am using docker to simulate postgres database for my app. I was testing it in Cypress for some time and it works fine. I want to set up Jenkins for further testing, but I seem stuck.
On my device, I would use commands
docker create -e POSTGRES_DB=myDB -p 127.0.0.1:5432:5432 --name myDB postgres
docker start myDB
to create it. How can I simulate this in Jenkins pipeline? I need the DB for the app to work.
I use Dockerfile as my agent, and I have tried putting the ENV variables there, but it does not work. Docker is not installed on the pipeline.
The way I see it is either:
Create an image by using a
Somehow install docker inside the pipeline and use the same commands
Maybe with master/slave nodes? I don't understand them well yet.
This might be a use case for sidecar pattern one of Jenkins Pipeline's advanced features.
For example (from the above site):
node {
checkout scm
docker.image('mysql:5').withRun('-e "MYSQL_ROOT_PASSWORD=my-secret-pw"') { c ->
docker.image('mysql:5').inside("--link ${c.id}:db") {
/* Wait until mysql service is up */
sh 'while ! mysqladmin ping -hdb --silent; do sleep 1; done'
}
docker.image('centos:7').inside("--link ${c.id}:db") {
/*
* Run some tests which require MySQL, and assume that it is
* available on the host name `db`
*/
sh 'make check'
}
}
}
The above example uses the object exposed by withRun, which has the
running container’s ID available via the id property. Using the
container’s ID, the Pipeline can create a link by passing custom
Docker arguments to the inside() method.
Best thing is that the containers should be automatically stopped and removed when the work is done.
EDIT:
To use docker network instead you can do the following (open Jira to support this OOTB). Following helper function
def withDockerNetwork(Closure inner) {
try {
networkId = UUID.randomUUID().toString()
sh "docker network create ${networkId}"
inner.call(networkId)
} finally {
sh "docker network rm ${networkId}"
}
}
Actual usage
withDockerNetwork{ n ->
docker.image('sidecar').withRun("--network ${n} --name sidecar") { c->
docker.image('main').inside("--network ${n}") {
// do something with host "sidecar"
}
}
}
For declarative pipelines:
pipeline {
agent any
environment {
POSTGRES_HOST = 'localhost'
POSTGRES_USER = myuser'
}
stages {
stage('run!') {
steps {
script {
docker.image('postgres:9.6').withRun(
"-h ${env.POSTGRES_HOST} -e POSTGRES_USER=${env.POSTGRES_USER}"
) { db ->
// You can your image here but you need psql to be installed inside
docker.image('postgres:9.6').inside("--link ${db.id}:db") {
sh '''
psql --version
until psql -h ${POSTGRES_HOST} -U ${POSTGRES_USER} -c "select 1" > /dev/null 2>&1 || [ $RETRIES -eq 0 ]; do
echo "Waiting for postgres server, $((RETRIES-=1)) remaining attempts..."
sleep 1
done
'''
sh 'echo "your commands here"'
}
}
}
}
}
}
}
Related to Docker wait for postgresql to be running

Jenkinsfile: Curl logs from previous (still running) stage

I'm in a process of migrating from freestyle jobs chained into pipeline to have the pipeline in a Jenkinsfile.
My current pipeline will execute 2 jobs in parallel, one will create a tunnel to database (with a randomly generated port) and the next job needs to get this port number, so I'm performing a curl command and reading the console of the create-db-tunnel job and storing the port number. The create-db-tunnel needs to keep running as the follow up job is connecting to the database and is taking DB dump. This is the curl command which I run on the second job and which is returning the randomly generated port number from the established DB tunnel:
Port=$(curl -u ${USERNAME}:${TOKEN} http://myjenkinsurl.com/job/create-db-tunnel/lastBuild/consoleText | grep Port | grep -Eo '[0-9]{3,5}')
I wonder if there is anything similar I can use in Jenkinsfile? I currently have the 2 jobs triggered in parallel, but since the create-db-tunnel is no longer a freestyle job, I'm not sure if I can get the port number still? I can confirm that the console logs for the db_tunnel stage has the port number in there, just not sure how can I query that console. Here is my jenkinsfile:
pipeline {
agent any
environment {
APTIBLE_LOGIN = credentials('aptible')
}
stages {
stage('Setup') {
parallel {
// run db_tunnel and get_port in parralel
stage ('db_tunnel') {
steps {
sh """
export PATH=$PATH:/usr/local/bin
aptible login --email=$APTIBLE_LOGIN_USR --password=$APTIBLE_LOGIN_PSW
aptible db:tunnel postgres-prod & sleep 30s
"""
}
}
stage('get_port') {
steps {
sh """
sleep 15s
//this will not work
Port=$(curl -u ${USERNAME}:${TOKEN} http://myjenkinsurl.com/job/db_tunnel/lastBuild/consoleText | grep Port | grep -Eo '[0-9]{3,5}')
echo "Port=$Port" > port.txt
"""
}
}
}
}
}
}
Actually, I found a solution to my question - it was a very similar curl command I had to run and I'm now getting the desired port number I needed. Here is the jenkinsfile if someone is interested:
pipeline {
agent any
environment {
APTIBLE_LOGIN = credentials('aptible')
JENKINS_TOKEN = credentials('jenkins')
}
stages {
stage('Setup') {
parallel {
// run db_tunnel and get_port in parralel
stage ('db_tunnel') {
steps {
sh """
export PATH=$PATH:/usr/local/bin
aptible login --email=$APTIBLE_LOGIN_USR --password=$APTIBLE_LOGIN_PSW
aptible db:tunnel postgres-prod & sleep 30s
"""
}
}
stage('get_port') {
steps {
sh """
sleep 20
Port=\$(curl -u $JENKINS_TOKEN_USR:$JENKINS_TOKEN_PSW http://myjenkinsurl.com/job/schema-archive-jenkinsfile/lastBuild/consoleText | grep Port | grep -Eo '[0-9]{3,5}')
echo "Port=\$Port" > port.txt
"""
}
}
}
}
}
}

Cannot specify flags when using variables for Docker agent args?

I am attempting to mount a volume for my Docker agent with Jenkins pipeline. The following is my JenkinsFile:
pipeline {
agent none
environment {
DOCKER_ARGS = '-v /tmp/my-cache:/home/my-cache'
}
stages {
stage('Build') {
agent {
docker {
image 'my-image:latest'
args '$DOCKER_ARGS'
}
}
steps {
sh 'ls -la /home'
}
}
}
}
Sadly it fails to run, and the following can be seen from the pipeline.log file.
java.io.IOException: Failed to run image 'my-image:latest'. Error: docker: Error response from daemon: create /tmp/my-cache: " /tmp/my-cache" includes invalid characters for a local volume name, only "[a-zA-Z0-9][a-zA-Z0-9_.-]" are allowed. If you intended to pass a host directory, use absolute path.
See 'docker run --help'.
However, the following JenkinsFile does work:
pipeline {
agent none
environment {
DOCKER_ARGS = '/tmp/my-cache:/home/my-cache'
}
stages {
stage('Build') {
agent {
docker {
image 'my-image:latest'
args '-v $DOCKER_ARGS'
}
}
steps {
sh 'ls -la /home'
}
}
}
}
The only difference is the -v flag is hardcoded outside of the environment variable.
I am new to Jenkins, so I have struggled to find any documentation on this behaviour. Could somebody please explain why I can't define my Docker agent args entirely in an environment variable?

Resources