While running my tests in jenkins pipeline, it says you have passed the cy.request() url as null. I have mentioned all of the env variables in jenkins. Could someone please advise what is the problem here ?
Error:
CypressError: cy.request()requires aurl. You did not provide a url.
my .env file:
CYPRESS_TEST_USERNAME=Test1
CYPRESS_TEST_PASSWORD=BooksTest1234
CYPRESS_BASE_URL=https://my-test-url.com/books/
CYPRESS_API_URL=https://my-test-url.com/api/v1.0/books/
my jenkins file
def channel = '#jenkins-cypress'
def ARTEFACT_DIR=""
pipeline {
agent any
stages {
stage('Clone books-suite') {
steps {
dir('books-co') {
script {
checkout([
$class: 'GitSCM',
branches: [
[name: "origin/develop"]
],
extensions: [
[$class: 'CleanCheckout'],
[$class: 'LocalBranch', localBranch: '**'],
[$class: 'CloneOption', depth: 1, noTags: false, reference: '', shallow: true, timeout: 10]
],
userRemoteConfigs: [[
credentialsId: 'fsdf68fs61-esdfsdf56546-92ea-7345bcfsfsdfb3d391',
url: 'ssh://git#bitbucket_url_location'
]],
doGenerateSubmoduleConfigurations: false,
submoduleCfg: []
])
ARTEFACT_DIR="${env.WORKSPACE}/artefacts/${BUILD_NUMBER}"
}
}
}
}
stage('Run cypress') {
steps {
script {
mattermostSend color: 'good', channel: channel, message: "**STARTING** - Cypress starts (<${env.BUILD_URL}|build ${env.BUILD_NUMBER}>)"
sh "mkdir -p \"${ARTEFACT_DIR}/videos\""
sh "mkdir -p \"${ARTEFACT_DIR}/screenshots\""
def baseUrlConfig="-e CYPRESS_baseUrl=https://my-test-url.com/books/"
def screenshotsFolderConfig="screenshotsFolder=/artefacts/screenshots"
def videosFolderConfig="videosFolder=/artefacts/videos"
def config = "--config ${screenshotsFolderConfig},${videosFolderConfig}"
def cypressArgs = "run --headless --browser chrome ${config} --project /books-suite"
sh """
docker run ${baseUrlConfig} \
-v \"${ARTEFACT_DIR}:/artefacts\" \
-e CYPRESS_OUTPUT_FILE=\"/artefacts/report.html\" \
-e CYPRESS_TEST_USERNAME=\"Test1\" \
-e CYPRESS_TEST_PASSWORD=\"BooksTest1234\" \
-e CYPRESS_BASE_URL=\"https://my-test-url.com/books/\" \
-e CYPRESS_API_URL=\"https://my-test-url.com/api/v1.0/books/\" \
cypress:latest \
/node_modules/.bin/cypress ${cypressArgs}
"""
mattermostSend color: 'good', channel: channel, message: "**SUCCESSED** - Cypress CI passed successfully (<${env.BUILD_URL}|build ${env.BUILD_NUMBER}>)"
}
}
}
}
post {
always {
script {
USER_ID = get_uid()
GROUP_ID = get_gid()
sh """
docker run --user ${USER_ID}:${GROUP_ID} \
-v \"${ARTEFACT_DIR}:/artefacts\" \
-v \"${env.WORKSPACE}/books-co:/books\" \
-e JSON_DIR=\"/books/tests/cypress/cucumber-json\" \
-e OUTPUT_FILE=\"/artefacts/report.html\" \
cypress-books-report:latest
"""
}
archiveArtifacts artifacts: "artefacts/${BUILD_NUMBER}/**/*", fingerprint: true
emailext attachmentsPattern: "artefacts/${BUILD_NUMBER}/**/*",
body: '${FILE, path="' + "artefacts/${BUILD_NUMBER}/report.html" + '"}',
mimeType: 'text/html',
to: 'first.lastman#books.com',
subject: "Cypress Jenkins Build ${currentBuild.currentResult}: Job ${env.JOB_NAME}"
sh "rm -Rf artefacts/${BUILD_NUMBER}"
mattermostSend color: 'good', channel: channel, message: "**SUCCESSED** - Cypress CI report generated (<${env.BUILD_URL}|build ${env.BUILD_NUMBER}>)"
}
failure {
mattermostSend color: 'danger', channel: channel, message: "**FAILED** - cypress CI failed (<${env.BUILD_URL}|build ${env.BUILD_NUMBER}> - <${env.BUILD_URL}console|click here to see the console output>)"
}
}
}
def get_uid() {
node('master') {
return sh(script: "id -u ${USER}", returnStdout: true).trim()
}
}
def get_gid() {
node('master') {
return sh(script: "id -g ${USER}", returnStdout: true).trim()
}
}
plugins/index.js :
module.exports = (on, config) => {
on('file:preprocessor', cucumber()),
on('before:browser:launch', (browser, launchOptions) => {
console.log("Print browser name: "+browser.name);
if (browser.name === 'chrome' || browser.name === 'chrome' && browser.isHeadless) {
launchOptions.args.push('--disable-features=SameSiteByDefaultCookies') // bypass 401 unauthorised access on chromium-based browsers
return launchOptions
}
if (browser.name === 'chrome') {
// launch chrome using incognito
launchOptions.args.push(' --incognito')
return launchOptions
}
if (browser.name === 'chrome' && browser.isHeadless) {
launchOptions.args.push('--disable-gpu');
return launchOptions
}
});
config = dotenvPlugin(config)
return config
};
commands.js file
Cypress.Commands.add("loginReq", () => {
cy.request({
method: 'POST',
url: Cypress.env('BASE_URL'), // baseUrl is prepended to url
form: true,
body: {
loginUsername: Cypress.env('TEST_USERNAME'),
loginPassword: Cypress.env('TEST_PASSWORD')
}
}).then((response)=>{
console.log("login success: "+response);
})
});
When you set CYPRESS_BASE_URL, Cypress config will look like:
{
"baseUrl": "your set base url",
"env": {
}
}
You can have a look when you run cypress open and then go to Settings in the opened test runner.
The problem is that Cypress.env() reads variables from env object, where your base url is not set because CYPRESS_BASE_URL sets baseUrl property, not env.baseUrl property.
If you want to set CYPRESS_BASE_URL and access it from your tests, then you have to invoke it with Cypress.config().baseUrl.
However, you don't really need to call (not in your example) Cypress.config().baseUrl because Cypress does that automatically for you, so:
cy
.visit('/');
really means that Cypress prepends the base url for you. The same goes for cy.request(), so in your example, you can delete it from there.
Related
I have this code:
#Library('cm-library') _
def GITOPS_GITHUB_TOKEN = credentials('someToken')
def GITOPS_GITHUB_BRANCH = "dev"
def X_GW_IMS_ORG_ID = "someId"
def SPRING_PROFILES_ACTIVE = "dev"
def GITOPS_INPUT_JOBSTORUN = "someJobToRun"
def IMG = "someImage"
def GITOPS_IMS_CLIENT_SECRET = ""
def GITOPS_IMS_CLIENTCODE = ""
def ARTIFACTORY_API_TOKEN = ""
pipeline {
agent any
stages {
stage('Prepare Variables') {
steps {
script {
dockerRegistryVaultAuth("some-vault.com", "dev") {
def configuration = [$class : 'VaultConfiguration',
vaultUrl : "some-vault.com",
vaultCredentialId: env.VAULT_ROLE]
def secrets = [
[
$class: 'VaultSecret', path: "${env.VAULT_PATH}/keys", secretValues:
[
[$class: 'VaultSecretValue', envVar: 'GITOPS_IMS_CLIENT_SECRET', vaultKey: 'someKey'],
[$class: 'VaultSecretValue', envVar: 'GITOPS_IMS_CLIENTCODE', vaultKey: 'someOtherKey'],
[$class: 'VaultSecretValue', envVar: 'ARTIFACTORY_API_TOKEN', vaultKey: 'someToken']
]
]
]
wrap([$class: 'VaultBuildWrapper', vaultSecrets: secrets, configuration: configuration]) {
sh "echo working"
}
}
}
}
}
stage('Build and push docker') {
steps {
echo "stage 2!!!"
echo "---------env.GITOPS_IMS_CLIENTCODE=$env.GITOPS_IMS_CLIENTCODE"
echo "---------GITOPS_IMS_CLIENT_SECRET=$GITOPS_IMS_CLIENT_SECRET"
sh "docker run -it -e GITOPS_GITHUB_BRANCH=${GITOPS_GITHUB_BRANCH} \
-e GITOPS_GITHUB_TOKEN=${GITOPS_GITHUB_TOKEN} \
-e GITOPS_IMS_CLIENT_SECRET=${GITOPS_IMS_CLIENT_SECRET} \
-e GITOPS_IMS_CLIENTCODE=${GITOPS_IMS_CLIENTCODE} \
-e X_GW_IMS_ORG_ID=${X_GW_IMS_ORG_ID} \
-e ARTIFACTORY_API_TOKEN=${ARTIFACTORY_API_TOKEN} \
-e REGION_NAME=local \
${IMG}"
}
}
stage('Stage 3') {
steps {
echo 'stage three here!'
}
}
}
}
I'm trying to make a docker container run from my Jenkins platform.
It seems like the variable values aren't being transferred from one stage to another. I also doubt the script I'm trying to run is right, but that's another problem.
Anyone any clue how to fix it?
When you declare a variable with the def keyword, it will be bound to the scope it's declared in. So simply remove the def keyword from the declaration.
pipeline {
agent any
stages {
stage('11') {
steps {
script {
vr = "test"
}
}
}
stage('22') {
steps {
script {
echo "$vr"
}
}
}
}
}
I'm trying to create a VM list out of active choice parameter:
def command = 'az vm list --resource-group test-test-test \
--query '[].{computerName:osProfile.computerName}' \
--output tsv'
def proc = command.execute()
proc.waitFor()
def output = proc.in.text
def exitcode= proc.exitValue()
def error = proc.err.text
if (error) {
println "Std Err: ${error}"
println "Process exit code: ${exitcode}"
return exitcode
}
//println output.split()
return output.split()
How am I supposed to write the groovy script inside the Active choice parameter in Jenkins? I just want to fetch all azure VMS in a list and send them to active choice parameter. One more question does the active choice paramter authenticate only on master? Can it authenticate on a node that has the AZ binary
Yes the script will always run on the Master and it can be included like shown below.
properties([
parameters([
[$class: 'ChoiceParameter',
choiceType: 'PT_SINGLE_SELECT',
description: 'Select the Host',
name: 'Host',
script: [
$class: 'GroovyScript',
fallbackScript: [
classpath: [],
sandbox: false,
script:
'return [\'Could not get Host\']'
],
script: [
classpath: [],
sandbox: false,
script:
'''
def command = 'az vm list --resource-group dhl-dhlcom-bbb \
--query '[].{computerName:osProfile.computerName}' \
--output tsv'
def proc = command.execute()
proc.waitFor()
def output = proc.in.text
def exitcode= proc.exitValue()
def error = proc.err.text
if (error) {
println "Std Err: ${error}"
println "Process exit code: ${exitcode}"
return exitcode
}
return output.split()
'''
]
]
]
])
])
pipeline {
agent any
stages {
stage('Build') {
steps {
script {
echo "Host:::: ${params.Host}"
}
}
}
}
}
I am facing an issue when shell command is returning non existent value because output produces no value as env.version == '1.0.0.232'-->false, does not exist in pypy server.
but when env.version == '1.0.0.23'--> true, does exist in pypy server, code proceed as normal.
Jenkins code:
pipeline {
agent { label 'master' }
parameters {
string(defaultValue: 'DEV', description: '', name: 'ENV', trim: true)
string(defaultValue: 'sys', description: '', name: 'platform_type', trim: true)
string(defaultValue: 'server2', description: '', name: 'dev_app_host', trim: true)
string(defaultValue: 'server1', description: '', name: 'dev_xbar_host', trim: true)
string(defaultValue: '1.0.0.23', description: '', name: 'VERSION', trim: true)
booleanParam(defaultValue: false, description: 'force build if possible', name: 'force_build')
}
environment {
}
stages {
stage('build') {
steps {
script {
try{
try{
def version_exists = sh(script: "ssh -o StrictHostKeyChecking=no ansible#pip_server ls /var/pypi/packages/dev/ | grep ${env.app_module_name} | grep ${env.VERSION}" , returnStdout: true) ?: 'no_files_found'
echo version_exists
echo version_exists.inspect()
echo version_exists.dump()
} catch(e){
echo "inner exception: ${e}"
}
} catch (e) {
echo "outer exception: ${e}"
currentBuild.result = 'FAILURE'
}
}
}
}
}
}
Jenkins relevant long:
+ grep 1.0.0.232
+ grep dvmt_event_processor
+ ssh -o StrictHostKeyChecking=no ansible#pip_server ls /var/pypi/packages/dev/
[Pipeline] echo
inner exception: hudson.AbortException: script returned exit code 1
[Pipeline] echo
outer exception: groovy.lang.MissingPropertyException: No such property: version_exists for class: groovy.lang.Binding
PS: can the shell command be improved upon?
grep returns status code 1 when it finds no matching lines. Jenkins interprets a non-0 status as the script failing, so it throws a hudson.AbortException rather than assigning the output to version_exists.
Try something like this:
def version_exists = sh( " ... | grep ${env.VERSION} || echo not_found", returnStdout: true)
I'm trying to mount a jenkins pipeline, with jenkinsfile and docker-compose.
My docker-compose run fine. But the next steps (test stage in Jenkinsfile) don't run.
How to tell jenkins "ok fine the docker container is fine, you can do the next thing" but prevent the docker container to stop (this is why I put rails s at the end of the command"
Here the docker-compose.yml :
version: '3'
services:
db-test:
image: postgres
environment:
- POSTGRES_USER=postgres
- POSTGRES_PASSWORD=secret
- POSTGRES_DB=server_dev
volumes:
- ./tmp/db:/var/lib/postgresql/data
ports:
- "${POSTGRES_PORT}:5432"
web-test:
image: starefossen/ruby-node
command: bash -c "cd /app && bundle install && rake db:migrate && rails s"
volumes:
- /home/xero/jenkins/jenkins_home/workspace/project-open-source:/app # Workspace
- /home/cache/bundle:/usr/local/bundle # Cache gemfiles
- /home/cache/node_modules:/app/node_modules # Cache yarn files
- /home/xero/.ssh:/root/.ssh # SSH keys (for git)
ports:
- "3000:3000"
depends_on:
- db-test
And the Jenkinsfile :
pipeline {
agent any
options {
timeout(time: 1, unit: 'DAYS')
disableConcurrentBuilds()
}
stages {
stage("Init") {
agent any
steps { initialize() }
}
stage("Test") {
agent any
steps { test() }
}
}
}
def initialize() {
sh 'docker-compose -f docker-compose-jenkins.yml up --build --abort-on-container-exit'
}
def test() {
sh 'docker exec -ti web-test sh -c "cd app/ && bundle exec rspec -f documentation"'
}
Here my solution. I used retry and sleep, to wait that the dockers containers starts.
#!groovy
def message = "";
def author = "";
def getLastCommitMessage = {
message = sh(returnStdout: true, script: 'git log -1 --pretty=%B').trim()
}
def getGitAuthor = {
def commit = sh(returnStdout: true, script: 'git rev-parse HEAD')
author = sh(returnStdout: true, script: "git --no-pager show -s --format='%an' ${commit}").trim()
}
pipeline {
agent any
options {
timeout(time: 1, unit: 'DAYS')
disableConcurrentBuilds()
}
stages {
stage("Init RoR and DB") {
agent any
steps { initialize() }
}
stage("Tests") {
agent any
steps { test() }
post {
success {
publishHTML([allowMissing: false, alwaysLinkToLastBuild: false, keepAll: false, reportDir: '/var/jenkins_home/workspace/VPX-open-source/coverage/', reportFiles: 'index.html', reportName: 'RspecCoverage', reportTitles: ''])
publishHTML([allowMissing: false, alwaysLinkToLastBuild: false, keepAll: false, reportDir: '/var/jenkins_home/workspace/VPX-open-source/coverage/lcov-report', reportFiles: 'index.html', reportName: 'JestCoverage', reportTitles: ''])
publishHTML([allowMissing: false, alwaysLinkToLastBuild: false, keepAll: false, reportDir: '/var/jenkins_home/workspace/VPX-open-source/reports/', reportFiles: 'eslint.html', reportName: 'Eslint', reportTitles: ''])
publishHTML([allowMissing: false, alwaysLinkToLastBuild: false, keepAll: false, reportDir: '/var/jenkins_home/workspace/VPX-open-source/reports/', reportFiles: 'rubocop.html', reportName: 'Rubocop', reportTitles: ''])
publishHTML([allowMissing: false, alwaysLinkToLastBuild: false, keepAll: false, reportDir: '/var/jenkins_home/workspace/VPX-open-source/reports/rubycritic/', reportFiles: 'overview.html', reportName: 'Rubycritic', reportTitles: ''])
}
}
}
}
post {
failure {
script {
getLastCommitMessage()
getGitAuthor()
}
rocketSend channel: 'myproject-ci', emoji: ':x:', message: "Build failed - Commit : '${message}' by ${author}", rawMessage: true
}
}
}
def initialize() {
sh 'docker-compose -f docker-compose-jenkins.yml up --build --detach'
}
def test() {
try {
retry(3){
sleep 25
HEALTH = sh (
script: 'docker inspect -f \'{{json .State.Health.Status}}\' vpx-web-test',
returnStdout: true
).trim()
echo "${HEALTH}"
if(HEALTH == "starting"){
return true
}
}
sh 'docker exec vpx-web-test sh -c "cd app/ && RAILS_ENV=test bundle exec rspec -f documentation"'
sh 'docker exec vpx-web-test sh -c "cd app/ && yarn test"'
sh 'docker exec vpx-web-test sh -c "cd app/ && yarn test --coverage > reports/jest-coverage.html"'
sh 'docker exec vpx-web-test sh -c "cd app/ && yarn lint --f html reports/eslint.html ; exit 0"'
sh 'docker exec vpx-web-test sh -c "cd app/ && rubycritic app/ --no-browser -p reports/rubycritic"'
sh 'docker exec vpx-web-test sh -c "cd app/ && rubocop app/ --format html -o reports/rubocop.html --fail-level error"'
}
catch (exc) {
error("Build failed")
}
finally{
sh 'docker-compose -f docker-compose-jenkins.yml down'
}
}
I have several jenkins pipelines that in my case call "docker-compose up" at the end of the build to run the containers/application. Now, I don't need the containers/application to be up all the time and I would like to have a way from the jenkins pipeline page to shutdown (docker-compose stop) the application to free resources for other builds. Do you know any good way to do this?
You can declare a choice parameter (ex: stop_containers) with values YES and NO in your Jenkins job that is responsible for stopping the containers.
Then in the Build section select Execute shell and add the following script to it:
#!/bin/bash
if [ "$stop_containers" = "YES" ]
then
//Command for stopping containers....
else
echo "Do Nothing."
fi
Now, whenever you will run your job it will ask if you want to stop the containers or not. Choose the required option, YES if you want to stop the containers.
If it is a Pipeline Job then you can define a stage to perform the stopping the container operation.
stage('stop containers') {
properties(
[parameters([choice(choices: ["YES", "NO"].join("\n"),
description: 'Some choice parameter',
name: 'STOP_CONTAINERS')])])
agent label:'some-node'
when {
expression {
return env.STOP_CONTAINERS = 'YES';
}
}
steps {
//command to stop containers
}
}
This is the way I have done it looking at
Converting Conditional Build Steps to Jenkins Pipeline.
pipeline {
agent any
parameters {
choice(
choices: 'full_deploy_and_run_docker_container\nrun_docker_containers\nstop_docker_containers',
description: '',
name: 'REQUESTED_ACTION')
}
tools {
maven 'Maven x.x.x'
jdk 'jdkYuWZ'
}
environment {
SVN_URL = 'http://svn.myexample.com/svn/myproject/trunk/'
DOCKER_PROJECT_DIR = '/home/myuser/docker/containers/myproject/trunk'
}
stages {
stage ('Initialize') {
steps {
sh '''
echo "Initialize..."
'''
}
}
stage ('Checkout') {
when {
expression { params.REQUESTED_ACTION == 'full_deploy_and_run_docker_container' }
}
steps {
checkout([$class: 'SubversionSCM', additionalCredentials: [], excludedCommitMessages: '', excludedRegions: '', excludedRevprop: '', excludedUsers: '', filterChangelog: false, ignoreDirPropChanges: false, includedRegions: '', locations: [[credentialsId: ' something here ', depthOption: 'infinity', ignoreExternalsOption: true, local: '.', remote: "$SVN_URL"]], workspaceUpdater: [$class: 'UpdateUpdater']])
}
}
stage ('Build') {
when {
expression { params.REQUESTED_ACTION == 'full_deploy_and_run_docker_container' }
}
steps {
sh 'mvn clean package'
}
}
stage ('Test') {
when {
expression { params.REQUESTED_ACTION == 'full_deploy_and_run_docker_container' }
}
steps {
sh 'echo "Other tests..."'
}
}
stage ('Deploy') {
when {
expression { params.REQUESTED_ACTION == 'full_deploy_and_run_docker_container' }
}
steps {
sh 'echo "The deploy here"'
}
}
stage ('Docker compose run') {
when {
expression { params.REQUESTED_ACTION == 'full_deploy_and_run_docker_container' ||
params.REQUESTED_ACTION == 'run_docker_containers'
}
}
steps {
sh '''
cd $DOCKER_PROJECT_DIR
docker-compose up -d
'''
}
}
stage ('Docker compose stop') {
when {
expression { params.REQUESTED_ACTION == 'stop_docker_containers' }
}
steps {
sh '''
cd $DOCKER_PROJECT_DIR
docker-compose stop
'''
}
}
stage ('Cleanup') {
steps {
cleanWs()
}
}
}
}
A simple way, you can use sh command in jenkins pipeline for this.
node {
stage('stop') {
sh "ssh root#host.com"
sh "cd /diretorio_do_docker-compose/"
sh "docker-compose stop"
}
}