inline lambda function using cdk - aws-cdk

This code is working as expected and I can create cloudformation template. But I need to embed the function inline. This sample code will upload the file to S3 and I do not want to use S3.
# cat mylambda/hello.py
import json
def handler(event, context):
print('request: {}'.format(json.dumps(event)))
return {
'statusCode': 200,
'headers': {
'Content-Type': 'text/plain'
},
'body': 'Hello, CDK! You have hit {}\n'.format(event['path'])
}
# cat app.py
#!/usr/bin/env python3
from aws_cdk import core
from hello.hello_stack import MyStack
app = core.App()
MyStack(app, "hello-cdk-1", env={'region': 'us-east-2'})
MyStack(app, "hello-cdk-2", env={'region': 'us-west-2'})
app.synth()
# cat hello/hello_stack.py
from aws_cdk import (
core,
aws_lambda as _lambda,
)
class MyStack(core.Stack):
def __init__(self, scope: core.Construct, id: str, **kwargs) -> None:
super().__init__(scope, id, **kwargs)
# Defines an AWS Lambda resource
my_lambda = _lambda.Function(
self, 'HelloHandler',
runtime=_lambda.Runtime.PYTHON_3_7,
code=_lambda.Code.asset('mylambda'),
handler='hello.handler',
)

Here is an example of inline lambda function that can be deployed using cdk
git clone https://github.com/aws-samples/aws-cdk-examples.git
mkdir lambda-cron1
cd lambda-cron1
cdk init --language python
cp /tmp/aws-cdk-examples/python/lambda-cron/* .
pip install -r requirements.txt
export AWS_DEFAULT_REGION=us-east-1
export AWS_ACCESS_KEY_ID=xxx
export AWS_SECRET_ACCESS_KEY=xxx
cdk ls
cdk synth LambdaCronExample > a2.txt
cdk deploy LambdaCronExample

Related

Pulumi Automation API doesn't run the Pulumi CLI?

I'm writing a Flask app that uses Pulumi Automation API. I'm following the Automation API project examples. But when I send a POST request I get a Program run without the Pulumi engine available; re-run using the pulumi CLI error. Isn't the Automation API supposed to run the CLI on its own?
The Pulumi CLI is available:
pulumi version
v3.24.1
edit: I followed the pulumi over HTTP example, here is my app.py
import pulumi
from flask import Flask, request, make_response, jsonify
from pulumi import automation as auto
import os
from pulumi_aws import s3
app = Flask(__name__)
# This function defines our pulumi s3 static website in terms of the content that the caller passes in.
# This allows us to dynamically deploy websites based on user defined values from the POST body.
def create_pulumi_program(content: str):
# Create a bucket and expose a website index document
site_bucket = s3.Bucket("s3-website-bucket", website=s3.BucketWebsiteArgs(index_document="index.html"))
index_content = content
# Write our index.html into the site bucket
s3.BucketObject("index",
bucket=site_bucket.id,
content=index_content,
key="index.html",
content_type="text/html; charset=utf-8")
# Set the access policy for the bucket so all objects are readable
s3.BucketPolicy("bucket-policy",
bucket=site_bucket.id,
policy={
"Version": "2012-10-17",
"Statement": {
"Effect": "Allow",
"Principal": "*",
"Action": ["s3:GetObject"],
# Policy refers to bucket explicitly
"Resource": [pulumi.Output.concat("arn:aws:s3:::", site_bucket.id, "/*")]
},
})
# Export the website URL
pulumi.export("website_url", site_bucket.website_endpoint)
#app.route('/', methods=['GET'])
def home():
return "<h1>Hello</p>"
#app.route('/v1/code', methods=['POST'])
def create_handler():
content = request.get_json()
project_name = content.get('project_name')
stack_name = content.get('stack_name')
pulumi_access_token = request.headers['pulumi_access_token']
os.environ['PULUMI_ACCESS_TOKEN'] = pulumi_access_token
try:
def pulumi_program():
return create_pulumi_program(content)
stack = auto.create_stack(stack_name=stack_name,
project_name=project_name,
program=create_pulumi_program(content))
stack.workspace.install_plugin("aws", "v4.0.0")
stack.set_config("aws:region", auto.ConfigValue(value="us-west-2"))
stack.set_config("aws:region", auto.ConfigValue("us-west-2"))
# deploy the stack, tailing the logs to stdout
up_res = stack.up(on_output=print)
return jsonify(id=stack_name, url=up_res.outputs['website_url'].value)
except auto.StackAlreadyExistsError:
return make_response(f"stack '{stack_name}' already exists", 409)
except Exception as exn:
return make_response(str(exn), 500)
if __name__ == '__main__':
app.run(debug=True)
I found the issue, it was because I was passing a parameter to the program function in create_stack
stack = automation.create_stack(
stack_name=stack_name,
project_name=project_name,
program=create_pulumi_program(content)
)
It should be instead like this:
stack = automation.create_stack(
stack_name=stack_name,
project_name=project_name,
program=create_pulumi_program
)

aws_cdk events rule target for cdk pipelines fails

below error pops when I try to target a CDK pipeline using events targets.
jsii.errors.JavaScriptError:
Error: Resolution error: Supplied properties not correct for "CfnRuleProps"
targets: element 0: supplied properties not correct for "TargetProperty"
arn: required but missing.
code is below
from aws_cdk import (
core,
aws_codecommit as codecommit,
aws_codepipeline as codepipeline,
aws_events as events,
aws_events_targets as targets
)
from aws_cdk import pipelines
from aws_cdk.pipelines import CodePipeline, CodePipelineSource, ShellStep
class BootStrappingStack(core.Stack):
def __init__(self, scope: core.Construct, construct_id: str, **kwargs) -> None:
super().__init__(scope, construct_id, **kwargs
repo = codecommit.Repository(
self, 'Repository',
repository_name='Repository'
)
source_artifact = codepipeline.Artifact()
cloud_assembly_artifact = codepipeline.Artifact()
pipeline = CodePipeline(self, 'Pipeline',
synth=ShellStep("Synth",
input=CodePipelineSource.code_commit(
repository=repo,
branch='master'),
commands=[
'pip install -r requirements',
'npm install -g aws-cdk',
'cdk synth']
)
)
rule = events.Rule(self, 'TriggerPipeline',
schedule=events.Schedule.expression('rate(1 hour)')
)
rule.add_target(targets.CodePipeline(pipeline))
The documentation for aws_cdk.events_targets works with codepipeline construct, however doesn't work as documented for cdk pipelines.
This needs to be addressed in the documentation, once I get to know what's the fix. Please help.
As mentioned by #Otavio, you need to use the codepipeline.IPipeline.
You can use the pipeline property from CDK CodePipeline construct but in order to use that first you need to construct the pipeline using build_pipeline() method:
pipeline = CodePipeline(self, 'Pipeline',
synth=ShellStep("Synth",
input=CodePipelineSource.code_commit(
repository=repo,
branch='master'),
commands=[
'pip install -r requirements',
'npm install -g aws-cdk',
'cdk synth']
)
)
# You need to construct the pipeline before passing it as a target in rule
pipeline.build_pipeline()
rule = events.Rule(self, 'TriggerPipeline',
schedule=events.Schedule.expression('rate(1 hour)')
)
# Using the pipeline property from CDK Codepipeline
rule.add_target(targets.CodePipeline(pipeline.pipeline))
The problem is that targets.CodePipeline receives a codepipeline.IPipeline as a parameter. But what you are using instead is a pipelines.CodePipeline, which is a different thing. CodePipeline is more abstract construct, built on top of the codepipeline module.
You can try this:
const pipeline = new CodePipeline(self, 'Pipeline' ....
Then:
rule.addTarget(new targets.CodePipeline(pipeline))

Is it possible to compile jenkins pipeline into jar for shared library to use it?

Here's my question, I want to compile jenkins pipeline into jar for shared library to import and use it, so that I can protect my jenkins pipeline source code.
As we know, shared library can use third-party libraries, I write my own third-party library and compile into jar then make shared library to import and use it, but I don't know how to use jenkins pipeline steps in my own third-party library.
Here's what I did:
I create my own third-party library write by groovy language and compile it into jar, source code like this:
// src/main/groovy/DemoLibrary.groovy
// I want this library to run jenkins pipeline step
package com.example
import org.jenkinsci.plugins.workflow.steps.durable_task.ShellStep
class DemoLibrary {
// this function can run command in jenkins master node
// runShell(script: script, cwd: cwd)
// return: process
def runShell(args) {
def cmd = args.script
if (args.cwd && args.cwd != "") {
cmd = "cd ${args.cwd} && ${cmd}"
}
def cmds = ['bash', '-c', cmd]
def proc = cmds.execute()
proc.waitFor()
if (proc.exitValue() != 0) {
throw new Exception("[ERROR] run shell error: ${proc.err.text}")
}
return proc
}
// i want this function to call jenkins "sh" step, but i don't know how to get StepContext in shared library
// runStepShell(script: script, context: context)
// return: stepExecution
def runStepShell(args) {
def shellStep = new ShellStep(args.script)
def stepExecution = shellStep.start(args.context)
retrun stepExecution
}
}
I create my shared library, source code like this:
// vars/demoLibrary.groovy
#Grab('com.example:demo-library:0.1.0')
#Field demoLib = new DemoLibrary()
def demoStage() {
docker.image("alpine:latest").inside("--user 1000:1000") {
def script = "hostname"
// run step "sh" can show the hostname of the docker container
sh script: script
// but run runShell show the hostname of the jenkins master
def proc = demoLib.runShell(script: script)
echo "${proc.text}"
// how can i get the docker stepContext to make my third-party library to run jenkins sh step?
demoLib.rrunStepShell(script: script, context: context)
}
}
Is it possible I can call jenkins steps in my own third-party library? This stucked me for several days. Thx

run any command from configuration file

I write the shared library for jenkins where I have a method that read configuration file (yaml) and should execute commands based on the input.
example for configuration file
commands:
- name: command 1
command: "sh 'ls -la'"
- name: command 2
command: "readYaml file: 'demo.yaml'"
the method code
def command_executor(config){
config.commands.each { command ->
this.script.echo "running ${command.name} command"
// This is my problem how to run the command
command.command.execute().text
}
}
The above example is define in my class and I call it from /var/my_command_executer.groovy file
How I can run any command from the string parameter?
I found the below solution:
Create temporary groovy file the predefined method name that call to the command.
Load the temporary file in method and call the method.
Something like
def command_executor(config){
config.commands.each { command ->
this.script.echo "running ${command.name} command"
this.script.writeFile file: "temp.groovy" text: """
def my_command_executor(){
${command.command}
}
"""
def temp_command_executor = load "temp.groovy"
temp_command_executor.my_command_executor()
}
}

Create docker image with latest jenkins

I'm using the latest Jenkins image (2.60.3) and then I would like to update the jenkins.war file that is in /usr/share/jenkins/jenkins.war to get an image with the latest version of it (2.73.3). I'm trying to achieve that using the following dockerfile:
FROM jenkins:latest
COPY jenkins.war /usr/share/jenkins/
I have the jenkins.war file in the same folder than the dockerfile. The issue I'm having is that for some reason the file doesn't get overwritten (there is the jenkins.war v2.60.3). Why that could be happening?
As commented, using the jenkins/jenkins image, I have (with the latest LTS):
FROM jenkins/jenkins:2.73.3
ARG http_proxy
ARG https_proxy
# Skip setup wizard
ENV JAVA_OPTS="-Djenkins.install.runSetupWizard=false"
USER root
RUN addgroup --system --gid 581 dtpdkr && \
adduser jenkins dtpdkr
USER jenkins
# Remove executors in master
COPY master-executors.groovy /usr/share/jenkins/ref/init.groovy.d/
# Set proxy based on proxy-password secret
COPY set-proxy.groovy /usr/share/jenkins/ref/init.groovy.d/
# Create admin based on secrets jenkins-adm-name and jenkins-adm-pass
COPY security.groovy /usr/share/jenkins/ref/init.groovy.d/security.groovy
# Install plugins
COPY plugins.txt /usr/share/jenkins/ref/plugins.txt
RUN /usr/local/bin/install-plugins.sh < /usr/share/jenkins/ref/plugins.txt
That does give me an installed Jenkins LTS image, with all the plugins I need.
Since I am behind a proxy, I had to configure it first:
$ more set-proxy.groovy
import hudson.model.*;
import jenkins.model.*;
def instance = Jenkins.getInstance()
final String name = "proxy.mycompany.com"
final int port = 8080
final String username = "unix_web_account"
def password = new File("/run/secrets/proxy_password").text.trim()
final String noProxyHost = "127.0.0.1,localhost,mycompany.com"
final def pc = new hudson.ProxyConfiguration(name, port, username, password, noProxyHost)
instance.proxy = pc
instance.save()
pc.save()
println "Proxy settings updated!"
And I need to define an admin account:
$ more security.groovy
#!groovy
import jenkins.model.*
import hudson.security.*
import jenkins.security.s2m.AdminWhitelistRule
def instance = Jenkins.getInstance()
def user = new File("/run/secrets/jenkins-adm-name").text.trim()
def pass = new File("/run/secrets/jenkins-adm-pass").text.trim()
println "Creating user " + user + "..."
def hudsonRealm = new HudsonPrivateSecurityRealm(false)
hudsonRealm.createAccount(user, pass)
instance.setSecurityRealm(hudsonRealm)
def strategy = new FullControlOnceLoggedInAuthorizationStrategy()
instance.setAuthorizationStrategy(strategy)
instance.save()
Jenkins.instance.getInjector().getInstance(AdminWhitelistRule.class).setMasterKillSwitch(false)
println "User " + user + " was created"
Finally, I don't want any job execution on the master:
$ more master-executors.groovy
import hudson.model.*;
import jenkins.model.*;
println "--> disabling master executors"
Jenkins.instance.setNumExecutors(0)

Resources