Deploying Node.js Lambda Function Using ECR Images

3 min readApr 8, 2025

In this post, we’ll see how we can build and use ECR images to deploy as node.js Lambda function.

Reference: https://gallery.ecr.aws/lambda/nodejs

Step 1: Launch an EC2 instance with Amazon Linux 2 AMI. Associate an IAM role with this instance which has full ECR,S3 and DynamoDB permissions.

Create a DynamoDB table with id as partition key and also create an S3 bucket.

Step 2: SSH into the EC2 instance and execute following commands:

sudo su -
yum install -y docker
service docker start
wget -nv https://d3rnber7ry90et.cloudfront.net/linux-x86_64/node-v18.17.1.tar.gz
mkdir /usr/local/lib/node
tar -xf node-v18.17.1.tar.gz
mv node-v18.17.1 /usr/local/lib/node/nodejs
echo "export NVM_DIR=''" >> /home/ec2-user/.bashrc
echo "export NODEJS_HOME=/usr/local/lib/node/nodejs" >> /home/ec2-user/.bashrc
echo "export PATH=\$NODEJS_HOME/bin:\$PATH" >> /home/ec2-user/.bashrc
. /home/ec2-user/.bashrc
node -e "console.log('Running Node.js ' + process.version)"

mkdir lambda-app && cd lambda-app
npm init -y
npm install aws-sdk
mkdir -p src src/services

Step 3: Create following files

Dockerfile

FROM public.ecr.aws/lambda/nodejs:18

WORKDIR /var/task

COPY package*.json ./
RUN npm install

COPY src ./src

CMD [ "src/index.handler" ]

package.json

{
"name": "lambda-app",
"version": "1.0.0",
"description": "AWS Lambda container example",
"main": "src/index.js",
"scripts": {
"start": "node src/index.js"
},
"dependencies": {
"aws-sdk": "^2.1420.0"
}
}

src/index.js

const { uploadFile } = require('./services/s3Service');
const { putItem, getItem } = require('./services/dynamoService');

exports.handler = async (event) => {
try {
let body;

if (typeof event.body === 'string') {
body = JSON.parse(event.body);
} else if (typeof event.body === 'object') {
body = event.body;
} else {
throw new Error("Missing or invalid request body");
}

const { fileName, fileContent, id, data } = body;

if (!fileName || !fileContent || !id || !data) {
throw new Error("Missing required fields: fileName, fileContent, id, data");
}

const s3Response = await uploadFile(fileName, fileContent);
await putItem(id, data);
const item = await getItem(id);

return {
statusCode: 200,
body: JSON.stringify({
message: 'Operation successful',
s3: s3Response,
dynamoData: item,
}),
};
} catch (error) {
console.error('Error:', error);
return {
statusCode: 500,
body: JSON.stringify({ error: error.message }),
};
}
};

src/services/dynamoService.js

const AWS = require('aws-sdk');
const db = new AWS.DynamoDB.DocumentClient();

const TABLE = process.env.TABLE_NAME;

exports.putItem = async (id, data) => {
const params = {
TableName: TABLE,
Item: {
id,
...data,
},
};
return db.put(params).promise();
};

exports.getItem = async (id) => {
const params = {
TableName: TABLE,
Key: { id },
};
const result = await db.get(params).promise();
return result.Item;
};

src/services/s3Service.js

const AWS = require('aws-sdk');
const s3 = new AWS.S3();

const BUCKET = process.env.BUCKET_NAME;

exports.uploadFile = async (fileName, fileContent) => {
const params = {
Bucket: BUCKET,
Key: fileName,
Body: fileContent,
};
return s3.upload(params).promise();
};

Step 4: Execute following commands to build docker image

docker build -t lambda-app .
docker images
docker run -p 9000:8080 -e BUCKET_NAME=<S3 bucket name> -e TABLE_NAME=<DynamoDB table name> -e AWS_REGION=us-east-1 lambda-app

and make a curl request

curl -XPOST http://localhost:9000/2015-03-31/functions/function/invocations   -H "Content-Type: application/json"   -d '{
"body": "{\"fileName\":\"hello.txt\",\"fileContent\":\"Hello from Lambda!\",\"id\":\"1989\",\"data\":{\"hello\":\"world\"}}"
}'

Now check your S3 bucket and a file will get uploaded and a record will be inserted into the DynamoDB table.

Step 5: Now create an ECR registry and push the image to this repo.

aws ecr get-login-password --region us-east-1 | docker login --username AWS --password-stdin <AWS_ACCOUNT_ID>.dkr.ecr.us-east-1.amazonaws.com
docker tag lambda-app:latest <AWS_ACCOUNT_ID>.dkr.ecr.us-east-1.amazonaws.com/<REGISTRY_NAME>:latest
docker push <AWS_ACCOUNT_ID>.dkr.ecr.us-east-1.amazonaws.com/<REGISTRY_NAME>:latest

Step 6: Create a Lambda function with this image and set following environment variables.


BUCKET_NAME <S3_BUCKET_NAME>
TABLE_NAME <DYNAMODB_TABLE_NAME>

Grant full ECR,S3 and DynamoDB permissions to the Lambda IAM role. Create a test event with following body and execute it.

{
"body": "{\"fileName\":\"hello2.txt\",\"fileContent\":\"Hello from Lambda!\",\"id\":\"125\",\"data\":{\"hello\":\"world\"}}"
}

Now check your S3 bucket and another file will get uploaded and a record will be inserted into the DynamoDB table.

--

--

Vinayak Pandey
Vinayak Pandey

Written by Vinayak Pandey

Experienced Cloud Engineer with a knack of automation. Linkedin profile: https://www.linkedin.com/in/vinayakpandeyit/

No responses yet