diff --git a/kinesis/datastreams/kpl-example/target/classes/co/exampro/App.class b/kinesis/datastreams/kpl-example/target/classes/co/exampro/App.class
index d657ad9..83f3a40 100644
Binary files a/kinesis/datastreams/kpl-example/target/classes/co/exampro/App.class and b/kinesis/datastreams/kpl-example/target/classes/co/exampro/App.class differ
diff --git a/s3/bash-scripts/create-bucket b/s3/bash-scripts/create-bucket
deleted file mode 100755
index ec3c10e..0000000
--- a/s3/bash-scripts/create-bucket
+++ /dev/null
@@ -1,17 +0,0 @@
-#!/usr/bin/env bash
-echo "== create bucket"
-
-# Check for bucket name
-if [ -z "$1" ]; then
- echo "There needs to be a bucket name eg. ./bucket my-bucket-name"
- exit 1
-fi
-
-BUCKET_NAME=$1
-
-# https://docs.aws.amazon.com/cli/latest/reference/s3api/create-bucket.html
-aws s3api create-bucket \
---bucket $BUCKET_NAME \
---create-bucket-configuration="LocationConstraint=ca-central-1" \
---query Location \
---output text
\ No newline at end of file
diff --git a/s3/bash-scripts/create-bucket.sh b/s3/bash-scripts/create-bucket.sh
new file mode 100755
index 0000000..94abc13
--- /dev/null
+++ b/s3/bash-scripts/create-bucket.sh
@@ -0,0 +1,23 @@
+#!/bin/bash
+
+# Prompt user to enter bucket name
+read -p "Enter the bucket name: " BUCKET_NAME
+
+# Prompt user to enter region
+read -p "Enter the region: " REGION
+
+# Check if either bucket name or region is empty
+if [ -z "$BUCKET_NAME" ] || [ -z "$REGION" ]; then
+ echo "Both bucket name and region must be provided."
+ exit 1
+fi
+
+# Create the bucket with the specified name and region
+aws s3api create-bucket --bucket "$BUCKET_NAME" --region "$REGION"
+
+# Check if bucket creation was successful
+if [ $? -eq 0 ]; then
+ echo "Bucket '$BUCKET_NAME' created successfully in region '$REGION'."
+else
+ echo "Failed to create bucket '$BUCKET_NAME' in region '$REGION'."
+fi
diff --git a/s3/bash-scripts/delete-bucket b/s3/bash-scripts/delete-bucket
deleted file mode 100755
index 7bb1300..0000000
--- a/s3/bash-scripts/delete-bucket
+++ /dev/null
@@ -1,14 +0,0 @@
-#!/usr/bin/env bash
-echo "== delete bucket"
-
-# Check for bucket name
-if [ -z "$1" ]; then
- echo "There needs to be a bucket name eg. ./bucket my-bucket-name"
- exit 1
-fi
-
-BUCKET_NAME=$1
-
-# https://docs.aws.amazon.com/cli/latest/reference/s3api/create-bucket.html
-aws s3api delete-bucket \
---bucket $BUCKET_NAME
\ No newline at end of file
diff --git a/s3/bash-scripts/delete-bucket.sh b/s3/bash-scripts/delete-bucket.sh
new file mode 100755
index 0000000..2095da8
--- /dev/null
+++ b/s3/bash-scripts/delete-bucket.sh
@@ -0,0 +1,36 @@
+#!/bin/bash
+
+echo "List of existing buckets:"
+# List existing buckets
+buckets=$(aws s3api list-buckets --query 'Buckets[].Name' --output text)
+
+# Check if there are any buckets
+if [ -z "$buckets" ]; then
+ echo "No buckets found."
+ exit 0
+fi
+
+# Print each bucket name with index
+echo "Available buckets:"
+select bucket in $buckets; do
+ break
+done
+
+# Prompt user for confirmation before deleting the bucket
+read -p "Are you sure you want to delete bucket '$bucket'? (yes/no): " confirmation
+
+# Check user confirmation
+if [ "$confirmation" != "yes" ]; then
+ echo "Bucket deletion canceled."
+ exit 0
+fi
+
+# Delete the specified bucket
+aws s3api delete-bucket --bucket "$bucket"
+
+# Check if bucket deletion was successful
+if [ $? -eq 0 ]; then
+ echo "Bucket '$bucket' deleted successfully."
+else
+ echo "Failed to delete bucket '$bucket'."
+fi
diff --git a/s3/bash-scripts/delete-objects b/s3/bash-scripts/delete-objects
deleted file mode 100755
index 4a898b7..0000000
--- a/s3/bash-scripts/delete-objects
+++ /dev/null
@@ -1,20 +0,0 @@
-#!/usr/bin/env bash
-echo "== delete-objects"
-
-# Exit immediately if any command returns a non-zero status
-set -e
-
-# Check for bucket name
-if [ -z "$1" ]; then
- echo "There needs to be a bucket name eg. ./bucket my-bucket-name"
- exit 1
-fi
-
-BUCKET_NAME=$1
-
-aws s3api list-objects-v2 \
---bucket $BUCKET_NAME \
---query Contents[].Key \
-| jq -n '{Objects: [inputs | .[] | {Key: .}]}' > /tmp/delete_objects.json
-
-aws s3api delete-objects --bucket $BUCKET_NAME --delete file:///tmp/delete_objects.json
diff --git a/s3/bash-scripts/delete-objects.sh b/s3/bash-scripts/delete-objects.sh
new file mode 100755
index 0000000..9647f92
--- /dev/null
+++ b/s3/bash-scripts/delete-objects.sh
@@ -0,0 +1,83 @@
+#!/bin/bash
+
+echo "== Delete S3 Objects =="
+
+# List all S3 buckets
+buckets=$(aws s3api list-buckets --query "Buckets[].Name" --output text)
+
+# Check if there are any buckets
+if [ -z "$buckets" ]; then
+ echo "No buckets found."
+ exit 0
+fi
+
+# Print each bucket name with index
+echo "Available buckets:"
+select bucket in $buckets; do
+ break
+done
+
+# List objects in the selected bucket
+objects=$(aws s3 ls "s3://$bucket" --recursive --human-readable --summarize)
+
+# Check if there are any objects
+if [ -z "$objects" ]; then
+ echo "No objects found in bucket '$bucket'."
+ exit 0
+fi
+
+# Print objects in the selected bucket with index
+echo "Objects in bucket '$bucket':"
+echo "$objects" | awk '{print NR, $0}'
+
+# Prompt user to choose to delete all objects or specific ones
+read -p "Do you want to delete all objects? (yes/no): " DELETE_ALL
+
+# Check user's choice
+if [ "$DELETE_ALL" = "yes" ]; then
+ # Confirm with user before deleting all objects
+ read -p "Are you sure you want to delete all objects from bucket '$bucket'? (yes/no): " CONFIRM
+ if [ "$CONFIRM" != "yes" ]; then
+ echo "Deletion canceled."
+ exit 0
+ fi
+
+ # Delete all objects from the bucket
+ aws s3 rm "s3://$bucket" --recursive
+
+ # Check if all objects deletion was successful
+ if [ $? -eq 0 ]; then
+ echo "All objects deleted successfully from bucket '$bucket'."
+ else
+ echo "Failed to delete all objects from bucket '$bucket'."
+ fi
+else
+ # Prompt user to enter the index of the object to delete
+ read -p "Enter the index of the object to delete (or 0 to cancel): " object_index
+
+ # Validate user input
+ if [ "$object_index" -eq 0 ]; then
+ echo "Deletion canceled."
+ exit 0
+ fi
+
+ # Extract the object key from the selected index
+ object_key=$(echo "$objects" | sed -n "${object_index}p" | awk '{print $NF}')
+
+ # Confirm with user before deleting the object
+ read -p "Are you sure you want to delete object '$object_key' from bucket '$bucket'? (yes/no): " CONFIRM
+ if [ "$CONFIRM" != "yes" ]; then
+ echo "Deletion canceled."
+ exit 0
+ fi
+
+ # Delete the selected object from the bucket
+ aws s3 rm "s3://$bucket/$object_key"
+
+ # Check if object deletion was successful
+ if [ $? -eq 0 ]; then
+ echo "Object '$object_key' deleted successfully from bucket '$bucket'."
+ else
+ echo "Failed to delete object '$object_key' from bucket '$bucket'."
+ fi
+fi
diff --git a/s3/bash-scripts/get-newest-bucket b/s3/bash-scripts/get-newest-bucket
deleted file mode 100755
index 6f25fba..0000000
--- a/s3/bash-scripts/get-newest-bucket
+++ /dev/null
@@ -1,2 +0,0 @@
-#!/usr/bin/env bash
-aws s3api list-buckets | jq -r '.Buckets | sort_by(.CreationDate) | reverse | .[0] | .Name'
\ No newline at end of file
diff --git a/s3/bash-scripts/list-buckets b/s3/bash-scripts/list-buckets
deleted file mode 100755
index 87ae21a..0000000
--- a/s3/bash-scripts/list-buckets
+++ /dev/null
@@ -1,5 +0,0 @@
-#!/usr/bin/env bash
-echo "== list newest buckets"
-
-aws s3api list-buckets | jq -r '.Buckets | sort_by(.CreationDate) | reverse | .[0:5] | .[] | .Name'
-echo "..."
\ No newline at end of file
diff --git a/s3/bash-scripts/list-buckets.sh b/s3/bash-scripts/list-buckets.sh
new file mode 100755
index 0000000..1508bcb
--- /dev/null
+++ b/s3/bash-scripts/list-buckets.sh
@@ -0,0 +1,25 @@
+#!/bin/bash
+
+echo "== Listing S3 Buckets =="
+
+# List all S3 buckets along with their creation dates
+buckets_with_dates=$(aws s3 ls | awk '{print $3, $1, $2}')
+
+# Check if there are any buckets
+if [ -z "$buckets_with_dates" ]; then
+ echo "No buckets found."
+ exit 0
+fi
+
+# Sort buckets by creation date (oldest first)
+sorted_buckets=$(echo "$buckets_with_dates" | sort -k 2)
+
+# Reverse the sorted list to have the newest bucket listed first
+newest_first=$(echo "$sorted_buckets" | tac)
+
+# Extract bucket names from the sorted list
+bucket_names=$(echo "$newest_first" | awk '{print $1}')
+
+# Print each bucket name
+echo "Found bucket or buckets (newest first):"
+echo "$bucket_names"
diff --git a/s3/bash-scripts/list-objects b/s3/bash-scripts/list-objects
deleted file mode 100755
index a6d0880..0000000
--- a/s3/bash-scripts/list-objects
+++ /dev/null
@@ -1,14 +0,0 @@
-#!/usr/bin/env bash
-echo "== list objects"
-
-# Check for bucket name
-if [ -z "$1" ]; then
- echo "There needs to be a bucket name eg. ./bucket my-bucket-name"
- exit 1
-fi
-
-BUCKET_NAME=$1
-
-# https://docs.aws.amazon.com/cli/latest/reference/s3api/create-bucket.html
-aws s3api list-objects-v2 \
---bucket $BUCKET_NAME
\ No newline at end of file
diff --git a/s3/bash-scripts/list-objects.sh b/s3/bash-scripts/list-objects.sh
new file mode 100755
index 0000000..dc455f5
--- /dev/null
+++ b/s3/bash-scripts/list-objects.sh
@@ -0,0 +1,13 @@
+#!/bin/bash
+
+# Prompt user to enter the bucket name
+read -p "Enter the bucket name: " BUCKET_NAME
+
+# Check if bucket name is empty
+if [ -z "$BUCKET_NAME" ]; then
+ echo "Bucket name must be provided."
+ exit 1
+fi
+
+# List objects in the specified bucket
+aws s3api list-objects --bucket "$BUCKET_NAME"
diff --git a/s3/bash-scripts/put-object b/s3/bash-scripts/put-object
deleted file mode 100755
index 40d685b..0000000
--- a/s3/bash-scripts/put-object
+++ /dev/null
@@ -1,22 +0,0 @@
-#!/usr/bin/env bash
-echo "== put-object"
-
-# Check for bucket name
-if [ -z "$1" ]; then
- echo "There needs to be a bucket name eg. ./bucket my-bucket-name"
- exit 1
-fi
-
-if [ -z "$2" ]; then
- echo "There needs to be a filename eg. ./bucket my-bucket-name filename"
- exit 1
-fi
-BUCKET_NAME=$1
-FILENAME=$2
-
-OBJECT_KEY=$(basename "$FILENAME")
-
-aws s3api put-object \
---bucket $BUCKET_NAME \
---body $FILENAME \
---key $OBJECT_KEY
\ No newline at end of file
diff --git a/s3/bash-scripts/put-object.sh b/s3/bash-scripts/put-object.sh
new file mode 100755
index 0000000..97a002c
--- /dev/null
+++ b/s3/bash-scripts/put-object.sh
@@ -0,0 +1,34 @@
+#!/bin/bash
+
+# Set the script to exit immediately if any command fails
+set -e
+
+# Heading
+echo "Creating 5 Random Files and Displaying Directory Tree"
+
+# Define the output directory
+output_dir="/tmp/s3-temp-bash-scripts"
+
+# Remove the directory if it already exists
+if [ -d "$output_dir" ]; then
+ echo "Removing existing directory: $output_dir"
+ rm -rf "$output_dir"
+fi
+
+# Create the directory
+mkdir -p "$output_dir"
+
+# Generate 5 random files
+for i in {1..5}; do
+ # Generate a random filename
+ filename="$output_dir/file$i.txt"
+ # Generate random content
+ content=$(openssl rand -base64 32)
+ # Write content to file
+ echo "$content" > "$filename"
+ echo "Created file: $filename"
+done
+
+# Display directory tree
+echo "Directory Tree:"
+tree "$output_dir"
diff --git a/s3/bash-scripts/sync b/s3/bash-scripts/sync
deleted file mode 100755
index 2e89a33..0000000
--- a/s3/bash-scripts/sync
+++ /dev/null
@@ -1,45 +0,0 @@
-#!/usr/bin/env bash
-echo "== sync"
-# Exit immediately if any command returns a non-zero status
-set -e
-
-# Check for bucket name
-if [ -z "$1" ]; then
- echo "There needs to be a bucket name eg. ./bucket my-bucket-name"
- exit 1
-fi
-
-
-# Check for filename prefix
-if [ -z "$2" ]; then
- echo "There needs to be a filename prefix eg. ./bucket my-bucket-name my-filename-prefix"
- exit 1
-fi
-
-BUCKET_NAME=$1
-FILENAME_PREFIX=$2
-
-# where we'll store these files
-OUTPUT_DIR="/tmp/s3-bash-scripts"
-
-# remove folder if it already exists
-rm -r $OUTPUT_DIR
-
-# lets create output folder
-mkdir -p $OUTPUT_DIR
-
-# generate a random number
-# to determine how many files to create
-NUM_FILES=$((RANDOM % 6 + 5))
-
-for ((i=1; i<=$NUM_FILES; i++)); do
- # Generate a random filename
- FILENAME="$OUTPUT_DIR/${FILENAME_PREFIX}_$i.txt"
-
- # Generate random data and write it to the file
- dd if=/dev/urandom of="$FILENAME" bs=1024 count=$((RANDOM % 1024 + 1)) 2>/dev/null
-done
-
-tree $OUTPUT_DIR
-
-aws s3 sync $OUTPUT_DIR s3://$BUCKET_NAME/files
\ No newline at end of file
diff --git a/s3/bash-scripts/sync.sh b/s3/bash-scripts/sync.sh
new file mode 100755
index 0000000..a9951fc
--- /dev/null
+++ b/s3/bash-scripts/sync.sh
@@ -0,0 +1,60 @@
+#!/bin/bash
+
+echo "== Sync"
+
+# Check if bucket name is provided as argument
+if [ -z "$1" ]; then
+ # Prompt user to enter the bucket name
+ read -p "Enter the bucket name: " BUCKET_NAME
+
+ # Check if bucket name is provided
+ if [ -z "$BUCKET_NAME" ]; then
+ echo "There needs to be a bucket name."
+ exit 1
+ fi
+
+ # Prompt user to enter the filename prefix
+ read -p "Enter the filename prefix: " FILENAME_PREFIX
+
+ # Check if filename prefix is provided
+ if [ -z "$FILENAME_PREFIX" ]; then
+ echo "There needs to be a filename prefix."
+ exit 1
+ fi
+else
+ BUCKET_NAME=$1
+ FILENAME_PREFIX=$2
+fi
+
+# where we'll store these files
+OUTPUT_DIR="/tmp/s3-bash-scripts"
+
+# remove folder if it already exists
+rm -r $OUTPUT_DIR
+
+# lets create output folder
+mkdir -p $OUTPUT_DIR
+
+# generate a random number
+# to determine how many files to create
+NUM_FILES=$((RANDOM % 6 + 5))
+
+for ((i=1; i<=$NUM_FILES; i++)); do
+ # Generate a random filename
+ FILENAME="$OUTPUT_DIR/${FILENAME_PREFIX}_$i.txt"
+
+ # Generate random data and write it to the file
+ dd if=/dev/urandom of="$FILENAME" bs=1024 count=$((RANDOM % 1024 + 1)) 2>/dev/null
+done
+
+tree $OUTPUT_DIR
+
+# Sync the files to the specified S3 bucket
+aws s3 sync $OUTPUT_DIR s3://$BUCKET_NAME/files
+
+# Check if synchronization was successful
+if [ $? -eq 0 ]; then
+ echo "Synchronization to bucket '$BUCKET_NAME' successful."
+else
+ echo "Failed to synchronize files to bucket '$BUCKET_NAME'."
+fi
diff --git a/s3/iac/cfn/delete-stack b/s3/iac/cfn/delete-stack
deleted file mode 100755
index 2e50a3d..0000000
--- a/s3/iac/cfn/delete-stack
+++ /dev/null
@@ -1,9 +0,0 @@
-#!/usr/bin/env bash
-
-echo "== delete stack for s3 bucket via CFN"
-
-STACK_NAME="cfn-s3-simple"
-
-aws cloudformation delete-stack \
---stack-name $STACK_NAME \
---region us-west-2
\ No newline at end of file
diff --git a/s3/iac/cfn/delete-stack.sh b/s3/iac/cfn/delete-stack.sh
new file mode 100755
index 0000000..5bbd4fe
--- /dev/null
+++ b/s3/iac/cfn/delete-stack.sh
@@ -0,0 +1,19 @@
+#!/bin/bash
+
+echo "== Delete S3 bucket via CloudFormation =="
+
+STACK_NAME="cfn-s3-simple"
+REGION="us-east-1"
+
+# Delete CloudFormation stack
+aws cloudformation delete-stack \
+--region $REGION \
+--stack-name $STACK_NAME
+
+# Wait for the stack to be deleted
+echo "Waiting for stack to be deleted..."
+aws cloudformation wait stack-delete-complete \
+--region $REGION \
+--stack-name $STACK_NAME
+
+echo "== Deletion complete =="
diff --git a/s3/iac/cfn/deploy b/s3/iac/cfn/deploy
deleted file mode 100755
index 6f46d67..0000000
--- a/s3/iac/cfn/deploy
+++ /dev/null
@@ -1,11 +0,0 @@
-#!/usr/bin/env bash
-
-echo "== deploy s3 bucket via CFN"
-
-STACK_NAME="cfn-s3-simple"
-
-aws cloudformation deploy \
---template-file template.yaml \
---no-execute-changeset \
---region us-west-2 \
---stack-name $STACK_NAME
\ No newline at end of file
diff --git a/s3/iac/cfn/deploy.sh b/s3/iac/cfn/deploy.sh
new file mode 100755
index 0000000..aeedd97
--- /dev/null
+++ b/s3/iac/cfn/deploy.sh
@@ -0,0 +1,20 @@
+#!/bin/bash
+
+echo "== Delete S3 bucket via CloudFormation =="
+
+# Define stack name and AWS region
+STACK_NAME="cfn-s3-simple"
+REGION="us-east-1"
+
+# Delete CloudFormation stack
+aws cloudformation delete-stack \
+ --region $REGION \
+ --stack-name $STACK_NAME
+
+# Wait for the stack to be deleted
+echo "Waiting for stack to be deleted..."
+aws cloudformation wait stack-delete-complete \
+ --region $REGION \
+ --stack-name $STACK_NAME
+
+echo "== Deletion complete =="
diff --git a/s3/iac/pulumi/.gitignore b/s3/iac/pulumi/.gitignore
new file mode 100644
index 0000000..a3807e5
--- /dev/null
+++ b/s3/iac/pulumi/.gitignore
@@ -0,0 +1,2 @@
+*.pyc
+venv/
diff --git a/s3/iac/pulumi/Pulumi.dev.yaml b/s3/iac/pulumi/Pulumi.dev.yaml
new file mode 100644
index 0000000..1a38cef
--- /dev/null
+++ b/s3/iac/pulumi/Pulumi.dev.yaml
@@ -0,0 +1,2 @@
+config:
+ aws:region: us-east-1
diff --git a/s3/iac/pulumi/Pulumi.yaml b/s3/iac/pulumi/Pulumi.yaml
new file mode 100644
index 0000000..39e3788
--- /dev/null
+++ b/s3/iac/pulumi/Pulumi.yaml
@@ -0,0 +1,10 @@
+name: my-pulumi-project
+runtime:
+ name: python
+ options:
+ virtualenv: venv
+description: A minimal AWS Python Pulumi program
+config:
+ pulumi:tags:
+ value:
+ pulumi:template: aws-python
diff --git a/s3/iac/pulumi/__main__.py b/s3/iac/pulumi/__main__.py
new file mode 100644
index 0000000..dfb3e5a
--- /dev/null
+++ b/s3/iac/pulumi/__main__.py
@@ -0,0 +1,10 @@
+"""An AWS Python Pulumi program"""
+
+import pulumi
+from pulumi_aws import s3
+
+# Create an AWS resource (S3 Bucket)
+bucket = s3.Bucket('my-bucket')
+
+# Export the name of the bucket
+pulumi.export('bucket_name', bucket.id)
diff --git a/s3/iac/pulumi/requirements.txt b/s3/iac/pulumi/requirements.txt
new file mode 100644
index 0000000..72aee79
--- /dev/null
+++ b/s3/iac/pulumi/requirements.txt
@@ -0,0 +1,2 @@
+pulumi>=3.0.0,<4.0.0
+pulumi-aws>=6.0.2,<7.0.0
diff --git a/s3/iac/terraform/.terraform.lock.hcl b/s3/iac/terraform/.terraform.lock.hcl
index adbc211..911c1d0 100644
--- a/s3/iac/terraform/.terraform.lock.hcl
+++ b/s3/iac/terraform/.terraform.lock.hcl
@@ -2,24 +2,24 @@
# Manual edits may be lost in future updates.
provider "registry.terraform.io/hashicorp/aws" {
- version = "5.26.0"
- constraints = "5.26.0"
+ version = "5.53.0"
+ constraints = "5.53.0"
hashes = [
- "h1:UkBMGEScvNP+9JDzKXGrgj931LngYpIB8TBBUY+mvdg=",
- "zh:11a4062491e574c8e96b6bc7ced67b5e9338ccfa068223fc9042f9e1e7eda47a",
- "zh:4331f85aeb22223ab656d04b48337a033f44f02f685c8def604c4f8f4687d10f",
- "zh:915d6c996390736709f7ac7582cd41418463cfc07696218af6fea4a282df744a",
- "zh:9306c306dbb2e1597037c54d20b1bd5f22a9cdcdb2b2b7bad657c8230bea2298",
- "zh:93371860b9df369243219606711bfd3cfbd263db67838c06d5d5848cf47b6ede",
- "zh:98338c17764a7b9322ddb6efd3af84e6890a4a0687f846eefdfb0fa03cec892d",
+ "h1:JdKy+/hyc91nHvVZggxlaFVYOkaLxSmR86MkRUpgX30=",
+ "zh:2adad39412111d19a5195474d6b95577fc25ccf06d88a90019bee0efba33a1e3",
+ "zh:51226453a14f95b0d1163cfecafc9cf1a92ce5f66e42e6b4065d83a813836a2c",
+ "zh:62450fadb56db9c18d50bb8b7728a3d009be608d7ee0d4fe95c85ccb521dff83",
+ "zh:6f3ad977a9cc4800847c136690b1c0a0fd8437705062163d29dc4e9429598950",
+ "zh:71ca0a16b735b8d34b7127dd7d1e1e5d1eaac9c9f792e08abde291b5beb947d5",
+ "zh:7ae9cf4838eea80288305be0a3e69b39ffff86ede7b4319be421f06d32d04fb6",
+ "zh:93abc2db5ad995cfee014eb7446abc7caedc427e141d375a11993e6e199076b5",
+ "zh:9560b3424d97da804e98ee86b474b7370afefa09baf350cae7f33afb3f1aa209",
"zh:9b12af85486a96aedd8d7984b0ff811a4b42e3d88dad1a3fb4c0b580d04fa425",
- "zh:a28c9d77a5be25bac42d99418365757e4eb65a2c7c6788828263772cf2774869",
- "zh:bd9c4648a090622d6b8c3c91dad513eec81e54db3dfe940ab6d155e5f37735e5",
- "zh:bde63db136cccdeb282489e2ec2b3f9a7566edc9df27911a296352ab00832261",
- "zh:ccd33f9490ce3f2d89efab995abf3b30e75579585f6a8a5b1f756246903d3518",
- "zh:d73d1c461eb9d22833251f6533fc214cf014bc1d3165c5bfaa8ca29cd295ffb2",
- "zh:db4ffb7eec5d0e1d0dbd0d65e1a3eaa6173a3337058105aec41fd0b2af5a2b46",
- "zh:eb36b933419e9f6563330f3b7d53d4f1b09e62d27f7786d5dc6c4a2d0f6de182",
- "zh:ec85ce1976e43f7d7fa10fa191c0a85e97326a3cb22387c0ed8b74d426ec94fd",
+ "zh:9eb57a9b649c217ac4eeb27af2a1935c18bd9bc8fb1be07434e7de74729eff46",
+ "zh:b5f32dcbe71ea22c2090eeeaec9af3e098d7b8c3e4491f34ffdfdc6f1c1abf81",
+ "zh:c9fbd5417f266c773055178e87bb4091df7f0542b72bf5ad0a4ae27045a2b7ca",
+ "zh:d518b3c52c8a9f79769dbe1b3683d25b4cdc8bfc77a3b3cd9c85f74e6c7383e1",
+ "zh:db741be21f32404bb87d73d25b1b7fd9b813b00aeb20a130ed8806d44dc26680",
+ "zh:ed1a8bb4d08653d87265ae534d6fc33bbdabae1608692a1ee364fce03548d36c",
]
}
diff --git a/s3/iac/terraform/main.tf b/s3/iac/terraform/main.tf
index 45cfede..17ecf77 100644
--- a/s3/iac/terraform/main.tf
+++ b/s3/iac/terraform/main.tf
@@ -2,7 +2,7 @@ terraform {
required_providers {
aws = {
source = "hashicorp/aws"
- version = "5.26.0"
+ version = "5.53.0"
}
}
}
diff --git a/s3/powershell-scripts/aws-configure.ps1 b/s3/powershell-scripts/aws-configure.ps1
new file mode 100755
index 0000000..edc210f
--- /dev/null
+++ b/s3/powershell-scripts/aws-configure.ps1
@@ -0,0 +1,17 @@
+# Prompt user to enter AWS access key
+$AccessKey = Read-Host "Enter your AWS access key"
+
+# Prompt user to enter AWS secret key
+$SecretKey = Read-Host "Enter your AWS secret key" -AsSecureString
+
+# Prompt user to enter AWS region
+$Region = Read-Host "Enter your AWS region"
+
+# Set AWS credentials using the provided access key and secret key
+Set-AWSCredential -AccessKey ($AccessKey) -SecretKey ($SecretKey | ConvertFrom-SecureString) -StoreAs "MyProfile"
+
+# Set default AWS region
+Set-DefaultAWSRegion -Region $Region
+
+# Display confirmation message
+Write-Host "AWS credentials and region set successfully."
diff --git a/s3/powershell-scripts/create-bucket.ps1 b/s3/powershell-scripts/create-bucket.ps1
new file mode 100755
index 0000000..107b89b
--- /dev/null
+++ b/s3/powershell-scripts/create-bucket.ps1
@@ -0,0 +1,15 @@
+# Prompt user to enter the bucket name
+$BucketName = Read-Host "Enter the name for the new S3 bucket"
+
+# Prompt user to enter the region
+$Region = Read-Host "Enter the region for the new S3 bucket"
+
+# Create a new S3 bucket
+New-S3Bucket -BucketName $BucketName -Region $Region -ProfileName "MyProfile"
+
+# Check if the bucket was created successfully
+if ($?) {
+ Write-Host "Bucket '$BucketName' created successfully."
+} else {
+ Write-Host "Failed to create bucket '$BucketName'."
+}
diff --git a/s3/powershell-scripts/delete-bucket.ps1 b/s3/powershell-scripts/delete-bucket.ps1
new file mode 100755
index 0000000..16d3eba
--- /dev/null
+++ b/s3/powershell-scripts/delete-bucket.ps1
@@ -0,0 +1,37 @@
+# List existing buckets
+Write-Host "List of existing buckets:"
+$buckets = Get-S3Bucket | Select-Object -ExpandProperty BucketName
+
+# Check if there are any buckets
+if (-not $buckets) {
+ Write-Host "No buckets found."
+ exit 0
+}
+
+# Print each bucket name with index
+Write-Host "Available buckets:"
+for ($i = 0; $i -lt $buckets.Count; $i++) {
+ Write-Host "$($i + 1). $($buckets[$i])"
+}
+
+# Prompt user to select a bucket by index
+do {
+ $index = Read-Host "Enter the number corresponding to the bucket you want to delete"
+} while (-not ($index -ge 1 -and $index -le $buckets.Count))
+
+$selectedBucket = $buckets[$index - 1]
+
+# Prompt user for confirmation before deleting the bucket
+$confirmation = Read-Host "Are you sure you want to delete bucket '$selectedBucket'? (yes/no)"
+if ($confirmation -ne "yes") {
+ Write-Host "Bucket deletion canceled."
+ exit 0
+}
+
+# Delete the specified bucket
+try {
+ Remove-S3Bucket -BucketName $selectedBucket -Force -Confirm:$false
+ Write-Host "Bucket '$selectedBucket' deleted successfully."
+} catch {
+ Write-Host "Failed to delete bucket '$selectedBucket'."
+}
diff --git a/s3/powershell-scripts/delete-objects.ps1 b/s3/powershell-scripts/delete-objects.ps1
new file mode 100755
index 0000000..2a3a179
--- /dev/null
+++ b/s3/powershell-scripts/delete-objects.ps1
@@ -0,0 +1,74 @@
+# List existing S3 buckets
+Write-Host "== Delete S3 Objects =="
+
+# Get list of S3 buckets
+$buckets = Get-S3Bucket
+
+# Check if there are any buckets
+if ($buckets.Count -eq 0) {
+ Write-Host "No buckets found."
+ exit
+}
+
+# Print each bucket name with index
+Write-Host "Available buckets:"
+for ($i = 0; $i -lt $buckets.Count; $i++) {
+ Write-Host "$($i + 1). $($buckets[$i].BucketName)"
+}
+
+# Prompt user to choose a bucket
+$bucketIndex = Read-Host "Enter the index of the bucket you want to delete objects from (or '0' to cancel)"
+if ($bucketIndex -eq 0) {
+ Write-Host "Deletion canceled."
+ exit
+}
+
+# Get the selected bucket name
+$selectedBucket = $buckets[$bucketIndex - 1].BucketName
+
+# List objects in the selected bucket
+$objects = Get-S3Object -BucketName $selectedBucket
+
+# Check if there are any objects
+if ($objects.Count -eq 0) {
+ Write-Host "No objects found in bucket '$selectedBucket'."
+ exit
+}
+
+# Print objects in the selected bucket with index
+Write-Host "Objects in bucket '$selectedBucket':"
+for ($i = 0; $i -lt $objects.Count; $i++) {
+ Write-Host "$($i + 1). $($objects[$i].Key)"
+}
+
+# Prompt user to choose to delete all objects or specific ones
+$deleteAll = Read-Host "Do you want to delete all objects? (yes/no)"
+if ($deleteAll.ToLower() -eq "yes") {
+ # Confirm with user before deleting all objects
+ $confirm = Read-Host "Are you sure you want to delete all objects from bucket '$selectedBucket'? (yes/no)"
+ if ($confirm.ToLower() -eq "yes") {
+ Remove-S3Object -BucketName $selectedBucket -Force
+ Write-Host "All objects deleted successfully from bucket '$selectedBucket'."
+ } else {
+ Write-Host "Deletion canceled."
+ }
+} else {
+ # Prompt user to enter the index of the object to delete
+ $objectIndex = Read-Host "Enter the index of the object you want to delete (or '0' to cancel)"
+ if ($objectIndex -eq 0) {
+ Write-Host "Deletion canceled."
+ exit
+ }
+
+ # Get the selected object key
+ $selectedObject = $objects[$objectIndex - 1].Key
+
+ # Confirm with user before deleting the object
+ $confirm = Read-Host "Are you sure you want to delete object '$selectedObject' from bucket '$selectedBucket'? (yes/no)"
+ if ($confirm.ToLower() -eq "yes") {
+ Remove-S3Object -BucketName $selectedBucket -Key $selectedObject -Force
+ Write-Host "Object '$selectedObject' deleted successfully from bucket '$selectedBucket'."
+ } else {
+ Write-Host "Deletion canceled."
+ }
+}
diff --git a/s3/powershell-scripts/list-bucket.ps1 b/s3/powershell-scripts/list-bucket.ps1
new file mode 100755
index 0000000..874e48b
--- /dev/null
+++ b/s3/powershell-scripts/list-bucket.ps1
@@ -0,0 +1,20 @@
+# List existing S3 buckets
+Write-Host "== Listing S3 Buckets =="
+
+# Get list of S3 buckets with creation dates
+$buckets = Get-S3Bucket
+
+# Check if there are any buckets
+if ($buckets.Count -eq 0) {
+ Write-Host "No buckets found."
+ exit
+}
+
+# Sort buckets by creation date (newest first)
+$sortedBuckets = $buckets | Sort-Object -Property CreationDate -Descending
+
+# Print each bucket name (newest first)
+Write-Host "Found bucket or buckets (newest first):"
+foreach ($bucket in $sortedBuckets) {
+ Write-Host $bucket.BucketName
+}
diff --git a/s3/powershell-scripts/list-object.ps1 b/s3/powershell-scripts/list-object.ps1
new file mode 100755
index 0000000..c24877f
--- /dev/null
+++ b/s3/powershell-scripts/list-object.ps1
@@ -0,0 +1,11 @@
+# Prompt user to enter the bucket name
+$bucketName = Read-Host "Enter the bucket name"
+
+# Check if bucket name is empty
+if (-not $bucketName) {
+ Write-Host "Bucket name must be provided."
+ exit 1
+}
+
+# List objects in the specified bucket
+Get-S3Object -BucketName $bucketName
diff --git a/s3/powershell-scripts/put-object.ps1 b/s3/powershell-scripts/put-object.ps1
new file mode 100755
index 0000000..e0188eb
--- /dev/null
+++ b/s3/powershell-scripts/put-object.ps1
@@ -0,0 +1,32 @@
+# Set the script to exit immediately if any command fails
+$ErrorActionPreference = "Stop"
+
+# Heading
+Write-Host "Creating 5 Random Files and Displaying Directory Tree"
+
+# Define the output directory
+$outputDir = "$env:TEMP\s3-temp-powershell-scripts"
+
+# Remove the directory if it already exists
+if (Test-Path $outputDir -PathType Container) {
+ Write-Host "Removing existing directory: $outputDir"
+ Remove-Item $outputDir -Recurse -Force
+}
+
+# Create the directory
+New-Item -ItemType Directory -Path $outputDir | Out-Null
+
+# Generate 5 random files
+for ($i = 1; $i -le 5; $i++) {
+ # Generate a random filename
+ $filename = Join-Path $outputDir "file$i.txt"
+ # Generate random content
+ $content = [Convert]::ToBase64String((Get-Random -Count 32 -InputObject (0..255)))
+ # Write content to file
+ $content | Out-File -FilePath $filename
+ Write-Host "Created file: $filename"
+}
+
+# Display directory tree
+Write-Host "Directory Tree:"
+Get-ChildItem $outputDir -Recurse | Format-Wide -Property FullName
diff --git a/s3/powershell-scripts/s3.ps1 b/s3/powershell-scripts/s3.ps1
deleted file mode 100755
index 8cd3d29..0000000
--- a/s3/powershell-scripts/s3.ps1
+++ /dev/null
@@ -1,29 +0,0 @@
-Import-Module AWS.Tools.S3
-
-$region = "us-east-1"
-
-$bucketName = Read-Host -Prompt 'Enter the S3 bucket name'
-
-Write-Host "AWS Region: $region"
-Write-Host "S3 Bucket: $bucketName"
-
-function BucketExists {
- $bucket = Get-S3Bucket -BucketName $bucketName -ErrorAction SilentlyContinue
- return $null -ne $bucket
-}
-
-if (-not (BucketExists)){
- Write-Host "Bucket does not exist..."
- New-S3Bucket -BucketName $bucketName -Region $region
-} else {
- Write-Host "Bucket already exists..."
-
-}
-
-# Create a new file
-
-$fileName = 'myfile.txt'
-$fileContent = 'Hello World!'
-Set-Content -Path $fileName -Value $fileContent
-
-Write-S3Object -BucketName $bucketName -File $fileName -Key $fileName
\ No newline at end of file
diff --git a/s3/powershell-scripts/sync.ps1 b/s3/powershell-scripts/sync.ps1
new file mode 100755
index 0000000..0258dd5
--- /dev/null
+++ b/s3/powershell-scripts/sync.ps1
@@ -0,0 +1,45 @@
+param (
+ [string]$BUCKET_NAME = $(Read-Host -Prompt "Enter the bucket name"),
+ [string]$FILENAME_PREFIX = $(Read-Host -Prompt "Enter the filename prefix")
+)
+
+Write-Host "== Sync"
+
+# Set the output directory
+$outputDir = "C:\Temp\s3-powershell-scripts"
+
+# Remove existing directory if present
+if (Test-Path -Path $outputDir -PathType Container) {
+ Remove-Item -Path $outputDir -Recurse -Force
+}
+
+# Create the directory
+New-Item -ItemType Directory -Path $outputDir | Out-Null
+
+# Generate random number to determine number of files to create
+$NumFiles = Get-Random -Minimum 5 -Maximum 10
+
+# Generate and sync files
+for ($i = 1; $i -le $NumFiles; $i++) {
+ $Filename = "$outputDir\$($FILENAME_PREFIX)_$i.txt"
+ # Generate random content
+ $Content = [Convert]::ToBase64String((Get-Random -Count 1024 -InputObject (0..255) | ForEach-Object { [byte]$_ }))
+ # Write content to file
+ Set-Content -Path $Filename -Value $Content -Encoding Byte
+ Write-Host "Created file: $Filename"
+}
+
+# Display directory tree
+Write-Host "Directory Tree:"
+Get-ChildItem -Path $outputDir -Recurse | Format-Table -AutoSize
+
+# Sync the files to the specified S3 bucket
+Write-Host "Syncing files to bucket '$BUCKET_NAME'..."
+aws s3 sync $outputDir "s3://$BUCKET_NAME/files"
+
+# Check if synchronization was successful
+if ($LASTEXITCODE -eq 0) {
+ Write-Host "Synchronization to bucket '$BUCKET_NAME' successful."
+} else {
+ Write-Host "Failed to synchronize files to bucket '$BUCKET_NAME'."
+}
diff --git a/s3/sdk/java/README.md b/s3/sdk/java/README.md
new file mode 100644
index 0000000..178136b
--- /dev/null
+++ b/s3/sdk/java/README.md
@@ -0,0 +1,19 @@
+
+# CREATE A NEW MAVEN PROJECT
+```sh
+mvn archetype:generate \
+-DgroupId=com.mycompany.app \
+-DartifactId=my-app \
+-DarchetypeArtifactId=maven-archetype-quickstart \
+-DarchetypeVersion=1.4 \
+-DinteractiveMode=false
+```
+
+```
+mvn -B archetype:generate \
+ -DarchetypeGroupId=software.amazon.awssdk \
+ -DarchetypeArtifactId=archetype-lambda -Dservice=s3 -Dregion=US_EAST_1 \
+ -DarchetypeVersion=2.25.69 \
+ -DgroupId=com.example.myapp \
+ -DartifactId=myapp
+ ```
\ No newline at end of file
diff --git a/s3/sdk/java/myapp/pom.xml b/s3/sdk/java/myapp/pom.xml
index d702a7d..2d7291d 100644
--- a/s3/sdk/java/myapp/pom.xml
+++ b/s3/sdk/java/myapp/pom.xml
@@ -13,9 +13,9 @@
3.2.1
3.6.1
1.6.0
- 2.21.29
+ 2.25.69
1.2.3
- 5.8.1
+ 5.10.1
@@ -48,7 +48,7 @@
software.amazon.awssdk
- url-connection-client
+ aws-crt-client
diff --git a/s3/sdk/java/myapp/src/main/java/com/example/myapp/App.java b/s3/sdk/java/myapp/src/main/java/com/example/myapp/App.java
index 92f79b4..c9d5637 100644
--- a/s3/sdk/java/myapp/src/main/java/com/example/myapp/App.java
+++ b/s3/sdk/java/myapp/src/main/java/com/example/myapp/App.java
@@ -2,7 +2,7 @@
import com.amazonaws.services.lambda.runtime.Context;
import com.amazonaws.services.lambda.runtime.RequestHandler;
-import software.amazon.awssdk.services.s3.S3Client;
+import software.amazon.awssdk.services.s3.S3AsyncClient;
/**
* Lambda function entry point. You can change to use other pojo type or implement
@@ -11,7 +11,7 @@
* @see Lambda Java Handler for more information
*/
public class App implements RequestHandler