diff --git a/.github/workflows/module-test.yaml b/.github/workflows/module-test.yaml index 2a1367d58..d84a25f68 100644 --- a/.github/workflows/module-test.yaml +++ b/.github/workflows/module-test.yaml @@ -78,7 +78,6 @@ jobs: MODULE: ${{ inputs.module }} GLOB: ${{ inputs.glob }} AWS_REGION: "${{ secrets.AWS_REGION }}" - ASSUME_ROLE: "${{ secrets.AWS_ROLE_ARN }}" DOCKER_DNS_OVERRIDE: "8.8.8.8" run: | export AWS_DEFAULT_REGION="$AWS_REGION" @@ -102,7 +101,6 @@ jobs: DOCKER_BUILDKIT: 1 DEV_MODE: 1 AWS_REGION: "${{ secrets.AWS_REGION }}" - ASSUME_ROLE: "${{ secrets.AWS_ROLE_ARN }}" run: | export AWS_DEFAULT_REGION="$AWS_REGION" make test environment="$CLUSTER_ID" module="cleanup" @@ -119,13 +117,4 @@ jobs: env: AWS_REGION: "${{ secrets.AWS_REGION }}" run: | - export CLEANUP_ENVIRONMENT_NAME="eks-workshop-$CLUSTER_ID" - export AWS_DEFAULT_REGION="$AWS_REGION" - - envsubst < hack/lib/filter.yml > filter.yml - - cat filter.yml - - awsweeper --force filter.yml - make destroy-infrastructure environment="$CLUSTER_ID" diff --git a/.github/workflows/pr.yaml b/.github/workflows/pr.yaml index e12a0cab4..919c3ee3e 100644 --- a/.github/workflows/pr.yaml +++ b/.github/workflows/pr.yaml @@ -42,6 +42,8 @@ jobs: - name: Check out code uses: actions/checkout@v4 - name: Make shell + env: + SKIP_CREDENTIALS: 1 run: | bash hack/exec.sh '' 'ls -la' @@ -70,3 +72,14 @@ jobs: node-version: 18 - run: | npx cspell lint "website/docs/**/*.md" + + terraform-validate: + name: "Validate Terraform" + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: hashicorp/setup-terraform@v3 + with: + terraform_version: "~1.9.0" + - run: | + bash hack/validate-terraform.sh diff --git a/.github/workflows/test-cleanup.yaml b/.github/workflows/test-cleanup.yaml index 8473fb68b..b15005522 100644 --- a/.github/workflows/test-cleanup.yaml +++ b/.github/workflows/test-cleanup.yaml @@ -24,19 +24,6 @@ jobs: - name: Install utilities run: | sudo apt install -y gettext - - mkdir -p ${HOME}/.local/bin - wget https://github.com/jckuester/awsweeper/releases/download/v0.12.0/awsweeper_0.12.0_linux_amd64.tar.gz - tar zxf awsweeper_0.12.0_linux_amd64.tar.gz - mv awsweeper_0.12.0_linux_amd64/awsweeper ${HOME}/.local/bin - - wget https://github.com/eksctl-io/eksctl/releases/download/v0.169.0/eksctl_Linux_amd64.tar.gz - tar zxf eksctl_Linux_amd64.tar.gz - mv eksctl ${HOME}/.local/bin - - chmod +x ${HOME}/.local/bin/* - - echo "${HOME}/.local/bin" >> $GITHUB_PATH - name: Get AWS credentials uses: aws-actions/configure-aws-credentials@v4.0.2 with: @@ -50,13 +37,4 @@ jobs: CLUSTER_ID: ${{ github.event.inputs.clusterId }} AWS_REGION: "${{ secrets.AWS_REGION }}" run: | - export CLEANUP_ENVIRONMENT_NAME="$CLUSTER_ID" - export AWS_DEFAULT_REGION="$AWS_REGION" - - envsubst < hack/lib/filter.yml > filter.yml - - cat filter.yml - - awsweeper --force filter.yml - make destroy-infrastructure environment="$CLUSTER_ID" diff --git a/Makefile b/Makefile index 4300ee33b..2f81d713d 100644 --- a/Makefile +++ b/Makefile @@ -35,8 +35,8 @@ delete-environment: .PHONY: create-infrastructure create-infrastructure: - bash hack/exec.sh $(environment) 'cat /cluster/eksctl/cluster.yaml | envsubst | eksctl create cluster -f -' + bash hack/create-infrastructure.sh $(environment) .PHONY: destroy-infrastructure destroy-infrastructure: - bash hack/exec.sh $(environment) 'cat /cluster/eksctl/cluster.yaml | envsubst | eksctl delete cluster --wait --force --disable-nodegroup-eviction --timeout 45m -f -' + bash hack/destroy-infrastructure.sh $(environment) diff --git a/cluster/terraform/eks.tf b/cluster/terraform/eks.tf index 6949f4a45..276818318 100644 --- a/cluster/terraform/eks.tf +++ b/cluster/terraform/eks.tf @@ -32,9 +32,12 @@ module "eks" { eks_managed_node_groups = { default = { - instance_types = ["m5.large"] - force_update_version = true - release_version = var.ami_release_version + instance_types = ["m5.large"] + force_update_version = true + release_version = var.ami_release_version + use_name_prefix = false + iam_role_name = "${var.cluster_name}-ng-default" + iam_role_use_name_prefix = false min_size = 3 max_size = 6 diff --git a/docs/reviewer_checklist.md b/docs/reviewer_checklist.md index c5f58b764..d7dd5f591 100644 --- a/docs/reviewer_checklist.md +++ b/docs/reviewer_checklist.md @@ -22,6 +22,10 @@ See style guide for expanded explanations. - [ ] `$EKS_CLUSTER_NAME` is used instead of hard-coded cluster names, including referencing other infrastructure that may use the cluster name - [ ] Avoided use of interactive `kubectl exec` or multiple terminal windows (or tests skipped) +## AWS infrastructure + +- [ ] All Terraform resources created have names that prefixed with the EKS cluster name (`var.addon_context.eks_cluster_id`) + ## Tests - [ ] `bash` blocks that run commands that are intended to error use `expectError=true` @@ -34,5 +38,5 @@ See style guide for expanded explanations. ## Misc - [ ] Generated lab timing has been created (new lab) or updated (updated lab) if needed -- [ ] All Terraform resources created have dynamic names +- [ ] Relevant updates have been made to the [lab IAM policy](../lab/iam-policy-labs.json) - [ ] Images should be in `webp` format diff --git a/hack/build-ide-cfn.sh b/hack/build-ide-cfn.sh new file mode 100644 index 000000000..49586cc0d --- /dev/null +++ b/hack/build-ide-cfn.sh @@ -0,0 +1,23 @@ +#!/bin/bash + +set -e + +output_path=$1 + +SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd ) + +source $SCRIPT_DIR/lib/common-env.sh + +if [ -z "$output_path" ]; then + outfile=$(mktemp) +else + outfile=$output_path +fi + +cd lab + +export Env="${EKS_CLUSTER_NAME}" + +cat cfn/eks-workshop-vscode-cfn.yaml | yq '(.. | select(has("file"))) |= (load(.file))' | envsubst '$Env' > $outfile + +echo "Output file: $outfile" \ No newline at end of file diff --git a/hack/create-infrastructure.sh b/hack/create-infrastructure.sh new file mode 100644 index 000000000..14775e0af --- /dev/null +++ b/hack/create-infrastructure.sh @@ -0,0 +1,24 @@ +#!/bin/bash + +environment=$1 + +set -Eeuo pipefail +set -u + +SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd ) + +source $SCRIPT_DIR/lib/common-env.sh + +bash $SCRIPT_DIR/update-iam-role.sh $environment + +sleep 5 + +cluster_exists=0 +aws eks describe-cluster --name "${EKS_CLUSTER_NAME}" &> /dev/null || cluster_exists=$? + +if [ $cluster_exists -eq 0 ]; then + echo "Cluster ${EKS_CLUSTER_NAME} already exists" +else + echo "Creating cluster ${EKS_CLUSTER_NAME}" + bash $SCRIPT_DIR/exec.sh "${environment}" 'cat /cluster/eksctl/cluster.yaml | envsubst | eksctl create cluster -f -' +fi \ No newline at end of file diff --git a/hack/deploy-ide-cfn.sh b/hack/deploy-ide-cfn.sh new file mode 100644 index 000000000..982a01409 --- /dev/null +++ b/hack/deploy-ide-cfn.sh @@ -0,0 +1,14 @@ +#!/bin/bash + +set -e + +SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd ) + +source $SCRIPT_DIR/lib/common-env.sh + +outfile=$(mktemp) + +bash $SCRIPT_DIR/build-ide-cfn.sh $outfile + +aws cloudformation deploy --stack-name eks-workshop-ide1 \ + --capabilities CAPABILITY_NAMED_IAM --disable-rollback --template-file $outfile \ No newline at end of file diff --git a/hack/destroy-infrastructure.sh b/hack/destroy-infrastructure.sh new file mode 100644 index 000000000..6a2342870 --- /dev/null +++ b/hack/destroy-infrastructure.sh @@ -0,0 +1,24 @@ +#!/bin/bash + +environment=$1 + +set -Eeuo pipefail +set -u + +SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd ) + +source $SCRIPT_DIR/lib/common-env.sh + +cluster_exists=0 +aws eks describe-cluster --name "${EKS_CLUSTER_NAME}" &> /dev/null || cluster_exists=$? + +if [ $cluster_exists -eq 0 ]; then + echo "Deleting cluster ${EKS_CLUSTER_NAME}" + bash $SCRIPT_DIR/shell.sh "${environment}" 'delete-environment || true' + + bash $SCRIPT_DIR/exec.sh "${environment}" 'eksctl delete cluster --name ${EKS_CLUSTER_NAME} --region ${AWS_REGION} --wait --force --disable-nodegroup-eviction --timeout 45m' +else + echo "Cluster ${EKS_CLUSTER_NAME} does not exist" +fi + +aws cloudformation delete-stack --stack-name ${EKS_CLUSTER_NAME}-ide-role || true \ No newline at end of file diff --git a/hack/exec.sh b/hack/exec.sh index bf38b1513..ea785e5b0 100644 --- a/hack/exec.sh +++ b/hack/exec.sh @@ -19,7 +19,11 @@ container_image='eks-workshop-environment' (cd $SCRIPT_DIR/../lab && $CONTAINER_CLI build -q -t $container_image .) -source $SCRIPT_DIR/lib/generate-aws-creds.sh +if [ -z "$SKIP_CREDENTIALS" ]; then + source $SCRIPT_DIR/lib/generate-aws-creds.sh +else + aws_credential_args="" +fi echo "Executing command in container..." diff --git a/hack/find-dangling-resources.sh b/hack/find-dangling-resources.sh new file mode 100644 index 000000000..fc52c7552 --- /dev/null +++ b/hack/find-dangling-resources.sh @@ -0,0 +1,12 @@ +#!/bin/bash + +environment=$1 + +set -Eeuo pipefail +set -u + +SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd ) + +source $SCRIPT_DIR/lib/common-env.sh + +aws resourcegroupstaggingapi get-resources --tag-filters Key=env,Values=$EKS_CLUSTER_NAME --query 'ResourceTagMappingList[].ResourceARN' \ No newline at end of file diff --git a/hack/lib/common-env.sh b/hack/lib/common-env.sh index 636ec0842..42181d2f9 100644 --- a/hack/lib/common-env.sh +++ b/hack/lib/common-env.sh @@ -13,3 +13,12 @@ if [ -z "$AWS_REGION" ]; then export AWS_REGION="us-west-2" fi + +SKIP_CREDENTIALS=${SKIP_CREDENTIALS:-""} + +if [ -z "$SKIP_CREDENTIALS" ]; then + ACCOUNT_ID=$(aws sts get-caller-identity --query Account --output text) + + IDE_ROLE_NAME="${EKS_CLUSTER_NAME}-ide-role" + IDE_ROLE_ARN="arn:aws:iam::${ACCOUNT_ID}:role/${IDE_ROLE_NAME}" +fi \ No newline at end of file diff --git a/hack/lib/generate-aws-creds.sh b/hack/lib/generate-aws-creds.sh index 3e5728d85..dbab3dd40 100644 --- a/hack/lib/generate-aws-creds.sh +++ b/hack/lib/generate-aws-creds.sh @@ -1,21 +1,19 @@ -aws_credential_args="" +echo "Generating temporary AWS credentials..." -ASSUME_ROLE=${ASSUME_ROLE:-""} -AWS_ACCESS_KEY_ID=${AWS_ACCESS_KEY_ID:-""} +session_suffix=$(openssl rand -hex 4) + +target_role=${IDE_ROLE_ARN} -if [ ! -z "$AWS_ACCESS_KEY_ID" ]; then - echo "Using environment AWS_ACCESS_KEY_ID/AWS_SECRET_ACCESS_KEY" +ASSUME_ROLE=${ASSUME_ROLE:-""} - aws_credential_args="-e AWS_ACCESS_KEY_ID=$AWS_ACCESS_KEY_ID -e AWS_SECRET_ACCESS_KEY=$AWS_SECRET_ACCESS_KEY -e AWS_SESSION_TOKEN=$AWS_SESSION_TOKEN" -elif [ ! -z "$ASSUME_ROLE" ]; then - echo "Generating temporary AWS credentials..." +if [ ! -z "$ASSUME_ROLE" ]; then + echo "Assuming role $ASSUME_ROLE" + target_role=$ASSUME_ROLE +fi - ACCESS_VARS=$(aws sts assume-role --role-arn $ASSUME_ROLE --role-session-name ${EKS_CLUSTER_NAME}-shell --output json | jq -r '.Credentials | "export AWS_ACCESS_KEY_ID=\(.AccessKeyId) AWS_SECRET_ACCESS_KEY=\(.SecretAccessKey) AWS_SESSION_TOKEN=\(.SessionToken)"') +ACCESS_VARS=$(aws sts assume-role --role-arn ${target_role} --role-session-name ${EKS_CLUSTER_NAME}-shell-${session_suffix} --output json | jq -r '.Credentials | "export AWS_ACCESS_KEY_ID=\(.AccessKeyId) AWS_SECRET_ACCESS_KEY=\(.SecretAccessKey) AWS_SESSION_TOKEN=\(.SessionToken)"') - # TODO: This should probably not use eval - eval "$ACCESS_VARS" +# TODO: This should probably not use eval +eval "$ACCESS_VARS" - aws_credential_args="-e AWS_ACCESS_KEY_ID=$AWS_ACCESS_KEY_ID -e AWS_SECRET_ACCESS_KEY=$AWS_SECRET_ACCESS_KEY -e AWS_SESSION_TOKEN=$AWS_SESSION_TOKEN" -else - echo "Inheriting credentials from instance profile" -fi \ No newline at end of file +aws_credential_args="-e AWS_ACCESS_KEY_ID=$AWS_ACCESS_KEY_ID -e AWS_SECRET_ACCESS_KEY=$AWS_SECRET_ACCESS_KEY -e AWS_SESSION_TOKEN=$AWS_SESSION_TOKEN" \ No newline at end of file diff --git a/hack/run-tests.sh b/hack/run-tests.sh index 3e96fe595..8fbe0dba1 100755 --- a/hack/run-tests.sh +++ b/hack/run-tests.sh @@ -76,19 +76,27 @@ RESOURCES_PRECREATED=${RESOURCES_PRECREATED:-""} echo "Running test suite..." +exit_code=0 + $CONTAINER_CLI run $background_args $dns_args \ --name $container_name \ -v $SCRIPT_DIR/../website/docs:/content \ -v $SCRIPT_DIR/../manifests:/manifests \ -e 'EKS_CLUSTER_NAME' -e 'AWS_REGION' -e 'RESOURCES_PRECREATED' \ - $aws_credential_args $container_image -g "${actual_glob}" --hook-timeout 3600 --timeout 3600 $output_args ${AWS_EKS_WORKSHOP_TEST_FLAGS} + $aws_credential_args $container_image -g "${actual_glob}" --hook-timeout 3600 --timeout 3600 $output_args ${AWS_EKS_WORKSHOP_TEST_FLAGS} || exit_code=$? -if [ ! -z "$TEST_REPORT" ]; then - docker cp $container_name:/tmp/test-report.json $TEST_REPORT > /dev/null +if [ $exit_code -eq 0 ]; then + if [ ! -z "$TEST_REPORT" ]; then + docker cp $container_name:/tmp/test-report.json $TEST_REPORT > /dev/null + fi fi docker rm $container_name > /dev/null +if [ $exit_code -ne 0 ]; then + exit $exit_code +fi + if [ ! -z "$GENERATE_TIMINGS" ]; then tmpfile=$(mktemp) diff --git a/hack/update-iam-role.sh b/hack/update-iam-role.sh new file mode 100644 index 000000000..0a7abdbfd --- /dev/null +++ b/hack/update-iam-role.sh @@ -0,0 +1,23 @@ +#!/bin/bash + +environment=$1 + +set -Eeuo pipefail +set -u + +SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd ) + +source $SCRIPT_DIR/lib/common-env.sh + +outfile=$(mktemp) + +cd lab + +export Env="${EKS_CLUSTER_NAME}" + +cat iam/iam-role-cfn.yaml | yq '(.. | select(has("file"))) |= (load(.file))' | envsubst '$Env' > $outfile + +aws cloudformation deploy \ + --stack-name ${EKS_CLUSTER_NAME}-ide-role \ + --capabilities CAPABILITY_IAM CAPABILITY_NAMED_IAM \ + --template-file $outfile \ No newline at end of file diff --git a/hack/validate-terraform.sh b/hack/validate-terraform.sh new file mode 100644 index 000000000..a085a0ff6 --- /dev/null +++ b/hack/validate-terraform.sh @@ -0,0 +1,39 @@ +#!/bin/bash + +environment=$1 + +SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd ) + +source $SCRIPT_DIR/lib/common-env.sh + +terraform_dir="$(mktemp -d)" +manifests_dir="${SCRIPT_DIR}/../manifests" + +conf_dir="$terraform_dir/conf" + +mkdir -p "$conf_dir" + +cp $manifests_dir/.workshop/terraform/base.tf $conf_dir/base.tf + +find $manifests_dir/modules -type d -name "terraform" -print0 | while read -d $'\0' file +do + target=$(echo $file | md5sum | cut -f1 -d" ") + cp -R $file $conf_dir/$target + + cat << EOF > $conf_dir/$target.tf +module "gen-$target" { + source = "./$target" + + eks_cluster_id = local.eks_cluster_id + eks_cluster_version = local.eks_cluster_version + cluster_security_group_id = local.cluster_security_group_id + addon_context = local.addon_context + tags = local.tags + resources_precreated = var.resources_precreated +} +EOF +done + +terraform -chdir="${conf_dir}" init -backend=false + +terraform -chdir="${conf_dir}" validate \ No newline at end of file diff --git a/lab/Dockerfile b/lab/Dockerfile index ac169e511..234e41a3a 100644 --- a/lab/Dockerfile +++ b/lab/Dockerfile @@ -1,6 +1,6 @@ -FROM public.ecr.aws/amazonlinux/amazonlinux:2 +FROM public.ecr.aws/amazonlinux/amazonlinux:2023 -RUN yum install -y shadow-utils && useradd \ +RUN yum install -y tar gzip vim shadow-utils && useradd \ --home "/home/ec2-user" \ --create-home \ --user-group \ diff --git a/lab/bin/reset-environment b/lab/bin/reset-environment index 333963db3..2c3626d54 100644 --- a/lab/bin/reset-environment +++ b/lab/bin/reset-environment @@ -89,6 +89,8 @@ logmessage "Tip: Read the rest of the lab introduction while you wait!" if [ -f "/eks-workshop/hooks/cleanup.sh" ]; then bash /eks-workshop/hooks/cleanup.sh + + rm /eks-workshop/hooks/cleanup.sh fi kubectl delete pod load-generator --ignore-not-found @@ -227,4 +229,4 @@ kubectl delete pod -l app.kubernetes.io/created-by=eks-workshop -l app.kubernete kubectl wait --for=condition=Ready --timeout=240s pods -l app.kubernetes.io/created-by=eks-workshop -A # Finished -logmessage 'Environment is ready' +logmessage 'Environment is ready' \ No newline at end of file diff --git a/lab/cfn/eks-workshop-vscode-cfn.yaml b/lab/cfn/eks-workshop-vscode-cfn.yaml new file mode 100644 index 000000000..5b664c477 --- /dev/null +++ b/lab/cfn/eks-workshop-vscode-cfn.yaml @@ -0,0 +1,578 @@ +AWSTemplateFormatVersion: "2010-09-09" +Description: Creates a code-server IDE for the EKS workshop +Parameters: + InstanceVolumeSize: + Type: Number + Description: The Size in GB of the Cloud9 Instance Volume. + Default: 30 + RepositoryOwner: + Type: String + Description: The owner of the GitHub repository to be used to bootstrap Cloud9 + Default: "aws-samples" + RepositoryName: + Type: String + Description: The name of the GitHub repository to be used to bootstrap Cloud9 + Default: "eks-workshop-v2" + RepositoryRef: + Type: String + Description: The Git reference to be used to bootstrap Cloud9 + Default: "vscode-ide" + ResourcesPrecreated: + Type: String + Description: Whether lab infrastructure has been pre-provisioned + Default: "false" + AllowedValues: + - "false" + - "true" + AnalyticsEndpoint: + Type: String + Description: Analytics endpoint used for AWS events + Default: "" + CodeServerVersion: + Type: String + Description: Default code-server version to use + Default: "4.91.1" + AmiParameterStoreName: + Type: "AWS::SSM::Parameter::Value" + Default: "/aws/service/ami-amazon-linux-latest/al2023-ami-kernel-6.1-x86_64" + Environment: + Type: String + Description: For testing purposes only + Default: "" + +Mappings: + PrefixListID: + ap-northeast-1: + PrefixList: pl-58a04531 + ap-northeast-2: + PrefixList: pl-22a6434b + ap-south-1: + PrefixList: pl-9aa247f3 + ap-southeast-1: + PrefixList: pl-31a34658 + ap-southeast-2: + PrefixList: pl-b8a742d1 + ca-central-1: + PrefixList: pl-38a64351 + eu-central-1: + PrefixList: pl-a3a144ca + eu-north-1: + PrefixList: pl-fab65393 + eu-west-1: + PrefixList: pl-4fa04526 + eu-west-2: + PrefixList: pl-93a247fa + eu-west-3: + PrefixList: pl-75b1541c + sa-east-1: + PrefixList: pl-5da64334 + us-east-1: + PrefixList: pl-3b927c52 + us-east-2: + PrefixList: pl-b6a144df + us-west-1: + PrefixList: pl-4ea04527 + us-west-2: + PrefixList: pl-82a045eb + +Resources: + VPC: + Type: AWS::EC2::VPC + Properties: + CidrBlock: 10.0.0.0/24 + EnableDnsSupport: true + EnableDnsHostnames: true + + InternetGateway: + Type: AWS::EC2::InternetGateway + + GatewayAttachment: + Type: AWS::EC2::VPCGatewayAttachment + Properties: + VpcId: !Ref VPC + InternetGatewayId: !Ref InternetGateway + + PublicSubnet: + Type: AWS::EC2::Subnet + Properties: + CidrBlock: 10.0.0.0/24 + VpcId: !Ref VPC + MapPublicIpOnLaunch: true + AvailabilityZone: !Select [0, !GetAZs ""] + + PublicSubnetRouteTable: + Type: AWS::EC2::RouteTable + Properties: + VpcId: !Ref VPC + + PublicSubnetRoute: + Type: AWS::EC2::Route + DependsOn: GatewayAttachment + Properties: + RouteTableId: !Ref PublicSubnetRouteTable + DestinationCidrBlock: 0.0.0.0/0 + GatewayId: !Ref InternetGateway + + PublicSubnetRouteTableAssoc: + Type: AWS::EC2::SubnetRouteTableAssociation + Properties: + RouteTableId: !Ref PublicSubnetRouteTable + SubnetId: !Ref PublicSubnet + + SecurityGroup: + Type: AWS::EC2::SecurityGroup + Properties: + GroupDescription: SG for IDE + SecurityGroupIngress: + - Description: Allow HTTP from CloudFront + IpProtocol: tcp + FromPort: 80 + ToPort: 80 + SourcePrefixListId: + !FindInMap [PrefixListID, !Ref "AWS::Region", PrefixList] + SecurityGroupEgress: + - Description: Allow all outbound traffic + IpProtocol: -1 + CidrIp: 0.0.0.0/0 + VpcId: !Ref VPC + + EksWorkshopIdeLambdaExecutionRole: + Type: AWS::IAM::Role + Properties: + AssumeRolePolicyDocument: + Version: "2012-10-17" + Statement: + - Effect: Allow + Principal: + Service: + - lambda.amazonaws.com + Action: + - sts:AssumeRole + Path: "/" + Policies: + - PolicyName: + Fn::Join: + - "" + - - EksWorkshopIdeLambdaPolicy- + - Ref: AWS::Region + PolicyDocument: + Version: "2012-10-17" + Statement: + - Effect: Allow + Action: + - logs:CreateLogGroup + - logs:CreateLogStream + - logs:PutLogEvents + Resource: arn:aws:logs:*:*:* + - Effect: Allow + Action: + - iam:PassRole + - ssm:SendCommand + - ssm:GetCommandInvocation + Resource: "*" + + EksWorkshopIdeBootstrapInstanceLambda: + Type: Custom::EksWorkshopIdeBootstrapInstanceLambda + DependsOn: + - EksWorkshopIdeLambdaExecutionRole + Properties: + ServiceToken: + Fn::GetAtt: + - EksWorkshopIdeBootstrapInstanceLambdaFunction + - Arn + REGION: + Ref: AWS::Region + InstanceId: + Ref: EksWorkshopIdeInstance + SsmDocument: + Ref: EksWorkshopIdeSSMDocument + #UpdateTrigger: + # Ref: UpdateTrigger + + EksWorkshopIdeBootstrapInstanceLambdaFunction: + Type: AWS::Lambda::Function + Properties: + Handler: index.lambda_handler + Role: + Fn::GetAtt: + - EksWorkshopIdeLambdaExecutionRole + - Arn + Runtime: python3.12 + Environment: + Variables: + DiskSize: + Ref: InstanceVolumeSize + MemorySize: 256 + Timeout: "900" + Code: + ZipFile: | + from __future__ import print_function + import boto3 + import json + import os + import time + import traceback + import cfnresponse + import logging + logger = logging.getLogger(__name__) + + def lambda_handler(event, context): + print(event.values()) + print('context: {}'.format(context)) + responseData = {} + + status = cfnresponse.SUCCESS + + if event['RequestType'] == 'Delete': + responseData = {'Success': 'Custom Resource removed'} + cfnresponse.send(event, context, status, responseData, 'CustomResourcePhysicalID') + else: + try: + # Open AWS clients + #ec2 = boto3.client('ec2') + ssm = boto3.client('ssm') + + instance_id = event['ResourceProperties']['InstanceId'] + + ssm_document = event['ResourceProperties']['SsmDocument'] + + print('Sending SSM command...') + + response = ssm.send_command( + InstanceIds=[instance_id], + DocumentName=ssm_document) + + command_id = response['Command']['CommandId'] + + waiter = ssm.get_waiter('command_executed') + + waiter.wait( + CommandId=command_id, + InstanceId=instance_id, + WaiterConfig={ + 'Delay': 10, + 'MaxAttempts': 60 + } + ) + + responseData = {'Success': 'Started bootstrapping for instance: '+instance_id} + cfnresponse.send(event, context, status, responseData, 'CustomResourcePhysicalID') + + except Exception as e: + status = cfnresponse.FAILED + print(traceback.format_exc()) + responseData = {'Error': traceback.format_exc(e)} + finally: + cfnresponse.send(event, context, status, responseData, 'CustomResourcePhysicalID') + + EksWorkshopIdeSSMDocument: + Type: AWS::SSM::Document + Properties: + DocumentType: Command + DocumentFormat: YAML + Content: + schemaVersion: "2.2" + description: Bootstrap Cloud9 Instance + mainSteps: + - action: aws:runShellScript + name: EksWorkshopIdebootstrap + inputs: + runCommand: + - !Sub | + set -e + + yum install -y git tar gzip vim nodejs npm make gcc g++ + + export environment="${Environment}" + + source <(curl -fsSL https://raw.githubusercontent.com/${RepositoryOwner}/${RepositoryName}/${RepositoryRef}/hack/lib/common-env.sh) + + dnf copr enable -y @caddy/caddy epel-9-x86_64 + dnf install -y caddy + systemctl enable --now caddy + + tee /etc/caddy/Caddyfile < ~/.local/share/code-server/coder.json + + curl -fsSL https://raw.githubusercontent.com/${RepositoryOwner}/${RepositoryName}/${RepositoryRef}/lab/scripts/setup.sh | bash + + code-server --install-extension ms-kubernetes-tools.vscode-kubernetes-tools --force + code-server --install-extension redhat.vscode-yaml --force + + EOT + + systemctl restart code-server@ec2-user + + EksWorkshopIdeRole: + Type: AWS::IAM::Role + Properties: + AssumeRolePolicyDocument: + Version: "2012-10-17" + Statement: + - Effect: Allow + Principal: + Service: + - ec2.amazonaws.com + - ssm.amazonaws.com + Action: + - sts:AssumeRole + Policies: + - PolicyName: ide-password + PolicyDocument: + Version: "2012-10-17" + Statement: + - Effect: Allow + Action: + - secretsmanager:GetResourcePolicy + - secretsmanager:GetSecretValue + - secretsmanager:DescribeSecret + - secretsmanager:ListSecretVersionIds + Resource: + - !Ref EksWorkshopIdePassword + - Effect: Allow + Action: secretsmanager:ListSecrets + Resource: "*" + + ManagedPolicyArns: + - arn:aws:iam::aws:policy/AmazonSSMManagedInstanceCore + Path: "/" + + EksWorkshopIamPolicy: + Type: AWS::IAM::ManagedPolicy + Properties: + Roles: + - !Ref EksWorkshopIdeRole + ManagedPolicyName: !Sub ${AWS::StackName}-iam + PolicyDocument: + file: ./iam/policies/iam.yaml + + EksWorkshopBasePolicy: + Type: AWS::IAM::ManagedPolicy + Properties: + Roles: + - !Ref EksWorkshopIdeRole + ManagedPolicyName: !Sub ${AWS::StackName}-base + PolicyDocument: + file: ./iam/policies/base.yaml + + EksWorkshopEc2Policy: + Type: AWS::IAM::ManagedPolicy + Properties: + Roles: + - !Ref EksWorkshopIdeRole + ManagedPolicyName: !Sub ${AWS::StackName}-ec2 + PolicyDocument: + file: ./iam/policies/ec2.yaml + + EksWorkshopLabsPolicy1: + Type: AWS::IAM::ManagedPolicy + DependsOn: + - EksWorkshopIdeRole + Properties: + Roles: + - !Ref EksWorkshopIdeRole + ManagedPolicyName: !Sub ${AWS::StackName}-labs1 + PolicyDocument: + file: ./iam/policies/labs1.yaml + + EksWorkshopLabsPolicy2: + Type: AWS::IAM::ManagedPolicy + DependsOn: + - EksWorkshopIdeRole + Properties: + Roles: + - !Ref EksWorkshopIdeRole + ManagedPolicyName: !Sub ${AWS::StackName}-labs2 + PolicyDocument: + file: ./iam/policies/labs2.yaml + + EksWorkshopIdeInstanceProfile: + Type: AWS::IAM::InstanceProfile + Properties: + Path: "/" + Roles: + - Ref: EksWorkshopIdeRole + + EksWorkshopIdeInstance: + Type: AWS::EC2::Instance + Properties: + ImageId: !Ref AmiParameterStoreName + InstanceType: t3.medium + BlockDeviceMappings: + - Ebs: + VolumeSize: !Ref InstanceVolumeSize + VolumeType: gp3 + DeleteOnTermination: true + Encrypted: true + DeviceName: /dev/xvda + SubnetId: !Ref PublicSubnet + SecurityGroupIds: + - !Ref SecurityGroup + IamInstanceProfile: !Ref EksWorkshopIdeInstanceProfile + Tags: + - Key: type + Value: eksworkshop-ide + + EksWorkshopIdePassword: + Type: AWS::SecretsManager::Secret + Properties: + Name: !Sub ${AWS::StackName}-password + GenerateSecretString: + ExcludeCharacters: "\"@/\\" + ExcludePunctuation: true + GenerateStringKey: password + IncludeSpace: false + PasswordLength: 32 + SecretStringTemplate: '{"password":""}' + UpdateReplacePolicy: Delete + DeletionPolicy: Delete + + EksWorkshopIdeCachePolicy: + Type: AWS::CloudFront::CachePolicy + Properties: + CachePolicyConfig: + DefaultTTL: 86400 + MaxTTL: 31536000 + MinTTL: 1 + Name: !Ref AWS::StackName + ParametersInCacheKeyAndForwardedToOrigin: + CookiesConfig: + CookieBehavior: all + EnableAcceptEncodingGzip: False + HeadersConfig: + HeaderBehavior: whitelist + Headers: + - Accept-Charset + - Authorization + - Origin + - Accept + - Referer + - Host + - Accept-Language + - Accept-Encoding + - Accept-Datetime + QueryStringsConfig: + QueryStringBehavior: all + + EksWorkshopIdeCloudFrontDistribution: + Type: AWS::CloudFront::Distribution + Properties: + DistributionConfig: + Enabled: True + HttpVersion: http2 + CacheBehaviors: + - AllowedMethods: + - GET + - HEAD + - OPTIONS + - PUT + - PATCH + - POST + - DELETE + CachePolicyId: 4135ea2d-6df8-44a3-9df3-4b5a84be39ad + Compress: False + OriginRequestPolicyId: 216adef6-5c7f-47e4-b989-5492eafa07d3 + TargetOriginId: !Sub CloudFront-${AWS::StackName} + ViewerProtocolPolicy: allow-all + PathPattern: "/proxy/*" + DefaultCacheBehavior: + AllowedMethods: + - GET + - HEAD + - OPTIONS + - PUT + - PATCH + - POST + - DELETE + CachePolicyId: !Ref EksWorkshopIdeCachePolicy + OriginRequestPolicyId: 216adef6-5c7f-47e4-b989-5492eafa07d3 + TargetOriginId: !Sub CloudFront-${AWS::StackName} + ViewerProtocolPolicy: allow-all + Origins: + - DomainName: !GetAtt EksWorkshopIdeInstance.PublicDnsName + Id: !Sub CloudFront-${AWS::StackName} + CustomOriginConfig: + OriginProtocolPolicy: http-only + +Outputs: + IdeUrl: + Value: !Sub https://${EksWorkshopIdeCloudFrontDistribution.DomainName} + + IdePasswordSecret: + Value: !Sub + - https://console.aws.amazon.com/secretsmanager/secret?name=${SecretName} + - SecretName: !Sub ${AWS::StackName}-password + + IdeRole: + Value: !Sub ${EksWorkshopIdeRole.Arn} diff --git a/lab/iam/iam-role-cfn.yaml b/lab/iam/iam-role-cfn.yaml new file mode 100644 index 000000000..5232703ea --- /dev/null +++ b/lab/iam/iam-role-cfn.yaml @@ -0,0 +1,64 @@ +AWSTemplateFormatVersion: "2010-09-09" +Description: Creates an IAM role for the EKS workshop IDE +Resources: + EksWorkshopIdeRole: + Type: AWS::IAM::Role + Properties: + RoleName: ${Env}-ide-role + AssumeRolePolicyDocument: + Version: "2012-10-17" + Statement: + - Effect: Allow + Principal: + AWS: !Sub arn:aws:iam::${AWS::AccountId}:root + Action: + - sts:AssumeRole + + EksWorkshopIamPolicy: + Type: AWS::IAM::ManagedPolicy + Properties: + Roles: + - !Ref EksWorkshopIdeRole + ManagedPolicyName: ${Env}-ide-iam + PolicyDocument: + file: ./iam/policies/iam.yaml + + EksWorkshopBasePolicy: + Type: AWS::IAM::ManagedPolicy + Properties: + Roles: + - !Ref EksWorkshopIdeRole + ManagedPolicyName: ${Env}-ide-base + PolicyDocument: + file: ./iam/policies/base.yaml + + EksWorkshopEc2Policy: + Type: AWS::IAM::ManagedPolicy + Properties: + Roles: + - !Ref EksWorkshopIdeRole + ManagedPolicyName: ${Env}-ide-ec2 + PolicyDocument: + file: ./iam/policies/ec2.yaml + + EksWorkshopLabsPolicy1: + Type: AWS::IAM::ManagedPolicy + DependsOn: + - EksWorkshopIdeRole + Properties: + Roles: + - !Ref EksWorkshopIdeRole + ManagedPolicyName: ${Env}-ide-labs1 + PolicyDocument: + file: ./iam/policies/labs1.yaml + + EksWorkshopLabsPolicy2: + Type: AWS::IAM::ManagedPolicy + DependsOn: + - EksWorkshopIdeRole + Properties: + Roles: + - !Ref EksWorkshopIdeRole + ManagedPolicyName: ${Env}-ide-labs2 + PolicyDocument: + file: ./iam/policies/labs2.yaml diff --git a/lab/iam/policies/base.yaml b/lab/iam/policies/base.yaml new file mode 100644 index 000000000..5261d3e07 --- /dev/null +++ b/lab/iam/policies/base.yaml @@ -0,0 +1,87 @@ +Version: "2012-10-17" +Statement: + - Effect: Allow + Action: + - eks:* + - ec2:CreateLaunchTemplate + - ec2:DeleteLaunchTemplate + - sts:GetCallerIdentity + Resource: ["*"] + - Effect: Allow + Action: + - cloudformation:CreateStack + Resource: + - !Sub arn:aws:cloudformation:${AWS::Region}:${AWS::AccountId}:stack/eksctl-${Env}* + Condition: + "Null": + cloudformation:RoleARN: "true" + - Effect: Allow + Action: + - cloudformation:DeleteStack + Resource: + - !Sub arn:aws:cloudformation:${AWS::Region}:${AWS::AccountId}:stack/eksctl-${Env}* + Condition: + "Null": + cloudformation:RoleARN: "true" + - Effect: Allow + Action: + - cloudformation:Get* + - cloudformation:Describe* + - cloudformation:List* + - cloudformation:TagResource + Resource: ["*"] + - Effect: Allow + Action: + - autoscaling:UpdateAutoScalingGroup + Resource: ["*"] + Condition: + StringLike: + aws:ResourceTag/eks:cluster-name: + - ${Env} + - Effect: Allow + Action: + - autoscaling:Get* + - autoscaling:Describe* + Resource: ["*"] + - Effect: Allow + Action: + - ecr-public:GetAuthorizationToken + - sts:GetServiceBearerToken + Resource: ["*"] + - Effect: Allow + Action: + - kms:CreateKey + - kms:TagResource + - kms:ScheduleKeyDeletion + - kms:CreateGrant + - kms:EnableKeyRotation + - kms:GetKeyPolicy + - kms:GetKeyRotationStatus + - kms:ListResourceTags + - kms:PutKeyPolicy + Resource: ["*"] + - Effect: Allow + Action: + - kms:Decrypt + - kms:DescribeKey + - kms:EnableKeyRotation + - kms:Encrypt + - kms:GenerateDataKey + - kms:GenerateDataKeyWithoutPlaintext + Resource: ["*"] + Condition: + StringLike: + kms:RequestAlias: "alias/${Env}*" + - Effect: Allow + Action: + - kms:CreateAlias + - kms:DeleteAlias + Resource: + - !Sub arn:aws:kms:${AWS::Region}:${AWS::AccountId}:alias/${Env}* + - !Sub arn:aws:kms:${AWS::Region}:${AWS::AccountId}:key/* + - Effect: Allow + Action: + - kms:List* + - kms:Get* + - kms:Describe* + Resource: ["*"] diff --git a/lab/iam/policies/ec2.yaml b/lab/iam/policies/ec2.yaml new file mode 100644 index 000000000..4f588cd50 --- /dev/null +++ b/lab/iam/policies/ec2.yaml @@ -0,0 +1,92 @@ +Version: "2012-10-17" +Statement: + - Effect: Allow + Action: + - ec2:Get* + - ec2:Describe* + - ec2:List* + - ec2:RunInstances + Resource: ["*"] + - Effect: Allow + Action: + - ec2:TerminateInstances + Resource: ["*"] + Condition: + StringLike: + aws:ResourceTag/env: + - ${Env}* + - Effect: Deny + Action: ec2:RunInstances + Resource: + - !Sub arn:aws:ec2:*:*:instance/* + Condition: + ForAnyValue:StringNotLike: + ec2:InstanceType: + - m5.large + - t4g.medium + - c*.large + - Effect: Allow + Action: + - ec2:CreateVpc + - ec2:CreateSubnet + - ec2:CreateRouteTable + - ec2:CreateRoute + - ec2:CreateInternetGateway + - ec2:AttachInternetGateway + - ec2:AssociateRouteTable + - ec2:ModifyVpcAttribute + - ec2:CreateSecurityGroup + - ec2:AllocateAddress + - ec2:ReleaseAddress + - ec2:DisassociateAddress + - ec2:CreateNetworkAclEntry + - ec2:DeleteNetworkAclEntry + - ec2:CreateNatGateway + - ec2:DeleteNatGateway + Resource: ["*"] + - Effect: Allow + Action: + - ec2:DeleteVpc + - ec2:DeleteSubnet + - ec2:DeleteRouteTable + - ec2:DeleteRoute + - ec2:DeleteInternetGateway + - ec2:DetachInternetGateway + - ec2:DisassociateRouteTable + - ec2:ModifyVpcAttribute + - ec2:ModifySubnetAttribute + - ec2:AuthorizeSecurityGroup* + - ec2:UpdateSecurityGroupRuleDescriptionsEgress + - ec2:RevokeSecurityGroup* + - ec2:DeleteSecurityGroup + - ec2:ModifySecurityGroupRules + - ec2:UpdateSecurityGroupRuleDescriptionsIngress + Resource: ["*"] + Condition: + StringLike: + aws:ResourceTag/env: + - ${Env}* + - Effect: Allow + Action: + - ec2:AuthorizeSecurityGroup* + - ec2:RevokeSecurityGroup* + Resource: ["*"] + Condition: + StringLike: + aws:ResourceTag/aws:eks:cluster-name: + - ${Env}* + - Effect: Allow + Action: + - ec2:CreateTags + - ec2:DeleteTags + Resource: ["*"] + - Effect: Allow + Action: + - ec2:AssociateVpcCidrBlock + - ec2:DisassociateVpcCidrBlock + Resource: + - !Sub arn:aws:ec2:${AWS::Region}:${AWS::AccountId}:vpc/* + Condition: + StringLike: + aws:ResourceTag/env: + - ${Env}* diff --git a/lab/iam/policies/iam.yaml b/lab/iam/policies/iam.yaml new file mode 100644 index 000000000..15641fc6c --- /dev/null +++ b/lab/iam/policies/iam.yaml @@ -0,0 +1,75 @@ +Version: "2012-10-17" +Statement: + - Effect: Allow + Action: + - iam:CreateRole + - iam:GetRolePolicy + - iam:DetachRolePolicy + - iam:AttachRolePolicy + - iam:PutRolePolicy + - iam:DeleteRolePolicy + - iam:DeleteRole + - iam:ListInstanceProfilesForRole + - iam:ListAttachedRolePolicies + - iam:ListRolePolicies + - iam:TagRole + - iam:PassRole + - sts:AssumeRole + Resource: + - !Sub arn:aws:iam::${AWS::AccountId}:role/${Env}* + - !Sub arn:aws:iam::${AWS::AccountId}:role/eksctl-${Env}* + - Effect: Allow + Action: + - iam:CreatePolicy + - iam:DeletePolicy + - iam:GetPolicyVersion + - iam:ListPolicyVersions + - iam:TagPolicy + - iam:GetPolicy + Resource: + - !Sub arn:aws:iam::${AWS::AccountId}:policy/${Env}* + - !Sub arn:aws:iam::${AWS::AccountId}:policy/eksctl-${Env}* + - Effect: Allow + Action: + - iam:CreateInstanceProfile + - iam:DeleteInstanceProfile + - iam:GetInstanceProfile + - iam:TagInstanceProfile + - iam:RemoveRoleFromInstanceProfile + - iam:AddRoleToInstanceProfile + Resource: + - !Sub arn:aws:iam::${AWS::AccountId}:instance-profile/${Env}* + - !Sub arn:aws:iam::${AWS::AccountId}:instance-profile/eksctl-${Env}* + - !Sub arn:aws:iam::${AWS::AccountId}:instance-profile/eks-* + - Effect: Allow + Action: + - iam:CreateUser + - iam:DeleteUser + - iam:TagUser + - iam:GetUser + - iam:ListGroupsForUser + - iam:AttachUserPolicy + - iam:DetachUserPolicy + - iam:ListAttachedUserPolicies + - iam:*SSHPublicKey + Resource: + - !Sub arn:aws:iam::${AWS::AccountId}:user/${Env}* + - Effect: Allow + Action: + - iam:ListOpenIDConnectProviders + - iam:CreateOpenIDConnectProvider + - iam:DeleteOpenIDConnectProvider + - iam:TagOpenIDConnectProvider + - iam:GetOpenIDConnectProvider + - iam:GetRole + Resource: ["*"] + - Effect: Allow + Action: + - iam:CreateServiceLinkedRole + Resource: ["*"] + Condition: + StringEquals: + iam:AWSServiceName: + - eks.amazonaws.com + - eks-nodegroup.amazonaws.com + - eks-fargate.amazonaws.com diff --git a/lab/iam/policies/labs1.yaml b/lab/iam/policies/labs1.yaml new file mode 100644 index 000000000..20217d34f --- /dev/null +++ b/lab/iam/policies/labs1.yaml @@ -0,0 +1,149 @@ +Version: "2012-10-17" +Statement: + - Effect: Allow + Action: + - aps:CreateWorkspace + - aps:TagResource + Resource: ["*"] + Condition: + StringLike: + aws:RequestTag/env: + - ${Env}* + - Effect: Allow + Action: + - aps:DeleteWorkspace + - aps:Describe* + - aps:List* + - aps:QueryMetrics + Resource: ["*"] + Condition: + StringLike: + aws:ResourceTag/env: + - ${Env}* + - Effect: Allow + Action: + - dynamodb:ListTables + Resource: ["*"] + - Effect: Allow + Action: + - dynamodb:CreateTable + - dynamodb:DeleteTable + - dynamodb:DescribeTable + - dynamodb:DescribeContinuousBackups + - dynamodb:ListTagsOfResource + - dynamodb:DescribeTimeToLive + - dynamodb:Scan + - dynamodb:TagResource + Resource: + - !Sub arn:aws:dynamodb:${AWS::Region}:${AWS::AccountId}:table/${Env}* + - Effect: Allow + Action: + - secretsmanager:ListSecrets + Resource: ["*"] + - Effect: Allow + Action: + - secretsmanager:CreateSecret + - secretsmanager:DeleteSecret + - secretsmanager:DescribeSecret + Resource: + - !Sub arn:aws:secretsmanager:${AWS::Region}:${AWS::AccountId}:secret:${Env}* + - Effect: Allow + Action: + - secretsmanager:ListSecrets + Resource: ["*"] + - Effect: Allow + Action: + - sqs:CreateQueue + - sqs:DeleteQueue + - sqs:GetQueueAttributes + - sqs:SetQueueAttributes + - sqs:TagQueue + - sqs:ListQueueTags + Resource: + - !Sub arn:aws:sqs:${AWS::Region}:${AWS::AccountId}:${Env}* + - Effect: Allow + Action: + - rds:DescribeDBInstances + Resource: ["*"] + - Effect: Allow + Action: + - rds:CreateDBInstance + - rds:CreateTenantDatabase + - rds:DeleteDBInstance + - rds:DeleteTenantDatabase + - rds:DescribeDBInstances + - rds:AddTagsToResource + - rds:ListTagsForResource + Resource: + - !Sub arn:aws:rds:${AWS::Region}:${AWS::AccountId}:db:${Env}* + - Effect: Allow + Action: + - rds:CreateDBInstance + - rds:CreateDBSubnetGroup + - rds:DeleteDBSubnetGroup + - rds:DescribeDBSubnetGroups + - rds:AddTagsToResource + - rds:ListTagsForResource + Resource: + - !Sub arn:aws:rds:${AWS::Region}:${AWS::AccountId}:subgrp:${Env}* + - Effect: Allow + Action: + - lambda:AddPermission + - lambda:CreateFunction + - lambda:DeleteFunction + - lambda:GetFunction + - lambda:GetFunctionCodeSigningConfig + - lambda:GetPolicy + - lambda:GetRuntimeManagementConfig + - lambda:ListVersionsByFunction + - lambda:RemovePermission + - lambda:TagResource + Resource: + - !Sub arn:aws:lambda:${AWS::Region}:${AWS::AccountId}:function:${Env}* + - Effect: Allow + Action: + - lambda:GetLayerVersion + Resource: ["*"] + - Effect: Allow + Action: + - es:CreateDomain + - es:DeleteDomain + - es:DescribeDomain + - es:DescribeDomainConfig + - es:GetCompatibleVersions + - es:ListTags + - es:AddTags + Resource: + - !Sub arn:aws:es:${AWS::Region}:${AWS::AccountId}:domain/${Env}* + - Effect: Allow + Action: + - elasticloadbalancing:Describe* + - elasticloadbalancing:Get* + Resource: ["*"] + - Effect: Allow + Action: + - cloudwatch:DeleteDashboards + - cloudwatch:GetDashboard + - cloudwatch:PutDashboard + Resource: + - !Sub arn:aws:cloudwatch::${AWS::AccountId}:dashboard/* + - Effect: Allow + Action: + - cloudwatch:GetMetricData + Resource: ["*"] + - Effect: Allow + Action: + - ecr:CreateRepository + - ecr:DeleteRepository + - ecr:DescribeRepositories + - ecr:ListTagsForResource + - ecr:TagResource + Resource: + - !Sub arn:aws:ecr:${AWS::Region}:${AWS::AccountId}:repository/retail-store-sample* + - !Sub arn:aws:ecr:${AWS::Region}:${AWS::AccountId}:repository/${Env}* + - Effect: Allow + Action: + - guardduty:CreateDetector + - guardduty:DeleteDetector + - guardduty:ListDetectors + Resource: ["*"] diff --git a/lab/iam/policies/labs2.yaml b/lab/iam/policies/labs2.yaml new file mode 100644 index 000000000..908e42aa7 --- /dev/null +++ b/lab/iam/policies/labs2.yaml @@ -0,0 +1,116 @@ +Version: "2012-10-17" +Statement: + - Effect: Allow + Action: + - logs:DescribeLogGroups + - logs:ListTagsForResource + Resource: ["*"] + - Effect: Allow + Action: + - logs:CreateLogGroup + - logs:DeleteLogGroup + - logs:DeleteSubscriptionFilter + - logs:PutRetentionPolicy + - logs:PutSubscriptionFilter + - logs:TagResource + - logs:TagLogGroup + - logs:Get* + - logs:Describe* + - logs:List* + Resource: + - !Sub arn:aws:logs:${AWS::Region}:${AWS::AccountId}:log-group:${Env}* + - !Sub arn:aws:logs:${AWS::Region}:${AWS::AccountId}:log-group:/${Env}* + - !Sub arn:aws:logs:${AWS::Region}:${AWS::AccountId}:log-group:/aws/eks/${Env}* + - Effect: Allow + Action: + - events:DeleteRule + - events:DescribeRule + - events:ListTagsForResource + - events:ListTargetsByRule + - events:PutRule + - events:PutTargets + - events:RemoveTargets + - events:TagResource + Resource: + - !Sub arn:aws:events:${AWS::Region}:${AWS::AccountId}:rule/${Env}* + - !Sub arn:aws:events:${AWS::Region}:${AWS::AccountId}:rule/eks-workshop* + - Effect: Allow + Action: + - vpc-lattice:List* + - vpc-lattice:Get* + - vpc-lattice:DeleteServiceNetwork + - vpc-lattice:DeleteServiceNetworkVpcAssociation + Resource: ["*"] + - Effect: Allow + Action: + - elasticfilesystem:CreateFileSystem + - elasticfilesystem:CreateMountTarget + - elasticfilesystem:DeleteFileSystem + - elasticfilesystem:DeleteMountTarget + - elasticfilesystem:DescribeLifecycleConfiguration + - elasticfilesystem:DescribeMountTargetSecurityGroups + - elasticfilesystem:DescribeMountTargets + - elasticfilesystem:CreateTags + - elasticfilesystem:TagResource + - elasticfilesystem:DescribeFileSystems + Resource: + - !Sub arn:aws:elasticfilesystem:${AWS::Region}:${AWS::AccountId}:file-system/* + - Effect: Allow + Action: + - ssm:DescribeParameters + - ssm:ListTagsForResource + Resource: ["*"] + - Effect: Allow + Action: + - ssm:PutParameter + - ssm:GetParameter + - ssm:GetParameters + - ssm:DeleteParameter + - ssm:AddTagsToResource + Resource: + - !Sub arn:aws:ssm:${AWS::Region}:${AWS::AccountId}:parameter/${Env}* + - !Sub arn:aws:ssm:${AWS::Region}:${AWS::AccountId}:parameter/eksworkshop/${Env}* + - Effect: Allow + Action: + - ssm:GetParameter + Resource: + - !Sub arn:aws:ssm:${AWS::Region}::parameter/aws/service/eks/optimized-ami/* + - Effect: Allow + Action: + - s3:CreateBucket + - s3:DeleteBucket + - s3:List* + - s3:Get* + - s3:PutBucketPublicAccessBlock + - s3:PutBucketTagging + - s3:DeleteObject + - s3:DeleteObjectVersion + Resource: + - arn:aws:s3:::${Env}* + - arn:aws:s3:::${Env}*/* + - Effect: Allow + Action: + - codecommit:CreateRepository + - codecommit:GetRepository + - codecommit:DeleteRepository + - codecommit:TagResource + - codecommit:ListTagsForResource + Resource: + - !Sub arn:aws:codecommit:${AWS::Region}:${AWS::AccountId}:${Env}* + - Effect: Allow + Action: + - codebuild:CreateProject + - codebuild:DeleteProject + - codebuild:BatchGetProjects + Resource: + - !Sub arn:aws:codebuild:${AWS::Region}:${AWS::AccountId}:project/${Env}* + - Effect: Allow + Action: + - codepipeline:CreatePipeline + - codepipeline:DeletePipeline + - codepipeline:GetPipeline + - codepipeline:GetPipelineState + - codepipeline:ListTagsForResource + - codepipeline:TagResource + Resource: + - !Sub arn:aws:codepipeline:${AWS::Region}:${AWS::AccountId}:${Env}* diff --git a/lab/scripts/entrypoint.sh b/lab/scripts/entrypoint.sh index 2697798b1..6cfcf1e94 100644 --- a/lab/scripts/entrypoint.sh +++ b/lab/scripts/entrypoint.sh @@ -5,13 +5,12 @@ set -e bash /tmp/setup.sh if [ ! -z "$EKS_CLUSTER_NAME" ]; then - use-cluster $EKS_CLUSTER_NAME + aws eks update-kubeconfig --name $EKS_CLUSTER_NAME fi if [ $# -eq 0 ] then bash -l else - source /home/ec2-user/.bashrc.d/env.bash - bash -c "$@" + bash -l -c "$@" fi diff --git a/lab/scripts/setup.sh b/lab/scripts/setup.sh index cc426ac33..568c528be 100644 --- a/lab/scripts/setup.sh +++ b/lab/scripts/setup.sh @@ -14,9 +14,17 @@ if [ ! -z "$CLOUD9_ENVIRONMENT_ID" ]; then echo "aws cloud9 update-environment --environment-id $CLOUD9_ENVIRONMENT_ID --managed-credentials-action DISABLE &> /dev/null || true" > ~/.bashrc.d/c9.bash fi +AWS_ACCOUNT_ID=$(aws sts get-caller-identity --query "Account" --output text) + cat << EOT > ~/.bashrc.d/aws.bash export AWS_PAGER="" export AWS_REGION="${AWS_REGION}" +export AWS_ACCOUNT_ID="${AWS_ACCOUNT_ID}" +export EKS_CLUSTER_NAME="${EKS_CLUSTER_NAME}" +export EKS_DEFAULT_MNG_NAME="default" +export EKS_DEFAULT_MNG_MIN=3 +export EKS_DEFAULT_MNG_MAX=6 +export EKS_DEFAULT_MNG_DESIRED=3 EOT touch ~/.bashrc.d/workshop-env.bash diff --git a/manifests/.workshop/terraform/base.tf b/manifests/.workshop/terraform/base.tf index 5786d75d9..3041f4b45 100644 --- a/manifests/.workshop/terraform/base.tf +++ b/manifests/.workshop/terraform/base.tf @@ -2,6 +2,10 @@ terraform { required_version = ">= 1.3" required_providers { + aws = { + source = "hashicorp/aws" + version = "5.61.0" + } kubernetes = { source = "hashicorp/kubernetes" version = "2.31.0" @@ -53,6 +57,12 @@ data "aws_eks_cluster_auth" "this" { name = var.eks_cluster_id } +provider "aws" { + default_tags { + tags = local.tags + } +} + provider "kubernetes" { host = local.eks_cluster_endpoint cluster_ca_certificate = base64decode(data.aws_eks_cluster.eks_cluster.certificate_authority[0].data) diff --git a/manifests/modules/aiml/inferentia/.workshop/terraform/main.tf b/manifests/modules/aiml/inferentia/.workshop/terraform/main.tf index 5b792c9f9..a995dadbb 100644 --- a/manifests/modules/aiml/inferentia/.workshop/terraform/main.tf +++ b/manifests/modules/aiml/inferentia/.workshop/terraform/main.tf @@ -22,12 +22,26 @@ module "eks_blueprints_addons" { enable_karpenter = true - karpenter_enable_spot_termination = true + karpenter_enable_spot_termination = false karpenter_enable_instance_profile_creation = true karpenter = { - chart_version = var.karpenter_version - repository_username = data.aws_ecrpublic_authorization_token.token.user_name - repository_password = data.aws_ecrpublic_authorization_token.token.password + chart_version = var.karpenter_version + repository_username = data.aws_ecrpublic_authorization_token.token.user_name + repository_password = data.aws_ecrpublic_authorization_token.token.password + role_name = "${var.addon_context.eks_cluster_id}-karpenter-controller" + role_name_use_prefix = false + policy_name = "${var.addon_context.eks_cluster_id}-karpenter-controller" + policy_name_use_prefix = false + } + + karpenter_node = { + iam_role_use_name_prefix = false + iam_role_name = "${var.addon_context.eks_cluster_id}-karpenter-node" + instance_profile_name = "${var.addon_context.eks_cluster_id}-karpenter" + } + + karpenter_sqs = { + queue_name = "${var.addon_context.eks_cluster_id}-karpenter" } cluster_name = var.addon_context.eks_cluster_id @@ -49,13 +63,12 @@ data "aws_subnets" "private" { } resource "aws_s3_bucket" "inference" { - bucket_prefix = "eksworkshop-inference" + bucket_prefix = "${var.addon_context.eks_cluster_id}-inference" force_destroy = true tags = var.tags } - module "iam_assumable_role_inference" { source = "terraform-aws-modules/iam/aws//modules/iam-assumable-role-with-oidc" version = "5.39.1" diff --git a/manifests/modules/automation/controlplanes/ack/.workshop/terraform/main.tf b/manifests/modules/automation/controlplanes/ack/.workshop/terraform/main.tf index c51341b56..d54a4ff57 100644 --- a/manifests/modules/automation/controlplanes/ack/.workshop/terraform/main.tf +++ b/manifests/modules/automation/controlplanes/ack/.workshop/terraform/main.tf @@ -35,6 +35,10 @@ module "dynamodb_ack_addon" { # Controllers to enable enable_dynamodb = true + dynamodb = { + role_name = "${var.addon_context.eks_cluster_id}-ack-ddb" + role_name_use_prefix = false + } tags = var.tags } @@ -68,7 +72,9 @@ module "eks_blueprints_addons" { enable_aws_load_balancer_controller = true aws_load_balancer_controller = { - wait = true + wait = true + role_name = "${var.addon_context.eks_cluster_id}-alb-controller" + policy_name = "${var.addon_context.eks_cluster_id}-alb-controller" } cluster_name = var.addon_context.eks_cluster_id diff --git a/manifests/modules/automation/controlplanes/crossplane/.workshop/terraform/main.tf b/manifests/modules/automation/controlplanes/crossplane/.workshop/terraform/main.tf index 76fdf7cf3..86094a6da 100644 --- a/manifests/modules/automation/controlplanes/crossplane/.workshop/terraform/main.tf +++ b/manifests/modules/automation/controlplanes/crossplane/.workshop/terraform/main.tf @@ -44,7 +44,8 @@ module "upbound_irsa_aws" { source = "terraform-aws-modules/iam/aws//modules/iam-role-for-service-accounts-eks" version = "5.39.1" - role_name_prefix = "ddb-upbound-aws-" + role_name_prefix = "${var.addon_context.eks_cluster_id}-ddb-upbound-" + policy_name_prefix = "${var.addon_context.eks_cluster_id}-ddb-upbound-" assume_role_condition_test = "StringLike" role_policy_arns = { @@ -128,7 +129,9 @@ module "eks_blueprints_addons" { enable_aws_load_balancer_controller = true aws_load_balancer_controller = { - wait = true + wait = true + role_name = "${var.addon_context.eks_cluster_id}-alb-controller" + policy_name = "${var.addon_context.eks_cluster_id}-alb-controller" } cluster_name = var.addon_context.eks_cluster_id diff --git a/manifests/modules/automation/gitops/flux/.workshop/terraform/main.tf b/manifests/modules/automation/gitops/flux/.workshop/terraform/main.tf index b7c048bdd..881a442ad 100644 --- a/manifests/modules/automation/gitops/flux/.workshop/terraform/main.tf +++ b/manifests/modules/automation/gitops/flux/.workshop/terraform/main.tf @@ -545,7 +545,9 @@ module "eks_blueprints_addons" { enable_aws_load_balancer_controller = true aws_load_balancer_controller = { - wait = true + wait = true + role_name = "${var.addon_context.eks_cluster_id}-alb-controller" + policy_name = "${var.addon_context.eks_cluster_id}-alb-controller" } cluster_name = var.addon_context.eks_cluster_id diff --git a/manifests/modules/autoscaling/compute/cluster-autoscaler/.workshop/terraform/main.tf b/manifests/modules/autoscaling/compute/cluster-autoscaler/.workshop/terraform/main.tf index e9d792963..150aca529 100644 --- a/manifests/modules/autoscaling/compute/cluster-autoscaler/.workshop/terraform/main.tf +++ b/manifests/modules/autoscaling/compute/cluster-autoscaler/.workshop/terraform/main.tf @@ -7,6 +7,12 @@ module "eks_blueprints_addons" { cluster_version = var.eks_cluster_version oidc_provider_arn = var.addon_context.eks_oidc_provider_arn - enable_cluster_autoscaler = true + enable_cluster_autoscaler = true + cluster_autoscaler = { + role_name = "${var.addon_context.eks_cluster_id}-cluster-autoscaler" + role_name_use_prefix = false + policy_name = "${var.addon_context.eks_cluster_id}-cluster-autoscaler" + policy_name_use_prefix = false + } create_kubernetes_resources = false } diff --git a/manifests/modules/autoscaling/compute/karpenter/.workshop/terraform/main.tf b/manifests/modules/autoscaling/compute/karpenter/.workshop/terraform/main.tf index fa4e672ee..b914858aa 100644 --- a/manifests/modules/autoscaling/compute/karpenter/.workshop/terraform/main.tf +++ b/manifests/modules/autoscaling/compute/karpenter/.workshop/terraform/main.tf @@ -13,6 +13,14 @@ module "karpenter" { enable_pod_identity = true create_pod_identity_association = true namespace = "karpenter" + iam_role_name = "${var.addon_context.eks_cluster_id}-karpenter-controller" + iam_role_use_name_prefix = false + iam_policy_name = "${var.addon_context.eks_cluster_id}-karpenter-controller" + iam_policy_use_name_prefix = false + node_iam_role_name = "${var.addon_context.eks_cluster_id}-karpenter-node" + node_iam_role_use_name_prefix = false + queue_name = "${var.addon_context.eks_cluster_id}-karpenter" + rule_name_prefix = "eks-workshop" node_iam_role_additional_policies = { AmazonSSMManagedInstanceCore = "arn:aws:iam::aws:policy/AmazonSSMManagedInstanceCore" diff --git a/manifests/modules/autoscaling/workloads/keda/.workshop/terraform/main.tf b/manifests/modules/autoscaling/workloads/keda/.workshop/terraform/main.tf index fafee4603..04a445064 100644 --- a/manifests/modules/autoscaling/workloads/keda/.workshop/terraform/main.tf +++ b/manifests/modules/autoscaling/workloads/keda/.workshop/terraform/main.tf @@ -10,6 +10,10 @@ module "eks_blueprints_addons" { oidc_provider_arn = var.addon_context.eks_oidc_provider_arn enable_aws_load_balancer_controller = true + aws_load_balancer_controller = { + role_name = "${var.addon_context.eks_cluster_id}-alb-controller" + policy_name = "${var.addon_context.eks_cluster_id}-alb-controller" + } } module "iam_assumable_role_keda" { diff --git a/manifests/modules/exposing/ingress/.workshop/terraform/main.tf b/manifests/modules/exposing/ingress/.workshop/terraform/main.tf index 601a2d4a3..f0f2c7602 100644 --- a/manifests/modules/exposing/ingress/.workshop/terraform/main.tf +++ b/manifests/modules/exposing/ingress/.workshop/terraform/main.tf @@ -8,5 +8,10 @@ module "eks_blueprints_addons" { oidc_provider_arn = var.addon_context.eks_oidc_provider_arn enable_aws_load_balancer_controller = true - create_kubernetes_resources = false + aws_load_balancer_controller = { + role_name = "${var.addon_context.eks_cluster_id}-alb-controller" + policy_name = "${var.addon_context.eks_cluster_id}-alb-controller" + } + + create_kubernetes_resources = false } diff --git a/manifests/modules/exposing/load-balancer/.workshop/terraform/main.tf b/manifests/modules/exposing/load-balancer/.workshop/terraform/main.tf index 601a2d4a3..f0f2c7602 100644 --- a/manifests/modules/exposing/load-balancer/.workshop/terraform/main.tf +++ b/manifests/modules/exposing/load-balancer/.workshop/terraform/main.tf @@ -8,5 +8,10 @@ module "eks_blueprints_addons" { oidc_provider_arn = var.addon_context.eks_oidc_provider_arn enable_aws_load_balancer_controller = true - create_kubernetes_resources = false + aws_load_balancer_controller = { + role_name = "${var.addon_context.eks_cluster_id}-alb-controller" + policy_name = "${var.addon_context.eks_cluster_id}-alb-controller" + } + + create_kubernetes_resources = false } diff --git a/manifests/modules/fundamentals/fargate/profile/fargate.yaml b/manifests/modules/fundamentals/fargate/profile/fargate.yaml deleted file mode 100644 index c3e438940..000000000 --- a/manifests/modules/fundamentals/fargate/profile/fargate.yaml +++ /dev/null @@ -1,18 +0,0 @@ -apiVersion: eksctl.io/v1alpha5 -kind: ClusterConfig - -metadata: - name: $EKS_CLUSTER_NAME - region: $AWS_REGION - -fargateProfiles: - - name: checkout-profile - selectors: - - namespace: checkout - labels: - fargate: "yes" - subnets: - - $PRIVATE_SUBNET_1 - - $PRIVATE_SUBNET_2 - - $PRIVATE_SUBNET_3 - podExecutionRoleARN: $FARGATE_IAM_PROFILE_ARN diff --git a/manifests/modules/fundamentals/storage/ebs/.workshop/terraform/main.tf b/manifests/modules/fundamentals/storage/ebs/.workshop/terraform/main.tf index 59152013a..21e3a0272 100644 --- a/manifests/modules/fundamentals/storage/ebs/.workshop/terraform/main.tf +++ b/manifests/modules/fundamentals/storage/ebs/.workshop/terraform/main.tf @@ -2,7 +2,8 @@ module "ebs_csi_driver_irsa" { source = "terraform-aws-modules/iam/aws//modules/iam-role-for-service-accounts-eks" version = "5.39.1" - role_name_prefix = "${var.addon_context.eks_cluster_id}-ebs-csi-" + role_name_prefix = "${var.addon_context.eks_cluster_id}-ebs-csi-" + policy_name_prefix = "${var.addon_context.eks_cluster_id}-ebs-csi-" attach_ebs_csi_policy = true diff --git a/manifests/modules/fundamentals/storage/efs/.workshop/terraform/main.tf b/manifests/modules/fundamentals/storage/efs/.workshop/terraform/main.tf index 15fd12452..1e225b177 100644 --- a/manifests/modules/fundamentals/storage/efs/.workshop/terraform/main.tf +++ b/manifests/modules/fundamentals/storage/efs/.workshop/terraform/main.tf @@ -2,7 +2,8 @@ module "efs_csi_driver_irsa" { source = "terraform-aws-modules/iam/aws//modules/iam-role-for-service-accounts-eks" version = "5.39.1" - role_name_prefix = "${var.addon_context.eks_cluster_id}-efs-csi-" + role_name_prefix = "${var.addon_context.eks_cluster_id}-efs-csi-" + policy_name_prefix = "${var.addon_context.eks_cluster_id}-ebs-csi-" attach_efs_csi_policy = true diff --git a/manifests/modules/networking/custom-networking/.workshop/cleanup.sh b/manifests/modules/networking/custom-networking/.workshop/cleanup.sh index e3370e0f1..80913ff3a 100644 --- a/manifests/modules/networking/custom-networking/.workshop/cleanup.sh +++ b/manifests/modules/networking/custom-networking/.workshop/cleanup.sh @@ -2,6 +2,8 @@ set -e +logmessage "WARNING! This lab takes additional time to clean up to ensure lab stability, please be patient" + logmessage "Deleting ENI configs..." kubectl delete ENIConfig --all -A @@ -23,6 +25,8 @@ do aws ec2 terminate-instances --instance-ids $INSTANCE_ID done +sleep 30 + custom_nodegroup=$(aws eks list-nodegroups --cluster-name $EKS_CLUSTER_NAME --query "nodegroups[? @ == 'custom-networking']" --output text) if [ ! -z "$custom_nodegroup" ]; then diff --git a/manifests/modules/networking/network-policies/.workshop/terraform/main.tf b/manifests/modules/networking/network-policies/.workshop/terraform/main.tf index a98dc1783..dd0ce05ff 100644 --- a/manifests/modules/networking/network-policies/.workshop/terraform/main.tf +++ b/manifests/modules/networking/network-policies/.workshop/terraform/main.tf @@ -3,6 +3,10 @@ module "eks_blueprints_addons" { version = "1.16.3" enable_aws_load_balancer_controller = true + aws_load_balancer_controller = { + role_name = "${var.addon_context.eks_cluster_id}-alb-controller" + policy_name = "${var.addon_context.eks_cluster_id}-alb-controller" + } cluster_name = var.addon_context.eks_cluster_id cluster_endpoint = var.addon_context.aws_eks_cluster_endpoint diff --git a/manifests/modules/networking/vpc-lattice/.workshop/terraform/main.tf b/manifests/modules/networking/vpc-lattice/.workshop/terraform/main.tf index 2c16658fe..f39380cae 100644 --- a/manifests/modules/networking/vpc-lattice/.workshop/terraform/main.tf +++ b/manifests/modules/networking/vpc-lattice/.workshop/terraform/main.tf @@ -4,7 +4,9 @@ module "eks_blueprints_addons" { enable_aws_load_balancer_controller = true aws_load_balancer_controller = { - wait = true + wait = true + role_name = "${var.addon_context.eks_cluster_id}-alb-controller" + policy_name = "${var.addon_context.eks_cluster_id}-alb-controller" } cluster_name = var.addon_context.eks_cluster_id diff --git a/manifests/modules/observability/container-insights/.workshop/terraform/main.tf b/manifests/modules/observability/container-insights/.workshop/terraform/main.tf index 7989393f4..0bd35ac44 100644 --- a/manifests/modules/observability/container-insights/.workshop/terraform/main.tf +++ b/manifests/modules/observability/container-insights/.workshop/terraform/main.tf @@ -12,7 +12,9 @@ module "eks_blueprints_addons" { enable_aws_load_balancer_controller = true aws_load_balancer_controller = { - wait = true + wait = true + role_name = "${var.addon_context.eks_cluster_id}-alb-controller" + policy_name = "${var.addon_context.eks_cluster_id}-alb-controller" } } diff --git a/manifests/modules/observability/kubecost/.workshop/terraform/main.tf b/manifests/modules/observability/kubecost/.workshop/terraform/main.tf index a620cf90e..8d617efba 100644 --- a/manifests/modules/observability/kubecost/.workshop/terraform/main.tf +++ b/manifests/modules/observability/kubecost/.workshop/terraform/main.tf @@ -2,7 +2,8 @@ module "ebs_csi_driver_irsa" { source = "terraform-aws-modules/iam/aws//modules/iam-role-for-service-accounts-eks" version = "5.39.1" - role_name_prefix = "${var.addon_context.eks_cluster_id}-ebs-csi-" + role_name_prefix = "${var.addon_context.eks_cluster_id}-ebs-csi-" + policy_name_prefix = "${var.addon_context.eks_cluster_id}-ebs-csi-" attach_ebs_csi_policy = true @@ -36,7 +37,9 @@ module "eks_blueprints_addons" { enable_aws_load_balancer_controller = true aws_load_balancer_controller = { - wait = true + wait = true + role_name = "${var.addon_context.eks_cluster_id}-alb-controller" + policy_name = "${var.addon_context.eks_cluster_id}-alb-controller" } } diff --git a/manifests/modules/observability/oss-metrics/.workshop/terraform/main.tf b/manifests/modules/observability/oss-metrics/.workshop/terraform/main.tf index dd5b8be1e..b32e4da31 100644 --- a/manifests/modules/observability/oss-metrics/.workshop/terraform/main.tf +++ b/manifests/modules/observability/oss-metrics/.workshop/terraform/main.tf @@ -4,7 +4,8 @@ module "ebs_csi_driver_irsa" { source = "terraform-aws-modules/iam/aws//modules/iam-role-for-service-accounts-eks" version = "5.39.1" - role_name_prefix = "${var.addon_context.eks_cluster_id}-ebs-csi-" + role_name_prefix = "${var.addon_context.eks_cluster_id}-ebs-csi-" + policy_name_prefix = "${var.addon_context.eks_cluster_id}-ebs-csi-" attach_ebs_csi_policy = true @@ -38,7 +39,9 @@ module "eks_blueprints_addons" { enable_aws_load_balancer_controller = true aws_load_balancer_controller = { - wait = true + wait = true + role_name = "${var.addon_context.eks_cluster_id}-alb-controller" + policy_name = "${var.addon_context.eks_cluster_id}-alb-controller" } } diff --git a/manifests/modules/security/eks-pod-identity/.workshop/terraform/main.tf b/manifests/modules/security/eks-pod-identity/.workshop/terraform/main.tf index e6dd8c94e..44fd60ac4 100644 --- a/manifests/modules/security/eks-pod-identity/.workshop/terraform/main.tf +++ b/manifests/modules/security/eks-pod-identity/.workshop/terraform/main.tf @@ -8,7 +8,9 @@ module "eks_blueprints_addons" { enable_aws_load_balancer_controller = true aws_load_balancer_controller = { - wait = true + wait = true + role_name = "${var.addon_context.eks_cluster_id}-alb-controller" + policy_name = "${var.addon_context.eks_cluster_id}-alb-controller" } cluster_name = var.addon_context.eks_cluster_id diff --git a/manifests/modules/security/irsa/.workshop/terraform/main.tf b/manifests/modules/security/irsa/.workshop/terraform/main.tf index 8c1a531e1..1067420fb 100644 --- a/manifests/modules/security/irsa/.workshop/terraform/main.tf +++ b/manifests/modules/security/irsa/.workshop/terraform/main.tf @@ -8,7 +8,9 @@ module "eks_blueprints_addons" { enable_aws_load_balancer_controller = true aws_load_balancer_controller = { - wait = true + wait = true + role_name = "${var.addon_context.eks_cluster_id}-alb-controller" + policy_name = "${var.addon_context.eks_cluster_id}-alb-controller" } cluster_name = var.addon_context.eks_cluster_id diff --git a/manifests/modules/security/secrets-manager/.workshop/terraform/main.tf b/manifests/modules/security/secrets-manager/.workshop/terraform/main.tf index 8a6f62316..c1e93da3f 100644 --- a/manifests/modules/security/secrets-manager/.workshop/terraform/main.tf +++ b/manifests/modules/security/secrets-manager/.workshop/terraform/main.tf @@ -44,7 +44,8 @@ module "secrets_manager_role" { source = "terraform-aws-modules/iam/aws//modules/iam-role-for-service-accounts-eks" version = "5.39.1" - role_name_prefix = "${var.eks_cluster_id}-secrets-" + role_name_prefix = "${var.eks_cluster_id}-secrets-" + policy_name_prefix = "${var.eks_cluster_id}-secrets-" role_policy_arns = { policy = aws_iam_policy.secrets_manager.arn diff --git a/website/docs/fundamentals/fargate/enabling.md b/website/docs/fundamentals/fargate/enabling.md index a7a78bb8a..cb626c681 100644 --- a/website/docs/fundamentals/fargate/enabling.md +++ b/website/docs/fundamentals/fargate/enabling.md @@ -9,13 +9,7 @@ As an administrator, you can use a Fargate profile to declare which Pods run on If a Pod matches multiple Fargate profiles, you can specify which profile a Pod uses by adding the following Kubernetes label to the Pod specification: `eks.amazonaws.com/fargate-profile: my-fargate-profile`. The Pod must match a selector in that profile to be scheduled onto Fargate. Kubernetes affinity/anti-affinity rules do not apply and aren't necessary with Amazon EKS Fargate Pods. -Lets start by adding a Fargate profile to our EKS cluster. This is the `eksctl` configuration we'll use: - -```file -manifests/modules/fundamentals/fargate/profile/fargate.yaml -``` - -This configuration creates a Fargate profile called `checkout-profile` with the following characteristics: +Lets start by adding a Fargate profile to our EKS cluster. The command below creates a Fargate profile called `checkout-profile` with the following characteristics: 1. Target Pods in the `checkout` namespace that have the label `fargate: yes` 2. Place pod in the private subnets of the VPC @@ -24,9 +18,15 @@ This configuration creates a Fargate profile called `checkout-profile` with the The following command creates the profile, which will take several minutes: ```bash timeout=600 -$ cat ~/environment/eks-workshop/modules/fundamentals/fargate/profile/fargate.yaml \ -| envsubst \ -| eksctl create fargateprofile -f - +$ aws eks create-fargate-profile \ + --cluster-name ${EKS_CLUSTER_NAME} \ + --pod-execution-role-arn $FARGATE_IAM_PROFILE_ARN \ + --fargate-profile-name checkout-profile \ + --selectors '[{"namespace": "checkout", "labels": {"fargate": "yes"}}]' \ + --subnets "[\"$PRIVATE_SUBNET_1\", \"$PRIVATE_SUBNET_2\", \"$PRIVATE_SUBNET_3\"]" + +$ aws eks wait fargate-profile-active --cluster-name ${EKS_CLUSTER_NAME} \ + --fargate-profile-name checkout-profile ``` Now we can inspect the Fargate profile: diff --git a/website/docs/introduction/assets/vscode-copy-paste.webp b/website/docs/introduction/assets/vscode-copy-paste.webp new file mode 100644 index 000000000..01933604f Binary files /dev/null and b/website/docs/introduction/assets/vscode-copy-paste.webp differ diff --git a/website/docs/introduction/navigating-labs.md b/website/docs/introduction/navigating-labs.md index 574c6500b..56d497727 100644 --- a/website/docs/introduction/navigating-labs.md +++ b/website/docs/introduction/navigating-labs.md @@ -3,6 +3,9 @@ title: Navigating the labs sidebar_position: 25 --- +import Tabs from '@theme/Tabs'; +import TabItem from '@theme/TabItem'; + Let’s review how to navigate this web site and the content provided. ## Structure @@ -22,7 +25,8 @@ This module contains a single lab named **Getting started** which will be visibl You should start each lab from the page indicated by this badge. Starting in the middle of a lab will cause unpredictable behavior. ::: -## Cloud9 IDE + + Once you have accessed the Cloud9 IDE, we recommend you use the **+** button and select **New Terminal** to open a new full screen terminal window. @@ -33,6 +37,15 @@ This will open a new tab with a fresh terminal. ![Shows new Cloud9 terminal](./assets/terminal.webp) You may also close the small terminal at the bottom if you wish. + + + +Depending on your browser the first time you copy/paste content in to the VSCode terminal you may be presented with a prompt that looks like this: + +![VSCode copy/paste](./assets/vscode-copy-paste.webp) + + + ## Terminal commands diff --git a/website/docs/introduction/setup/your-account/assets/vscode-outputs.webp b/website/docs/introduction/setup/your-account/assets/vscode-outputs.webp new file mode 100644 index 000000000..0fc1b1e7b Binary files /dev/null and b/website/docs/introduction/setup/your-account/assets/vscode-outputs.webp differ diff --git a/website/docs/introduction/setup/your-account/assets/vscode-password-retrieve.webp b/website/docs/introduction/setup/your-account/assets/vscode-password-retrieve.webp new file mode 100644 index 000000000..70183cd10 Binary files /dev/null and b/website/docs/introduction/setup/your-account/assets/vscode-password-retrieve.webp differ diff --git a/website/docs/introduction/setup/your-account/assets/vscode-password-visible.webp b/website/docs/introduction/setup/your-account/assets/vscode-password-visible.webp new file mode 100644 index 000000000..5dcd58768 Binary files /dev/null and b/website/docs/introduction/setup/your-account/assets/vscode-password-visible.webp differ diff --git a/website/docs/introduction/setup/your-account/assets/vscode-password.webp b/website/docs/introduction/setup/your-account/assets/vscode-password.webp new file mode 100644 index 000000000..ca5c43dbc Binary files /dev/null and b/website/docs/introduction/setup/your-account/assets/vscode-password.webp differ diff --git a/website/docs/introduction/setup/your-account/assets/vscode-splash.webp b/website/docs/introduction/setup/your-account/assets/vscode-splash.webp new file mode 100644 index 000000000..df06190c7 Binary files /dev/null and b/website/docs/introduction/setup/your-account/assets/vscode-splash.webp differ diff --git a/website/docs/introduction/setup/your-account/cleanup.md b/website/docs/introduction/setup/your-account/cleanup.md index 20c0f4ba5..6204c9525 100644 --- a/website/docs/introduction/setup/your-account/cleanup.md +++ b/website/docs/introduction/setup/your-account/cleanup.md @@ -12,7 +12,7 @@ Make sure you have run the respective clean up instructions for the mechanism yo ::: -This section outlines how to clean up the Cloud9 IDE we've used to run the labs. +This section outlines how to clean up the IDE we've used to run the labs. Start by opening CloudShell in the region where you deployed the CloudFormation stack: diff --git a/website/docs/introduction/setup/your-account/index.md b/website/docs/introduction/setup/your-account/index.md index 3877da221..3fed9a532 100644 --- a/website/docs/introduction/setup/your-account/index.md +++ b/website/docs/introduction/setup/your-account/index.md @@ -3,24 +3,48 @@ title: In your AWS account sidebar_position: 30 --- +import Tabs from '@theme/Tabs'; +import TabItem from '@theme/TabItem'; + :::danger Warning Provisioning this workshop environment in your AWS account will create resources and **there will be cost associated with them**. The cleanup section provides a guide to remove them, preventing further charges. ::: This section outlines how to set up the environment to run the labs in your own AWS account. -The first step is to create an IDE with the provided CloudFormation template. The easiest way to do this is using the quick launch links below: - -| Region | Link | -| ---------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `us-west2` | [Launch](https://us-west-2.console.aws.amazon.com/cloudformation/home#/stacks/quickcreate?templateUrl=https://ws-assets-prod-iad-r-pdx-f3b3f9f1a7d6a3d0.s3.us-west-2.amazonaws.com/39146514-f6d5-41cb-86ef-359f9d2f7265/eks-workshop-ide-cfn.yaml&stackName=eks-workshop-ide¶m_RepositoryRef=VAR::MANIFESTS_REF) | -| `eu-west-1` | [Launch](https://eu-west-1.console.aws.amazon.com/cloudformation/home#/stacks/quickcreate?templateUrl=https://ws-assets-prod-iad-r-dub-85e3be25bd827406.s3.eu-west-1.amazonaws.com/39146514-f6d5-41cb-86ef-359f9d2f7265/eks-workshop-ide-cfn.yaml&stackName=eks-workshop-ide¶m_RepositoryRef=VAR::MANIFESTS_REF) | -| `ap-southeast-1` | [Launch](https://ap-southeast-1.console.aws.amazon.com/cloudformation/home#/stacks/quickcreate?templateUrl=https://ws-assets-prod-iad-r-sin-694a125e41645312.s3.ap-southeast-1.amazonaws.com/39146514-f6d5-41cb-86ef-359f9d2f7265/eks-workshop-ide-cfn.yaml&stackName=eks-workshop-ide¶m_RepositoryRef=VAR::MANIFESTS_REF") | +The first step is to create an IDE with the provided CloudFormation templates. You have the choice between using AWS Cloud9 or a browser-accessible instance of VSCode that will run on an EC2 instance in your AWS account. :::tip + +After careful consideration, we have made the decision to close new customer access to AWS Cloud9, effective July 25, 2024. AWS Cloud9 existing customers can continue to use the service as normal. AWS continues to invest in security, availability, and performance improvements for AWS Cloud9, but we do not plan to introduce new features. + +If you cannot access Cloud9 in your AWS account you must use the VSCode option. + +::: + +Use the AWS CloudFormation quick-create links below to launch the desired template in the appropriate AWS region. + +| Region | Cloud9 Link | VSCode Link (Preview) | +| ---------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `us-west2` | [Launch](https://us-west-2.console.aws.amazon.com/cloudformation/home#/stacks/quickcreate?templateUrl=https://ws-assets-prod-iad-r-pdx-f3b3f9f1a7d6a3d0.s3.us-west-2.amazonaws.com/39146514-f6d5-41cb-86ef-359f9d2f7265/eks-workshop-ide-cfn.yaml&stackName=eks-workshop-ide¶m_RepositoryRef=VAR::MANIFESTS_REF) | [Launch](https://us-west-2.console.aws.amazon.com/cloudformation/home#/stacks/quickcreate?templateUrl=https://ws-assets-prod-iad-r-pdx-f3b3f9f1a7d6a3d0.s3.us-west-2.amazonaws.com/39146514-f6d5-41cb-86ef-359f9d2f7265/eks-workshop-vscode-cfn.yaml&stackName=eks-workshop-ide¶m_RepositoryRef=VAR::MANIFESTS_REF) | +| `eu-west-1` | [Launch](https://eu-west-1.console.aws.amazon.com/cloudformation/home#/stacks/quickcreate?templateUrl=https://ws-assets-prod-iad-r-dub-85e3be25bd827406.s3.eu-west-1.amazonaws.com/39146514-f6d5-41cb-86ef-359f9d2f7265/eks-workshop-ide-cfn.yaml&stackName=eks-workshop-ide¶m_RepositoryRef=VAR::MANIFESTS_REF) | [Launch](https://eu-west-1.console.aws.amazon.com/cloudformation/home#/stacks/quickcreate?templateUrl=https://ws-assets-prod-iad-r-dub-85e3be25bd827406.s3.eu-west-1.amazonaws.com/39146514-f6d5-41cb-86ef-359f9d2f7265/eks-workshop-vscode-cfn.yaml&stackName=eks-workshop-ide¶m_RepositoryRef=VAR::MANIFESTS_REF) | +| `ap-southeast-1` | [Launch](https://ap-southeast-1.console.aws.amazon.com/cloudformation/home#/stacks/quickcreate?templateUrl=https://ws-assets-prod-iad-r-sin-694a125e41645312.s3.ap-southeast-1.amazonaws.com/39146514-f6d5-41cb-86ef-359f9d2f7265/eks-workshop-vscode-cfn.yaml&stackName=eks-workshop-ide¶m_RepositoryRef=VAR::MANIFESTS_REF") | [Launch](https://ap-southeast-1.console.aws.amazon.com/cloudformation/home#/stacks/quickcreate?templateUrl=https://ws-assets-prod-iad-r-sin-694a125e41645312.s3.ap-southeast-1.amazonaws.com/39146514-f6d5-41cb-86ef-359f9d2f7265/eks-workshop-ide-cfn.yaml&stackName=eks-workshop-ide¶m_RepositoryRef=VAR::MANIFESTS_REF") | + These instructions have been tested in the AWS regions listed above and are not guaranteed to work in others without modification. + +:::warning + +The nature of the workshop material means that the IDE EC2 instance requires broad IAM permissions in your account, for example creating IAM roles. Before continuing please review the IAM permissions that will be provided to the IDE instance in the CloudFormation template. + +We are continuously working to optimize the IAM permissions. Please raise a [GitHub issue](https://github.com/aws-samples/eks-workshop-v2/issues) with any suggestions for improvement. + ::: +Now select that tab that corresponds to the IDE that you have installed. + + + + Scroll to the bottom of the screen and acknowledge the IAM notice: ![acknowledge IAM](./assets/acknowledge-iam.webp) @@ -43,8 +67,44 @@ You can now close CloudShell, all further commands will be run in the terminal s $ aws sts get-caller-identity ``` + + + +Scroll to the bottom of the screen and acknowledge the IAM notice: + +![acknowledge IAM](./assets/acknowledge-iam.webp) + +Then click the **Create stack** button: + +![Create Stack](./assets/create-stack.webp) + +The CloudFormation stack will take roughly 5 minutes to deploy, and once completed you can retrieve information required to continue from the **Outputs** tab: + +![cloudformation outputs](./assets/vscode-outputs.webp) + +The `IdeUrl` output contains the URL to enter in your browser to access the IDE. The `IdePasswordSecret` contains a link to an AWS Secrets Manger secret that contains a generated password for the IDE. + +To retrieve the password open that URL and click the **Retrieve** button: + +![secretsmanager retrieve](./assets/vscode-password-retrieve.webp) + +The password will then be available for you to copy: + +![cloudformation outputs](./assets/vscode-password-visible.webp) + +Open the IDE URL provided and you will be prompted for the password: + +![cloudformation outputs](./assets/vscode-password.webp) + +After submitting your password you will be presented with the initial VSCode screen: + +![cloudformation outputs](./assets/vscode-splash.webp) + + + + The next step is to create an EKS cluster to perform the lab exercises in. Please follow one of the guides below to provision a cluster that meets the requirements for these labs: - **(Recommended)** [eksctl](./using-eksctl.md) -- Terraform +- [Terraform](./using-terraform.md) - (Coming soon!) CDK diff --git a/website/src/components/Terminal/index.tsx b/website/src/components/Terminal/index.tsx index 735152f2a..6e88346f9 100644 --- a/website/src/components/Terminal/index.tsx +++ b/website/src/components/Terminal/index.tsx @@ -16,7 +16,7 @@ export default function Terminal({ output }: Props): JSX.Element { let sections: Array = []; - let section = new TerminalSection(); + let section = new TerminalSection(0); let appendNext = false; @@ -27,7 +27,7 @@ export default function Terminal({ output }: Props): JSX.Element { if (!appendNext) { if (currentLine.startsWith("$ ")) { - section = new TerminalSection(); + section = new TerminalSection(i); sections.push(section); currentLine = currentLine.substring(2); @@ -40,7 +40,7 @@ export default function Terminal({ output }: Props): JSX.Element { } const handler = () => { - navigator.clipboard.writeText(`${allCommands}\n`); + triggerCopy(`${allCommands}\n`); }; return ( @@ -80,8 +80,11 @@ class TerminalSection { private inHeredoc = false; private commandString: string = ""; private inCommand = true; + private index: number; + + constructor(index: number) { + this.index = index; - constructor() { this.context = this.commandContext = new TerminalCommand(); this.contexts.push(this.context); } @@ -127,24 +130,37 @@ class TerminalSection { render() { const commandString = this.commandContext.getCommand(); const handler = () => { - navigator.clipboard.writeText(commandString); + triggerCopy(commandString); }; return (
- {this.contexts.map((element) => { - return element.render(); + {this.contexts.map((element, index) => { + return element.render(index); })}
); } } +function triggerCopy(text: string) { + navigator.permissions + .query({ name: "clipboard-write" as PermissionName }) + .then((e) => { + if (e.state === "granted") { + navigator.clipboard.writeText(text); + } + }); + + window.parent.postMessage(`eks-workshop-terminal:${text}`, "*"); +} + class TerminalContext { protected lines: Array = []; @@ -152,7 +168,7 @@ class TerminalContext { this.lines.push(line); } - render() { + render(index: number) { return
; } @@ -172,17 +188,17 @@ class TerminalCommand extends TerminalContext { return this.lines.join("\n"); } - render() { + render(index: number) { return ( -
+
~ $ {this.renderCommand(this.lines[0], false)}
- {this.lines.slice(1).map((element) => { + {this.lines.slice(1).map((element, lineIndex) => { return ( -
+
{this.renderCommand(element, true)}
); @@ -199,9 +215,9 @@ class TerminalCommand extends TerminalContext { } class TerminalOutput extends TerminalContext { - render() { + render(index: number) { return ( -
+
{this.lines.join("\n")}
);