From e9751adde24372aaf139bf7ec2acebfee75977ae Mon Sep 17 00:00:00 2001 From: SanDim Ciin Date: Mon, 1 May 2023 17:08:39 -0700 Subject: [PATCH 1/2] release v2.1.0 changes --- .github/workflows/pr-workflow.yml | 198 ------------------ .viperlightignore | 11 - .viperlightrc | 4 - CHANGELOG.md | 10 + README.md | 10 +- buildspec.yml | 39 ---- deployment/aws-content-analysis-auth.yaml | 4 +- .../aws-content-analysis-opensearch.yaml | 2 +- ...ntent-analysis-use-existing-mie-stack.yaml | 2 +- deployment/aws-content-analysis-web.yaml | 7 +- deployment/aws-content-analysis.yaml | 20 +- deployment/build-open-source-dist.sh | 121 ----------- deployment/build-s3-dist.sh | 12 +- .../anonymous-data-logger/lib/cfnresponse.py | 1 + source/consumer/requirements.txt | 4 +- 15 files changed, 43 insertions(+), 402 deletions(-) delete mode 100644 .github/workflows/pr-workflow.yml delete mode 100644 .viperlightignore delete mode 100644 .viperlightrc delete mode 100644 buildspec.yml delete mode 100755 deployment/build-open-source-dist.sh diff --git a/.github/workflows/pr-workflow.yml b/.github/workflows/pr-workflow.yml deleted file mode 100644 index 9b47a98..0000000 --- a/.github/workflows/pr-workflow.yml +++ /dev/null @@ -1,198 +0,0 @@ -name: pr-workflow - -on: - pull_request: - - workflow_dispatch: - inputs: - remove_stack: - description: 'Remove stack when finished?' - required: true - default: 'true' - -jobs: - build-us-west-2: - runs-on: ubuntu-latest - env: - REGION: "us-west-2" - VERSION: "3.0.0" - EMAIL: ${{ secrets.INVITATION_EMAIL_RECIPIENT }} - steps: - - name: Check out pr branch - uses: actions/checkout@v2 - with: - ref: ${{ github.sha }} - - - name: Initialize AWS credentials - uses: aws-actions/configure-aws-credentials@v1 - with: - aws-access-key-id: ${{ secrets.BUILD_AWS_ACCESS_KEY_ID }} - aws-secret-access-key: ${{ secrets.BUILD_AWS_SECRET_ACCESS_KEY }} - aws-region: us-west-2 - - - name: Setup build environment - run: | - echo "SHORT_SHA=`git rev-parse --short HEAD`" >> $GITHUB_ENV - DATETIME=$(date '+%s') - echo "DIST_OUTPUT_BUCKET=aws-content-analysis-$DATETIME-dist" >> $GITHUB_ENV - echo "TEMPLATE_OUTPUT_BUCKET=aws-content-analysis-$DATETIME" >> $GITHUB_ENV - - - name: Run build script - run: | - cd deployment - aws s3 mb s3://$DIST_OUTPUT_BUCKET-$REGION --region $REGION - aws s3 mb s3://$TEMPLATE_OUTPUT_BUCKET --region $REGION - echo y | ./build-s3-dist.sh --template-bucket ${TEMPLATE_OUTPUT_BUCKET} --code-bucket ${DIST_OUTPUT_BUCKET} --version ${VERSION} --region ${REGION} | tee >( awk '/Without existing MIE deployment/{getline; print}' >template ) - - - name: Deploy stack - run: | - cd deployment - TEMPLATE=$(cat template | cut -f 2 -d "'") - rm -f template - STACK_NAME="pr${SHORT_SHA}" - set -x - - # Delete STACK_NAME if it already exists. - # This is necessary in order to rerun github action workflows. - # If $STACK_NAME exists... - if [ $(echo $(aws cloudformation list-stacks --query 'StackSummaries[?StackName==`$STACK_NAME`]' --output text) | tr -d '\n' | wc -c) > 0 ]; then - # Then delete $STACK_NAME... - echo "Removing $STACK_NAME so we can use that stack name again" - aws cloudformation delete-stack --stack-name $STACK_NAME --region $REGION - aws cloudformation wait stack-delete-complete --stack-name $STACK_NAME - fi - - aws cloudformation create-stack --stack-name $STACK_NAME --template-url $TEMPLATE --region $REGION --parameters ParameterKey=AdminEmail,ParameterValue=$EMAIL --capabilities CAPABILITY_IAM CAPABILITY_NAMED_IAM CAPABILITY_AUTO_EXPAND --disable-rollback - aws cloudformation wait stack-create-complete --stack-name $STACK_NAME - set +x - WEBAPP_URL=$(aws cloudformation --region us-west-2 describe-stacks --stack-name $STACK_NAME --query "Stacks[0].Outputs[?OutputKey=='ContentAnalyisSolution'].OutputValue" --output text) - # Make the WEBAPP_URL available to the next workflow step - echo "WEBAPP_URL=${WEBAPP_URL}" >> $GITHUB_ENV - - - name: Clean build environment - run: aws s3 rb s3://${DIST_OUTPUT_BUCKET}-${REGION} --force - - - name: Get login credentials - run: | - # Iterate thru all the files in the s3://github-test-bot2 until you find the invitation email that references our stack - NUM_EMAILS=$(aws s3 ls s3://github-test-bot2 | wc -l) - for i in `seq 1 $NUM_EMAILS`; do - INVITATION_EMAIL=$(aws s3api list-objects-v2 --bucket "github-test-bot2" --query 'reverse(sort_by(Contents, &LastModified))['$((i-1))'].Key' --output=text) - # Make sure it belongs to our stack - aws s3 cp s3://github-test-bot2/$INVITATION_EMAIL ./invitation_email --quiet - STACK_NAME="pr${SHORT_SHA}" - # Check to see if this invitation email is for the $STACK_NAME stack - grep ":stack/${STACK_NAME}" ./invitation_email > /dev/null - if [ $? -eq 0 ]; - then - echo "Found invitation email in s3://github-test-bot2/$INVITATION_EMAIL" - # we found the invitation email so quit looking - break; - fi; - done; - # Remove the invitation email from s3 - aws s3 rm s3://github-test-bot2/$INVITATION_EMAIL - TEMP_PASSWORD=$(cat ./invitation_email | grep 'temporary password' | sed 's/.*password is \(.*\)
AWS.*/\1/') - # Password may contain HTML entities, so decode them to characters - TEMP_PASSWORD=$(echo $TEMP_PASSWORD | perl -MHTML::Entities -pe 'decode_entities($_);') - # Make TEMP_PASSWORD available to the next workflow step - echo "TEMP_PASSWORD=${TEMP_PASSWORD}" >> $GITHUB_ENV - - - name: Start workflow - run: | - STACK_NAME="pr${SHORT_SHA}" - # Get the workflow api endpoint - MIE_STACK_NAME=$(aws cloudformation list-stacks --query 'StackSummaries[?starts_with(StackName,`'$STACK_NAME'-MieStack`) && StackStatus==`CREATE_COMPLETE`].StackName' --output json --region $REGION | grep MieStack | cut -f 2 -d '"' | tail -n 1) - WORKFLOW_API_ENDPOINT=$(aws cloudformation describe-stacks --stack-name "$MIE_STACK_NAME" --region $REGION --query "Stacks[0].Outputs[?OutputKey=='WorkflowApiEndpoint'].OutputValue" --output text) - DATAPLANE_BUCKET=$(aws cloudformation describe-stacks --stack-name "$MIE_STACK_NAME" --region $REGION --query "Stacks[0].Outputs[?OutputKey=='DataplaneBucket'].OutputValue" --output text) - # Upload a test video file - wget -q https://techmkt-videoarchive.s3-us-west-2.amazonaws.com/amazon_studios/sizzle_reels/Amazon+TCA+2019+Series+Sizzle.mp4 -O AmazonVideoSizzle2019.mp4 - aws s3 cp AmazonVideoSizzle2019.mp4 s3://${DATAPLANE_BUCKET} - # Install an IAM enabled HTTP client - pip install awscurl - - # - # Uncomment to enable CasImageWorkflow: - # - # ##################################### - # ###### TEST CasImageWorkflow ####### - # ##################################### - # # TODO: upload TEST_IMAGE.png image file to dataplane bucket - # # Get workflow configuration - # WORKFLOW_NAME=CasImageWorkflow - # # Disable faceSearch - # WORKFLOW_CONFIGURATION=$(awscurl -X GET --region us-west-2 ${WORKFLOW_API_ENDPOINT}workflow/$WORKFLOW_NAME | cut -f 2 -d "'" | perl -pe 's/"Definition.+?}]}}}",//g' | jq '.Stages.RekognitionStage.Configuration' --compact-output) - # WORKFLOW_CONFIGURATION=$(echo $WORKFLOW_CONFIGURATION | sed -e 's/"faceSearchImage":{"MediaType":"Image","Enabled":true}/"faceSearchImage":{"MediaType":"Image","Enabled":false}/') - # WORKFLOW_CONFIGURATION='{"RekognitionStage":'$WORKFLOW_CONFIGURATION'}' - # # Execute workflow - # awscurl -X POST --region us-west-2 --data '{"Name":"CasImageWorkflow", "Configuration":'$WORKFLOW_CONFIGURATION', "Input":{"Media":{"Image":{"S3Bucket": "'${DATAPLANE_BUCKET}'", "S3Key":"TEST_IMAGE.png"}}}}' ${WORKFLOW_API_ENDPOINT}workflow/execution > curl.txt - - # # Wait until the workflow is done - # WORKFLOW_ID=$(cat curl.txt | cut -f 2 -d "'" | perl -pe 's/"Definition.+?}]}}}",//g' | jq '.Id' --raw-output) - # WORKFLOW_STATUS=$(awscurl -X GET --region us-west-2 ${WORKFLOW_API_ENDPOINT}workflow/execution/${WORKFLOW_ID} | cat | cut -f 2 -d "'" | perl -pe 's/"Definition.+?}]}}}",//g' | jq '.Status' --raw-output) - # while [ "$WORKFLOW_STATUS" = "Started" ] || [ "$WORKFLOW_STATUS" = "Queued" ]; do sleep 1; WORKFLOW_STATUS=$(awscurl -X GET --region us-west-2 ${WORKFLOW_API_ENDPOINT}workflow/execution/${WORKFLOW_ID} | cat | cut -f 2 -d "'" | perl -pe 's/"Definition.+?}]}}}",//g' | jq '.Status' --raw-output); echo $WORKFLOW_STATUS; done - - #################################### - ###### TEST CasVideoWorkflow ####### - #################################### - WORKFLOW_NAME=CasVideoWorkflow - # Disable faceSearch and GenericDataLookup operator - WORKFLOW_CONFIGURATION=$(awscurl -X GET --region us-west-2 ${WORKFLOW_API_ENDPOINT}workflow/$WORKFLOW_NAME | cut -f 2 -d "'" | perl -pe 's/"Definition.+?}]}}}",//g' | jq '.Stages.defaultVideoStage.Configuration' --compact-output) - WORKFLOW_CONFIGURATION=$(echo $WORKFLOW_CONFIGURATION | sed -e 's/"faceSearch":{"MediaType":"Video","Enabled":true}/"faceSearch":{"MediaType":"Video","Enabled":false}/') - WORKFLOW_CONFIGURATION=$(echo $WORKFLOW_CONFIGURATION | sed -e 's/"GenericDataLookup":{"MediaType":"Video","Enabled":true}/"GenericDataLookup":{"MediaType":"Video","Enabled":false}/') - WORKFLOW_CONFIGURATION='{"defaultVideoStage":'$WORKFLOW_CONFIGURATION'}' - echo "WORKFLOW_CONFIGURATION:" - echo $WORKFLOW_CONFIGURATION - echo "Starting CasVideoWorkflow" - set -x - # Execute workflow - awscurl -X POST --region us-west-2 --data '{"Name":"CasVideoWorkflow", "Configuration":'$WORKFLOW_CONFIGURATION', "Input":{"Media":{"Video":{"S3Bucket": "'${DATAPLANE_BUCKET}'", "S3Key":"AmazonVideoSizzle2019.mp4"}}}}' ${WORKFLOW_API_ENDPOINT}workflow/execution > curl.txt - set +x - # Wait until the workflow is done - WORKFLOW_ID=$(cat curl.txt | cut -f 2 -d "'" | perl -pe 's/"Definition.+?}]}}}",//g' | jq '.Id' --raw-output) - echo "WORKFLOW_ID: $WORKFLOW_ID" - WORKFLOW_STATUS=$(awscurl -X GET --region us-west-2 ${WORKFLOW_API_ENDPOINT}workflow/execution/${WORKFLOW_ID} | cat | cut -f 2 -d "'" | perl -pe 's/"Definition.+?}]}}}",//g' | jq '.Status' --raw-output) - echo "Waiting for workflow to complete..." - while [ "$WORKFLOW_STATUS" = "Started" ] || [ "$WORKFLOW_STATUS" = "Queued" ]; do sleep 10; WORKFLOW_STATUS=$(awscurl -X GET --region us-west-2 ${WORKFLOW_API_ENDPOINT}workflow/execution/${WORKFLOW_ID} | cat | cut -f 2 -d "'" | perl -pe 's/"Definition.+?}]}}}",//g' | jq '.Status' --raw-output); echo -n '.'; done - echo -e "\n$WORKFLOW_STATUS" - - - name: Start puppeteer - # The checkout action above changes the work dir in a way that breaks - # the docker commands in this action, so we need to specify work dir - # explicitly here. - # TODO: after moving the aws-media-insights repo to aws-content-analysis, then remove the if statement at the beginning: - working-directory: /home/runner/work/ - run: | - if [ -d ./aws-media-insights/ ]; then - cp -R ./aws-media-insights/aws-media-insights/source/website/test . - elif [ -d ./aws-content-analysis/ ]; then - cp -R ./aws-content-analysis/aws-content-analysis/source/website/test . - else - echo "ERROR: Cannot find test files" - exit 1 - fi - cd test/ - echo "Building puppeteer tests" - npm init -y - npm i puppeteer --quiet - docker build --tag=cas-puppeteer:latest . --quiet - echo "Running puppeteer tests" - docker run --rm -v "$PWD":/usr/src/app -e WEBAPP_URL="${{ env.WEBAPP_URL }}" -e INVITATION_EMAIL_RECIPIENT="${{ secrets.INVITATION_EMAIL_RECIPIENT }}" -e TEMP_PASSWORD="${{ env.TEMP_PASSWORD }}" cas-puppeteer:latest - - - name: Clean the test environment - run: | - # remove stack by default - if [ -z ${{ github.event.inputs.remove_stack }} ] || [ ${{ github.event.inputs.remove_stack }} = "true" ]; then - STACK_NAME="pr${SHORT_SHA}" - echo "Removing $STACK_NAME" - aws cloudformation delete-stack --stack-name $STACK_NAME --region $REGION - echo "Waiting for stack to delete..." - aws cloudformation wait stack-delete-complete --stack-name $STACK_NAME - echo "Removing $STACK_NAME S3 buckets:" - aws s3 ls | awk '{print $3}' | grep $STACK_NAME | while read line; do - echo "s3://$line"; - aws s3 rb s3://$line --force > /dev/null; - done - else - echo "User requested not to delete stack." - fi diff --git a/.viperlightignore b/.viperlightignore deleted file mode 100644 index 8c4d151..0000000 --- a/.viperlightignore +++ /dev/null @@ -1,11 +0,0 @@ -.github/workflows/pr-workflow.yml -test/app.js -source/consumer/lambda_handler.py:729 -deployment/aws-content-analysis-auth.yaml:177 -CODE_OF_CONDUCT.md:4 -CONTRIBUTING.md:50 -README.md:180 -README.md:181 -README.md:205 -cloudformation/aws-content-analysis-auth.yaml:166 -source/consumer/lambda_handler.py:732 diff --git a/.viperlightrc b/.viperlightrc deleted file mode 100644 index 34036e5..0000000 --- a/.viperlightrc +++ /dev/null @@ -1,4 +0,0 @@ -{ - "all": true, - "failOn": "medium" -} diff --git a/CHANGELOG.md b/CHANGELOG.md index 416c2bb..f66392c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,16 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). +## [2.1.0] - 2023-05-01 + +### Changed: +* Upgrade Media Insights on AWS dependency to v5.1.2 +* Updated template prefix from aws-content-analysis to content-analysis-on-aws + +### Fixed: + +* Updated object ownership configuration for ContentAnalysisWebsiteBucket +* Fixed formatting in Auth stack for Admin Role ## [2.0.2] - 2023-01-11 diff --git a/README.md b/README.md index a011886..d160bb3 100644 --- a/README.md +++ b/README.md @@ -10,8 +10,8 @@ The following Cloudformation templates will deploy the Content Analysis front-en Region| Launch ------|----- -US East (N. Virginia) | [![Launch in us-east-1](docs/images/launch-stack.png)](https://console.aws.amazon.com/cloudformation/home?region=us-east-1#/stacks/new?stackName=cas&templateURL=https://s3.amazonaws.com/solutions-reference/aws-content-analysis/latest/aws-content-analysis.template) -US West (Oregon) | [![Launch in us-west-2](docs/images/launch-stack.png)](https://console.aws.amazon.com/cloudformation/home?region=us-west-2#/stacks/new?stackName=cas&templateURL=https://s3.amazonaws.com/solutions-reference/aws-content-analysis/latest/aws-content-analysis.template) +US East (N. Virginia) | [![Launch in us-east-1](docs/images/launch-stack.png)](https://console.aws.amazon.com/cloudformation/home?region=us-east-1#/stacks/new?stackName=cas&templateURL=https://s3.amazonaws.com/solutions-reference/content-analysis-on-aws/latest/aws-content-analysis.template) +US West (Oregon) | [![Launch in us-west-2](docs/images/launch-stack.png)](https://console.aws.amazon.com/cloudformation/home?region=us-west-2#/stacks/new?stackName=cas&templateURL=https://s3.amazonaws.com/solutions-reference/content-analysis-on-aws/latest/aws-content-analysis.template) Once the Cloud Formation stack has been created, open the URL shown in the `ContentAnalyisSolution` output of the base stack. You can also get this URL with the following AWS CLI command: @@ -81,11 +81,11 @@ EMAIL=[specify your email] WEBAPP_STACK_NAME=[specify a stack name] REGION=[specify a region] VERSION=1.0.0 -git clone https://github.com/awslabs/aws-content-analysis -cd aws-content-analysis +git clone https://github.com/aws-solutions/content-analysis-on-aws +cd content-analysis-on-aws cd deployment DATETIME=$(date '+%s') -DIST_OUTPUT_BUCKET=aws-content-analysis-frontend-$DATETIME +DIST_OUTPUT_BUCKET=content-analysis-on-aws-frontend-$DATETIME aws s3 mb s3://$DIST_OUTPUT_BUCKET-$REGION --region $REGION aws s3 mb s3://$TEMPLATE_OUTPUT_BUCKET --region $REGION ./build-s3-dist.sh --template-bucket ${TEMPLATE_OUTPUT_BUCKET} --code-bucket ${DIST_OUTPUT_BUCKET} --version ${VERSION} --region ${REGION} diff --git a/buildspec.yml b/buildspec.yml deleted file mode 100644 index bf47817..0000000 --- a/buildspec.yml +++ /dev/null @@ -1,39 +0,0 @@ -version: 0.2 - -############################################################################### -# DO NOT MODIFY THIS FILE. -# This file is used by the build pipeline for the [CAS solution](https://aws.amazon.com/solutions/implementations/aws-content-analysis/). -############################################################################### - -phases: - install: - runtime-versions: - nodejs: 12 - python: 3.8 - commands: - - echo "nothing to do in install" - pre_build: - commands: - - echo "Installing dependencies and executing unit tests - `pwd`" - - cd deployment - - echo "Installing dependencies and executing unit tests completed `date`" - build: - commands: - - echo "Starting build `date` in `pwd`" - - env - - chmod +x ./build-s3-dist.sh && ./build-s3-dist.sh --template-bucket ${TEMPLATE_OUTPUT_BUCKET} --code-bucket ${DIST_OUTPUT_BUCKET} --version ${VERSION} --region ${AWS_REGION} - - echo "Build completed `date`" - - echo "Starting open-source-dist `date` in `pwd`" - - chmod +x ./build-open-source-dist.sh && ./build-open-source-dist.sh $SOLUTION_NAME - - echo "Open Source Dist completed `date`" - post_build: - commands: - - echo "Retrieving next stage buildspec `date` in `pwd`" - - aws s3 cp s3://solutions-build-assets/changelog-spec.yml ../buildspec.yml - - echo "Retrieving next stage buildspec complete" - - echo "Post build completed on `date`" -artifacts: - files: - - deployment/**/* - - CHANGELOG.md - - buildspec.yml diff --git a/deployment/aws-content-analysis-auth.yaml b/deployment/aws-content-analysis-auth.yaml index d903546..8be5c51 100755 --- a/deployment/aws-content-analysis-auth.yaml +++ b/deployment/aws-content-analysis-auth.yaml @@ -279,10 +279,10 @@ Resources: "Resource": "${searchdomain}/*" }, { - "Action": [ + "Action": [ "kms:Encrypt", "kms:GenerateDataKey", - "kms:Decrypt", + "kms:Decrypt" ], "Effect": "Allow", "Resource": "${kmskeyarn}" diff --git a/deployment/aws-content-analysis-opensearch.yaml b/deployment/aws-content-analysis-opensearch.yaml index f2e22bf..620a663 100755 --- a/deployment/aws-content-analysis-opensearch.yaml +++ b/deployment/aws-content-analysis-opensearch.yaml @@ -33,7 +33,7 @@ Mappings: SourceCode: General: RegionalS3Bucket: '%%REGIONAL_BUCKET_NAME%%' - KeyPrefix: "aws-content-analysis/%%VERSION%%" + KeyPrefix: "content-analysis-on-aws/%%VERSION%%" Resources: # Opensearch cluster diff --git a/deployment/aws-content-analysis-use-existing-mie-stack.yaml b/deployment/aws-content-analysis-use-existing-mie-stack.yaml index 7449ddb..0929661 100755 --- a/deployment/aws-content-analysis-use-existing-mie-stack.yaml +++ b/deployment/aws-content-analysis-use-existing-mie-stack.yaml @@ -25,7 +25,7 @@ Mappings: ContentAnalysisApp: SourceCode: GlobalS3Bucket: "%%GLOBAL_BUCKET_NAME%%" - TemplateKeyPrefix: "aws-content-analysis/%%VERSION%%" + TemplateKeyPrefix: "content-analysis-on-aws/%%VERSION%%" Resources: diff --git a/deployment/aws-content-analysis-web.yaml b/deployment/aws-content-analysis-web.yaml index 6152b6b..3d703fc 100755 --- a/deployment/aws-content-analysis-web.yaml +++ b/deployment/aws-content-analysis-web.yaml @@ -21,8 +21,8 @@ Mappings: SourceCode: General: RegionalS3Bucket: "%%REGIONAL_BUCKET_NAME%%" - CodeKeyPrefix: "aws-content-analysis/%%VERSION%%" - WebsitePrefix: "aws-content-analysis/%%VERSION%%/website" + CodeKeyPrefix: "content-analysis-on-aws/%%VERSION%%" + WebsitePrefix: "content-analysis-on-aws/%%VERSION%%/website" Resources: # Web application resources @@ -92,6 +92,9 @@ Resources: DeletionPolicy: Retain Properties: AccessControl: LogDeliveryWrite + OwnershipControls: + Rules: + - ObjectOwnership: ObjectWriter BucketName: !GetAtt GetWebsiteBucketName.Data BucketEncryption: ServerSideEncryptionConfiguration: diff --git a/deployment/aws-content-analysis.yaml b/deployment/aws-content-analysis.yaml index 11fd99e..55f96ad 100755 --- a/deployment/aws-content-analysis.yaml +++ b/deployment/aws-content-analysis.yaml @@ -19,22 +19,22 @@ Parameters: - "r4.xlarge.search" Conditions: - EnableAnonymousData: !Equals [ !FindInMap [AnonymousData,SendAnonymousData,Data], Yes] + EnableAnonymousData: !Equals [ !FindInMap [AnonymousData,SendAnonymousData,Data], "Yes"] Mappings: MediaInsightsEngine: Release: - Version: "v5.0.0" + Version: "v5.1.2" Application: SourceCode: GlobalS3Bucket: "%%GLOBAL_BUCKET_NAME%%" - TemplateKeyPrefix: "aws-content-analysis/%%VERSION%%" + TemplateKeyPrefix: "content-analysis-on-aws/%%VERSION%%" RegionalS3Bucket: "%%REGIONAL_BUCKET_NAME%%" - CodeKeyPrefix: "aws-content-analysis/%%VERSION%%" + CodeKeyPrefix: "content-analysis-on-aws/%%VERSION%%" Version: "%%VERSION%%" AnonymousData: SendAnonymousData: - Data: Yes + Data: "Yes" Resources: # Deploy MIE Framework @@ -44,17 +44,17 @@ Resources: Properties: TemplateURL: !Join - "" - - - "https://solutions-reference.s3.amazonaws.com/aws-media-insights-engine/" + - - "https://solutions-reference.s3.amazonaws.com/media-insights-on-aws/" - !FindInMap - MediaInsightsEngine - Release - Version - - "/media-insights-stack.template" + - "/media-insights-on-aws-stack.template" Parameters: - DeployAnalyticsPipeline: Yes - DeployTestResources: No + DeployAnalyticsPipeline: "Yes" + DeployTestResources: "No" MaxConcurrentWorkflows: 5 - EnableXrayTrace: Yes + EnableXrayTrace: "Yes" SendAnonymousData: !FindInMap [AnonymousData,SendAnonymousData,Data] SolutionId: SO0042 SolutionVersion: "%%VERSION%%" diff --git a/deployment/build-open-source-dist.sh b/deployment/build-open-source-dist.sh deleted file mode 100755 index 1b73fe1..0000000 --- a/deployment/build-open-source-dist.sh +++ /dev/null @@ -1,121 +0,0 @@ -#!/bin/bash -############################################################################### -# DO NOT MODIFY THIS FILE. -# This file is used by the build pipeline for the [MIE solution](https://aws.amazon.com/solutions/implementations/aws-media-insights-engine/). -############################################################################### -# -# This assumes all of the OS-level configuration has been completed and git repo has already been cloned -# -# This script should be run from the repo's deployment directory -# cd deployment -# ./build-open-source-dist.sh solution-name -# -# Paramenters: -# - solution-name: name of the solution for consistency - -# Check to see if input has been provided: -if [ -z "$1" ]; then - echo "Please provide the trademark approved solution name for the open source package." - echo "For example: ./build-open-source-dist.sh trademarked-solution-name" - exit 1 -fi - -# Get reference for all important folders -orig_template_dir="$PWD" -orig_source_dir="$orig_template_dir/../source" -dist_dir="$orig_template_dir/open-source/"$1"" -dist_template_dir="$dist_dir/deployment" -dist_source_dir="$dist_dir/source" - -echo "------------------------------------------------------------------------------" -echo "[Init] Clean old open-source folder" -echo "------------------------------------------------------------------------------" -echo "rm -rf $dist_dir/" -rm -rf "$dist_dir"/ -echo "rm -rf $dist_dir/../$1.zip" -rm -f "$dist_dir"/../"$1".zip -echo "mkdir -p $dist_dir" -mkdir -p "$dist_dir" -echo "mkdir -p $dist_template_dir" -mkdir -p "$dist_template_dir" -echo "mkdir -p $dist_source_dir" -mkdir -p "$dist_source_dir" - -echo "------------------------------------------------------------------------------" -echo "[Packing] Templates" -echo "------------------------------------------------------------------------------" -echo "copy yaml templates" -cp "$orig_template_dir"/*.yaml "$dist_template_dir"/ - -echo "------------------------------------------------------------------------------" -echo "[Packing] Build Script" -echo "------------------------------------------------------------------------------" -echo "cp $orig_template_dir/build-s3-dist.sh $dist_template_dir" -cp "$orig_template_dir"/build-s3-dist.sh "$dist_template_dir" - -echo "------------------------------------------------------------------------------" -echo "[Packing] Source Folder" -echo "------------------------------------------------------------------------------" -echo "cp -R $orig_source_dir/* $dist_source_dir/" -cp -R "$orig_source_dir"/* "$dist_source_dir"/ - -echo "------------------------------------------------------------------------------" -echo "[Packing] Documentation" -echo "------------------------------------------------------------------------------" -echo "cp -R $orig_template_dir/../doc $dist_dir" -cp -R $orig_template_dir/../doc $dist_dir -echo "cp $orig_template_dir/../LICENSE.txt $dist_dir" -cp $orig_template_dir/../LICENSE.txt $dist_dir -echo "cp $orig_template_dir/../NOTICE.txt $dist_dir" -cp $orig_template_dir/../NOTICE.txt $dist_dir -echo "cp $orig_template_dir/../README.md $dist_dir" -cp $orig_template_dir/../README.md $dist_dir -echo "cp $orig_template_dir/../CODE_OF_CONDUCT.md $dist_dir" -cp $orig_template_dir/../CODE_OF_CONDUCT.md $dist_dir -echo "cp $orig_template_dir/../CONTRIBUTING.md $dist_dir" -cp $orig_template_dir/../CONTRIBUTING.md $dist_dir -echo "cp $orig_template_dir/../CHANGELOG.md $dist_dir" -cp $orig_template_dir/../CHANGELOG.md $dist_dir - -echo "------------------------------------------------------------------------------" -echo "[Packing] Remove compiled python and node.js files" -echo "------------------------------------------------------------------------------" -echo "find $dist_dir -iname "dist" -type d -exec rm -rf "{}" \; 2> /dev/null" -find $dist_dir -iname "dist" -type d -exec rm -rf "{}" \; 2> /dev/null -echo "find $dist_dir -iname "package" -type d -exec rm -rf "{}" \; 2> /dev/null" -find $dist_dir -iname "package" -type d -exec rm -rf "{}" \; 2> /dev/null -echo "find $dist_dir -iname "__pycache__" -type d -exec rm -rf "{}" \; 2> /dev/null" -find $dist_dir -iname "__pycache__" -type d -exec rm -rf "{}" \; 2> /dev/null -echo "find $dist_dir -iname "node_modules" -type d -exec rm -rf "{}" \; 2> /dev/null" -find $dist_dir -iname "node_modules" -type d -exec rm -rf "{}" \; 2> /dev/null -echo "find $dist_dir -iname "deployments" -type d -exec rm -rf "{}" \; 2> /dev/null" -find $dist_dir -iname "deployments" -type d -exec rm -rf "{}" \; 2> /dev/null -echo "find ../ -type f -name 'package-lock.json' -delete" -find $dist_dir -type f -name 'package-lock.json' -delete - -echo "------------------------------------------------------------------------------" -echo "[Packing] Clean library and lambda layer folders" -echo "------------------------------------------------------------------------------" - -echo 'rm -rf "$dist_dir"/source/lib/MediaInsightsEngineLambdaHelper/build' -rm -rf "$dist_dir"/source/lib/MediaInsightsEngineLambdaHelper/build -echo 'rm -rf "$dist_dir"/orig_source_dir/lib/MediaInsightsEngineLambdaHelper/Media_Insights_Engine_Lambda_Helper.egg-info' -rm -rf "$dist_dir"/source/lib/MediaInsightsEngineLambdaHelper/Media_Insights_Engine_Lambda_Helper.egg-info -echo 'rm -f "$dist_dir"/source/lambda_layer_factory/media_insights_engine_lambda_layer_python*.zip*' -rm -f "$dist_dir"/source/lambda_layer_factory/media_insights_engine_lambda_layer_python*.zip* -echo 'rm -f "$dist_dir"/source/lambda_layer_factory/Media_Insights_Engine*.whl' -rm -f "$dist_dir"/source/lambda_layer_factory/Media_Insights_Engine*.whl -echo 'rm -rf "$dist_dir"/source/lambda_layer_factory/MediaInsightsEngineLambdaHelper' -rm -rf "$dist_dir"/source/lambda_layer_factory/MediaInsightsEngineLambdaHelper - -echo "------------------------------------------------------------------------------" -echo "[Packing] Create GitHub (open-source) zip file" -echo "------------------------------------------------------------------------------" -echo "cd $dist_dir" -cd $dist_dir/../ -echo "zip -q -r9 ./$1.zip $1" -zip -q -r9 ./"$1".zip "$1" -echo "Clean up open-source folder" -echo "rm -rf $1" -rm -rf "$1" -echo "Completed building $1.zip dist" diff --git a/deployment/build-s3-dist.sh b/deployment/build-s3-dist.sh index 61499f3..0c88b68 100755 --- a/deployment/build-s3-dist.sh +++ b/deployment/build-s3-dist.sh @@ -291,7 +291,7 @@ echo "-------------------------------------------------------------------------- # instead of doing a list bucket operation, which would require ListBucket permission. # Furthermore, the S3 bucket used to host AWS solutions (s3://solutions-reference) # disallows ListBucket access, so the only way to copy files from -# s3://solutions-reference/aws-content-analysis/latest/website to +# s3://solutions-reference/content-analysis-on-aws/latest/website to # ContentAnalysisWebsiteBucket is to use said manifest file. # cd $regional_dist_dir"/website/" || exit 1 @@ -342,7 +342,7 @@ cp "./dist/anonymous-data-logger.zip" "$regional_dist_dir/anonymous-data-logger. # Skip copy dist to S3 if building for solution builder because # that pipeline takes care of copying the dist in another script. -if [ "$global_bucket" != "solutions-reference" ] && [ "$global_bucket" != "solutions-test-reference" ]; then +if [[ ! "$global_bucket" =~ solutions(-[a-z]+)?-reference ]]; then echo "------------------------------------------------------------------------------" echo "Copy dist to S3" @@ -394,8 +394,8 @@ if [ "$global_bucket" != "solutions-reference" ] && [ "$global_bucket" != "solut echo "---" set -x - aws s3 sync $global_dist_dir s3://$global_bucket/aws-content-analysis/$version/ $(if [ ! -z $profile ]; then echo "--profile $profile"; fi) - aws s3 sync $regional_dist_dir s3://${regional_bucket}-${region}/aws-content-analysis/$version/ $(if [ ! -z $profile ]; then echo "--profile $profile"; fi) + aws s3 sync $global_dist_dir s3://$global_bucket/content-analysis-on-aws/$version/ $(if [ ! -z $profile ]; then echo "--profile $profile"; fi) + aws s3 sync $regional_dist_dir s3://${regional_bucket}-${region}/content-analysis-on-aws/$version/ $(if [ ! -z $profile ]; then echo "--profile $profile"; fi) set +x echo "------------------------------------------------------------------------------" @@ -406,9 +406,9 @@ if [ "$global_bucket" != "solutions-reference" ] && [ "$global_bucket" != "solut echo "Template to deploy:" echo "" echo "With existing MIE deployment:" - echo "TEMPLATE='"https://"$global_bucket"."$s3domain"/aws-content-analysis/"$version"/aws-content-analysis-use-existing-mie-stack.template"'" + echo "TEMPLATE='"https://"$global_bucket"."$s3domain"/content-analysis-on-aws/"$version"/aws-content-analysis-use-existing-mie-stack.template"'" echo "Without existing MIE deployment:" - echo "TEMPLATE='"https://"$global_bucket"."$s3domain"/aws-content-analysis/"$version"/aws-content-analysis.template"'" + echo "TEMPLATE='"https://"$global_bucket"."$s3domain"/content-analysis-on-aws/"$version"/aws-content-analysis.template"'" fi cleanup diff --git a/source/anonymous-data-logger/lib/cfnresponse.py b/source/anonymous-data-logger/lib/cfnresponse.py index 54175b5..381a1bf 100644 --- a/source/anonymous-data-logger/lib/cfnresponse.py +++ b/source/anonymous-data-logger/lib/cfnresponse.py @@ -40,6 +40,7 @@ def send(event, context, responseStatus, responseData, physicalResourceId=None, try: response = requests.put(responseUrl, + timeout=20, data=json_responseBody, headers=headers) print("Status code: " + response.reason) diff --git a/source/consumer/requirements.txt b/source/consumer/requirements.txt index 53b24f2..96f147d 100644 --- a/source/consumer/requirements.txt +++ b/source/consumer/requirements.txt @@ -1,2 +1,2 @@ -elasticsearch==8.6.0 -requests-aws4auth==1.1.1 +elasticsearch==8.7.0 +requests-aws4auth==1.2.2 From e834ccdcc36f96054af67e44e012f0588b8fb5be Mon Sep 17 00:00:00 2001 From: SanDim Ciin Date: Tue, 2 May 2023 09:27:48 -0700 Subject: [PATCH 2/2] Change template link to old name --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index d160bb3..e7b3e6a 100644 --- a/README.md +++ b/README.md @@ -10,8 +10,8 @@ The following Cloudformation templates will deploy the Content Analysis front-en Region| Launch ------|----- -US East (N. Virginia) | [![Launch in us-east-1](docs/images/launch-stack.png)](https://console.aws.amazon.com/cloudformation/home?region=us-east-1#/stacks/new?stackName=cas&templateURL=https://s3.amazonaws.com/solutions-reference/content-analysis-on-aws/latest/aws-content-analysis.template) -US West (Oregon) | [![Launch in us-west-2](docs/images/launch-stack.png)](https://console.aws.amazon.com/cloudformation/home?region=us-west-2#/stacks/new?stackName=cas&templateURL=https://s3.amazonaws.com/solutions-reference/content-analysis-on-aws/latest/aws-content-analysis.template) +US East (N. Virginia) | [![Launch in us-east-1](docs/images/launch-stack.png)](https://console.aws.amazon.com/cloudformation/home?region=us-east-1#/stacks/new?stackName=cas&templateURL=https://s3.amazonaws.com/solutions-reference/aws-content-analysis/latest/aws-content-analysis.template) +US West (Oregon) | [![Launch in us-west-2](docs/images/launch-stack.png)](https://console.aws.amazon.com/cloudformation/home?region=us-west-2#/stacks/new?stackName=cas&templateURL=https://s3.amazonaws.com/solutions-reference/aws-content-analysis/latest/aws-content-analysis.template) Once the Cloud Formation stack has been created, open the URL shown in the `ContentAnalyisSolution` output of the base stack. You can also get this URL with the following AWS CLI command: