From 7fc0997794bd529f59cd75805d163f7c2c62e67e Mon Sep 17 00:00:00 2001 From: Pritish Budhiraja Date: Mon, 16 Sep 2024 10:22:01 +0000 Subject: [PATCH] Pull request #311: chore: Jenkins File Refactor Merge in EXC/orca-elements from jenkins-file-refactor to main-oss Squashed commit of the following: commit bba865d0776bdff5eb4b5834fac0d02e34781885 Author: Pritish Budhiraja Date: Mon Sep 16 14:58:32 2024 +0530 chore: comments addressed commit c6140a2ed305d0d369d05a453aa0e86a960cb7c1 Author: Pritish Budhiraja Date: Mon Sep 16 12:24:42 2024 +0530 chore: Jenkins File Refactor --- Jenkinsfile | 141 +++++++++++++++++++++++++--------------------------- 1 file changed, 68 insertions(+), 73 deletions(-) diff --git a/Jenkinsfile b/Jenkinsfile index 748d9882d..d6bee6a72 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -1,7 +1,8 @@ pipeline { agent { - label 'orca-elements' + label 'orca-elements' } + environment { AWS_REGION_INTEG = 'eu-central-1' AWS_REGION_SANDBOX = 'eu-central-1' @@ -34,16 +35,25 @@ pipeline { ''' )}""" } + stages { stage('Clone OSS Repo') { steps { sh "echo ${env.VERSION}" sh 'mkdir hyperswitch-web' dir('hyperswitch-web') { - checkout changelog: false, poll: false, scm: [$class: 'GitSCM', branches: [[name: "refs/tags/${env.VERSION}"]], extensions: [], userRemoteConfigs: [[credentialsId: 'hyperswitch-sdk-github-deploy-key', url: 'git@github.com:juspay/hyperswitch-web']]] + checkout changelog: false, poll: false, scm: [ + $class: 'GitSCM', + branches: [[name: "refs/tags/${env.VERSION}"]], + extensions: [], + userRemoteConfigs: [ + [credentialsId: 'hyperswitch-sdk-github-deploy-key', url: 'git@github.com:juspay/hyperswitch-web'] + ] + ] } } } + stage('Apply Hacks') { steps { dir('hyperswitch-web') { @@ -55,97 +65,82 @@ pipeline { } } - stage('Application Init') - { + stage('Application Init') { steps { - sh 'rm -rf .husky' - sh 'npm install --force' - sh 'npm install fast-glob' - sh 'npm install @aws-sdk/client-cloudfront@^3.414.0' - sh 'npm install @aws-sdk/client-s3@^3.417.0' - sh 'node --version' - sh 'npm run re:start' - sh 'npm run build:integ' - sh 'npm run build:sandbox' - sh 'npm run build:prod' + sh """ + source ~/.nvm/nvm.sh + nvm install v20.11.1 + nvm use v20.11.1 + npm install + rm -rf .husky + npm install --force + npm install fast-glob + npm install @aws-sdk/client-cloudfront@^3.414.0 + npm install @aws-sdk/client-s3@^3.417.0 + node --version + npm run re:start + npm run build:integ + npm run build:sandbox + npm run build:prod + """ + } + } + + // * Function to deploy to S3 + def s3Push(roleArn, awsRegion, bucketName, distId, envName) { + script { + RESPONSE = sh(script: "aws sts assume-role --role-arn ${roleArn} --role-session-name s3-bucket-access", returnStdout: true).trim() + writeFile(file: 'temp.json', text: RESPONSE) + + AWS_SECRET_ACCESS_KEY = sh(script: "jq -r '.Credentials.SecretAccessKey' temp.json", returnStdout: true).trim() + AWS_SESSION_TOKEN = sh(script: "jq -r '.Credentials.SessionToken' temp.json", returnStdout: true).trim() + AWS_ACCESS_KEY_ID = sh(script: "jq -r '.Credentials.AccessKeyId' temp.json", returnStdout: true).trim() + + DATA = """ + export AWS_ACCESS_KEY_ID=${AWS_ACCESS_KEY_ID} + export AWS_SECRET_ACCESS_KEY=${AWS_SECRET_ACCESS_KEY} + export AWS_SESSION_TOKEN=${AWS_SESSION_TOKEN} + export AWS_REGION=${awsRegion} + export BUCKET_NAME=${bucketName} + export DIST_ID=${distId} + export ENV='${envName}' + npm run deploy-to-s3 + """ + + writeFile(file: 'temp.sh', text: DATA) + sh('chmod +x temp.sh && ./temp.sh') + sh('rm -f temp.sh temp.json') // Clean up } } + + // * Stage to deploy to Integ stage('Integ S3 Push') { when { - anyOf { - branch 'main-oss' - } + branch 'main-oss' } steps { - script { - RESPONSE = sh( - script: "aws sts assume-role --role-arn ${env.ASSUMED_ROLE_INTEG} --role-session-name s3-bucket-access", - returnStdout: true - ).trim() - writeFile(file: 'temp.json', text: RESPONSE) - AWS_SECRET_ACCESS_KEY = sh(script: "jq '.Credentials.SecretAccessKey' temp.json", returnStdout: true).trim() - AWS_SESSION_TOKEN = sh(script: "jq '.Credentials.SessionToken' temp.json", returnStdout: true).trim() - AWS_ACCESS_KEY_ID = sh(script: "jq '.Credentials.AccessKeyId' temp.json", returnStdout: true).trim() - DATA = "export AWS_ACCESS_KEY_ID=${AWS_ACCESS_KEY_ID}\n export AWS_SECRET_ACCESS_KEY=${AWS_SECRET_ACCESS_KEY}\n export AWS_SESSION_TOKEN=${AWS_SESSION_TOKEN}\n export AWS_REGION=${AWS_REGION_INTEG}\n export BUCKET_NAME=${S3_INTEG_BUCKET}\n export DIST_ID=${DIST_ID_INTEG}\n export ENV='integ'\n npm run deploy-to-s3" - writeFile(file: 'temp.sh', text: DATA) - sh('chmod +x temp.sh && ./temp.sh') - } + s3Push(env.ASSUMED_ROLE_INTEG, env.AWS_REGION_INTEG, env.S3_INTEG_BUCKET, env.DIST_ID_INTEG, 'integ') } } + + // * Stage to deploy to Sandbox stage('Sandbox S3 Push') { when { - anyOf { - branch 'main-oss' - } + branch 'main-oss' } steps { - script { - RESPONSE = sh( - script: "aws sts assume-role --role-arn ${env.ASSUMED_ROLE_SANDBOX} --role-session-name s3-bucket-access", - returnStdout: true - ).trim() - writeFile(file: 'temp.json', text: RESPONSE) - AWS_SECRET_ACCESS_KEY = sh(script: "jq '.Credentials.SecretAccessKey' temp.json", returnStdout: true).trim() - AWS_SESSION_TOKEN = sh(script: "jq '.Credentials.SessionToken' temp.json", returnStdout: true).trim() - AWS_ACCESS_KEY_ID = sh(script: "jq '.Credentials.AccessKeyId' temp.json", returnStdout: true).trim() - DATA = "export AWS_ACCESS_KEY_ID=${AWS_ACCESS_KEY_ID}\n export AWS_SECRET_ACCESS_KEY=${AWS_SECRET_ACCESS_KEY}\n export AWS_SESSION_TOKEN=${AWS_SESSION_TOKEN}\n export AWS_REGION=${AWS_REGION_SANDBOX}\n export BUCKET_NAME=${S3_SANDBOX_BUCKET}\n export DIST_ID=${DIST_ID_SANDBOX}\n export ENV='sandbox'\n npm run deploy-to-s3" - writeFile(file: 'temp.sh', text: DATA) - sh('chmod +x temp.sh && ./temp.sh') - } + s3Push(env.ASSUMED_ROLE_SANDBOX, env.AWS_REGION_SANDBOX, env.S3_SANDBOX_BUCKET, env.DIST_ID_SANDBOX, 'sandbox') } } + + // * Stage to deploy to Prod stage('Prod S3 Push') { when { - anyOf { - branch 'main-oss' - } + branch 'main-oss' } steps { - script { - RESPONSE = sh( - script: "aws sts assume-role --role-arn ${env.ASSUMED_ROLE_PROD} --role-session-name s3-bucket-access", - returnStdout: true - ).trim() - writeFile(file: 'temp.json', text: RESPONSE) - AWS_SECRET_ACCESS_KEY = sh(script: "jq '.Credentials.SecretAccessKey' temp.json", returnStdout: true).trim() - AWS_SESSION_TOKEN = sh(script: "jq '.Credentials.SessionToken' temp.json", returnStdout: true).trim() - AWS_ACCESS_KEY_ID = sh(script: "jq '.Credentials.AccessKeyId' temp.json", returnStdout: true).trim() - DATA = "export AWS_ACCESS_KEY_ID=${AWS_ACCESS_KEY_ID}\n export AWS_SECRET_ACCESS_KEY=${AWS_SECRET_ACCESS_KEY}\n export AWS_SESSION_TOKEN=${AWS_SESSION_TOKEN}\n export AWS_REGION=${AWS_REGION_PROD}\n export BUCKET_NAME=${S3_PROD_BUCKET}\n export DIST_ID=${DIST_ID_PROD}\n export ENV='prod'\n npm run deploy-to-s3" - writeFile(file: 'temp.sh', text: DATA) - sh('chmod +x temp.sh && ./temp.sh') - } + s3Push(env.ASSUMED_ROLE_PROD, env.AWS_REGION_PROD, env.S3_PROD_BUCKET, env.DIST_ID_PROD, 'prod') } } - // stage('Commit') - // { - // steps { - // sh 'git add .' - // sh "git commit -am 'Release Candidate ${env.VERSION}'" - // sh "git tag -a ${env.VERSION}RC -m 'Release candidate for version ${env.VERSION}RC'" - // withCredentials([gitUsernamePassword(credentialsId: 'bitbucket-dc-jenkins-rw')]) { - // sh "git push origin ${env.VERSION}RC" - // } - // } - // } } }