diff --git a/.github/actions/core-cicd/deployment/deploy-sdk-npm/README.md b/.github/actions/core-cicd/deployment/deploy-sdk-npm/README.md new file mode 100644 index 000000000000..d86ee5a5c833 --- /dev/null +++ b/.github/actions/core-cicd/deployment/deploy-sdk-npm/README.md @@ -0,0 +1,81 @@ +# GitHub Action: SDK Publish NPM Packages + +This GitHub Action is designed to automate the process of publishing dotCMS SDK libraries to the NPM registry. It performs the following tasks: + +1. **Checks out the specified branch of the repository**. +2. **Sets up the required Node.js environment**. +3. **Retrieves the next version of the SDK from the package.json file**. +4. **Validates the version number against the existing version in the NPM registry**. +5. **Publishes the SDK libraries to the NPM registry if validation passes**. + +## Inputs + +| Name | Description | Required | Default | +|------------------|-----------------------------------|----------|---------| +| `ref` | Branch to build from | No | `master`| +| `npm-token` | NPM token | Yes | | +| `npm-package-tag`| Package tag | No | `alpha` | +| `node-version` | Node.js version | No | `19` | +| `github-token` | GitHub Token | Yes | | + +## Outputs + +| Name | Description | +|----------------------|---------------------------------------| +| `npm-package-version`| SDK libs - NPM package version | + +## Steps Overview + +1. **Checkout**: Checks out the specified branch of the repository. +2. **Set up Node.js**: Sets up the Node.js environment based on the provided version. +3. **Get Next Version**: Retrieves the next version from the `package.json` file of the SDK. +4. **Validate Version**: Validates whether the next version is correct and whether it should be published. +5. **Publish SDK into NPM Registry**: Publishes the SDK libraries to NPM if the version is validated. + +## Detailed Steps +1. **Checkout** +The action uses `actions/checkout@v4` to check out the specified branch, allowing the workflow to access the repository's contents. + +2. **Set Up Node.js** +`actions/setup-node@v4` sets up the Node.js environment, crucial for running scripts and managing dependencies. + +3. **Get Next Version** +This step retrieves the next version of the SDK by reading the `package.json` file from the specified directory. + +4. **Validate Version** +The version retrieved in the previous step is compared to the current version in the NPM registry. The workflow checks if the version is already published or if it follows the expected versioning scheme. + +5. **Publish SDK into NPM Registry** +If the validation passes, the SDK libraries are published to the NPM registry. The libraries are iterated over, and each is published using the provided NPM token and tag. + +### Notes + +- Ensure that the NPM token provided has the correct permissions to publish packages. +- The action assumes that the `package.json` files are located under `core-web/libs/sdk/client`. +- The publish step only runs if the version validation passes, ensuring that no duplicate versions are published. + +## Usage Example + +Below is an example of how to use this GitHub Action in your workflow file: + +```yaml +name: 'Publish SDK Libraries' +on: + push: + branches: + - master + workflow_dispatch: + +jobs: + publish-sdk: + runs-on: ubuntu-latest + steps: + - name: Publish to NPM + uses: ./path-to-this-action + with: + ref: 'master' + npm-token: ${{ secrets.NPM_TOKEN }} + npm-package-tag: 'latest' + node-version: '18' + github-token: ${{ secrets.GITHUB_TOKEN }} +``` \ No newline at end of file diff --git a/.github/actions/core-cicd/deployment/deploy-sdk-npm/action.yml b/.github/actions/core-cicd/deployment/deploy-sdk-npm/action.yml new file mode 100644 index 000000000000..cd3210c03a4d --- /dev/null +++ b/.github/actions/core-cicd/deployment/deploy-sdk-npm/action.yml @@ -0,0 +1,105 @@ +name: 'SDK Publish NPM Packages' +description: 'Publish the dotCMS SDK libs on NPM registry.' +inputs: + ref: + description: 'Branch to build from' + required: false + default: 'master' + npm-token: + description: 'NPM token' + required: true + npm-package-tag: + description: 'Package tag' + required: false + default: 'alpha' + node-version: + description: 'Node.js version' + required: false + default: '19' + github-token: + description: 'GitHub Token' + required: true +outputs: + npm-package-version: + description: 'SDK libs - NPM package version' + value: ${{ steps.next_version.outputs.next_version }} + published: + description: 'SDK libs - Published' + value: ${{ steps.next_version.outputs.publish }} +runs: + using: "composite" + steps: + - name: 'Checkout' + uses: actions/checkout@v4 + with: + ref: ${{ inputs.ref }} + token: ${{ inputs.github-token }} + + - name: 'Set up Node.js' + uses: actions/setup-node@v4 + with: + node-version: ${{ inputs.node-version }} + + - name: 'Get next version' + id: next_version + run: | + next_version=$(node -p "require('./core-web/libs/sdk/client/package.json').version") + echo "next_version=${next_version}" >> $GITHUB_OUTPUT + shell: bash + + - name: 'Valitate version' + id: validate_version + run: | + echo "::group::Validating version" + publish=false + + NPM_INFO=$(npm view '@dotcms/client' --json) + VERSIONS=$(echo "$NPM_INFO" | jq '.versions') + CURRENT_VERSION=$(echo "$NPM_INFO" | jq -r '.version') + + NEXT_VERSION=${{ steps.next_version.outputs.next_version }} + + base_number=$(echo "$CURRENT_VERSION" | grep -oE '[0-9]+$') + next_number=$((base_number + 1)) + prefix=$(echo "$CURRENT_VERSION" | sed -E 's/[0-9]+$//') + + EXPECTED_VERSION="$prefix$next_number" + + NEXT_VERSION_EXISTS=$(echo "$VERSIONS" | jq --arg item "$NEXT_VERSION" -r '.[] | select(. == $item)') + + echo "::notice::EXPECTED VERSION: $EXPECTED_VERSION" + echo "::notice::NEXT VERSION: $NEXT_VERSION" + + if [ -n "$NEXT_VERSION_EXISTS" ]; then + echo "Version $NEXT_VERSION already exists in NPM registry" + elif [[ "$NEXT_VERSION" != "$EXPECTED_VERSION" ]]; then + echo "Version $NEXT_VERSION is not the expected version." + else + publish=true + fi + echo "::notice::PUBLISH: $publish" + echo "publish=$publish" >> $GITHUB_OUTPUT + echo "::endgroup::" + shell: bash + + - name: 'Publishing sdk into NPM registry' + if: ${{ steps.validate_version.outputs.publish == 'true' }} + working-directory: ${{ github.workspace }}/core-web/libs/sdk/ + env: + NEXT_VERSION: ${{ steps.next_version.outputs.next_version }} + NPM_AUTH_TOKEN: ${{ inputs.npm-token }} + NPM_TAG: ${{ inputs.npm-package-tag }} + run: | + echo "::group::Publishing SDK packages" + sdks=$(ls) + for sdk in $sdks; do + echo "Publishing SDK lib [${sdk}]" + cd $sdk && echo "$(pwd)" + echo "//registry.npmjs.org/:_authToken=${NPM_AUTH_TOKEN}" > ~/.npmrc + npm publish --access public --tag $NPM_TAG + npm dist-tag $NEXT_VERSION latest + cd .. + done + echo "::endgroup::" + shell: bash + \ No newline at end of file diff --git a/.github/actions/core-cicd/notification/notify-slack/README.md b/.github/actions/core-cicd/notification/notify-slack/README.md new file mode 100644 index 000000000000..fcf493dd14e0 --- /dev/null +++ b/.github/actions/core-cicd/notification/notify-slack/README.md @@ -0,0 +1,47 @@ +# Slack Notification GitHub Action + +This GitHub Action sends a notification to a specified Slack channel using a provided payload in Markdown format. It's particularly useful for sending custom messages from your CI/CD pipelines directly to your Slack channels. + +## Inputs + +| Name | Description | Required | Default | +| ---------------- | ----------------------------------------------- | -------- | ------- | +| `channel-id` | The ID of the Slack channel to send the message to. | `true` | | +| `payload` | The message payload in Markdown format. | `true` | | +| `slack-bot-token`| The Slack Bot Token used for authentication. | `true` | | + +## Example Usage + +Here is an example of how to use this action in your GitHub workflow file: + +```yaml +name: Notify Slack on Success + +on: + push: + branches: + - main + +jobs: + notify: + runs-on: ubuntu-latest + steps: + - name: Send Slack Notification + uses: ./ + with: + channel-id: 'C12345678' + payload: | + "Build succeeded! :tada: + *Branch:* ${{ github.ref_name }} + *Commit:* ${{ github.sha }} + *Author:* ${{ github.actor }}" + slack-bot-token: ${{ secrets.SLACK_BOT_TOKEN }} +``` + +## Inputs Description +**channel-id**: The Slack channel ID where the notification will be posted. Make sure to use the correct ID (e.g., C12345678 for public channels or G12345678 for private channels). +**payload**: The content of the message, written in Markdown format. You can use standard Slack markdown for formatting. +**slack-bot-token**: Your Slack bot token, which should be stored as a secret in your GitHub repository for security purposes. + +### Note +> Ensure that your Slack Bot Token has the necessary permissions to post messages to the specified channel. If you encounter any issues with permissions, review your Slack app's OAuth scopes. \ No newline at end of file diff --git a/.github/actions/core-cicd/notification/notify-slack/action.yaml b/.github/actions/core-cicd/notification/notify-slack/action.yaml new file mode 100644 index 000000000000..d954ca7bd10f --- /dev/null +++ b/.github/actions/core-cicd/notification/notify-slack/action.yaml @@ -0,0 +1,34 @@ +name: 'Slack notification' +description: 'Send a notification to Slack' +inputs: + channel-id: + description: 'Channel ID to send the notification to' + required: true + payload: + description: 'Payload to send to Slack in MARKDOWN format' + required: true + slack-bot-token: + description: 'Slack Bot Token' + required: true + +runs: + using: "composite" + steps: + - name: Slack Notification + uses: slackapi/slack-github-action@v1.26.0 + with: + channel-id: ${{ inputs.channel-id }} + payload: | + { + "blocks": [ + { + "type": "section", + "text": { + "type": "mrkdwn", + "text": "${{ inputs.payload }}" + } + } + ] + } + env: + SLACK_BOT_TOKEN: ${{ inputs.slack-bot-token }} diff --git a/.github/filters.yaml b/.github/filters.yaml index c2b2e6f811b0..8aa9fe34050d 100644 --- a/.github/filters.yaml +++ b/.github/filters.yaml @@ -33,6 +33,9 @@ cli: &cli - *full_build_test - *backend +sdk_libs: &sdk_libs + - 'core-web/libs/sdk/**' + jvm_unit_test: - *backend - *cli diff --git a/.github/workflows/cicd_1-pr.yml b/.github/workflows/cicd_1-pr.yml index ee8f1154e834..239512bdc3dc 100644 --- a/.github/workflows/cicd_1-pr.yml +++ b/.github/workflows/cicd_1-pr.yml @@ -41,7 +41,7 @@ jobs: name: Initialize uses: ./.github/workflows/cicd_comp_initialize-phase.yml with: - incremental: true + validation-level: 'full' # Build job - only runs if no artifacts were found during initialization build: diff --git a/.github/workflows/cicd_3-trunk.yml b/.github/workflows/cicd_3-trunk.yml index 4488776dba0d..1d0c13796125 100644 --- a/.github/workflows/cicd_3-trunk.yml +++ b/.github/workflows/cicd_3-trunk.yml @@ -30,6 +30,10 @@ on: description: 'Run all tests' type: boolean default: false + publish-npm-sdk-libs: + description: 'Publish NPM SDKs' + type: boolean + default: false jobs: # Initialize the trunk check process @@ -39,6 +43,8 @@ jobs: with: reuse-previous-build: ${{ inputs.reuse-previous-build || github.event_name != 'workflow_dispatch' }} build-on-missing-artifacts: ${{ inputs.build-on-missing-artifacts || github.event_name != 'workflow_dispatch' }} + validation-level: 'custom' + custom-modules: 'sdk_libs' # Build job - only runs if no artifacts were found during initialization build: @@ -95,6 +101,7 @@ jobs: uses: ./.github/workflows/cicd_comp_deployment-phase.yml with: artifact-run-id: ${{ needs.initialize.outputs.artifact-run-id }} + publish-npm-sdk-libs: ${{ needs.initialize.outputs.sdk_libs != 'false' && github.event_name != 'workflow_dispatch' }} environment: trunk secrets: DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} diff --git a/.github/workflows/cicd_4-nightly.yml b/.github/workflows/cicd_4-nightly.yml index 85df04ee2559..abddfd02d462 100644 --- a/.github/workflows/cicd_4-nightly.yml +++ b/.github/workflows/cicd_4-nightly.yml @@ -26,7 +26,7 @@ on: type: boolean description: 'Indicates if the workflow should build on missing artifacts' default: true - publish-npm-package: + publish-npm-cli: type: boolean description: 'Indicates if the workflow should publish the NPM package on the registry' default: false @@ -89,7 +89,7 @@ jobs: artifact-run-id: ${{ needs.initialize.outputs.artifact-run-id }} environment: nightly deploy-dev-image: true - publish-npm-package: ${{ ( github.event_name == 'workflow_dispatch' && inputs.publish-npm-package == true ) || github.event_name == 'schedule' }} + publish-npm-cli: ${{ ( github.event_name == 'workflow_dispatch' && inputs.publish-npm-cli == true ) || github.event_name == 'schedule' }} reuse-previous-build: ${{ inputs.reuse-previous-build || false }} secrets: DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} diff --git a/.github/workflows/cicd_comp_deployment-phase.yml b/.github/workflows/cicd_comp_deployment-phase.yml index e76b5b9597ab..af49945f6a9d 100644 --- a/.github/workflows/cicd_comp_deployment-phase.yml +++ b/.github/workflows/cicd_comp_deployment-phase.yml @@ -32,9 +32,12 @@ on: reuse-previous-build: default: false type: boolean - publish-npm-package: + publish-npm-cli: default: false type: boolean + publish-npm-sdk-libs: + default: false + type: boolean secrets: DOCKER_USERNAME: required: false @@ -139,51 +142,46 @@ jobs: # Publish CLI to NPM (if required) - name: CLI Publish id: cli_publish - if: inputs.publish-npm-package + if: inputs.publish-npm-cli uses: ./.github/actions/core-cicd/deployment/deploy-cli-npm with: github-token: ${{ secrets.GITHUB_TOKEN }} npm-token: ${{ secrets.NPM_ORG_TOKEN }} cli-artifact-run-id: ${{ github.run_id }} + - name: SDKs Publish + id: sdks_publish + if: inputs.publish-npm-sdk-libs + uses: ./.github/actions/core-cicd/deployment/deploy-sdk-npm + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + npm-token: ${{ secrets.NPM_ORG_TOKEN }} + sdk-artifact-run-id: ${{ github.run_id }} + # Send Slack notification for Docker image deployment (if repository is 'dotcms/core') - name: Slack Notification (Docker image announcement) if: github.repository == 'dotcms/core' - uses: slackapi/slack-github-action@v1.26.0 + uses: ./.github/actions/core-cicd/notification/notify-slack with: channel-id: "log-docker" - payload: | - { - "blocks": [ - { - "type": "section", - "text": { - "type": "mrkdwn", - "text": "> :large_purple_circle: *Attention dotters:* Docker image built!\n \n>This automated script is happy to announce that a new *docker* image has been built for *${{ inputs.environment }}* with *tags:* [`${{ steps.docker_build.outputs.tags }}`] :docker:" - } - } - ] - } - env: - SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }} + payload: "> :large_purple_circle: *Attention dotters:* Docker image built!\n \n>This automated script is happy to announce that a new *docker* image has been built for *${{ inputs.environment }}* with *tags:* [`${{ steps.docker_build.outputs.tags }}`] :docker:" + slack-bot-token: ${{ secrets.SLACK_BOT_TOKEN }} # Send Slack notification for CLI publication (if required) - name: Slack Notification (dotCLI announcement) - if: inputs.publish-npm-package - uses: slackapi/slack-github-action@v1.26.0 + if: inputs.publish-npm-cli + uses: ./.github/actions/core-cicd/notification/notify-slack with: channel-id: "log-dotcli" - payload: | - { - "blocks": [ - { - "type": "section", - "text": { - "type": "mrkdwn", - "text": "> :large_purple_circle: *Attention dotters:* dotCLI published!\n \n>This automated script is happy to announce that a new *_dotCLI_* version *tagged as:* [ `${{ steps.cli_publish.outputs.npm-package-version }}, ${{ steps.cli_publish.outputs.npm-package-version-tag }}` ] is now available on the `NPM` registry :package:!\n \n>`npm i -g @dotcms/dotcli@${{ steps.cli_publish.outputs.npm-package-version-tag }}`" - } - } - ] - } - env: - SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }} \ No newline at end of file + payload: "> :large_purple_circle: *Attention dotters:* dotCLI published!\n \n>This automated script is happy to announce that a new *_dotCLI_* version *tagged as:* [ `${{ steps.cli_publish.outputs.npm-package-version }}, ${{ steps.cli_publish.outputs.npm-package-version-tag }}` ] is now available on the `NPM` registry :package:!\n \n>`npm i -g @dotcms/dotcli@${{ steps.cli_publish.outputs.npm-package-version-tag }}`" + slack-bot-token: ${{ secrets.SLACK_BOT_TOKEN }} + + # Send Slack notification for SDK publication (if required) + - name: Slack Notification (SDK announcement) + if: success() && steps.sdks_publish.outputs.published == 'true' + uses: ./.github/actions/core-cicd/notification/notify-slack + with: + channel-id: "log-sdk-libs" + payload: "> :large_orange_circle: *Attention dotters:* SDK libs (Angular, Client, Experiments and React) published!\n \n>This automated script is happy to announce that a new *_SDK libs_* version *tagged as:* [ `${{ steps.sdks_publish.outputs.npm-package-version }}` ] is now available on the `NPM` registry :package:!" + slack-bot-token: ${{ secrets.SLACK_BOT_TOKEN }} + \ No newline at end of file diff --git a/.github/workflows/cicd_comp_initialize-phase.yml b/.github/workflows/cicd_comp_initialize-phase.yml index c2430766488d..afe2267b4dc0 100644 --- a/.github/workflows/cicd_comp_initialize-phase.yml +++ b/.github/workflows/cicd_comp_initialize-phase.yml @@ -15,10 +15,14 @@ name: Initialize Phase on: workflow_call: inputs: - incremental: - description: 'Indicates if the workflow is incremental or not' - type: boolean - default: false + validation-level: + default: 'none' + type: string + description: 'Levels of validation: none, full, or custom' + custom-modules: + default: '' + type: string + description: 'Comma-separated list of custom modules to validate' reuse-previous-build: description: 'Indicates if the workflow should reuse the previous build' type: boolean @@ -42,6 +46,8 @@ on: value: ${{ jobs.changes.outputs.jvm_unit_test }} cli: value: ${{ jobs.changes.outputs.cli }} + sdk_libs: + value: ${{ jobs.changes.outputs.sdk_libs }} jobs: # This job is used as a required check to indicate that the workflow has started and is running @@ -113,11 +119,12 @@ jobs: frontend: ${{ steps.filter-rewrite.outputs.frontend }} jvm_unit_test: ${{ steps.filter-rewrite.outputs.jvm_unit_test }} cli: ${{ steps.filter-rewrite.outputs.cli }} + sdk_libs: ${{ steps.filter-rewrite.outputs.sdk_libs }} steps: - uses: actions/checkout@v4 - if: ${{ inputs.incremental }} + if: ${{ inputs.validation-level != 'none' }} - uses: dorny/paths-filter@v3.0.1 - if: ${{ inputs.incremental }} + if: ${{ inputs.validation-level != 'none' }} id: filter with: filters: .github/filters.yaml @@ -133,6 +140,7 @@ jobs: backend=${{ steps.filter.outputs.backend || 'true' }} build=${{ steps.filter.outputs.build || 'true' }} jvm_unit_test=${{ steps.filter.outputs.jvm_unit_test || 'true' }} + sdk_libs=${{ steps.filter.outputs.sdk_libs || 'false' }} # Check if the commit is to the master branch skip_tests=${CICD_SKIP_TESTS:-false} # Use environment variable, default to 'false' @@ -146,15 +154,43 @@ jobs: jvm_unit_test=false fi + # Adjust outputs based on validation_level + if [ "${{ inputs.validation-level }}" == "custom" ]; then + frontend=false + cli=false + backend=false + build=false + jvm_unit_test=false + sdk_libs=false + IFS=',' read -r -a custom_modules_list <<< "${{ inputs.custom-modules }}" + for module in "${custom_modules_list[@]}"; do + if [ "${module}" == "frontend" ]; then + frontend=${{ steps.filter.outputs.frontend }} + elif [ "${module}" == "cli" ]; then + cli=${{ steps.filter.outputs.cli }} + elif [ "${module}" == "backend" ]; then + backend=${{ steps.filter.outputs.backend }} + elif [ "${module}" == "build" ]; then + build=${{ steps.filter.outputs.build }} + elif [ "${module}" == "jvm_unit_test" ]; then + jvm_unit_test=${{ steps.filter.outputs.jvm_unit_test }} + elif [ "${module}" == "sdk_libs" ]; then + sdk=${{ steps.filter.outputs.sdk_libs }} + fi + done + fi + echo "build=${build}" echo "frontend=${frontend}" echo "cli=${cli}" echo "backend=${backend}" echo "jvm_unit_test=${jvm_unit_test}" + echo "sdk_libs=${sdk_libs}" # Export the outcomes as GitHub Actions outputs echo "frontend=${frontend}" >> $GITHUB_OUTPUT echo "cli=${cli}" >> $GITHUB_OUTPUT echo "backend=${backend}" >> $GITHUB_OUTPUT echo "build=${build}" >> $GITHUB_OUTPUT - echo "jvm_unit_test=${jvm_unit_test}" >> $GITHUB_OUTPUT \ No newline at end of file + echo "jvm_unit_test=${jvm_unit_test}" >> $GITHUB_OUTPUT + echo "sdk_libs=${sdk_libs}" >> $GITHUB_OUTPUT \ No newline at end of file diff --git a/.github/workflows/cicd_manual_publish-starter.yml b/.github/workflows/cicd_manual_publish-starter.yml index 7188612e0e5c..afd74f7114f0 100644 --- a/.github/workflows/cicd_manual_publish-starter.yml +++ b/.github/workflows/cicd_manual_publish-starter.yml @@ -156,20 +156,8 @@ jobs: environment: trunk steps: - name: Slack Notification - uses: slackapi/slack-github-action@v1.26.0 + uses: ./.github/actions/core-cicd/notification/notify-slack with: - channel-id: "log-starter" - payload: | - { - "blocks": [ - { - "type": "section", - "text": { - "type": "mrkdwn", - "text": "> :large_green_circle: *Attention dotters:* a new Starter published!\n \n>This automated script is happy to announce that a new *_${{ env.STARTER_TYPE }}_* :package: `${{ needs.deploy-artifacts.outputs.filename }}` is now available on `ARTIFACTORY` :frog:!\n \n>:link:${{ needs.deploy-artifacts.outputs.url }}\n>*Changelog* \n>```${{ github.event.inputs.changelog }}```" - } - } - ] - } - env: - SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }} + channel-id: "log-sdk-libs" + payload: "> :large_green_circle: *Attention dotters:* a new Starter published!\n \n>This automated script is happy to announce that a new *_${{ env.STARTER_TYPE }}_* :package: `${{ needs.deploy-artifacts.outputs.filename }}` is now available on `ARTIFACTORY` :frog:!\n \n>:link:${{ needs.deploy-artifacts.outputs.url }}\n>*Changelog* \n>```${{ github.event.inputs.changelog }}```" + slack-bot-token: ${{ secrets.SLACK_BOT_TOKEN }} diff --git a/.github/workflows/cicd_post-workflow-reporting.yml b/.github/workflows/cicd_post-workflow-reporting.yml index bd830d3d4307..a935cd7ed5d0 100644 --- a/.github/workflows/cicd_post-workflow-reporting.yml +++ b/.github/workflows/cicd_post-workflow-reporting.yml @@ -266,9 +266,8 @@ jobs: # Send Slack notification - name: Post to Slack if: steps.workflow-data.outputs.has_json == 'true' && github.repository == 'dotcms/core' && ( steps.workflow-data.outputs.status == 'FAILURE' || ( github.event_name == 'workflow_call' && inputs.slack-only-on-failure == false ) ) - uses: slackapi/slack-github-action@v1.24.0 + uses: ./.github/actions/core-cicd/notification/notify-slack with: channel-id: ${{ vars.SLACK_REPORT_CHANNEL }} payload: ${{ steps.prepare-slack-message.outputs.payload }} - env: - SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }} \ No newline at end of file + slack-bot-token: ${{ secrets.SLACK_BOT_TOKEN }} diff --git a/core-web/.husky/pre-commit b/core-web/.husky/pre-commit index 5ebd37ad4e87..51a091e7610f 100755 --- a/core-web/.husky/pre-commit +++ b/core-web/.husky/pre-commit @@ -16,11 +16,34 @@ NC='\033[0m' # No Color exit 1 } +# Load nvm and use the version specified in .nvmrc +load_nvm_and_use_node() { + export NVM_DIR="$HOME/.nvm" + if [ -s "$NVM_DIR/nvm.sh" ]; then + . "$NVM_DIR/nvm.sh" # This loads nvm + echo "nvm loaded." + + if [ -f ".nvmrc" ]; then + nvm use || { + echo "Failed to switch to Node version specified in .nvmrc" + exit 1 + } + echo "Using Node version from .nvmrc" + else + echo "No .nvmrc file found. Using default Node version." + fi + else + echo "nvm is not installed or could not be found." + exit 1 + fi +} + check_sdk_client_affected() { local YARN_EXEC="${root_dir}/installs/node/yarn/dist/bin/yarn" - local affected_projects=$(npx nx show projects --affected) echo "Affected projects: $affected_projects" + + # Build sdk-client if affected if echo "$affected_projects" | grep -q "sdk-client"; then echo "Building sdk-client" if ! $YARN_EXEC nx run sdk-client:build:js; then @@ -175,8 +198,6 @@ restore_untracked_files() { original_pwd=${PWD} root_dir="$(git rev-parse --show-toplevel)" - - # Check for sdkman before running other initializations # This will ensure that the correct version of node and yarn is installed @@ -201,6 +222,9 @@ fi core_web_dir="${root_dir}/core-web" cd "${core_web_dir}" || exit 1 +# Load nvm and use the node version specified in .nvmrc +load_nvm_and_use_node + staged_files=$(git diff --cached --name-only) modified_files=$(git diff --name-only) @@ -264,6 +288,22 @@ fi # Check if sdk-client is affected check_sdk_client_affected +# Check if there are changes in core-web/libs/sdk and run bump-sdk-versions.js if needed +sdk_files_staged=$(echo "$staged_files" | grep -E '^core-web/libs/sdk/' || true) +if [ -n "$sdk_files_staged" ]; then + echo "Changes detected in core-web/libs/sdk. Running bump-sdk-versions.js" + + if ! node "${root_dir}/core-web/bump-sdk-versions.js"; then + echo "Failed to update versions" + has_errors=true + else + # Add all modified package.json files to staging + git add "${root_dir}/core-web/libs/sdk/" + git add "${root_dir}/examples/" + echo "Staged updated package.json files for SDKs and examples" + fi +fi + # Restore original working directory cd "${original_pwd}" || exit 1 # Exit if the directory does not exist @@ -274,4 +314,4 @@ if [ "$has_errors" = true ]; then else echo "Commit checks completed successfully." exit 0 # No errors, exit normally -fi \ No newline at end of file +fi diff --git a/core-web/bump-sdk-versions.js b/core-web/bump-sdk-versions.js new file mode 100644 index 000000000000..8493a0c61921 --- /dev/null +++ b/core-web/bump-sdk-versions.js @@ -0,0 +1,116 @@ +const fs = require('fs'); +const path = require('path'); + +// Function to read and parse JSON +const readJSON = (filePath) => JSON.parse(fs.readFileSync(filePath, 'utf8')); + +// Function to write JSON to file +const writeJSON = (filePath, data) => + fs.writeFileSync(filePath, JSON.stringify(data, null, 2) + '\n'); + +// Function to bump the version +const bumpVersion = (version) => { + const match = version.match(/^(\d+\.\d+\.\d+)(-alpha\.)(\d+)?$/); + if (!match) { + throw new Error(`Invalid version format: ${version}`); + } + + const [, majorMinorPatch, alphaPrefix, buildNumber = 0] = match; + const newBuildNumber = parseInt(buildNumber) + 1; + return `${majorMinorPatch}${alphaPrefix}${newBuildNumber}`; +}; + +// Function to update the version in a package.json file +const updateVersionInPackageJson = (packageJsonPath, newVersion) => { + const packageJson = readJSON(packageJsonPath); + packageJson.version = newVersion; + writeJSON(packageJsonPath, packageJson); + console.log(`Updated version in ${packageJsonPath} to ${newVersion}`); +}; + +// Function to update peerDependencies in a package.json file +const updatePeerDependencies = (packageJsonPath, newVersion) => { + const packageJson = readJSON(packageJsonPath); + let updated = false; + + if (packageJson.peerDependencies) { + Object.keys(packageJson.peerDependencies).forEach((dep) => { + if (dep.startsWith('@dotcms/')) { + packageJson.peerDependencies[dep] = newVersion; + updated = true; + } + }); + } + + if (updated) { + writeJSON(packageJsonPath, packageJson); + console.log(`Updated peerDependencies in ${packageJsonPath} to version ${newVersion}`); + } +}; + +// Function to update dependencies in a package.json file for examples +const updateDependenciesInExamples = (packageJsonPath, sdkDependencies) => { + const packageJson = readJSON(packageJsonPath); + let updated = false; + + Object.keys(sdkDependencies).forEach((dep) => { + if (packageJson.dependencies && packageJson.dependencies[dep]) { + packageJson.dependencies[dep] = sdkDependencies[dep]; + updated = true; + } + }); + + if (updated) { + writeJSON(packageJsonPath, packageJson); + console.log(`Updated dependencies in ${packageJsonPath} to new SDK versions`); + } +}; + +// Paths +const sdkDir = path.join(__dirname, 'libs/sdk'); +const examplesDir = path.join(__dirname, '../examples'); + +// Step 1: Bump the version of the client library +const clientPackageJsonPath = path.join(sdkDir, 'client/package.json'); +const clientPackageJson = readJSON(clientPackageJsonPath); +const currentVersion = clientPackageJson.version; +const newVersion = bumpVersion(currentVersion); +console.log(`Bumping version of client from ${currentVersion} to ${newVersion}`); + +// Step 2: Update the version in all SDK libraries +const sdkLibraries = fs + .readdirSync(sdkDir) + .filter((lib) => fs.existsSync(path.join(sdkDir, lib, 'package.json'))); +sdkLibraries.forEach((lib) => { + const packageJsonPath = path.join(sdkDir, lib, 'package.json'); + updateVersionInPackageJson(packageJsonPath, newVersion); +}); + +// Step 3: Update peerDependencies in other SDK libraries +sdkLibraries.forEach((lib) => { + if (lib !== 'client') { + const packageJsonPath = path.join(sdkDir, lib, 'package.json'); + updatePeerDependencies(packageJsonPath, newVersion); + } +}); + +// Step 4: Dynamically build the sdkDependencies object +const sdkDependencies = sdkLibraries.reduce((deps, lib) => { + const packageJsonPath = path.join(sdkDir, lib, 'package.json'); + const packageJson = readJSON(packageJsonPath); + if (packageJson.name) { + deps[packageJson.name] = newVersion; + } + return deps; +}, {}); + +// Step 5: Update dependencies in example projects +const exampleProjects = fs + .readdirSync(examplesDir) + .filter((proj) => fs.existsSync(path.join(examplesDir, proj, 'package.json'))); +exampleProjects.forEach((proj) => { + const packageJsonPath = path.join(examplesDir, proj, 'package.json'); + updateDependenciesInExamples(packageJsonPath, sdkDependencies); +}); + +console.log(`All updates complete. New SDK version: ${newVersion}`); diff --git a/dotCMS/src/enterprise/java/com/dotcms/enterprise/priv/HostAssetsJobImpl.java b/dotCMS/src/enterprise/java/com/dotcms/enterprise/priv/HostAssetsJobImpl.java index 0899a68fa366..cf5bf2e4b7f4 100644 --- a/dotCMS/src/enterprise/java/com/dotcms/enterprise/priv/HostAssetsJobImpl.java +++ b/dotCMS/src/enterprise/java/com/dotcms/enterprise/priv/HostAssetsJobImpl.java @@ -110,6 +110,7 @@ import com.dotmarketing.util.contentet.pagination.PaginatedContentlets; import com.liferay.portal.model.User; import com.liferay.util.StringPool; +import io.vavr.Lazy; import io.vavr.control.Try; import org.apache.commons.beanutils.BeanUtils; import org.quartz.JobExecutionContext; @@ -117,7 +118,6 @@ import java.io.Serializable; import java.util.ArrayList; -import java.util.Arrays; import java.util.Collection; import java.util.HashMap; import java.util.Iterator; @@ -162,6 +162,24 @@ public class HostAssetsJobImpl extends ParentProxy{ private final Host SYSTEM_HOST; private final Folder SYSTEM_FOLDER; + /** + * This feature flag allows you to enable/disable copying Content Types when you copy a Site. + */ + public static final String ENABLE_CONTENT_TYPE_COPY = "FEATURE_FLAG_ENABLE_CONTENT_TYPE_COPY"; + /** + * This property allows you to fall back to NOT copy related content whose parent is a + * Contentlet living in System Host. Such type of relationship was being ignored because + * contents living in System Host are NEVER copied, and were not taken into account when + * copying relationship data. + */ + public static final String COPY_RELATED_CONTENT_IN_SYSTEM_HOST_CONTENTS = + "COPY_RELATED_CONTENT_IN_SYSTEM_HOST_CONTENTS"; + + private static final Lazy CONTENT_TYPE_COPY_FLAG = + Lazy.of(() -> Config.getBooleanProperty(ENABLE_CONTENT_TYPE_COPY, false)); + private static final Lazy COPY_RELATED_CONTENT_IN_SYSTEM_HOST_CONTENTS_FLAG = Lazy.of(() -> + Config.getBooleanProperty(COPY_RELATED_CONTENT_IN_SYSTEM_HOST_CONTENTS, true)); + private static final boolean DONT_RESPECT_FRONTEND_ROLES = Boolean.FALSE; private static final boolean RESPECT_FRONTEND_ROLES = Boolean.TRUE; @@ -255,24 +273,6 @@ public void run(final JobExecutionContext jobContext) throws JobExecutionExcepti this.sendNotification(String.format(successMsg, destinationSite.getHostname()), userId, NotificationLevel.INFO); } - private HTMLPageAssetAPI.TemplateContainersReMap getMirrorTemplateContainersReMap(Template sourceTemplate) - throws DotDataException, DotSecurityException { - - User user = userAPI.getSystemUser(); - boolean respectFrontendRoles = false; - - List sourceContainers = templateAPI.getContainersInTemplate(sourceTemplate, user, - respectFrontendRoles); - List containerMappings = new LinkedList<>(); - for (Container sourceContainer : sourceContainers) { - HTMLPageAssetAPI.TemplateContainersReMap.ContainerRemapTuple containerMapping = new HTMLPageAssetAPI.TemplateContainersReMap.ContainerRemapTuple( - sourceContainer, sourceContainer); - containerMappings.add(containerMapping); - } - return new HTMLPageAssetAPI.TemplateContainersReMap(sourceTemplate, - sourceTemplate, containerMappings); - } - /** * Performs the copy process of the user-specified elements from the source site to the * destination site. @@ -292,20 +292,20 @@ private void copySiteAssets(final Host sourceSite, final Host destinationSite, f try { HibernateUtil.startTransaction(); // Global Vars - double progressIncrement = 0; - double currentProgress = 0; + double progressIncrement; + double currentProgress; this.siteCopyStatus.addMessage("copying-templates"); // ====================================================================== // Copying templates and containers // ====================================================================== - final Map templatesMappingBySourceId = new HashMap<>(); + final Map copiedTemplatesBySourceId = new HashMap<>(); final Map copiedContainersBySourceId = new HashMap<>(); - final Map folderMappingsBySourceId = new HashMap<>(); - final Map contentMappingsBySourceId = new HashMap<>(); - final List contentsToCopyDependencies = new ArrayList<>(); - final Map copiedContentTypes = new HashMap<>(); - final Map copiedRelationships = new HashMap<>(); + final Map copiedFoldersBySourceId = new HashMap<>(); + final Map copiedContentsBySourceId = new HashMap<>(); + final List contentsWithRelationships = new ArrayList<>(); + final Map copiedContentTypesBySourceId = new HashMap<>(); + final Map copiedRelationshipsBySourceId = new HashMap<>(); if (copyOptions.isCopyTemplatesAndContainers()) { Logger.info(this, "----------------------------------------------------------------------"); Logger.info(this, String.format(":::: Copying Templates and Containers to new Site '%s'", destinationSite.getHostname())); @@ -338,7 +338,7 @@ private void copySiteAssets(final Host sourceSite, final Host destinationSite, f DONT_RESPECT_FRONTEND_ROLES); final Folder sourceFolder = this.folderAPI.find(sourceContent.getFolder(), this.SYSTEM_USER, DONT_RESPECT_FRONTEND_ROLES); // This should store the new container destination folder into the map. - final Folder destinationFolder = copyFolder(sourceFolder, destinationSite, folderMappingsBySourceId); + final Folder destinationFolder = copyFolder(sourceFolder, destinationSite, copiedFoldersBySourceId); Logger.debug(HostAssetsJobImpl.class, () -> String.format("---> Container-As-File destination folder path is '%s'", destinationFolder.getPath())); // now create the Copy of the container as file and all its assets @@ -350,12 +350,12 @@ private void copySiteAssets(final Host sourceSite, final Host destinationSite, f processedContentletsList.add( processCopyOfContentlet(asset, copyOptions, destinationSite, - contentMappingsBySourceId, - folderMappingsBySourceId, + copiedContentsBySourceId, + copiedFoldersBySourceId, copiedContainersBySourceId, - templatesMappingBySourceId, - contentsToCopyDependencies, - copiedContentTypes + copiedTemplatesBySourceId, + contentsWithRelationships, + copiedContentTypesBySourceId ) ); } @@ -400,7 +400,7 @@ private void copySiteAssets(final Host sourceSite, final Host destinationSite, f final Template newTemplate = copyTemplate(sourceTemplate, destinationSite, copiedContainersBySourceId); HTMLPageAssetAPI.TemplateContainersReMap templateMapping = new HTMLPageAssetAPI.TemplateContainersReMap( sourceTemplate, newTemplate, containerMappings); - templatesMappingBySourceId.put(sourceTemplate.getIdentifier(), templateMapping); + copiedTemplatesBySourceId.put(sourceTemplate.getIdentifier(), templateMapping); } catch (final Exception e) { Logger.error(this, String.format("An error occurred when copying data from Template '%s' [%s] " + "from Site '%s' to Site '%s'. The process will continue...", sourceTemplate.getTitle() @@ -446,7 +446,7 @@ private void copySiteAssets(final Host sourceSite, final Host destinationSite, f } final HTMLPageAssetAPI.TemplateContainersReMap templateMapping = new HTMLPageAssetAPI.TemplateContainersReMap( sourceTemplate, sourceTemplate, containerMappings); - templatesMappingBySourceId.put(sourceTemplate.getIdentifier(), templateMapping); + copiedTemplatesBySourceId.put(sourceTemplate.getIdentifier(), templateMapping); } catch (final Exception e) { Logger.error(this, String.format("An error occurred when copying data from Template '%s' [%s] " + "from Site '%s' to Site '%s'. The process will continue...", sourceTemplate.getTitle() @@ -470,7 +470,7 @@ private void copySiteAssets(final Host sourceSite, final Host destinationSite, f Logger.info(this, String.format("-> Copying %d Folders", allSourceFolders.size())); for (final Folder sourceFolder : allSourceFolders) { try { - copyFolder(sourceFolder, destinationSite, folderMappingsBySourceId); + copyFolder(sourceFolder, destinationSite, copiedFoldersBySourceId); } catch (final Exception e) { Logger.error(this, String.format("An error occurred when copying folder '%s' from Site '%s' to" + " Site '%s'. The process will continue...", sourceFolder.getPath(), sourceSite @@ -482,7 +482,7 @@ private void copySiteAssets(final Host sourceSite, final Host destinationSite, f this.siteCopyStatus.updateProgress(10); if (copyOptions.isCopyLinks()) { - final Collection folders = folderMappingsBySourceId.values(); + final Collection folders = copiedFoldersBySourceId.values(); // ====================================================================== // Copying Menu Links // ====================================================================== @@ -510,12 +510,12 @@ private void copySiteAssets(final Host sourceSite, final Host destinationSite, f if(copyOptions.isCopyTemplatesAndContainers()){ Logger.info(this, "----------------------------------------------------------------------"); Logger.info(this, String.format(":::: Pointing %d Templates to copied themes for new Site '%s'", - templatesMappingBySourceId.size(), destinationSite.getHostname())); - for (final String sourceTemplateId : templatesMappingBySourceId.keySet()) { - final Template srcTemplate = templatesMappingBySourceId.get(sourceTemplateId).getSourceTemplate(); - if(UtilMethods.isSet(srcTemplate.getTheme()) && folderMappingsBySourceId.containsKey(srcTemplate.getTheme())){ - final String destTemplateInode = templatesMappingBySourceId.get(sourceTemplateId).getDestinationTemplate().getInode(); - final String destTheme = folderMappingsBySourceId.get(srcTemplate.getTheme()).destinationFolder.getInode(); + copiedTemplatesBySourceId.size(), destinationSite.getHostname())); + for (final String sourceTemplateId : copiedTemplatesBySourceId.keySet()) { + final Template srcTemplate = copiedTemplatesBySourceId.get(sourceTemplateId).getSourceTemplate(); + if(UtilMethods.isSet(srcTemplate.getTheme()) && copiedFoldersBySourceId.containsKey(srcTemplate.getTheme())){ + final String destTemplateInode = copiedTemplatesBySourceId.get(sourceTemplateId).getDestinationTemplate().getInode(); + final String destTheme = copiedFoldersBySourceId.get(srcTemplate.getTheme()).destinationFolder.getInode(); this.templateAPI.updateThemeWithoutVersioning(destTemplateInode, destTheme); } } @@ -526,7 +526,7 @@ private void copySiteAssets(final Host sourceSite, final Host destinationSite, f HibernateUtil.startTransaction(); this.siteCopyStatus.updateProgress(70); - if (Config.getBooleanProperty("FEATURE_FLAG_ENABLE_CONTENT_TYPE_COPY", false) && copyOptions.isCopyContentTypes()) { + if (CONTENT_TYPE_COPY_FLAG.get() && copyOptions.isCopyContentTypes()) { Logger.info(this, "----------------------------------------------------------------------"); Logger.info(this, String.format(":::: Copying Content Types to new Site '%s'", destinationSite.getHostname())); final List sourceContentTypes = this.contentTypeAPI.search("", @@ -542,7 +542,7 @@ private void copySiteAssets(final Host sourceSite, final Host destinationSite, f // Copy the Content Type objects with NONE of their relationship fields final ContentType copiedContentType = this.contentTypeAPI.copyFromAndDependencies(builder.build(), destinationSite, false); - copiedContentTypes.put(sourceContentType.id(), new ContentTypeMapping(sourceContentType, copiedContentType)); + copiedContentTypesBySourceId.put(sourceContentType.id(), new ContentTypeMapping(sourceContentType, copiedContentType)); } final List childRelationships = new ArrayList<>(); // Now, copy the relationship fields back, but EXCLUDE all relationship fields that are pointing @@ -558,8 +558,8 @@ private void copySiteAssets(final Host sourceSite, final Host destinationSite, f sourceRelationshipField.name(), sourceContentType.name())); continue; } - final ContentTypeMapping parentContentTypeMapping = copiedContentTypes.get(sourceRelationship.getParentStructure().id()); - final ContentTypeMapping childContentTypeMapping = copiedContentTypes.get(sourceRelationship.getChildStructure().id()); + final ContentTypeMapping parentContentTypeMapping = copiedContentTypesBySourceId.get(sourceRelationship.getParentStructure().id()); + final ContentTypeMapping childContentTypeMapping = copiedContentTypesBySourceId.get(sourceRelationship.getChildStructure().id()); checkNotNull(parentContentTypeMapping, "Parent Content Type ID in Relationship Field " + "'%s' in Content Type '%s' is null", sourceRelationshipField.name(), sourceContentType.name()); checkNotNull(childContentTypeMapping, "Child Content Type ID in Relationship Field " + @@ -567,21 +567,21 @@ private void copySiteAssets(final Host sourceSite, final Host destinationSite, f // If this Relationship Field represents the parent of the relationship, or if the relationship is between // the same Content Types, just copy the field with the new IDs and data if (this.relationshipAPI.isChildField(sourceRelationship, sourceRelationshipField) || this.relationshipAPI.sameParentAndChild(sourceRelationship)) { - final String copiedContentTypeId = copiedContentTypes.get(sourceContentType.id()).destinationContentType.id(); + final String copiedContentTypeId = copiedContentTypesBySourceId.get(sourceContentType.id()).destinationContentType.id(); final String copiedContentTypeVarName = childContentTypeMapping.destinationContentType.variable(); this.createRelationshipField(copiedContentTypeId, copiedContentTypeVarName, sourceRelationshipField, - sourceRelationship, copiedRelationships); + sourceRelationship, copiedRelationshipsBySourceId); } else { // Here, the Relationship Field is the child of the current Relationship and its parent Relationship has // already been copied. Therefore, we can create the child Relationship Field now and reference the // existing relationship - if (copiedRelationships.containsKey(sourceRelationship.getInode())) { - final Relationship copiedRelationship = copiedRelationships.get(sourceRelationship.getInode()).destinationRelationship; - final String copiedContentTypeId = copiedContentTypes.get(sourceContentType.id()).destinationContentType.id(); + if (copiedRelationshipsBySourceId.containsKey(sourceRelationship.getInode())) { + final Relationship copiedRelationship = copiedRelationshipsBySourceId.get(sourceRelationship.getInode()).destinationRelationship; + final String copiedContentTypeId = copiedContentTypesBySourceId.get(sourceContentType.id()).destinationContentType.id(); this.createRelationshipField(copiedContentTypeId, copiedRelationship.getRelationTypeValue(), sourceRelationshipField, - sourceRelationship, copiedRelationships); + sourceRelationship, copiedRelationshipsBySourceId); } else { // If the Relationship Field points to a relationship that hasn't been copied yet, we'll wait until // the parent Relationship is created and store its data in a list @@ -598,8 +598,8 @@ private void copySiteAssets(final Host sourceSite, final Host destinationSite, f for (final RelationshipMapping childRelationshipMapping : childRelationships) { this.copyChildRelationship(childRelationshipMapping.sourceRelationship, childRelationshipMapping.sourceContentType, - childRelationshipMapping.sourceField, copiedRelationships, - copiedContentTypes); + childRelationshipMapping.sourceField, copiedRelationshipsBySourceId, + copiedContentTypesBySourceId); } } HibernateUtil.closeAndCommitTransaction(); @@ -618,31 +618,33 @@ private void copySiteAssets(final Host sourceSite, final Host destinationSite, f Logger.info(this, "----------------------------------------------------------------------"); Logger.info(this, String.format(":::: Copying HTML Pages - but NOT their contents - to new Site '%s'", destinationSite.getHostname())); final PaginatedContentlets sourceContentlets = this.contentAPI.findContentletsPaginatedByHost(sourceSite, - Arrays.asList(BaseContentType.HTMLPAGE.getType()), null, this.SYSTEM_USER, + List.of(BaseContentType.HTMLPAGE.getType()), null, this.SYSTEM_USER, DONT_RESPECT_FRONTEND_ROLES); currentProgress = 70; progressIncrement = (95 - 70) / (double) sourceContentlets.size(); Logger.info(this, String.format("-> Copying %d HTML Pages", sourceContentlets.size())); for (final Contentlet sourceContent : sourceContentlets) { - processCopyOfContentlet(sourceContent, copyOptions, - destinationSite, contentMappingsBySourceId, folderMappingsBySourceId, - copiedContainersBySourceId, templatesMappingBySourceId, - contentsToCopyDependencies, copiedContentTypes); + if (null != sourceContent) { + this.processCopyOfContentlet(sourceContent, copyOptions, + destinationSite, copiedContentsBySourceId, copiedFoldersBySourceId, + copiedContainersBySourceId, copiedTemplatesBySourceId, + contentsWithRelationships, copiedContentTypesBySourceId); - currentProgress += progressIncrement; + currentProgress += progressIncrement; - this.siteCopyStatus.updateProgress((int) currentProgress); - if (contentCount % 100 == 0) { - HibernateUtil.closeAndCommitTransaction(); - HibernateUtil.startTransaction(); - } - contentCount++; + this.siteCopyStatus.updateProgress((int) currentProgress); + if (contentCount % 100 == 0) { + HibernateUtil.closeAndCommitTransaction(); + HibernateUtil.startTransaction(); + } + contentCount++; + } } // Copy contentlet dependencies - this.copyRelatedContentlets(contentsToCopyDependencies, - contentMappingsBySourceId, copiedRelationships, copyOptions); + this.copyRelatedContentlets(contentsWithRelationships, + copiedContentsBySourceId, copiedRelationshipsBySourceId, copyOptions); } // Option 2: Copy all content on site @@ -660,11 +662,11 @@ private void copySiteAssets(final Host sourceSite, final Host destinationSite, f Logger.info(this, "-> Copying simple contents first"); while (ite.hasNext()) { final Contentlet sourceContent = ite.next(); - if (!sourceContent.isHTMLPage()) { - processCopyOfContentlet(sourceContent, copyOptions, - destinationSite, contentMappingsBySourceId, folderMappingsBySourceId, - copiedContainersBySourceId, templatesMappingBySourceId, - contentsToCopyDependencies, copiedContentTypes); + if (null != sourceContent && !sourceContent.isHTMLPage()) { + this.processCopyOfContentlet(sourceContent, copyOptions, + destinationSite, copiedContentsBySourceId, copiedFoldersBySourceId, + copiedContainersBySourceId, copiedTemplatesBySourceId, + contentsWithRelationships, copiedContentTypesBySourceId); // Update progress ONLY if the record is processed currentProgress += progressIncrement; this.siteCopyStatus.updateProgress((int) currentProgress); @@ -683,22 +685,24 @@ private void copySiteAssets(final Host sourceSite, final Host destinationSite, f ite = sourceContentlets.iterator(); while (ite.hasNext()) { final Contentlet sourceContent = ite.next(); - processCopyOfContentlet(sourceContent, copyOptions, - destinationSite, contentMappingsBySourceId, folderMappingsBySourceId, - copiedContainersBySourceId, templatesMappingBySourceId, - contentsToCopyDependencies, copiedContentTypes); - currentProgress += progressIncrement; - siteCopyStatus.updateProgress((int) currentProgress); - if (contentCount % 100 == 0) { - HibernateUtil.closeAndCommitTransaction(); - HibernateUtil.startTransaction(); - } - contentCount++; + if (null != sourceContent && sourceContent.isHTMLPage()) { + this.processCopyOfContentlet(sourceContent, copyOptions, + destinationSite, copiedContentsBySourceId, copiedFoldersBySourceId, + copiedContainersBySourceId, copiedTemplatesBySourceId, + contentsWithRelationships, copiedContentTypesBySourceId); + currentProgress += progressIncrement; + siteCopyStatus.updateProgress((int) currentProgress); + if (contentCount % 100 == 0) { + HibernateUtil.closeAndCommitTransaction(); + HibernateUtil.startTransaction(); + } + contentCount++; + } } Logger.info(this, String.format("-> A total of %d contents have been copied", contentCount)); // Copy contentlet dependencies - this.copyRelatedContentlets(contentsToCopyDependencies, - contentMappingsBySourceId, copiedRelationships, copyOptions); + this.copyRelatedContentlets(contentsWithRelationships, + copiedContentsBySourceId, copiedRelationshipsBySourceId, copyOptions); } this.siteCopyStatus.updateProgress(95); @@ -975,63 +979,70 @@ private Folder copyFolder(final Folder sourceFolder, final Host destinationSite, * If the content to copy is a Content Page, then its multi-tree structure needs to be updated, * which involves updating the child references to point to the recently copied contentlets. *

- * - * @param sourceContent The {@link Contentlet} whose data will be copied. - * @param copyOptions The preferences selected by the user regarding what - * elements of the source site will be copied over to the new - * site. - * @param destinationSite The new {@link Host} object that will contain the - * information from the source site. - * @param copiedContentlets A {@link Map} containing the association between the - * Contentlet's Identifier in the source site and the - * Contentlet's Identifier in the destination site. This - * map keeps both the source and the destination - * {@link Contentlet} objects. - * @param copiedFolders A {@link Map} containing the association between the - * folder's Identifier from the source site with the folder's - * Identifier from the destination site. This map keeps both - * the source and the destination {@link Folder} objects. - * @param copiedContainers A {@link Map} that says what containerId from the source - * site has become what in the new copy-site - * @param copiedTemplates A {@link Map} that says what templateId from the source - * site has become what in the new copy-site - * @param contentletsWithRelationships The dependencies of the contentlet to copy. - * @param copiedContentTypes + * + * @param sourceContent The {@link Contentlet} whose data will be copied. + * @param copyOptions The preferences selected by the user regarding what + * elements of the source site will be copied over to the + * new site. + * @param destinationSite The new {@link Host} object that will contain the + * information from the source site. + * @param copiedContentletsBySourceId A {@link Map} containing the association between the + * Contentlet's Identifier in the source site and the + * Contentlet's Identifier in the destination site. This + * map keeps both the source and the destination + * {@link Contentlet} objects. + * @param copiedFoldersBySourceId A {@link Map} containing the association between the + * folder's Identifier from the source site with the + * folder's Identifier from the destination site. This map + * keeps both the source and the destination {@link Folder} + * objects. + * @param copiedContainersBySourceId A {@link Map} that says what containerId from the source + * site has become what in the new copy-site. + * @param copiedTemplatesBySourceId A {@link Map} that says what templateId from the source + * site has become what in the new copy-site. + * @param contentsWithRelationships The list of Contentlets that have other content related + * to them. + * @param copiedContentTypesBySourceId A {@link Map} containing the association between the + * Content Type's Identifier in the source site and the + * Content Type's Identifier in the destination site. This + * is relevant ONLY if the + * {@code FEATURE_FLAG_ENABLE_CONTENT_TYPE_COPY} property + * is enabled. */ private Contentlet processCopyOfContentlet(final Contentlet sourceContent, final HostCopyOptions copyOptions, final Host destinationSite, - final Map contentMappingsBySourceId, - final Map folderMappingsBySourceId, + final Map copiedContentletsBySourceId, + final Map copiedFoldersBySourceId, final Map copiedContainersBySourceId, - final Map templatesMappingBySourceId, - final List contentsToCopyDependencies, - final Map copiedContentTypes) { + final Map copiedTemplatesBySourceId, + final List contentsWithRelationships, + final Map copiedContentTypesBySourceId) { - //Since certain properties are modified here we're gonna use a defensive copy to avoid cache issue. + //Since certain properties are modified here we're going to use a defensive copy to avoid cache issue. final Contentlet sourceCopy = new Contentlet(sourceContent); Contentlet newContent = null; try { - if (contentMappingsBySourceId.containsKey(sourceCopy.getIdentifier())) { + if (copiedContentletsBySourceId.containsKey(sourceCopy.getIdentifier())) { // The content has already been copied Logger.debug(HostAssetsJobImpl.class,()->String.format("---> Content identified by `%s` has been copied already.", sourceCopy.getIdentifier())); - return contentMappingsBySourceId.get(sourceCopy.getIdentifier()).destinationContent; + return copiedContentletsBySourceId.get(sourceCopy.getIdentifier()).destinationContent; } sourceCopy.getMap().put(Contentlet.DONT_VALIDATE_ME, true); sourceCopy.getMap().put(Contentlet.DISABLE_WORKFLOW, true); sourceCopy.setLowIndexPriority(true); if (copyOptions.isCopyTemplatesAndContainers() && sourceCopy.isHTMLPage()) { - //If we're dealing with pages, need pass template mappings to the copyContentlet - //such method deals with all versions of the contentlet and it needs to know about the mapping info internally. - sourceCopy.getMap().put(Contentlet.TEMPLATE_MAPPINGS, templatesMappingBySourceId); + //If we're dealing with pages, we need to pass template mappings to the copyContentlet + //such a method deals with all versions of the contentlet, and it needs to know about the mapping info internally. + sourceCopy.getMap().put(Contentlet.TEMPLATE_MAPPINGS, copiedTemplatesBySourceId); } - final ContentType destinationContentType = Try.of(() -> copiedContentTypes.get(sourceCopy.getContentTypeId()).destinationContentType).getOrNull(); + final ContentType destinationContentType = Try.of(() -> copiedContentTypesBySourceId.get(sourceCopy.getContentTypeId()).destinationContentType).getOrNull(); if (InodeUtils.isSet(sourceCopy.getFolder()) && !sourceCopy.getFolder().equals(this.SYSTEM_FOLDER.getInode())) { // The source content has a folder assigned in the source Site we copy it to the // same destination folder final Folder sourceFolder = this.folderAPI.find(sourceCopy.getFolder(), this.SYSTEM_USER, DONT_RESPECT_FRONTEND_ROLES); - final Folder destinationFolder = folderMappingsBySourceId.get(sourceFolder.getInode()) != null ? folderMappingsBySourceId + final Folder destinationFolder = copiedFoldersBySourceId.get(sourceFolder.getInode()) != null ? copiedFoldersBySourceId .get(sourceFolder.getInode()).destinationFolder : null; if (!copyOptions.isCopyFolders()) { return null; @@ -1063,15 +1074,15 @@ private Contentlet processCopyOfContentlet(final Contentlet sourceContent, // Copy page-associated contentlets final List pageContents = APILocator.getMultiTreeAPI().getMultiTrees(sourceCopy.getIdentifier()); for (final MultiTree sourceMultiTree : pageContents) { - String newChild = sourceMultiTree.getChild(); + String newChild = sourceMultiTree.getContentlet(); // Update the child reference to point to the previously copied content - if (contentMappingsBySourceId.containsKey(sourceMultiTree.getChild())) { - newChild = contentMappingsBySourceId.get(sourceMultiTree.getChild()).destinationContent.getIdentifier(); + if (copiedContentletsBySourceId.containsKey(sourceMultiTree.getContentlet())) { + newChild = copiedContentletsBySourceId.get(sourceMultiTree.getContentlet()).destinationContent.getIdentifier(); } - String newContainer = sourceMultiTree.getParent2(); - if(copiedContainersBySourceId.containsKey(sourceMultiTree.getParent2())){ - newContainer = copiedContainersBySourceId.get(sourceMultiTree.getParent2()).getIdentifier(); + String newContainer = sourceMultiTree.getContainer(); + if(copiedContainersBySourceId.containsKey(sourceMultiTree.getContainer())){ + newContainer = copiedContainersBySourceId.get(sourceMultiTree.getContainer()).getIdentifier(); } final MultiTree multiTree = new MultiTree(newContent.getIdentifier(), @@ -1086,14 +1097,12 @@ private Contentlet processCopyOfContentlet(final Contentlet sourceContent, }// Pages are a big deal. - contentMappingsBySourceId.put(sourceCopy.getIdentifier(), new ContentMapping(sourceCopy, newContent)); + copiedContentletsBySourceId.put(sourceCopy.getIdentifier(), new ContentMapping(sourceCopy, newContent)); final Contentlet finalNewContent = newContent; Logger.debug(HostAssetsJobImpl.class,()->String.format("---> Re-Mapping content: Identifier `%s` now points to `%s`.", sourceCopy.getIdentifier(), finalNewContent .getIdentifier())); - if (doesRelatedContentExists(sourceCopy)) { - contentsToCopyDependencies.add(sourceCopy); - } + this.checkRelatedContentToCopy(sourceCopy, contentsWithRelationships); } catch (final Exception e) { Logger.error(this, String.format("An error occurred when copying content '%s' from Site '%s' to Site '%s'." + " The process will continue...", sourceCopy.getIdentifier(), sourceCopy.getHost(), @@ -1111,61 +1120,75 @@ private Contentlet processCopyOfContentlet(final Contentlet sourceContent, * the copied Contentlets to the new Relationship records as they reflect the new IDs for the * copied Content Types as well.

* - * @param contentsToCopyDependencies The list of {@link Contentlet} objects that are the - * parents of a given relationship. - * @param contentMappingsBySourceId The {@link Map} containing the association between the - * contentlet's Identifier from the source site and the - * contentlet's Identifier from the destination site. This - * map keeps both the source and the destination - * {@link Contentlet} objects. - * @param copiedRelationships The {@link Map} containing the copied Relationships from - * the source Site. This way, the copied Contentlets will - * be saved just like the original ones. - * @param copyOptions The {@link HostCopyOptions} object containing what objects - * from the source Site must be copied to the destination - * Site. + * @param contentsWithRelationships The list of {@link Contentlet} objects that are the + * parents of a given relationship. + * @param copiedContentsBySourceId The {@link Map} containing the association between the + * contentlet's Identifier from the source site and the + * contentlet's Identifier from the destination site. This + * map keeps both the source and the destination + * {@link Contentlet} objects. + * @param copiedRelationshipsBySourceId The {@link Map} containing the copied Relationships + * from the source Site. This way, the copied Contentlets + * will be saved just like the original ones. + * @param copyOptions The {@link HostCopyOptions} object containing what + * objects from the source Site must be copied to the + * destination Site. * * @throws DotDataException An error occurred when updating records in the database. * @throws DotSecurityException The {@link User} accessing the APIs doesn't have the required * permissions to perform this action. */ - private void copyRelatedContentlets(final List contentsToCopyDependencies, - final Map contentMappingsBySourceId, final Map copiedRelationships, final HostCopyOptions copyOptions) throws DotDataException, DotSecurityException { - for (final Contentlet sourceContent : contentsToCopyDependencies) { - final Contentlet destinationContent = contentMappingsBySourceId.get(sourceContent.getIdentifier()).destinationContent; + private void copyRelatedContentlets(final List contentsWithRelationships, + final Map copiedContentsBySourceId, final Map copiedRelationshipsBySourceId, final HostCopyOptions copyOptions) throws DotDataException, DotSecurityException { + for (final Contentlet sourceContent : contentsWithRelationships) { + boolean isDestinationContentInSystemHost = false; + Contentlet destinationContent; + if (Host.SYSTEM_HOST.equals(sourceContent.getHost())) { + destinationContent = sourceContent; + isDestinationContentInSystemHost = true; + } else { + destinationContent = copiedContentsBySourceId.get(sourceContent.getIdentifier()).destinationContent; + } final Map> contentRelationships = new HashMap<>(); - final List rels = this.relationshipAPI.byContentType(sourceContent.getContentType()); - for (final Relationship r : rels) { - if (!contentRelationships.containsKey(r)) { - contentRelationships.put(r, new ArrayList<>()); + final List relationshipsInContentType = this.relationshipAPI.byContentType(sourceContent.getContentType()); + for (final Relationship relationship : relationshipsInContentType) { + if (!contentRelationships.containsKey(relationship)) { + contentRelationships.put(relationship, new ArrayList<>()); } - final List cons = this.contentAPI.getRelatedContent(sourceContent, r, this.SYSTEM_USER, RESPECT_FRONTEND_ROLES); + final List relatedContentlets = + this.contentAPI.getRelatedContent(sourceContent, relationship, this.SYSTEM_USER, RESPECT_FRONTEND_ROLES); List records = new ArrayList<>(); - for (final Contentlet c : cons) { - records = contentRelationships.get(r); - if (UtilMethods.isSet(contentMappingsBySourceId.get(c.getIdentifier()))) { - final Tree relationshipData = TreeFactory.getTree(c.getIdentifier(), sourceContent.getIdentifier(), r.getRelationTypeValue()); + for (final Contentlet relatedContentlet : relatedContentlets) { + records = contentRelationships.get(relationship); + if (UtilMethods.isSet(copiedContentsBySourceId.get(relatedContentlet.getIdentifier()))) { + final Tree relationshipData = + TreeFactory.getTree(relatedContentlet.getIdentifier(), + sourceContent.getIdentifier(), relationship.getRelationTypeValue()); // In self-related Relationships, we need to make sure that the related Contentlet is NOT the // parent in the relationship in order to NOT create a duplicate relationship - if (this.relationshipAPI.sameParentAndChild(r) && UtilMethods.isSet(relationshipData.getParent())) { + if (this.relationshipAPI.sameParentAndChild(relationship) && UtilMethods.isSet(relationshipData.getParent())) { continue; } - records.add(contentMappingsBySourceId.get(c.getIdentifier()).destinationContent); + records.add(copiedContentsBySourceId.get(relatedContentlet.getIdentifier()).destinationContent); + if (isDestinationContentInSystemHost) { + records.add(relatedContentlet); + } } else { - records.add(c); + records.add(relatedContentlet); } } if (!records.isEmpty()) { ContentletRelationshipRecords related; - if (!Config.getBooleanProperty("FEATURE_FLAG_ENABLE_CONTENT_TYPE_COPY", false) || !copyOptions.isCopyContentTypes()) { + if (!CONTENT_TYPE_COPY_FLAG.get() || !copyOptions.isCopyContentTypes()) { related = new ContentletRelationships( - destinationContent).new ContentletRelationshipRecords(r, true); + destinationContent).new ContentletRelationshipRecords(relationship, true); } else { - if (!copiedRelationships.containsKey(r.getInode())) { + if (!copiedRelationshipsBySourceId.containsKey(relationship.getInode())) { continue; } - final Relationship copiedRelationship = copiedRelationships.get(r.getInode()).destinationRelationship; - related = new ContentletRelationships(destinationContent).new ContentletRelationshipRecords(copiedRelationship, true); + final Relationship copiedRelationship = copiedRelationshipsBySourceId.get(relationship.getInode()).destinationRelationship; + related = new ContentletRelationships(destinationContent) + .new ContentletRelationshipRecords(copiedRelationship, true); } related.setRecords(records); this.contentAPI.relateContent(destinationContent, related, this.SYSTEM_USER, DONT_RESPECT_FRONTEND_ROLES); @@ -1180,28 +1203,36 @@ private void copyRelatedContentlets(final List contentsToCopyDepende * in mind that, in the case of self-related Content Types, the parent and child point to each * other, so the Tree data must be accessed and read accordingly. * - * @param contentlet The Contentlet whose relationships will be verified. - * - * @return If the Contentlet is the parent of one or more Contentlets, returns {@code true}. - * Otherwise, returns {@code false}. + * @param contentlet The Contentlet whose relationships will be verified. + * @param contentletsWithRelationships The list of Contentlets that have relationships, and + * need to be processed later. * * @throws DotDataException An error occurred when accessing the data source. * @throws DotSecurityException The specified user does not have the required permissions to * perform this action. */ - private boolean doesRelatedContentExists(final Contentlet contentlet) throws DotDataException, DotSecurityException{ - if(contentlet == null) { - return false; + private void checkRelatedContentToCopy(final Contentlet contentlet, + final List contentletsWithRelationships) throws DotDataException, DotSecurityException { + if (contentlet == null) { + return; + } + final List relationshipsByCT = + this.relationshipAPI.byContentType(contentlet.getContentType()); + for (final Relationship relationship : relationshipsByCT) { + final List relatedContents = this.contentAPI.getRelatedContent(contentlet, + relationship, this.SYSTEM_USER, DONT_RESPECT_FRONTEND_ROLES); + if (COPY_RELATED_CONTENT_IN_SYSTEM_HOST_CONTENTS_FLAG.get()) { + for (final Contentlet relatedContent : relatedContents) { + if (Host.SYSTEM_HOST.equals(relatedContent.getHost())) { + contentletsWithRelationships.add(relatedContent); + } + } + } + if (!relatedContents.isEmpty() && this.relationshipAPI.isParent(relationship, + contentlet.getContentType())) { + contentletsWithRelationships.add(contentlet); + } } - - final List rels = APILocator.getRelationshipAPI().byContentType(contentlet.getContentType()); - for (final Relationship r : rels) { - final List cons = this.contentAPI.getRelatedContent(contentlet, r, this.SYSTEM_USER, DONT_RESPECT_FRONTEND_ROLES); - if(cons.size() > 0 && APILocator.getRelationshipAPI().isParent(r, contentlet.getContentType())){ - return true; - } - } - return false; } @Override