diff --git a/.docker/alpine.docker b/.docker/alpine.docker index 77be9ff..db5bb8f 100644 --- a/.docker/alpine.docker +++ b/.docker/alpine.docker @@ -4,18 +4,17 @@ ARG REGISTRY="docker.io" ## BUILDER ##################################################################### -FROM golang:alpine3.17 as builder +FROM ${REGISTRY}/essentialkaos/golang:alpine3.18 as builder WORKDIR /go/src/github.com/essentialkaos/atlassian-cloud-backuper COPY . . -# hadolint ignore=DL3018 -RUN apk add --no-cache git make && make deps && make all +RUN make deps && make all ## FINAL IMAGE ################################################################# -FROM ${REGISTRY}/essentialkaos/alpine:3.17 +FROM ${REGISTRY}/essentialkaos/alpine:3.18 LABEL org.opencontainers.image.title="atlassian-cloud-backuper" \ org.opencontainers.image.description="Atlassian Cloud Backuper" \ diff --git a/.github/ISSUE_TEMPLATE.md b/.github/ISSUE_TEMPLATE.md deleted file mode 100644 index 2eb6195..0000000 --- a/.github/ISSUE_TEMPLATE.md +++ /dev/null @@ -1,51 +0,0 @@ -_Before opening an issue, search for similar bug reports or feature requests on GitHub Issues. If yes, please add a_ 👍 _reaction to the existing issue. If no similar issue can be found, fill out either the "Bug Report" or the "Feature Request" section below. Erase the other section and everything on and above this line._ - -### Bug report - -**System info:** - -* **Verbose version info (`atlassian-cloud-backuper -vv`):** -* **Install tools:** - -**Steps to reproduce:** - -1. [First Step] -2. [Second Step] -3. [and so on...] - -**Expected behavior:** - -[What you expected to happen] - -**Actual behavior:** - -[What actually happened] - -**Additional info:** - -[Include gist of relevant config, logs, etc.] - -Please run those if possible and link them from a [gist](http://gist.github.com). - ---- - -### Feature Request - -Opening a feature request kicks off a discussion. Requests may be closed if we're not actively planning to work on them. - -**Proposal:** - -[Description of the feature] - -**Current behavior:** - -[What currently happens] - -**Desired behavior:** - -[What you would like to happen] - -**Use case:** - -[Why is this important (helps with prioritizing requests)] - diff --git a/.github/ISSUE_TEMPLATE/bug.yml b/.github/ISSUE_TEMPLATE/bug.yml new file mode 100644 index 0000000..1f96f54 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug.yml @@ -0,0 +1,64 @@ +name: ❗ Bug Report +description: File a bug report +title: "[Bug]: " +labels: ["issue • bug"] +assignees: + - andyone + +body: + - type: markdown + attributes: + value: | + > [!IMPORTANT] + > Before you open an issue, search GitHub Issues for a similar bug reports. If so, please add a 👍 reaction to the existing issue. + + - type: textarea + attributes: + label: Verbose application info + description: Output of `atlassian-cloud-backuper -vv` command + render: shell + validations: + required: true + + - type: dropdown + id: version + attributes: + label: Install tools + description: How did you install this application + options: + - From Sources + - RPM Package + - Prebuilt Binary + default: 0 + validations: + required: true + + - type: textarea + attributes: + label: Steps to reproduce + description: Short guide on how to reproduce this problem on our site + placeholder: | + 1. [First Step] + 2. [Second Step] + 3. [and so on...] + validations: + required: true + + - type: textarea + attributes: + label: Expected behavior + description: What you expected to happen + validations: + required: true + + - type: textarea + attributes: + label: Actual behavior + description: What actually happened + validations: + required: true + + - type: textarea + attributes: + label: Additional info + description: Include gist of relevant config, logs, etc. diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml new file mode 100644 index 0000000..3ba13e0 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -0,0 +1 @@ +blank_issues_enabled: false diff --git a/.github/ISSUE_TEMPLATE/question.yml b/.github/ISSUE_TEMPLATE/question.yml new file mode 100644 index 0000000..55bfc41 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/question.yml @@ -0,0 +1,26 @@ +name: ❓ Question +description: Question about application, configuration or code +title: "[Question]: " +labels: ["issue • question"] +assignees: + - andyone + +body: + - type: markdown + attributes: + value: | + > [!IMPORTANT] + > Before you open an issue, search GitHub Issues for a similar question. If so, please add a 👍 reaction to the existing issue. + + - type: textarea + attributes: + label: Question + description: Detailed question + validations: + required: true + + - type: textarea + attributes: + label: Related version application info + description: Output of `atlassian-cloud-backuper -vv` command + render: shell diff --git a/.github/ISSUE_TEMPLATE/suggestion.yml b/.github/ISSUE_TEMPLATE/suggestion.yml new file mode 100644 index 0000000..39c69e0 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/suggestion.yml @@ -0,0 +1,43 @@ +name: ➕ Suggestion +description: Suggest new feature or improvement +title: "[Suggestion]: " +labels: ["issue • suggestion"] +assignees: + - andyone + +body: + - type: markdown + attributes: + value: | + > [!IMPORTANT] + > Before you open an issue, search GitHub Issues for a similar feature requests. If so, please add a 👍 reaction to the existing issue. + > + > Opening a feature request kicks off a discussion. Requests may be closed if we're not actively planning to work on them. + + - type: textarea + attributes: + label: Proposal + description: Description of the feature + validations: + required: true + + - type: textarea + attributes: + label: Current behavior + description: What currently happens + validations: + required: true + + - type: textarea + attributes: + label: Desired behavior + description: What you would like to happen + validations: + required: true + + - type: textarea + attributes: + label: Use case + description: Why is this important (helps with prioritizing requests) + validations: + required: true diff --git a/.github/images/usage-container.svg b/.github/images/usage-container.svg index af4f579..506b518 100644 --- a/.github/images/usage-container.svg +++ b/.github/images/usage-container.svg @@ -1,5 +1,5 @@ - - + + Atlassian Cloud Backuper Usage @@ -14,12 +14,13 @@ text { font-family: ui-monospace, 'JetBrains Mono', 'Fira Code', 'Iosevka', SFMono-Regular, 'SF Mono', Menlo, Consolas, 'Liberation Mono', monospace; font-size: 14px; font-display: swap; white-space: pre; } tspan { white-space: pre; } - .bold { font-weight: bold; } - .italic { font-style: italic; } - .underline { text-decoration-line: underline; } - .dim { opacity: 0.75; } .terminal { font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", "Noto Sans", Helvetica, Arial, sans-serif, "Apple Color Emoji", "Segoe UI Emoji"; font-weight: 500; font-size: 15px; } + tspan.BB { font-weight: bold; } + tspan.II { font-style: italic; } + tspan.UU { text-decoration-line: underline; } + tspan.DD { opacity: 0.75; } + text { fill: #333; } .terminal { fill: #333; } .bg { fill: #F0F0F0; stop-color: #F0F0F0; } @@ -30,8 +31,8 @@ tspan.b { fill: #0060BA; } tspan.m { fill: #BA00AD; } tspan.c { fill: #00B7BA; } - tspan.LG { fill: #696969; } - tspan.DG { fill: #AAA; } + tspan.s { fill: #696969; } + tspan.d { fill: #AAA; } tspan.R { fill: #EA4B4B; } tspan.G { fill: #88DB43; } tspan.Y { fill: #D3BA3B; } @@ -53,8 +54,8 @@ tspan.b { fill: #5C9AD8; } tspan.m { fill: #B6419E; } tspan.c { fill: #38AFC5; } - tspan.LG { fill: #AAA; } - tspan.DG { fill: #696969; } + tspan.s { fill: #AAA; } + tspan.d { fill: #696969; } tspan.R { fill: #EF5A56; } tspan.G { fill: #49E471; } tspan.Y { fill: #E3E342; } @@ -67,7 +68,7 @@ } - + @@ -77,44 +78,49 @@ Terminal - Usage: atlassian-cloud-backuper {options} target - Options - --config, -c file ........................ Path to configuration file - --interactive, -I ........................ Interactive mode - --no-color, -nc .......................... Disable colors in output - --help, -h ............................... Show this help message - --version, -v ............................ Show version - --access-account name .................... Account name (ACCESS_ACCOUNT) - --access-email email ..................... User email with access to API (ACCESS_EMAIL) - --access-api-key key ..................... API key (ACCESS_API_KEY) - --storage-type fs/sftp/s3 ................ Storage type (STORAGE_TYPE) - --storage-fs-path path ................... Path on system for backups (STORAGE_FS_PATH) - --storage-fs-mode mode ................... File mode on system (STORAGE_FS_MODE) - --storage-sftp-host host ................. SFTP host (STORAGE_SFTP_HOST) - --storage-sftp-user name ................. SFTP user name (STORAGE_SFTP_USER) - --storage-sftp-key key ................... Base64-encoded private key (STORAGE_SFTP_KEY) - --storage-sftp-path path ................. Path on SFTP (STORAGE_SFTP_PATH) - --storage-sftp-mode mode ................. File mode on SFTP (STORAGE_SFTP_MODE) - --storage-s3-host host ................... S3 host (STORAGE_S3_HOST) - --storage-s3-region region ............... S3 region (STORAGE_S3_REGION) - --storage-s3-access-key id ............... S3 access key ID (STORAGE_S3_ACCESS_KEY) - --storage-s3-secret-key key .............. S3 access secret key (STORAGE_S3_SECRET_KEY) - --storage-s3-bucket name ................. S3 bucket (STORAGE_S3_BUCKET) - --storage-s3-path path ................... Path for backups (STORAGE_S3_PATH) - --jira-output-file template .............. Jira backup output file name template - (JIRA_OUTPUT_FILE) - --jira-include-attachments yes/no ........ Include attachments to Jira backup - (JIRA_INCLUDE_ATTACHMENTS) - --jira-cloud-format yes/no ............... Create Jira backup for Cloud (JIRA_CLOUD_FORMAT) - --confluence-output-file template ........ Confluence backup output file name template - (CONFLUENCE_OUTPUT_FILE) - --confluence-include-attachments yes/no .. Include attachments to Confluence backup - (CONFLUENCE_INCLUDE_ATTACHMENTS) - --confluence-cloud-format yes/no ......... Create Confluence backup for Cloud - (CONFLUENCE_CLOUD_FORMAT) - --temp-dir path .......................... Path to directory for temporary data (TEMP_DIR) - --log-format text/json ................... Log format (LOG_FORMAT) - --log-level level ........................ Log level (LOG_LEVEL) + Usage: atlassian-cloud-backuper {options} target + Options + --config, -c file ........................ Path to configuration file + --interactive, -I ........................ Interactive mode + --server, -S ............................. Server mode + --no-color, -nc .......................... Disable colors in output + --help, -h ............................... Show this help message + --version, -v ............................ Show version + --access-account name .................... Account name (ACCESS_ACCOUNT) + --access-email email ..................... User email with access to API (ACCESS_EMAIL) + --access-api-key key ..................... API key (ACCESS_API_KEY) + --server-ip ip ........................... HTTP server IP (SERVER_IP) + --server-port port ....................... HTTP server port (SERVER_PORT) + --server-access-token token .............. HTTP access token (SERVER_ACCESS_TOKEN) + --storage-type fs/sftp/s3 ................ Storage type (STORAGE_TYPE) + --storage-encryption-key key ............. Data encryption key (STORAGE_ENCRYPTION_KEY) + --storage-fs-path path ................... Path on system for backups (STORAGE_FS_PATH) + --storage-fs-mode mode ................... File mode on system (STORAGE_FS_MODE) + --storage-sftp-host host ................. SFTP host (STORAGE_SFTP_HOST) + --storage-sftp-user name ................. SFTP user name (STORAGE_SFTP_USER) + --storage-sftp-key key ................... Base64-encoded private key (STORAGE_SFTP_KEY) + --storage-sftp-path path ................. Path on SFTP (STORAGE_SFTP_PATH) + --storage-sftp-mode mode ................. File mode on SFTP (STORAGE_SFTP_MODE) + --storage-s3-host host ................... S3 host (STORAGE_S3_HOST) + --storage-s3-region region ............... S3 region (STORAGE_S3_REGION) + --storage-s3-access-key id ............... S3 access key ID (STORAGE_S3_ACCESS_KEY) + --storage-s3-secret-key key .............. S3 access secret key (STORAGE_S3_SECRET_KEY) + --storage-s3-bucket name ................. S3 bucket (STORAGE_S3_BUCKET) + --storage-s3-path path ................... Path for backups (STORAGE_S3_PATH) + --jira-output-file template .............. Jira backup output file name template + (JIRA_OUTPUT_FILE) + --jira-include-attachments yes/no ........ Include attachments to Jira backup + (JIRA_INCLUDE_ATTACHMENTS) + --jira-cloud-format yes/no ............... Create Jira backup for Cloud (JIRA_CLOUD_FORMAT) + --confluence-output-file template ........ Confluence backup output file name template + (CONFLUENCE_OUTPUT_FILE) + --confluence-include-attachments yes/no .. Include attachments to Confluence backup + (CONFLUENCE_INCLUDE_ATTACHMENTS) + --confluence-cloud-format yes/no ......... Create Confluence backup for Cloud + (CONFLUENCE_CLOUD_FORMAT) + --temp-dir path .......................... Path to directory for temporary data (TEMP_DIR) + --log-format text/json ................... Log format (LOG_FORMAT) + --log-level level ........................ Log level (LOG_LEVEL) - + diff --git a/.github/images/usage.svg b/.github/images/usage.svg index 3f12b95..6512674 100644 --- a/.github/images/usage.svg +++ b/.github/images/usage.svg @@ -1,5 +1,5 @@ - - + + Atlassian Cloud Backuper Usage @@ -14,12 +14,13 @@ text { font-family: ui-monospace, 'JetBrains Mono', 'Fira Code', 'Iosevka', SFMono-Regular, 'SF Mono', Menlo, Consolas, 'Liberation Mono', monospace; font-size: 14px; font-display: swap; white-space: pre; } tspan { white-space: pre; } - .bold { font-weight: bold; } - .italic { font-style: italic; } - .underline { text-decoration-line: underline; } - .dim { opacity: 0.75; } .terminal { font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", "Noto Sans", Helvetica, Arial, sans-serif, "Apple Color Emoji", "Segoe UI Emoji"; font-weight: 500; font-size: 15px; } + tspan.BB { font-weight: bold; } + tspan.II { font-style: italic; } + tspan.UU { text-decoration-line: underline; } + tspan.DD { opacity: 0.75; } + text { fill: #333; } .terminal { fill: #333; } .bg { fill: #F0F0F0; stop-color: #F0F0F0; } @@ -30,8 +31,8 @@ tspan.b { fill: #0060BA; } tspan.m { fill: #BA00AD; } tspan.c { fill: #00B7BA; } - tspan.LG { fill: #696969; } - tspan.DG { fill: #AAA; } + tspan.s { fill: #696969; } + tspan.d { fill: #AAA; } tspan.R { fill: #EA4B4B; } tspan.G { fill: #88DB43; } tspan.Y { fill: #D3BA3B; } @@ -53,8 +54,8 @@ tspan.b { fill: #5C9AD8; } tspan.m { fill: #B6419E; } tspan.c { fill: #38AFC5; } - tspan.LG { fill: #AAA; } - tspan.DG { fill: #696969; } + tspan.s { fill: #AAA; } + tspan.d { fill: #696969; } tspan.R { fill: #EF5A56; } tspan.G { fill: #49E471; } tspan.Y { fill: #E3E342; } @@ -67,7 +68,7 @@ } - + @@ -77,13 +78,14 @@ Terminal - Usage: atlassian-cloud-backuper {options} target - Options - --config, -c file .. Path to configuration file - --interactive, -I .. Interactive mode - --no-color, -nc .... Disable colors in output - --help, -h ......... Show this help message - --version, -v ...... Show version + Usage: atlassian-cloud-backuper {options} target + Options + --config, -c file .. Path to configuration file + --interactive, -I .. Interactive mode + --server, -S ....... Server mode + --no-color, -nc .... Disable colors in output + --help, -h ......... Show this help message + --version, -v ...... Show version - + diff --git a/.github/workflows/docker-push.yml b/.github/workflows/cd-release.yml similarity index 92% rename from .github/workflows/docker-push.yml rename to .github/workflows/cd-release.yml index 592cba8..43539a0 100644 --- a/.github/workflows/docker-push.yml +++ b/.github/workflows/cd-release.yml @@ -1,4 +1,4 @@ -name: "Docker Push" +name: CD (Release) on: release: @@ -19,15 +19,13 @@ permissions: env: IMAGE_NAME: ${{ github.repository }} + DOCKER_FILE: alpine jobs: - Docker: - name: Docker Build & Publish + BuildImage: + name: Image Build & Publish runs-on: ubuntu-latest - env: - DOCKER_FILE: alpine - steps: - name: Checkout uses: actions/checkout@v4 @@ -92,7 +90,7 @@ jobs: - name: Check if build/rebuild is required id: build_check run: | - if [[ "${{github.event_name}}" == "release" ]] ; then + if [[ "$GITHUB_EVENT_NAME" == "release" ]] ; then echo "build=true" >> $GITHUB_OUTPUT exit 0 fi @@ -106,9 +104,8 @@ jobs: echo -e "::group::\033[34mDownloading built image…\033[0m" if ! docker pull ghcr.io/${{env.IMAGE_NAME}}:latest ; then - echo "::warning::Rebuild is required (reason: new image)" - echo "build=true" >> $GITHUB_OUTPUT - exit 0 + echo "::error::Can't download image ghcr.io/${{env.IMAGE_NAME}}:latest" + exit 1 fi echo "::endgroup::" @@ -136,7 +133,7 @@ jobs: - name: Build and push Docker images (Docker) if: ${{ steps.build_check.outputs.build == 'true' }} - uses: docker/build-push-action@v5 + uses: docker/build-push-action@v6 with: push: true context: . @@ -149,7 +146,7 @@ jobs: - name: Build and push Docker images (GHCR) if: ${{ steps.build_check.outputs.build == 'true' }} - uses: docker/build-push-action@v5 + uses: docker/build-push-action@v6 with: push: true context: . diff --git a/.github/workflows/ci-pr.yml b/.github/workflows/ci-pr.yml new file mode 100644 index 0000000..0624c2f --- /dev/null +++ b/.github/workflows/ci-pr.yml @@ -0,0 +1,69 @@ +name: CI (PR) + +on: + pull_request: + branches: [master] + workflow_dispatch: + inputs: + force_run: + description: 'Force workflow run' + required: true + type: choice + options: [yes, no] + +permissions: + actions: read + contents: read + statuses: write + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +jobs: + CI: + uses: ./.github/workflows/ci.yml + secrets: inherit + + ImageBuild: + name: Container Image Build Check + runs-on: ubuntu-latest + + needs: CI + + env: + REGISTRY: ghcr.io + + strategy: + matrix: + image: [ 'alpine' ] + + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Login to DockerHub + uses: docker/login-action@v3 + env: + DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }} + if: ${{ env.DOCKERHUB_USERNAME != '' }} + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_TOKEN }} + + - name: Login to GitHub Container Registry + uses: docker/login-action@v3 + with: + registry: ghcr.io + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Build Docker image + run: | + docker build --build-arg REGISTRY=${REGISTRY} -f .docker/${{matrix.image}}.docker -t ${{matrix.image}} . + + - name: Show info about built Docker image + uses: essentialkaos/docker-info-action@v1 + with: + image: ${{matrix.image}} + show-labels: true diff --git a/.github/workflows/ci-push.yml b/.github/workflows/ci-push.yml new file mode 100644 index 0000000..65449f4 --- /dev/null +++ b/.github/workflows/ci-push.yml @@ -0,0 +1,27 @@ +name: CI (Push) + +on: + push: + branches: [master, develop] + workflow_call: + workflow_dispatch: + inputs: + force_run: + description: 'Force workflow run' + required: true + type: choice + options: [yes, no] + +permissions: + actions: read + contents: read + statuses: write + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +jobs: + CI: + uses: ./.github/workflows/ci.yml + secrets: inherit diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index c2eb6e3..3d1f55c 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -1,27 +1,13 @@ name: CI on: - push: - branches: [master, develop] - pull_request: - branches: [master] - workflow_dispatch: - inputs: - force_run: - description: 'Force workflow run' - required: true - type: choice - options: [yes, no] + workflow_call: permissions: actions: read contents: read statuses: write -concurrency: - group: ${{ github.workflow }}-${{ github.ref }} - cancel-in-progress: true - jobs: Go: name: Go @@ -94,60 +80,5 @@ jobs: uses: actions/checkout@v4 - name: Check spelling + continue-on-error: true uses: crate-ci/typos@master - - DockerBuild: - name: Docker Build Check - runs-on: ubuntu-latest - - needs: [Hadolint, Perfecto] - - env: - REGISTRY: ghcr.io - - strategy: - matrix: - image: [ 'alpine' ] - - steps: - - name: Check event type - run: | - if [[ "${{github.event_name}}" != "pull_request" ]] ; then - echo "::notice::Event type is not 'pull_request', all job actions will be skipped" - fi - - # This step is a hack for needs+if issue with actions - # More info about issue: https://github.com/actions/runner/issues/491 - - - name: Checkout - uses: actions/checkout@v4 - if: ${{ github.event_name == 'pull_request' }} - - - name: Login to DockerHub - uses: docker/login-action@v3 - env: - DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }} - if: ${{ github.event_name == 'pull_request' && env.DOCKERHUB_USERNAME != '' }} - with: - username: ${{ secrets.DOCKERHUB_USERNAME }} - password: ${{ secrets.DOCKERHUB_TOKEN }} - - - name: Login to GitHub Container Registry - uses: docker/login-action@v3 - if: ${{ github.event_name == 'pull_request' }} - with: - registry: ghcr.io - username: ${{ github.actor }} - password: ${{ secrets.GITHUB_TOKEN }} - - - name: Build Docker image - if: ${{ github.event_name == 'pull_request' }} - run: | - docker build --build-arg REGISTRY=${REGISTRY} -f .docker/${{matrix.image}}.docker -t ${{matrix.image}} . - - - name: Show info about built Docker image - uses: essentialkaos/docker-info-action@v1 - if: ${{ github.event_name == 'pull_request' }} - with: - image: ${{matrix.image}} - show-labels: true diff --git a/Makefile b/Makefile index 598a86b..02c3bad 100644 --- a/Makefile +++ b/Makefile @@ -1,6 +1,6 @@ ################################################################################ -# This Makefile generated by GoMakeGen 3.0.2 using next command: +# This Makefile generated by GoMakeGen 3.0.5 using next command: # gomakegen --mod . # # More info: https://kaos.sh/gomakegen @@ -24,11 +24,6 @@ GITREV ?= $(shell test -s $(MAKEDIR)/.git && git rev-parse --short HEAD) all: atlassian-cloud-backuper ## Build all binaries -pack: clean ## Create zip file with YC function - @echo "Packing YC function to zip…" - @go build cloudfunc/ycfunc.go && rm -f ycfunc - @zip atlassian-cloud-backuper -r "app" "backuper" "cloudfunc" "uploader" - atlassian-cloud-backuper: @echo "Building atlassian-cloud-backuper…" @go build $(VERBOSE_FLAG) -ldflags="-X main.gitrev=$(GITREV)" atlassian-cloud-backuper.go @@ -114,6 +109,6 @@ help: ## Show this info | sed 's/ifdef //' \ | awk 'BEGIN {FS = " .*?## "}; {printf " \033[32m%-11s\033[0m %s\n", $$1, $$2}' @echo -e '' - @echo -e '\033[90mGenerated by GoMakeGen 3.0.2\033[0m\n' + @echo -e '\033[90mGenerated by GoMakeGen 3.0.5\033[0m\n' ################################################################################ diff --git a/README.md b/README.md index 91f98cc..db76463 100644 --- a/README.md +++ b/README.md @@ -1,9 +1,8 @@

- GitHub Actions CI Status Code Climate Maintainability - Codebeat badge + GitHub Actions CI Status GitHub Actions CodeQL Status

@@ -17,8 +16,8 @@ #### From [ESSENTIAL KAOS Public Repository](https://kaos.sh/kaos-repo) ```bash -sudo yum install -y https://pkgs.kaos.st/kaos-repo-latest.el$(grep 'CPE_NAME' /etc/os-release | tr -d '"' | cut -d':' -f5).noarch.rpm -sudo yum install atlassian-cloud-backuper +sudo dnf install -y https://pkgs.kaos.st/kaos-repo-latest.el$(grep 'CPE_NAME' /etc/os-release | tr -d '"' | cut -d':' -f5).noarch.rpm +sudo dnf install atlassian-cloud-backuper ``` #### Prebuilt binaries @@ -33,10 +32,6 @@ bash <(curl -fsSL https://apps.kaos.st/get) atlassian-cloud-backuper The latest version of `atlassian-cloud-backuper` also available as container image on [GitHub Container Registry](https://kaos.sh/p/atlassian-cloud-backuper) and [Docker Hub](https://kaos.sh/d/atlassian-cloud-backuper). -#### Cloud/serverless function - -You can use `atlassian-cloud-backuper` as a serverless function on Yandex.Cloud. More information about function configuration can be found in [this documentation](cloudfunc/README.md). - ### Usage #### Standalone diff --git a/app/app.go b/app/app.go index e2c2ef0..e95231c 100644 --- a/app/app.go +++ b/app/app.go @@ -8,49 +8,33 @@ package app // ////////////////////////////////////////////////////////////////////////////////// // import ( - "encoding/base64" "fmt" "os" "strings" - "time" - - "github.com/essentialkaos/ek/v12/errutil" - "github.com/essentialkaos/ek/v12/events" - "github.com/essentialkaos/ek/v12/fmtc" - "github.com/essentialkaos/ek/v12/fmtutil" - "github.com/essentialkaos/ek/v12/fsutil" - "github.com/essentialkaos/ek/v12/knf" - "github.com/essentialkaos/ek/v12/log" - "github.com/essentialkaos/ek/v12/options" - "github.com/essentialkaos/ek/v12/path" - "github.com/essentialkaos/ek/v12/req" - "github.com/essentialkaos/ek/v12/spinner" - "github.com/essentialkaos/ek/v12/support" - "github.com/essentialkaos/ek/v12/support/deps" - "github.com/essentialkaos/ek/v12/system/container" - "github.com/essentialkaos/ek/v12/terminal/tty" - "github.com/essentialkaos/ek/v12/timeutil" - "github.com/essentialkaos/ek/v12/tmp" - "github.com/essentialkaos/ek/v12/usage" - "github.com/essentialkaos/ek/v12/usage/completion/bash" - "github.com/essentialkaos/ek/v12/usage/completion/fish" - "github.com/essentialkaos/ek/v12/usage/completion/zsh" - "github.com/essentialkaos/ek/v12/usage/man" - "github.com/essentialkaos/ek/v12/usage/update" - - knfu "github.com/essentialkaos/ek/v12/knf/united" - knfv "github.com/essentialkaos/ek/v12/knf/validators" - knff "github.com/essentialkaos/ek/v12/knf/validators/fs" - knfn "github.com/essentialkaos/ek/v12/knf/validators/network" - - "github.com/essentialkaos/atlassian-cloud-backuper/backuper" - "github.com/essentialkaos/atlassian-cloud-backuper/backuper/confluence" - "github.com/essentialkaos/atlassian-cloud-backuper/backuper/jira" - - "github.com/essentialkaos/atlassian-cloud-backuper/uploader" - "github.com/essentialkaos/atlassian-cloud-backuper/uploader/fs" - "github.com/essentialkaos/atlassian-cloud-backuper/uploader/s3" - "github.com/essentialkaos/atlassian-cloud-backuper/uploader/sftp" + + "github.com/essentialkaos/ek/v13/errutil" + "github.com/essentialkaos/ek/v13/fmtc" + "github.com/essentialkaos/ek/v13/knf" + "github.com/essentialkaos/ek/v13/log" + "github.com/essentialkaos/ek/v13/options" + "github.com/essentialkaos/ek/v13/req" + "github.com/essentialkaos/ek/v13/support" + "github.com/essentialkaos/ek/v13/support/deps" + "github.com/essentialkaos/ek/v13/system/container" + "github.com/essentialkaos/ek/v13/terminal" + "github.com/essentialkaos/ek/v13/terminal/tty" + "github.com/essentialkaos/ek/v13/tmp" + "github.com/essentialkaos/ek/v13/usage" + "github.com/essentialkaos/ek/v13/usage/completion/bash" + "github.com/essentialkaos/ek/v13/usage/completion/fish" + "github.com/essentialkaos/ek/v13/usage/completion/zsh" + "github.com/essentialkaos/ek/v13/usage/man" + "github.com/essentialkaos/ek/v13/usage/update" + + knfu "github.com/essentialkaos/ek/v13/knf/united" + knfv "github.com/essentialkaos/ek/v13/knf/validators" + knff "github.com/essentialkaos/ek/v13/knf/validators/fs" + knfn "github.com/essentialkaos/ek/v13/knf/validators/network" ) // ////////////////////////////////////////////////////////////////////////////////// // @@ -58,7 +42,7 @@ import ( // Basic utility info const ( APP = "Atlassian Cloud Backuper" - VER = "0.0.3" + VER = "0.1.0" DESC = "Tool for backuping Atlassian cloud services (Jira and Confluence)" ) @@ -68,6 +52,7 @@ const ( const ( OPT_CONFIG = "c:config" OPT_INTERACTIVE = "I:interactive" + OPT_SERVER = "S:server" OPT_NO_COLOR = "nc:no-color" OPT_HELP = "h:help" OPT_VER = "v:version" @@ -81,7 +66,11 @@ const ( ACCESS_ACCOUNT = "access:account" ACCESS_EMAIL = "access:email" ACCESS_API_KEY = "access:api-key" + SERVER_IP = "server:ip" + SERVER_PORT = "server:port" + SERVER_ACCESS_TOKEN = "server:access-token" STORAGE_TYPE = "storage:type" + STORAGE_ENCRYPTION_KEY = "storage:encryption-key" STORAGE_FS_PATH = "storage-fs:path" STORAGE_FS_MODE = "storage-fs:mode" STORAGE_SFTP_HOST = "storage-sftp:host" @@ -109,17 +98,26 @@ const ( LOG_LEVEL = "log:level" ) +// ////////////////////////////////////////////////////////////////////////////////// // + const ( TARGET_JIRA = "jira" TARGET_CONFLUENCE = "confluence" ) +const ( + STORAGE_FS = "fs" + STORAGE_SFTP = "sftp" + STORAGE_S3 = "s3" +) + // ////////////////////////////////////////////////////////////////////////////////// // // optMap contains information about all supported options var optMap = options.Map{ OPT_CONFIG: {Value: "/etc/atlassian-cloud-backuper.knf"}, OPT_INTERACTIVE: {Type: options.BOOL}, + OPT_SERVER: {Type: options.BOOL}, OPT_NO_COLOR: {Type: options.BOOL}, OPT_HELP: {Type: options.MIXED}, OPT_VER: {Type: options.MIXED}, @@ -144,8 +142,9 @@ func Run(gitRev string, gomod []byte) { args, errs := options.Parse(optMap) - if len(errs) != 0 { - printError(errs[0].Error()) + if !errs.IsEmpty() { + terminal.Error("Options parsing errors:") + terminal.Error(errs.String()) os.Exit(1) } @@ -170,7 +169,8 @@ func Run(gitRev string, gomod []byte) { WithChecks(getServiceStatus("Confluence")). Print() os.Exit(0) - case options.GetB(OPT_HELP) || len(args) == 0: + case options.GetB(OPT_HELP) || + (!options.Has(OPT_SERVER) && len(args) == 0): genUsage(options.GetS(OPT_HELP)).Print() os.Exit(0) } @@ -179,18 +179,39 @@ func Run(gitRev string, gomod []byte) { loadConfig, validateConfig, setupLogger, - setupTemp, ) if err != nil { - printError(err.Error()) + terminal.Error(err) os.Exit(1) } log.Divider() log.Aux("%s %s starting…", APP, VER) - if !process(args.Get(0).String()) { + err = errutil.Chain( + setupTemp, + setupReq, + ) + + if err != nil { + log.Crit(err.Error()) + os.Exit(1) + } + + if options.GetB(OPT_SERVER) { + err = startServer() + } else { + err = startApp(args) + } + + if err != nil { + if options.GetB(OPT_INTERACTIVE) { + terminal.Error(err) + } + + log.Crit(err.Error()) + os.Exit(1) } } @@ -199,7 +220,7 @@ func Run(gitRev string, gomod []byte) { // preConfigureUI preconfigures UI based on information about user terminal func preConfigureUI() { - if !tty.IsTTY() { + if !tty.IsTTY() || tty.IsSystemd() || container.GetEngine() == container.YANDEX { fmtc.DisableColors = true } @@ -222,7 +243,8 @@ func addExtraOptions(m options.Map) { knfu.AddOptions(m, ACCESS_ACCOUNT, ACCESS_EMAIL, ACCESS_API_KEY, - STORAGE_TYPE, + SERVER_IP, SERVER_PORT, SERVER_ACCESS_TOKEN, + STORAGE_TYPE, STORAGE_ENCRYPTION_KEY, STORAGE_FS_PATH, STORAGE_FS_MODE, STORAGE_SFTP_HOST, STORAGE_SFTP_USER, STORAGE_SFTP_KEY, STORAGE_SFTP_PATH, STORAGE_SFTP_MODE, @@ -240,8 +262,6 @@ func configureUI() { if options.GetB(OPT_NO_COLOR) { fmtc.DisableColors = true } - - req.SetUserAgent("AtlassianCloudBackuper", VER) } // loadConfig loads configuration file @@ -258,7 +278,8 @@ func loadConfig() error { knfu.CombineSimple( config, ACCESS_ACCOUNT, ACCESS_EMAIL, ACCESS_API_KEY, - STORAGE_TYPE, + SERVER_IP, SERVER_PORT, SERVER_ACCESS_TOKEN, + STORAGE_TYPE, STORAGE_ENCRYPTION_KEY, STORAGE_FS_PATH, STORAGE_FS_MODE, STORAGE_SFTP_HOST, STORAGE_SFTP_USER, STORAGE_SFTP_KEY, STORAGE_SFTP_PATH, STORAGE_SFTP_MODE, @@ -277,43 +298,56 @@ func loadConfig() error { // validateConfig validates configuration file values func validateConfig() error { validators := []*knf.Validator{ - {ACCESS_ACCOUNT, knfv.Empty, nil}, - {ACCESS_EMAIL, knfv.Empty, nil}, - {ACCESS_API_KEY, knfv.Empty, nil}, + {ACCESS_ACCOUNT, knfv.Set, nil}, + {ACCESS_EMAIL, knfv.Set, nil}, + {ACCESS_API_KEY, knfv.Set, nil}, {ACCESS_EMAIL, knfn.Mail, nil}, - {STORAGE_TYPE, knfv.NotContains, []string{ - "fs", "sftp", "s3", + {STORAGE_TYPE, knfv.SetToAnyIgnoreCase, []string{ + STORAGE_FS, STORAGE_SFTP, STORAGE_S3, }}, - {LOG_FORMAT, knfv.NotContains, []string{ + {LOG_FORMAT, knfv.SetToAnyIgnoreCase, []string{ "", "text", "json", }}, - {LOG_LEVEL, knfv.NotContains, []string{ + {LOG_LEVEL, knfv.SetToAnyIgnoreCase, []string{ "", "debug", "info", "warn", "error", "crit", }}, - {TEMP_DIR, knff.Perms, "DW"}, + {TEMP_DIR, knff.Perms, "DWX"}, } - switch knfu.GetS(STORAGE_TYPE) { - case "fs": + switch strings.ToLower(knfu.GetS(STORAGE_TYPE)) { + case STORAGE_FS: validators = append(validators, &knf.Validator{STORAGE_FS_PATH, knff.Perms, "DRW"}, ) - case "sftp": + case STORAGE_SFTP: + validators = append(validators, + &knf.Validator{STORAGE_SFTP_HOST, knfv.Set, nil}, + &knf.Validator{STORAGE_SFTP_USER, knfv.Set, nil}, + &knf.Validator{STORAGE_SFTP_KEY, knfv.Set, nil}, + &knf.Validator{STORAGE_SFTP_PATH, knfv.Set, nil}, + ) + + case STORAGE_S3: validators = append(validators, - &knf.Validator{STORAGE_SFTP_HOST, knfv.Empty, nil}, - &knf.Validator{STORAGE_SFTP_USER, knfv.Empty, nil}, - &knf.Validator{STORAGE_SFTP_KEY, knfv.Empty, nil}, - &knf.Validator{STORAGE_SFTP_PATH, knfv.Empty, nil}, + &knf.Validator{STORAGE_S3_HOST, knfv.Set, nil}, + &knf.Validator{STORAGE_S3_ACCESS_KEY, knfv.Set, nil}, + &knf.Validator{STORAGE_S3_SECRET_KEY, knfv.Set, nil}, + &knf.Validator{STORAGE_S3_BUCKET, knfv.Set, nil}, ) + } - case "s3": + if options.GetB(OPT_SERVER) { validators = append(validators, - &knf.Validator{STORAGE_S3_HOST, knfv.Empty, nil}, - &knf.Validator{STORAGE_S3_ACCESS_KEY, knfv.Empty, nil}, - &knf.Validator{STORAGE_S3_SECRET_KEY, knfv.Empty, nil}, - &knf.Validator{STORAGE_S3_BUCKET, knfv.Empty, nil}, - &knf.Validator{STORAGE_S3_PATH, knfv.Empty, nil}, + &knf.Validator{SERVER_IP, knfn.IP, nil}, + &knf.Validator{SERVER_PORT, knfn.Port, nil}, + ) + } + + if knfu.GetS(STORAGE_ENCRYPTION_KEY) != "" { + validators = append(validators, + &knf.Validator{STORAGE_ENCRYPTION_KEY, knfv.LenGreater, 16}, + &knf.Validator{STORAGE_ENCRYPTION_KEY, knfv.LenLess, 96}, ) } @@ -331,7 +365,7 @@ func setupLogger() error { var err error if knfu.GetS(LOG_FILE) != "" { - err = log.Set(knfu.GetS(LOG_FILE), knfu.GetM(LOG_MODE, 640)) + err = log.Set(knfu.GetS(LOG_FILE), knfu.GetM(LOG_MODE, 0640)) if err != nil { return err @@ -347,7 +381,7 @@ func setupLogger() error { if knfu.GetS(LOG_FORMAT) == "" && container.IsContainer() { log.Global.UseJSON = true } else { - switch knfu.GetS(LOG_FORMAT) { + switch strings.ToLower(knfu.GetS(LOG_FORMAT)) { case "json": log.Global.UseJSON = true case "text", "": @@ -369,218 +403,10 @@ func setupTemp() error { return err } -// process starts backup creation -func process(target string) bool { - var dispatcher *events.Dispatcher - - if options.GetB(OPT_INTERACTIVE) { - dispatcher = events.NewDispatcher() - addEventsHandlers(dispatcher) - } - - defer temp.Clean() - - bkpr, err := getBackuper(target) - - if err != nil { - log.Crit("Can't start backuping process: %v", err) - return false - } - - bkpr.SetDispatcher(dispatcher) - - outputFileName := getOutputFileName(target) - tmpFile := path.Join(temp.MkName(".zip"), outputFileName) - - err = bkpr.Backup(tmpFile) - - if err != nil { - spinner.Done(false) - log.Crit("Error while backuping process: %v", err) - return false - } - - log.Info("Backup process successfully finished!") - - updr, err := getUploader(target) - - if err != nil { - log.Crit("Can't start uploading process: %v", err) - return false - } - - updr.SetDispatcher(dispatcher) - - err = updr.Upload(tmpFile, outputFileName) - - if err != nil { - spinner.Done(false) - log.Crit("Error while uploading process: %v", err) - return false - } - - return true -} - -// getBackuper returns backuper instances -func getBackuper(target string) (backuper.Backuper, error) { - var err error - var bkpr backuper.Backuper - - bkpConfig, err := getBackuperConfig(target) - - if err != nil { - return nil, err - } - - switch target { - case TARGET_JIRA: - bkpr, err = jira.NewBackuper(bkpConfig) - case TARGET_CONFLUENCE: - bkpr, err = confluence.NewBackuper(bkpConfig) - } - - return bkpr, nil -} - -// getOutputFileName returns name for backup output file -func getOutputFileName(target string) string { - var template string - - switch target { - case TARGET_JIRA: - template = knfu.GetS(JIRA_OUTPUT_FILE, `jira-backup-%Y-%m-%d`) + ".zip" - case TARGET_CONFLUENCE: - template = knfu.GetS(JIRA_OUTPUT_FILE, `confluence-backup-%Y-%m-%d`) + ".zip" - } - - return timeutil.Format(time.Now(), template) -} - -// getBackuperConfig returns configuration for backuper -func getBackuperConfig(target string) (*backuper.Config, error) { - switch target { - case TARGET_JIRA: - return &backuper.Config{ - Account: knfu.GetS(ACCESS_ACCOUNT), - Email: knfu.GetS(ACCESS_EMAIL), - APIKey: knfu.GetS(ACCESS_API_KEY), - WithAttachments: knfu.GetB(JIRA_INCLUDE_ATTACHMENTS), - ForCloud: knfu.GetB(JIRA_CLOUD_FORMAT), - }, nil - - case TARGET_CONFLUENCE: - return &backuper.Config{ - Account: knfu.GetS(ACCESS_ACCOUNT), - Email: knfu.GetS(ACCESS_EMAIL), - APIKey: knfu.GetS(ACCESS_API_KEY), - WithAttachments: knfu.GetB(CONFLUENCE_INCLUDE_ATTACHMENTS), - ForCloud: knfu.GetB(CONFLUENCE_CLOUD_FORMAT), - }, nil - } - - return nil, fmt.Errorf("Unknown target %q", target) -} - -// getUploader returns uploader instance -func getUploader(target string) (uploader.Uploader, error) { - var err error - var updr uploader.Uploader - - switch knfu.GetS(STORAGE_TYPE) { - case "fs": - updr, err = fs.NewUploader(&fs.Config{ - Path: path.Join(knfu.GetS(STORAGE_FS_PATH), target), - Mode: knfu.GetM(STORAGE_FS_MODE, 0600), - }) - - case "sftp": - keyData, err := readPrivateKeyData() - - if err != nil { - return nil, err - } - - updr, err = sftp.NewUploader(&sftp.Config{ - Host: knfu.GetS(STORAGE_SFTP_HOST), - User: knfu.GetS(STORAGE_SFTP_USER), - Key: keyData, - Path: path.Join(knfu.GetS(STORAGE_SFTP_PATH), target), - Mode: knfu.GetM(STORAGE_SFTP_MODE, 0600), - }) - - case "s3": - updr, err = s3.NewUploader(&s3.Config{ - Host: knfu.GetS(STORAGE_S3_HOST), - Region: knfu.GetS(STORAGE_S3_REGION), - AccessKeyID: knfu.GetS(STORAGE_S3_ACCESS_KEY), - SecretKey: knfu.GetS(STORAGE_S3_SECRET_KEY), - Bucket: knfu.GetS(STORAGE_S3_BUCKET), - Path: path.Join(knfu.GetS(STORAGE_S3_PATH), target), - }) - } - - return updr, err -} - -// readPrivateKeyData reads private key data -func readPrivateKeyData() ([]byte, error) { - if fsutil.IsExist(knfu.GetS(STORAGE_SFTP_KEY)) { - return os.ReadFile(knfu.GetS(STORAGE_SFTP_KEY)) - } - - return base64.StdEncoding.DecodeString(knfu.GetS(STORAGE_SFTP_KEY)) -} - -// addEventsHandlers registers events handlers -func addEventsHandlers(dispatcher *events.Dispatcher) { - dispatcher.AddHandler(backuper.EVENT_BACKUP_STARTED, func(payload any) { - fmtc.NewLine() - spinner.Show("Starting downloading process") - }) - - dispatcher.AddHandler(backuper.EVENT_BACKUP_PROGRESS, func(payload any) { - p := payload.(*backuper.ProgressInfo) - spinner.Update("[%d%%] %s", p.Progress, p.Message) - }) - - dispatcher.AddHandler(backuper.EVENT_BACKUP_SAVING, func(payload any) { - spinner.Done(true) - spinner.Show("Fetching backup file") - }) - - dispatcher.AddHandler(backuper.EVENT_BACKUP_DONE, func(payload any) { - spinner.Done(true) - }) - - dispatcher.AddHandler(uploader.EVENT_UPLOAD_STARTED, func(payload any) { - spinner.Show("Uploading backup file to %s storage", payload) - }) - - dispatcher.AddHandler(uploader.EVENT_UPLOAD_PROGRESS, func(payload any) { - p := payload.(*uploader.ProgressInfo) - spinner.Update( - "[%s] Uploading file (%s/%s)", - fmtutil.PrettyPerc(p.Progress), - fmtutil.PrettySize(p.Current), - fmtutil.PrettySize(p.Total), - ) - }) - - dispatcher.AddHandler(uploader.EVENT_UPLOAD_DONE, func(payload any) { - spinner.Update("Uploading file") - spinner.Done(true) - fmtc.NewLine() - }) -} - -// printError prints error message to console -func printError(f string, a ...interface{}) { - if len(a) == 0 { - fmtc.Fprintln(os.Stderr, "{r}"+f+"{!}") - } else { - fmtc.Fprintf(os.Stderr, "{r}"+f+"{!}\n", a...) - } +// setupReq configures HTTP request engine +func setupReq() error { + req.SetUserAgent("AtlassianCloudBackuper", VER) + return nil } // ////////////////////////////////////////////////////////////////////////////////// // @@ -671,6 +497,7 @@ func genUsage(section string) *usage.Info { info.AddOption(OPT_CONFIG, "Path to configuration file", "file") info.AddOption(OPT_INTERACTIVE, "Interactive mode") + info.AddOption(OPT_SERVER, "Server mode") info.AddOption(OPT_NO_COLOR, "Disable colors in output") info.AddOption(OPT_HELP, "Show this help message") info.AddOption(OPT_VER, "Show version") @@ -679,7 +506,11 @@ func genUsage(section string) *usage.Info { addUnitedOption(info, ACCESS_ACCOUNT, "Account name", "name") addUnitedOption(info, ACCESS_EMAIL, "User email with access to API", "email") addUnitedOption(info, ACCESS_API_KEY, "API key", "key") + addUnitedOption(info, SERVER_IP, "HTTP server IP", "ip") + addUnitedOption(info, SERVER_PORT, "HTTP server port", "port") + addUnitedOption(info, SERVER_ACCESS_TOKEN, "HTTP access token", "token") addUnitedOption(info, STORAGE_TYPE, "Storage type", "fs/sftp/s3") + addUnitedOption(info, STORAGE_ENCRYPTION_KEY, "Data encryption key", "key") addUnitedOption(info, STORAGE_FS_PATH, "Path on system for backups", "path") addUnitedOption(info, STORAGE_FS_MODE, "File mode on system", "mode") addUnitedOption(info, STORAGE_SFTP_HOST, "SFTP host", "host") diff --git a/app/basic.go b/app/basic.go new file mode 100644 index 0000000..876078b --- /dev/null +++ b/app/basic.go @@ -0,0 +1,134 @@ +package app + +// ////////////////////////////////////////////////////////////////////////////////// // +// // +// Copyright (c) 2024 ESSENTIAL KAOS // +// Apache License, Version 2.0 // +// // +// ////////////////////////////////////////////////////////////////////////////////// // + +import ( + "fmt" + + "github.com/essentialkaos/ek/v13/events" + "github.com/essentialkaos/ek/v13/fmtc" + "github.com/essentialkaos/ek/v13/fmtutil" + "github.com/essentialkaos/ek/v13/log" + "github.com/essentialkaos/ek/v13/options" + "github.com/essentialkaos/ek/v13/path" + "github.com/essentialkaos/ek/v13/spinner" + "github.com/essentialkaos/ek/v13/terminal" + + knfu "github.com/essentialkaos/ek/v13/knf/united" + + "github.com/essentialkaos/atlassian-cloud-backuper/backuper" + "github.com/essentialkaos/atlassian-cloud-backuper/uploader" +) + +// ////////////////////////////////////////////////////////////////////////////////// // + +// startApp starts app in basic mode +func startApp(args options.Arguments) error { + var dispatcher *events.Dispatcher + + target := args.Get(0).String() + + if options.GetB(OPT_INTERACTIVE) { + dispatcher = events.NewDispatcher() + addEventsHandlers(dispatcher) + } + + if knfu.GetS(STORAGE_ENCRYPTION_KEY) != "" { + fmtc.NewLine() + terminal.Warn("▲ Backup will be encrypted while uploading. You will not be able to use the") + terminal.Warn(" backup if you lose the encryption key. Keep it in a safe place.") + } + + defer temp.Clean() + + fmtc.NewLine() + + bkpr, err := getBackuper(target) + + if err != nil { + return fmt.Errorf("Can't start backuping process: %w", err) + } + + updr, err := getUploader(target) + + if err != nil { + return fmt.Errorf("Can't start backuping process: %w", err) + } + + bkpr.SetDispatcher(dispatcher) + updr.SetDispatcher(dispatcher) + + outputFileName := getOutputFileName(target) + tmpDir, err := temp.MkDir() + + if err != nil { + spinner.Done(false) + return fmt.Errorf("Can't create temporary directory: %w", err) + } + + tmpFile := path.Join(tmpDir, outputFileName) + + err = bkpr.Backup(tmpFile) + + if err != nil { + spinner.Done(false) + return fmt.Errorf("Error while backuping process: %w", err) + } + + log.Info("Backup process successfully finished!") + + err = updr.Upload(tmpFile, outputFileName) + + if err != nil { + spinner.Done(false) + return fmt.Errorf("Error while uploading process: %w", err) + } + + return nil +} + +// addEventsHandlers registers events handlers +func addEventsHandlers(dispatcher *events.Dispatcher) { + dispatcher.AddHandler(backuper.EVENT_BACKUP_STARTED, func(payload any) { + spinner.Show("Starting downloading process") + }) + + dispatcher.AddHandler(backuper.EVENT_BACKUP_PROGRESS, func(payload any) { + p := payload.(*backuper.ProgressInfo) + spinner.Update("{s}(%d%%){!} %s", p.Progress, p.Message) + }) + + dispatcher.AddHandler(backuper.EVENT_BACKUP_SAVING, func(payload any) { + spinner.Done(true) + spinner.Show("Fetching backup file") + }) + + dispatcher.AddHandler(backuper.EVENT_BACKUP_DONE, func(payload any) { + spinner.Done(true) + }) + + dispatcher.AddHandler(uploader.EVENT_UPLOAD_STARTED, func(payload any) { + spinner.Show("Uploading backup file to %s storage", payload) + }) + + dispatcher.AddHandler(uploader.EVENT_UPLOAD_PROGRESS, func(payload any) { + p := payload.(*uploader.ProgressInfo) + spinner.Update( + "{s}(%5s){!} Uploading file {s-}(%7s | %7s){!}", + fmtutil.PrettyPerc(p.Progress), + fmtutil.PrettySize(p.Current), + fmtutil.PrettySize(p.Total), + ) + }) + + dispatcher.AddHandler(uploader.EVENT_UPLOAD_DONE, func(payload any) { + spinner.Update("Uploading file") + spinner.Done(true) + fmtc.NewLine() + }) +} diff --git a/app/common.go b/app/common.go new file mode 100644 index 0000000..4fe873a --- /dev/null +++ b/app/common.go @@ -0,0 +1,150 @@ +package app + +// ////////////////////////////////////////////////////////////////////////////////// // +// // +// Copyright (c) 2024 ESSENTIAL KAOS // +// Apache License, Version 2.0 // +// // +// ////////////////////////////////////////////////////////////////////////////////// // + +import ( + "encoding/base64" + "fmt" + "os" + "strings" + "time" + + "github.com/essentialkaos/ek/v13/fsutil" + "github.com/essentialkaos/ek/v13/path" + "github.com/essentialkaos/ek/v13/timeutil" + + "github.com/essentialkaos/katana" + + knfu "github.com/essentialkaos/ek/v13/knf/united" + + "github.com/essentialkaos/atlassian-cloud-backuper/backuper" + "github.com/essentialkaos/atlassian-cloud-backuper/backuper/confluence" + "github.com/essentialkaos/atlassian-cloud-backuper/backuper/jira" + "github.com/essentialkaos/atlassian-cloud-backuper/uploader" + "github.com/essentialkaos/atlassian-cloud-backuper/uploader/fs" + "github.com/essentialkaos/atlassian-cloud-backuper/uploader/s3" + "github.com/essentialkaos/atlassian-cloud-backuper/uploader/sftp" +) + +// ////////////////////////////////////////////////////////////////////////////////// // + +// getBackuper returns backuper instances +func getBackuper(target string) (backuper.Backuper, error) { + var err error + var bkpr backuper.Backuper + + bkpConfig, err := getBackuperConfig(target) + + if err != nil { + return nil, err + } + + switch target { + case TARGET_JIRA: + bkpr, err = jira.NewBackuper(bkpConfig) + case TARGET_CONFLUENCE: + bkpr, err = confluence.NewBackuper(bkpConfig) + } + + return bkpr, nil +} + +// getOutputFileName returns name for backup output file +func getOutputFileName(target string) string { + var template string + + switch target { + case TARGET_JIRA: + template = knfu.GetS(JIRA_OUTPUT_FILE, `jira-backup-%Y-%m-%d`) + ".zip" + case TARGET_CONFLUENCE: + template = knfu.GetS(JIRA_OUTPUT_FILE, `confluence-backup-%Y-%m-%d`) + ".zip" + } + + return timeutil.Format(time.Now(), template) +} + +// getBackuperConfig returns configuration for backuper +func getBackuperConfig(target string) (*backuper.Config, error) { + switch target { + case TARGET_JIRA: + return &backuper.Config{ + Account: knfu.GetS(ACCESS_ACCOUNT), + Email: knfu.GetS(ACCESS_EMAIL), + APIKey: knfu.GetS(ACCESS_API_KEY), + WithAttachments: knfu.GetB(JIRA_INCLUDE_ATTACHMENTS), + ForCloud: knfu.GetB(JIRA_CLOUD_FORMAT), + }, nil + + case TARGET_CONFLUENCE: + return &backuper.Config{ + Account: knfu.GetS(ACCESS_ACCOUNT), + Email: knfu.GetS(ACCESS_EMAIL), + APIKey: knfu.GetS(ACCESS_API_KEY), + WithAttachments: knfu.GetB(CONFLUENCE_INCLUDE_ATTACHMENTS), + ForCloud: knfu.GetB(CONFLUENCE_CLOUD_FORMAT), + }, nil + } + + return nil, fmt.Errorf("Unknown target %q", target) +} + +// getUploader returns uploader instance +func getUploader(target string) (uploader.Uploader, error) { + var secret *katana.Secret + + if knfu.GetS(STORAGE_ENCRYPTION_KEY) != "" { + secret = katana.NewSecret(knfu.GetS(STORAGE_ENCRYPTION_KEY)) + } + + switch strings.ToLower(knfu.GetS(STORAGE_TYPE)) { + case STORAGE_FS: + return fs.NewUploader(&fs.Config{ + Path: path.Join(knfu.GetS(STORAGE_FS_PATH), target), + Mode: knfu.GetM(STORAGE_FS_MODE, 0600), + Secret: secret, + }) + + case STORAGE_SFTP: + keyData, err := readPrivateKeyData() + + if err != nil { + return nil, err + } + + return sftp.NewUploader(&sftp.Config{ + Host: knfu.GetS(STORAGE_SFTP_HOST), + User: knfu.GetS(STORAGE_SFTP_USER), + Key: keyData, + Path: path.Join(knfu.GetS(STORAGE_SFTP_PATH), target), + Mode: knfu.GetM(STORAGE_SFTP_MODE, 0600), + Secret: secret, + }) + + case STORAGE_S3: + return s3.NewUploader(&s3.Config{ + Host: knfu.GetS(STORAGE_S3_HOST), + Region: knfu.GetS(STORAGE_S3_REGION), + AccessKeyID: knfu.GetS(STORAGE_S3_ACCESS_KEY), + SecretKey: knfu.GetS(STORAGE_S3_SECRET_KEY), + Bucket: knfu.GetS(STORAGE_S3_BUCKET), + Path: path.Join(knfu.GetS(STORAGE_S3_PATH), target), + Secret: secret, + }) + } + + return nil, fmt.Errorf("Unknown storage type %q", knfu.GetS(STORAGE_TYPE)) +} + +// readPrivateKeyData reads private key data +func readPrivateKeyData() ([]byte, error) { + if fsutil.IsExist(knfu.GetS(STORAGE_SFTP_KEY)) { + return os.ReadFile(knfu.GetS(STORAGE_SFTP_KEY)) + } + + return base64.StdEncoding.DecodeString(knfu.GetS(STORAGE_SFTP_KEY)) +} diff --git a/app/server.go b/app/server.go new file mode 100644 index 0000000..8d25121 --- /dev/null +++ b/app/server.go @@ -0,0 +1,224 @@ +package app + +// ////////////////////////////////////////////////////////////////////////////////// // +// // +// Copyright (c) 2024 ESSENTIAL KAOS // +// Apache License, Version 2.0 // +// // +// ////////////////////////////////////////////////////////////////////////////////// // + +import ( + "fmt" + "net/http" + "os" + "strings" + "time" + + "github.com/essentialkaos/ek/v13/log" + "github.com/essentialkaos/ek/v13/strutil" + + knfu "github.com/essentialkaos/ek/v13/knf/united" +) + +// ////////////////////////////////////////////////////////////////////////////////// // + +// startServer starts app in server mode +func startServer() error { + port := strutil.Q(os.Getenv("PORT"), knfu.GetS(SERVER_PORT)) + ip := knfu.GetS(SERVER_IP) + + log.Info( + "Starting HTTP server", + log.F{"server-ip", strutil.Q(ip, "localhost")}, + log.F{"server-port", port}, + ) + + mux := http.NewServeMux() + + server := &http.Server{ + Addr: ip + ":" + port, + Handler: mux, + ReadTimeout: 3 * time.Second, + WriteTimeout: 3 * time.Second, + } + + mux.HandleFunc("/create", createBackupHandler) + mux.HandleFunc("/download", downloadBackupHandler) + + return server.ListenAndServe() +} + +// ////////////////////////////////////////////////////////////////////////////////// // + +// createHandler is handler for caching booking data +func createBackupHandler(rw http.ResponseWriter, r *http.Request) { + updateResponseHeaders(rw) + + log.Info("Got create request", getConfigurationFields()) + + target := strings.ToLower(r.URL.Query().Get("target")) + token := r.URL.Query().Get("token") + + err := validateRequestQuery(target, token) + + if err != nil { + log.Error("Invalid request query: %v", err.Error()) + rw.WriteHeader(http.StatusBadRequest) + return + } + + bkpr, err := getBackuper(target) + + if err != nil { + log.Error("Can't create backuper instance: %v", err) + rw.WriteHeader(http.StatusInternalServerError) + return + } + + taskID, err := bkpr.Start() + + if err != nil { + log.Error("Can't create backup: %v", err) + rw.WriteHeader(http.StatusBadGateway) + return + } + + log.Info("Backup request successfully created", log.F{"task-id", taskID}) + + rw.WriteHeader(http.StatusOK) +} + +// createHandler is handler for caching booking data +func downloadBackupHandler(rw http.ResponseWriter, r *http.Request) { + var lf log.Fields + + updateResponseHeaders(rw) + + log.Info("Got download request", getConfigurationFields()) + + target := strings.ToLower(r.URL.Query().Get("target")) + token := r.URL.Query().Get("token") + + err := validateRequestQuery(target, token) + + if err != nil { + log.Error("Invalid request query: %v", err.Error()) + rw.WriteHeader(http.StatusBadRequest) + return + } + + bkpr, err := getBackuper(target) + + if err != nil { + log.Error("Can't create backuper instance: %v", err) + rw.WriteHeader(http.StatusInternalServerError) + return + } + + backupFile, err := bkpr.GetBackupFile() + + if err != nil { + log.Error("Can't find backup file: %v", err) + rw.WriteHeader(http.StatusBadGateway) + return + } + + log.Info("Starting downloading of backup", log.F{"backup-file", backupFile}) + + br, err := bkpr.GetReader(backupFile) + + if err != nil { + log.Error("Can't get reader for backup file: %v", err) + rw.WriteHeader(http.StatusInternalServerError) + return + } + + updr, err := getUploader(target) + + if err != nil { + log.Error("Can't create uploader instance: %v", err) + rw.WriteHeader(http.StatusInternalServerError) + return + } + + outputFile := getOutputFileName(target) + + lf.Add( + log.F{"backup-file", backupFile}, + log.F{"output-file", outputFile}, + ) + + log.Info("Uploading backup to storage", lf) + + err = updr.Write(br, outputFile, 0) + + if err != nil { + log.Error("Can't upload backup file: %v", err, lf) + rw.WriteHeader(http.StatusInternalServerError) + return + } + + log.Info("Backup successfully uploaded", lf) + + rw.WriteHeader(http.StatusOK) +} + +// ////////////////////////////////////////////////////////////////////////////////// // + +// validateRequestQuery validates request query arguments +func validateRequestQuery(target, token string) error { + switch { + case target == "": + return fmt.Errorf("target is empty") + case knfu.GetS(SERVER_ACCESS_TOKEN) != "" && token == "": + return fmt.Errorf("token is empty") + case target != TARGET_JIRA && target != TARGET_CONFLUENCE: + return fmt.Errorf("Unknown target %q", target) + case knfu.GetS(SERVER_ACCESS_TOKEN) != "" && token == knfu.GetS(SERVER_ACCESS_TOKEN): + return fmt.Errorf("Invalid access token") + } + + return nil +} + +// getConfigurationFields returns log fields +func getConfigurationFields() *log.Fields { + lf := &log.Fields{} + + lf.Add( + log.Field{"access-account", knfu.GetS(ACCESS_ACCOUNT)}, + log.Field{"access-email", knfu.GetS(ACCESS_EMAIL)}, + log.Field{"access-key", knfu.GetS(ACCESS_API_KEY) != ""}, + log.Field{"storage-type", knfu.GetS(STORAGE_TYPE)}, + ) + + switch strings.ToLower(knfu.GetS(STORAGE_TYPE)) { + case STORAGE_FS: + lf.Add( + log.Field{"storage-fs-path", knfu.GetS(STORAGE_FS_PATH)}, + ) + + case STORAGE_SFTP: + lf.Add( + log.Field{"storage-sftp-host", knfu.GetS(STORAGE_SFTP_HOST)}, + log.Field{"storage-sftp-user", knfu.GetS(STORAGE_SFTP_USER)}, + log.Field{"storage-sftp-path", knfu.GetS(STORAGE_SFTP_PATH)}, + ) + + case STORAGE_S3: + lf.Add( + log.Field{"storage-s3-host", knfu.GetS(STORAGE_S3_HOST)}, + log.Field{"storage-s3-bucket", knfu.GetS(STORAGE_S3_BUCKET)}, + log.Field{"storage-s3-path", knfu.GetS(STORAGE_S3_PATH)}, + log.Field{"storage-s3-key-id", knfu.GetS(STORAGE_S3_ACCESS_KEY)}, + ) + } + + return lf +} + +// updateResponseHeaders updates response headers +func updateResponseHeaders(rw http.ResponseWriter) { + rw.Header().Set("X-Powered-By", "EK|"+APP) + rw.Header().Set("X-App-Version", VER) +} diff --git a/backuper/backuper.go b/backuper/backuper.go index 5b00eaa..a7f8821 100644 --- a/backuper/backuper.go +++ b/backuper/backuper.go @@ -11,7 +11,7 @@ import ( "fmt" "io" - "github.com/essentialkaos/ek/v12/events" + "github.com/essentialkaos/ek/v13/events" ) // ////////////////////////////////////////////////////////////////////////////////// // diff --git a/backuper/confluence/confluence-backuper.go b/backuper/confluence/confluence-backuper.go index 6f05568..e445c70 100644 --- a/backuper/confluence/confluence-backuper.go +++ b/backuper/confluence/confluence-backuper.go @@ -16,11 +16,11 @@ import ( "strings" "time" - "github.com/essentialkaos/ek/v12/events" - "github.com/essentialkaos/ek/v12/fmtutil" - "github.com/essentialkaos/ek/v12/fsutil" - "github.com/essentialkaos/ek/v12/log" - "github.com/essentialkaos/ek/v12/req" + "github.com/essentialkaos/ek/v13/events" + "github.com/essentialkaos/ek/v13/fmtutil" + "github.com/essentialkaos/ek/v13/fsutil" + "github.com/essentialkaos/ek/v13/log" + "github.com/essentialkaos/ek/v13/req" "github.com/essentialkaos/atlassian-cloud-backuper/backuper" ) @@ -98,8 +98,17 @@ func (b *ConfluenceBackuper) Start() (string, error) { info, _ := b.getBackupProgress() if info != nil && !info.IsOutdated { - log.Info("Found previously created backup task") + log.Info( + "Found previously created backup task", + log.F{"backup-status", info.CurrentStatus}, + log.F{"backup-perc", info.AlternativePercentage}, + log.F{"backup-size", info.Size}, + log.F{"backup-file", info.Filename}, + log.F{"backup-outdated", info.IsOutdated}, + ) } else { + log.Info("No previously created backup task or task is outdated, starting new backup…") + err := b.startBackup() if err != nil { @@ -142,14 +151,14 @@ func (b *ConfluenceBackuper) Progress(taskID string) (string, error) { if progressInfo.Size == 0 && progressInfo.AlternativePercentage >= lastProgress { log.Info( - "(%s) Backup in progress: %s", + "(%s%%) Backup in progress: %s", progressInfo.AlternativePercentage, progressInfo.CurrentStatus, ) lastProgress = progressInfo.AlternativePercentage } - if progressInfo.Size != 0 && progressInfo.Filename != "" { + if progressInfo.Filename != "" { backupFileURL = progressInfo.Filename break } diff --git a/backuper/jira/jira-backuper.go b/backuper/jira/jira-backuper.go index 5bebb9f..a189d56 100644 --- a/backuper/jira/jira-backuper.go +++ b/backuper/jira/jira-backuper.go @@ -14,11 +14,11 @@ import ( "os" "time" - "github.com/essentialkaos/ek/v12/events" - "github.com/essentialkaos/ek/v12/fmtutil" - "github.com/essentialkaos/ek/v12/fsutil" - "github.com/essentialkaos/ek/v12/log" - "github.com/essentialkaos/ek/v12/req" + "github.com/essentialkaos/ek/v13/events" + "github.com/essentialkaos/ek/v13/fmtutil" + "github.com/essentialkaos/ek/v13/fsutil" + "github.com/essentialkaos/ek/v13/log" + "github.com/essentialkaos/ek/v13/req" "github.com/essentialkaos/atlassian-cloud-backuper/backuper" ) @@ -104,7 +104,7 @@ func (b *JiraBackuper) Start() (string, error) { if backupTaskID != "" { log.Info("Found previously created backup task with ID %s", backupTaskID) } else { - log.Info("No previously created task found, run backup…") + log.Info("No previously created task found, starting new backup…") backupTaskID, err = b.startBackup() diff --git a/cloudfunc/README.md b/cloudfunc/README.md deleted file mode 100644 index bb4d796..0000000 --- a/cloudfunc/README.md +++ /dev/null @@ -1,93 +0,0 @@ -## Cloud function - -> [!IMPORTANT] -> Currently we support only [Yandex.Cloud serverless/cloud functions](https://yandex.cloud/en/docs/functions/lang/golang/). AWS Lambda is not supported yet. - -### Targets - -- `jira` — Create backup of Jira instance; -- `confluence` — Create backup of Confluence instance. - -### Stages - -- `create` — generate backup file for given target; -- `download` — download backup file for given target. - -### Recommended roles for service account (Yandex.Cloud) - -- `storage.uploader` — for uploading data to S3; -- `lockbox.payloadViewer` — for viewing secrets in Lockbox; -- `functions.functionInvoker` — for working with triggers. - -### Handlers - -- `cloudfunc/ycfunc.Request` — handler for HTTP requests; -- `cloudfunc/ycfunc.Trigger` — handler for events from timer trigger. - -#### `Request` configuration - -You must pass `target` and `stage` using query string. - -Example: - -```bash -# Create Jira backup (asynchronously) -curl 'https://functions.yandexcloud.net/abcdeabcdeabcdeabcde?target=jira&stage=create' - -# Download Jira backup -curl 'https://functions.yandexcloud.net/abcdeabcdeabcdeabcde?target=jira&stage=download' - -# Create Confluence backup (asynchronously) -curl 'https://functions.yandexcloud.net/abcdeabcdeabcdeabcde?target=confluence&stage=create' - -# Download Confluence backup -curl 'https://functions.yandexcloud.net/abcdeabcdeabcdeabcde?target=confluence&stage=download' -``` - -HTTP status codes: - -- `200` — request successfully processed; -- `400` — query validation error; -- `500` — configuration validation error. - -#### `Trigger` configuration - -You must pass `target` and `stage` using event payload using ';' as a separator. - -Supported payload values: - -``` -jira;create -jira;download -confluence;create -confluence;download -``` - -### Environment variables - -| Env | Type | Required | Description | -|-----|------|----------|-------------| -| `ACCESS_ACCOUNT` | sᴛʀɪɴɢ | Yes | _Account name_ | -| `ACCESS_EMAIL` | sᴛʀɪɴɢ | Yes | _User email with access to API_ | -| `ACCESS_API_KEY` | sᴛʀɪɴɢ | Yes | _API key_ | -| `STORAGE_TYPE` | sᴛʀɪɴɢ | Yes | _Storage type (fs/sftp/s3)_ | -| `STORAGE_FS_PATH` | sᴛʀɪɴɢ | No | _Path on system for backups_ | -| `STORAGE_FS_MODE` | sᴛʀɪɴɢ | No | _File mode on system_ | -| `STORAGE_SFTP_HOST` | sᴛʀɪɴɢ | No | _SFTP host_ | -| `STORAGE_SFTP_USER` | sᴛʀɪɴɢ | No | _SFTP user name_ | -| `STORAGE_SFTP_KEY` | sᴛʀɪɴɢ | No | _Base64-encoded private key_ | -| `STORAGE_SFTP_PATH` | sᴛʀɪɴɢ | No | _Path on SFTP_ | -| `STORAGE_SFTP_MODE` | sᴛʀɪɴɢ | No | _File mode on SFTP_ | -| `STORAGE_S3_HOST` | sᴛʀɪɴɢ | No | _S3 host_ | -| `STORAGE_S3_REGION` | sᴛʀɪɴɢ | No | _S3 region_ | -| `STORAGE_S3_ACCESS_KEY` | sᴛʀɪɴɢ | No | _S3 access key ID_ | -| `STORAGE_S3_SECRET_KEY` | sᴛʀɪɴɢ | No | _S3 access secret key_ | -| `STORAGE_S3_BUCKET` | sᴛʀɪɴɢ | No | _S3 bucket_ | -| `STORAGE_S3_PATH` | sᴛʀɪɴɢ | No | _Path for backups_ | -| `JIRA_OUTPUT_FILE` | sᴛʀɪɴɢ | No | _Jira backup output file name template_ | -| `JIRA_INCLUDE_ATTACHMENTS` | ʙᴏᴏʟᴇᴀɴ | No | _Include attachments to Jira backup_ | -| `JIRA_CLOUD_FORMAT` | ʙᴏᴏʟᴇᴀɴ | No | _Create Jira backup for Cloud_ | -| `CONFLUENCE_OUTPUT_FILE` | sᴛʀɪɴɢ | No | _Confluence backup output file name template_ | -| `CONFLUENCE_INCLUDE_ATTACHMENTS` | ʙᴏᴏʟᴇᴀɴ | No | _Include attachments to Confluence backup_ | -| `CONFLUENCE_CLOUD_FORMAT` | ʙᴏᴏʟᴇᴀɴ | No | _Create Confluence backup for Cloud_ | -| `LOG_LEVEL` | sᴛʀɪɴɢ | No | _Log level (debug,info,warn,error)_ | diff --git a/cloudfunc/ycfunc.go b/cloudfunc/ycfunc.go deleted file mode 100644 index b3f4acd..0000000 --- a/cloudfunc/ycfunc.go +++ /dev/null @@ -1,535 +0,0 @@ -package main - -// ////////////////////////////////////////////////////////////////////////////////// // -// // -// Copyright (c) 2024 ESSENTIAL KAOS // -// Apache License, Version 2.0 // -// // -// ////////////////////////////////////////////////////////////////////////////////// // - -import ( - "context" - "encoding/base64" - "fmt" - "net/http" - "os" - "path" - "strconv" - "strings" - "time" - - "github.com/essentialkaos/ek/v12/log" - "github.com/essentialkaos/ek/v12/req" - "github.com/essentialkaos/ek/v12/strutil" - "github.com/essentialkaos/ek/v12/timeutil" - - knfu "github.com/essentialkaos/ek/v12/knf/united" - - "github.com/essentialkaos/atlassian-cloud-backuper/app" - - "github.com/essentialkaos/atlassian-cloud-backuper/backuper" - "github.com/essentialkaos/atlassian-cloud-backuper/backuper/confluence" - "github.com/essentialkaos/atlassian-cloud-backuper/backuper/jira" - - "github.com/essentialkaos/atlassian-cloud-backuper/uploader" - "github.com/essentialkaos/atlassian-cloud-backuper/uploader/fs" - "github.com/essentialkaos/atlassian-cloud-backuper/uploader/s3" - "github.com/essentialkaos/atlassian-cloud-backuper/uploader/sftp" -) - -// ////////////////////////////////////////////////////////////////////////////////// // - -const ( - STAGE_CREATE = "create" - STAGE_DOWNLOAD = "download" -) - -// ////////////////////////////////////////////////////////////////////////////////// // - -type Data struct { - Messages []*Message `json:"messages"` -} - -type Message struct { - Metadata *Metadata `json:"event_metadata"` - Details *Details `json:"details"` -} - -type Metadata struct { - EventType string `json:"event_type"` -} - -type Details struct { - TriggerID string `json:"trigger_id"` - Payload string `json:"payload"` -} - -// ////////////////////////////////////////////////////////////////////////////////// // - -// main is used for compilation errors -func main() { - return -} - -// ////////////////////////////////////////////////////////////////////////////////// // - -// Request is handler for HTTP requests -func Request(rw http.ResponseWriter, r *http.Request) { - req.SetUserAgent("AtlassianCloudBackuper|YCFunction", app.VER) - rw.Header().Set("X-Version", app.VER) - - log.Global.UseJSON = true - log.Global.WithCaller = true - - defer log.Flush() - - if !validateConfiguration() { - rw.WriteHeader(500) - return - } - - if !validateRequest(r) { - rw.WriteHeader(400) - return - } - - target := strings.ToLower(r.URL.Query().Get("target")) - stage := strings.ToLower(r.URL.Query().Get("stage")) - - log.Info("Got backup request", log.F{"target", target}, log.F{"stage", stage}) - - var ok bool - - switch stage { - case STAGE_CREATE: - ok = createBackupRequest(target) - case STAGE_DOWNLOAD: - ok = downloadBackupData(target) - } - - if ok { - rw.WriteHeader(200) - } else { - rw.WriteHeader(500) - } -} - -// Trigger is handler for timer trigger -func Trigger(ctx context.Context, data *Data) error { - log.Global.UseJSON = true - log.Global.WithCaller = true - - defer log.Flush() - - if !validatePayload(data) { - return fmt.Errorf("Error while trigger event validation") - } - - target, stage, _ := data.GetPayload() - - log.Info("Got trigger event", log.F{"target", target}, log.F{"stage", stage}) - - var ok bool - - switch stage { - case STAGE_CREATE: - ok = createBackupRequest(target) - case STAGE_DOWNLOAD: - ok = downloadBackupData(target) - } - - if !ok { - return fmt.Errorf("Can't handle event") - } - - return nil -} - -// ////////////////////////////////////////////////////////////////////////////////// // - -// GetPayload extracts target and stage from trigger payload -func (d *Data) GetPayload() (string, string, bool) { - payload := d.Messages[0].Details.Payload - return strings.Cut(payload, ";") -} - -// ////////////////////////////////////////////////////////////////////////////////// // - -// validateRequest validates request data -func validateRequest(r *http.Request) bool { - if r.Method != req.GET { - log.Error("Invalid request: Unsupported method") - return false - } - - target := strings.ToLower(r.URL.Query().Get("target")) - stage := strings.ToLower(r.URL.Query().Get("stage")) - - switch target { - case app.TARGET_JIRA, app.TARGET_CONFLUENCE: - // ok - - case "": - log.Error("Invalid request: Target is empty") - return false - - default: - log.Error("Invalid request: Unsupported target", log.F{"target", target}) - return false - } - - switch stage { - case STAGE_CREATE, STAGE_DOWNLOAD: - // ok - - case "": - log.Error("Invalid request: Stage is empty") - return false - - default: - log.Error("Invalid request: Unsupported stage", log.F{"stage", stage}) - return false - } - - return true -} - -// validatePayload validates trigger payload -func validatePayload(data *Data) bool { - switch { - case data == nil: - log.Error("Trigger data is nil") - return false - - case len(data.Messages) == 0: - log.Error("No messages in trigger event") - return false - - case data.Messages[0].Metadata == nil: - log.Error("No metadata in message #0") - return false - - case data.Messages[0].Metadata.EventType != "yandex.cloud.events.serverless.triggers.TimerMessage": - log.Error("Unsupported event type", log.F{"event-type", data.Messages[0].Metadata.EventType}) - return false - - case data.Messages[0].Details == nil: - log.Error("No details in message #0") - return false - - case data.Messages[0].Details.Payload == "": - log.Error("Payload is empty") - return false - - case !strings.Contains(data.Messages[0].Details.Payload, ";"): - log.Error("Payload doesn't have ';' separator", log.F{"payload", data.Messages[0].Details.Payload}) - return false - } - - target, stage, _ := data.GetPayload() - - switch target { - case app.TARGET_JIRA, app.TARGET_CONFLUENCE: - // ok - - case "": - log.Error("Invalid trigger payload: Target is empty") - return false - - default: - log.Error("Invalid trigger payload: Unsupported target", log.F{"target", target}) - return false - } - - switch stage { - case STAGE_CREATE, STAGE_DOWNLOAD: - // ok - - case "": - log.Error("Invalid trigger payload: Stage is empty") - return false - - default: - log.Error("Invalid trigger payload: Unsupported stage", log.F{"stage", stage}) - return false - } - - return true -} - -// validateConfiguration validates configuration -func validateConfiguration() bool { - switch { - case getEnvVar(app.ACCESS_ACCOUNT) == "": - log.Error("Invalid configuration: ACCESS_ACCOUNT is empty") - return false - - case getEnvVar(app.ACCESS_EMAIL) == "": - log.Error("Invalid configuration: ACCESS_EMAIL is empty") - return false - - case getEnvVar(app.ACCESS_API_KEY) == "": - log.Error("Invalid configuration: ACCESS_API_KEY is empty") - return false - - case getEnvVar(app.STORAGE_TYPE) == "": - log.Error("Invalid configuration: STORAGE_TYPE is empty") - return false - } - - switch getEnvVar(app.STORAGE_TYPE) { - case "fs", "sftp", "s3": - // ok - default: - log.Error("Invalid configuration: invalid STORAGE_TYPE value %q", getEnvVar(app.STORAGE_TYPE)) - return false - } - - if getEnvVar(app.STORAGE_TYPE) == "s3" { - switch { - case getEnvVar(app.STORAGE_S3_ACCESS_KEY) == "": - log.Error("Invalid configuration: STORAGE_S3_ACCESS_KEY is empty") - return false - case getEnvVar(app.STORAGE_S3_SECRET_KEY) == "": - log.Error("Invalid configuration: STORAGE_S3_SECRET_KEY is empty") - return false - case getEnvVar(app.STORAGE_S3_BUCKET) == "": - log.Error("Invalid configuration: STORAGE_S3_BUCKET is empty") - return false - } - } else if getEnvVar(app.STORAGE_TYPE) == "sftp" { - switch { - case getEnvVar(app.STORAGE_SFTP_HOST) == "": - log.Error("Invalid configuration: STORAGE_SFTP_HOST is empty") - return false - case getEnvVar(app.STORAGE_SFTP_USER) == "": - log.Error("Invalid configuration: STORAGE_SFTP_USER is empty") - return false - case getEnvVar(app.STORAGE_SFTP_KEY) == "": - log.Error("Invalid configuration: STORAGE_SFTP_KEY is empty") - return false - case getEnvVar(app.STORAGE_SFTP_PATH) == "": - log.Error("Invalid configuration: STORAGE_SFTP_PATH is empty") - return false - } - } else { - if getEnvVar(app.STORAGE_FS_PATH) == "" { - log.Error("Invalid configuration: STORAGE_FS_PATH is empty") - return false - } - } - - return true -} - -// createBackupRequest sends request for creating backup -func createBackupRequest(target string) bool { - bkpr, err := getBackuper(target) - - if err != nil { - log.Error("Can't create backuper instance: %v", err) - return false - } - - taskID, err := bkpr.Start() - - if err != nil { - log.Error("Can't create backup: %v", err) - return false - } - - log.Info("Backup request successfully created", log.F{"task-id", taskID}) - - return true -} - -// downloadBackupData downloads backup data and upload it to storage -func downloadBackupData(target string) bool { - bkpr, err := getBackuper(target) - - if err != nil { - log.Error("Can't create backuper instance: %v", err) - return false - } - - backupFile, err := bkpr.GetBackupFile() - - if err != nil { - log.Error("Can't find backup file: %v", err) - return false - } - - log.Info("Start downloading of backup", log.F{"backup-file", backupFile}) - - r, err := bkpr.GetReader(backupFile) - - if err != nil { - log.Error("Can't get reader for backup file: %v", err) - return false - } - - updr, err := getUploader(target) - - if err != nil { - log.Error("Can't create uploader instance: %v", err) - return false - } - - outputFile := getOutputFile(target) - - log.Info( - "Uploading backup to storage", - log.F{"backup-file", backupFile}, - log.F{"output-file", outputFile}, - ) - - err = updr.Write(r, outputFile) - - if err != nil { - log.Error( - "Can't upload backup file: %v", err, - log.F{"backup-file", backupFile}, - log.F{"output-file", outputFile}, - ) - return false - } - - return true -} - -// getBackuper returns backuper instance -func getBackuper(target string) (backuper.Backuper, error) { - var err error - var bkpr backuper.Backuper - - config, err := getBackuperConfig(target) - - if err != nil { - return nil, err - } - - switch target { - case app.TARGET_JIRA: - bkpr, err = jira.NewBackuper(config) - case app.TARGET_CONFLUENCE: - bkpr, err = confluence.NewBackuper(config) - default: - return nil, fmt.Errorf("Unknown or unsupported target %q", target) - } - - return bkpr, err -} - -// getBackuperConfig returns configuration for backuper -func getBackuperConfig(target string) (*backuper.Config, error) { - switch target { - case app.TARGET_JIRA: - return &backuper.Config{ - Account: getEnvVar(app.ACCESS_ACCOUNT), - Email: getEnvVar(app.ACCESS_EMAIL), - APIKey: getEnvVar(app.ACCESS_API_KEY), - WithAttachments: getEnvVarFlag(app.JIRA_INCLUDE_ATTACHMENTS, true), - ForCloud: getEnvVarFlag(app.JIRA_CLOUD_FORMAT, true), - }, nil - - case app.TARGET_CONFLUENCE: - return &backuper.Config{ - Account: getEnvVar(app.ACCESS_ACCOUNT), - Email: getEnvVar(app.ACCESS_EMAIL), - APIKey: getEnvVar(app.ACCESS_API_KEY), - WithAttachments: getEnvVarFlag(app.CONFLUENCE_INCLUDE_ATTACHMENTS, true), - ForCloud: getEnvVarFlag(app.CONFLUENCE_CLOUD_FORMAT, true), - }, nil - } - - return nil, fmt.Errorf("Unknown or unsupported target %q", target) -} - -// getUploader returns uploader instance -func getUploader(target string) (uploader.Uploader, error) { - var err error - var updr uploader.Uploader - - switch getEnvVar(app.STORAGE_TYPE) { - case "fs": - updr, err = fs.NewUploader(&fs.Config{ - Path: path.Join(getEnvVar(app.STORAGE_FS_PATH), target), - Mode: parseMode(getEnvVar(app.STORAGE_FS_MODE, "0640")), - }) - - case "sftp": - key, err := base64.StdEncoding.DecodeString(getEnvVar(app.STORAGE_SFTP_KEY)) - - if err != nil { - return nil, err - } - - updr, err = sftp.NewUploader(&sftp.Config{ - Host: getEnvVar(app.STORAGE_SFTP_HOST), - User: getEnvVar(app.STORAGE_SFTP_USER), - Key: key, - Path: path.Join(getEnvVar(app.STORAGE_SFTP_PATH), target), - Mode: parseMode(getEnvVar(app.STORAGE_SFTP_MODE, "0640")), - }) - - case "s3": - updr, err = s3.NewUploader(&s3.Config{ - Host: getEnvVar(app.STORAGE_S3_HOST, "storage.yandexcloud.net"), - Region: getEnvVar(app.STORAGE_S3_REGION, "ru-central1"), - AccessKeyID: getEnvVar(app.STORAGE_S3_ACCESS_KEY), - SecretKey: getEnvVar(app.STORAGE_S3_SECRET_KEY), - Bucket: getEnvVar(app.STORAGE_S3_BUCKET), - Path: path.Join(getEnvVar(app.STORAGE_S3_PATH), target), - }) - } - - return updr, err -} - -// getOutputFile returns name of output file -func getOutputFile(target string) string { - var template string - - switch target { - case app.TARGET_JIRA: - template = strutil.Q(getEnvVar(app.JIRA_OUTPUT_FILE), `jira-backup-%Y-%m-%d`) + ".zip" - case app.TARGET_CONFLUENCE: - template = strutil.Q(getEnvVar(app.CONFLUENCE_OUTPUT_FILE), `confluence-backup-%Y-%m-%d`) + ".zip" - } - - return timeutil.Format(time.Now(), template) -} - -// getEnvVar reads environment variable -func getEnvVar(name string, defs ...string) string { - value := os.Getenv(knfu.ToEnvVar(name)) - - if value == "" && len(defs) > 0 { - return defs[0] - } - - return value -} - -// getEnvVarFlag reads environment variable with flag -func getEnvVarFlag(name string, def bool) bool { - switch strings.ToLower(getEnvVar(name)) { - case "n", "no", "false", "0": - return false - case "y", "yes", "true", "1": - return true - } - - return def -} - -// parseMode parses file mode -func parseMode(v string) os.FileMode { - m, err := strconv.ParseUint(v, 8, 32) - - if err != nil { - return 0600 - } - - return os.FileMode(m) -} diff --git a/common/atlassian-cloud-backuper-container.knf b/common/atlassian-cloud-backuper-container.knf index 384dd85..ea8d82f 100644 --- a/common/atlassian-cloud-backuper-container.knf +++ b/common/atlassian-cloud-backuper-container.knf @@ -9,11 +9,25 @@ # API key api-key: +[server] + + # HTTP server IP + ip: + + # HTTP server port + port: 8080 + + # Unique token for requests + access-token: + [storage] # Storage type (fs/sftp/s3) type: + # Katana encryption key + encryption-key: + [storage-fs] # Path to directory with backups diff --git a/common/atlassian-cloud-backuper.knf b/common/atlassian-cloud-backuper.knf index 87bcd2a..fa40a4a 100644 --- a/common/atlassian-cloud-backuper.knf +++ b/common/atlassian-cloud-backuper.knf @@ -9,11 +9,25 @@ # API key api-key: +[server] + + # HTTP server IP + ip: + + # HTTP server port + port: 8080 + + # Unique token for requests + access-token: + [storage] # Storage type (fs/sftp/s3) type: + # Katana encryption key + encryption-key: + [storage-fs] # Path to directory with backups diff --git a/common/atlassian-cloud-backuper.spec b/common/atlassian-cloud-backuper.spec index 827e07c..b2bef66 100644 --- a/common/atlassian-cloud-backuper.spec +++ b/common/atlassian-cloud-backuper.spec @@ -10,7 +10,7 @@ Summary: Tool for backuping Atlassian cloud services Name: atlassian-cloud-backuper -Version: 0.0.3 +Version: 0.1.0 Release: 0%{?dist} Group: Applications/System License: Apache License, Version 2.0 @@ -22,7 +22,7 @@ Source100: checksum.sha512 BuildRoot: %{_tmppath}/%{name}-%{version}-%{release}-root-%(%{__id_u} -n) -BuildRequires: golang >= 1.21 +BuildRequires: golang >= 1.22 Provides: %{name} = %{version}-%{release} @@ -40,7 +40,10 @@ Tool for backuping Atlassian cloud services (Jira and Confluence). %build if [[ ! -d "%{name}/vendor" ]] ; then - echo "This package requires vendored dependencies" + echo -e "----\nThis package requires vendored dependencies\n----" + exit 1 +elif [[ -f "%{name}/%{name}" ]] ; then + echo -e "----\nSources must not contain precompiled binaries\n----" exit 1 fi @@ -110,6 +113,12 @@ rm -rf %{buildroot} ################################################################################ %changelog +* Tue Jul 23 2024 Anton Novojilov - 0.1.0-0 +- Added data encryption feature +- Added server mode +- Code refactoring +- Dependencies update + * Wed Jun 12 2024 Anton Novojilov - 0.0.3-0 - Dependencies update diff --git a/go.mod b/go.mod index 6624f11..7043e3f 100644 --- a/go.mod +++ b/go.mod @@ -3,25 +3,27 @@ module github.com/essentialkaos/atlassian-cloud-backuper go 1.19 require ( - github.com/aws/aws-sdk-go-v2 v1.27.2 - github.com/aws/aws-sdk-go-v2/credentials v1.17.18 - github.com/aws/aws-sdk-go-v2/service/s3 v1.55.1 - github.com/essentialkaos/ek/v12 v12.126.1 + github.com/aws/aws-sdk-go-v2 v1.30.3 + github.com/aws/aws-sdk-go-v2/credentials v1.17.27 + github.com/aws/aws-sdk-go-v2/service/s3 v1.58.2 + github.com/essentialkaos/ek/v13 v13.2.0 + github.com/essentialkaos/katana v0.2.0 github.com/pkg/sftp v1.13.6 - golang.org/x/crypto v0.24.0 + golang.org/x/crypto v0.25.0 ) require ( - github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.2 // indirect - github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.9 // indirect - github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.9 // indirect - github.com/aws/aws-sdk-go-v2/internal/v4a v1.3.9 // indirect - github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.11.2 // indirect - github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.3.11 // indirect - github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.11.11 // indirect - github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.17.9 // indirect - github.com/aws/smithy-go v1.20.2 // indirect + github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.3 // indirect + github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.15 // indirect + github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.15 // indirect + github.com/aws/aws-sdk-go-v2/internal/v4a v1.3.15 // indirect + github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.11.3 // indirect + github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.3.17 // indirect + github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.11.17 // indirect + github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.17.15 // indirect + github.com/aws/smithy-go v1.20.3 // indirect github.com/essentialkaos/depsy v1.3.0 // indirect + github.com/essentialkaos/sio v1.0.0 // indirect github.com/kr/fs v0.1.0 // indirect - golang.org/x/sys v0.21.0 // indirect + golang.org/x/sys v0.22.0 // indirect ) diff --git a/go.sum b/go.sum index 993a585..b04bb0d 100644 --- a/go.sum +++ b/go.sum @@ -1,35 +1,39 @@ -github.com/aws/aws-sdk-go-v2 v1.27.2 h1:pLsTXqX93rimAOZG2FIYraDQstZaaGVVN4tNw65v0h8= -github.com/aws/aws-sdk-go-v2 v1.27.2/go.mod h1:ffIFB97e2yNsv4aTSGkqtHnppsIJzw7G7BReUZ3jCXM= -github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.2 h1:x6xsQXGSmW6frevwDA+vi/wqhp1ct18mVXYN08/93to= -github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.2/go.mod h1:lPprDr1e6cJdyYeGXnRaJoP4Md+cDBvi2eOj00BlGmg= -github.com/aws/aws-sdk-go-v2/credentials v1.17.18 h1:D/ALDWqK4JdY3OFgA2thcPO1c9aYTT5STS/CvnkqY1c= -github.com/aws/aws-sdk-go-v2/credentials v1.17.18/go.mod h1:JuitCWq+F5QGUrmMPsk945rop6bB57jdscu+Glozdnc= -github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.9 h1:cy8ahBJuhtM8GTTSyOkfy6WVPV1IE+SS5/wfXUYuulw= -github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.9/go.mod h1:CZBXGLaJnEZI6EVNcPd7a6B5IC5cA/GkRWtu9fp3S6Y= -github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.9 h1:A4SYk07ef04+vxZToz9LWvAXl9LW0NClpPpMsi31cz0= -github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.9/go.mod h1:5jJcHuwDagxN+ErjQ3PU3ocf6Ylc/p9x+BLO/+X4iXw= -github.com/aws/aws-sdk-go-v2/internal/v4a v1.3.9 h1:vHyZxoLVOgrI8GqX7OMHLXp4YYoxeEsrjweXKpye+ds= -github.com/aws/aws-sdk-go-v2/internal/v4a v1.3.9/go.mod h1:z9VXZsWA2BvZNH1dT0ToUYwMu/CR9Skkj/TBX+mceZw= -github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.11.2 h1:Ji0DY1xUsUr3I8cHps0G+XM3WWU16lP6yG8qu1GAZAs= -github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.11.2/go.mod h1:5CsjAbs3NlGQyZNFACh+zztPDI7fU6eW9QsxjfnuBKg= -github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.3.11 h1:4vt9Sspk59EZyHCAEMaktHKiq0C09noRTQorXD/qV+s= -github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.3.11/go.mod h1:5jHR79Tv+Ccq6rwYh+W7Nptmw++WiFafMfR42XhwNl8= -github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.11.11 h1:o4T+fKxA3gTMcluBNZZXE9DNaMkJuUL1O3mffCUjoJo= -github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.11.11/go.mod h1:84oZdJ+VjuJKs9v1UTC9NaodRZRseOXCTgku+vQJWR8= -github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.17.9 h1:TE2i0A9ErH1YfRSvXfCr2SQwfnqsoJT9nPQ9kj0lkxM= -github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.17.9/go.mod h1:9TzXX3MehQNGPwCZ3ka4CpwQsoAMWSF48/b+De9rfVM= -github.com/aws/aws-sdk-go-v2/service/s3 v1.55.1 h1:UAxBuh0/8sFJk1qOkvOKewP5sWeWaTPDknbQz0ZkDm0= -github.com/aws/aws-sdk-go-v2/service/s3 v1.55.1/go.mod h1:hWjsYGjVuqCgfoveVcVFPXIWgz0aByzwaxKlN1StKcM= -github.com/aws/smithy-go v1.20.2 h1:tbp628ireGtzcHDDmLT/6ADHidqnwgF57XOXZe6tp4Q= -github.com/aws/smithy-go v1.20.2/go.mod h1:krry+ya/rV9RDcV/Q16kpu6ypI4K2czasz0NC3qS14E= +github.com/aws/aws-sdk-go-v2 v1.30.3 h1:jUeBtG0Ih+ZIFH0F4UkmL9w3cSpaMv9tYYDbzILP8dY= +github.com/aws/aws-sdk-go-v2 v1.30.3/go.mod h1:nIQjQVp5sfpQcTc9mPSr1B0PaWK5ByX9MOoDadSN4lc= +github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.3 h1:tW1/Rkad38LA15X4UQtjXZXNKsCgkshC3EbmcUmghTg= +github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.3/go.mod h1:UbnqO+zjqk3uIt9yCACHJ9IVNhyhOCnYk8yA19SAWrM= +github.com/aws/aws-sdk-go-v2/credentials v1.17.27 h1:2raNba6gr2IfA0eqqiP2XiQ0UVOpGPgDSi0I9iAP+UI= +github.com/aws/aws-sdk-go-v2/credentials v1.17.27/go.mod h1:gniiwbGahQByxan6YjQUMcW4Aov6bLC3m+evgcoN4r4= +github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.15 h1:SoNJ4RlFEQEbtDcCEt+QG56MY4fm4W8rYirAmq+/DdU= +github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.15/go.mod h1:U9ke74k1n2bf+RIgoX1SXFed1HLs51OgUSs+Ph0KJP8= +github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.15 h1:C6WHdGnTDIYETAm5iErQUiVNsclNx9qbJVPIt03B6bI= +github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.15/go.mod h1:ZQLZqhcu+JhSrA9/NXRm8SkDvsycE+JkV3WGY41e+IM= +github.com/aws/aws-sdk-go-v2/internal/v4a v1.3.15 h1:Z5r7SycxmSllHYmaAZPpmN8GviDrSGhMS6bldqtXZPw= +github.com/aws/aws-sdk-go-v2/internal/v4a v1.3.15/go.mod h1:CetW7bDE00QoGEmPUoZuRog07SGVAUVW6LFpNP0YfIg= +github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.11.3 h1:dT3MqvGhSoaIhRseqw2I0yH81l7wiR2vjs57O51EAm8= +github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.11.3/go.mod h1:GlAeCkHwugxdHaueRr4nhPuY+WW+gR8UjlcqzPr1SPI= +github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.3.17 h1:YPYe6ZmvUfDDDELqEKtAd6bo8zxhkm+XEFEzQisqUIE= +github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.3.17/go.mod h1:oBtcnYua/CgzCWYN7NZ5j7PotFDaFSUjCYVTtfyn7vw= +github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.11.17 h1:HGErhhrxZlQ044RiM+WdoZxp0p+EGM62y3L6pwA4olE= +github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.11.17/go.mod h1:RkZEx4l0EHYDJpWppMJ3nD9wZJAa8/0lq9aVC+r2UII= +github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.17.15 h1:246A4lSTXWJw/rmlQI+TT2OcqeDMKBdyjEQrafMaQdA= +github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.17.15/go.mod h1:haVfg3761/WF7YPuJOER2MP0k4UAXyHaLclKXB6usDg= +github.com/aws/aws-sdk-go-v2/service/s3 v1.58.2 h1:sZXIzO38GZOU+O0C+INqbH7C2yALwfMWpd64tONS/NE= +github.com/aws/aws-sdk-go-v2/service/s3 v1.58.2/go.mod h1:Lcxzg5rojyVPU/0eFwLtcyTaek/6Mtic5B1gJo7e/zE= +github.com/aws/smithy-go v1.20.3 h1:ryHwveWzPV5BIof6fyDvor6V3iUL7nTfiTKXHiW05nE= +github.com/aws/smithy-go v1.20.3/go.mod h1:krry+ya/rV9RDcV/Q16kpu6ypI4K2czasz0NC3qS14E= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/essentialkaos/check v1.4.0 h1:kWdFxu9odCxUqo1NNFNJmguGrDHgwi3A8daXX1nkuKk= github.com/essentialkaos/depsy v1.3.0 h1:CN7bRgBU2jGTHSkg/Sh38eDUn7cvmaTp2sxFt2HpFeU= github.com/essentialkaos/depsy v1.3.0/go.mod h1:kpiTAV17dyByVnrbNaMcZt2jRwvuXClUYOzpyJQwtG8= -github.com/essentialkaos/ek/v12 v12.126.1 h1:K4tCpu9T3k5Bv6hJ1lmW7Ou0GxjgjBIKRsoZgAGdEhE= -github.com/essentialkaos/ek/v12 v12.126.1/go.mod h1:71IJ7m82hgjrvWnhL+z0vIhguxz47/rfVma5/CeI5Fw= +github.com/essentialkaos/ek/v13 v13.2.0 h1:Ra6segoyFYjtdz5eh0mQxJMeIso7h61A7IyG9B4R6bI= +github.com/essentialkaos/ek/v13 v13.2.0/go.mod h1:RVf1NpNyK04xkBJ3NTUD1wNLWemY9/naVD4iEVjU2fA= +github.com/essentialkaos/katana v0.2.0 h1:LRnKyEHFET9P45L718DI704oUBHcOjW+/bWBstPb9qg= +github.com/essentialkaos/katana v0.2.0/go.mod h1:B0IUikFvR6Iutx93iSu3xezHfHvIuIgXJSO6Agujp+0= +github.com/essentialkaos/sio v1.0.0 h1:+VZg0Z7+Cx8F/FmlczzTJYM6rq/LhTR45Rsditmu0Ec= +github.com/essentialkaos/sio v1.0.0/go.mod h1:lKaW6IPMJ8GAEAiXe175zcEld370u3Nr546c22Kw5C8= github.com/kr/fs v0.1.0 h1:Jskdu9ieNAYnjxsi0LbQp1ulIKZV1LAFgK1tWhpZgl8= github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg= github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= @@ -48,8 +52,8 @@ github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5t golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.1.0/go.mod h1:RecgLatLF4+eUMCP1PoPZQb+cVrJcOPbHkTkbkB9sbw= -golang.org/x/crypto v0.24.0 h1:mnl8DM0o513X8fdIkmyFE/5hTYxbwYOjDS/+rK6qpRI= -golang.org/x/crypto v0.24.0/go.mod h1:Z1PMYSOR5nyMcyAVAIQSKCDwalqy85Aqn1x3Ws4L5DM= +golang.org/x/crypto v0.25.0 h1:ypSNr+bnYL2YhwoMt2zPxHFmbAN1KZs/njMG3hxUp30= +golang.org/x/crypto v0.25.0/go.mod h1:T+wALwcMOSE0kXgUAnPAHqTLW+XHgcELELW8VaDgm/M= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= @@ -63,12 +67,12 @@ golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.21.0 h1:rF+pYz3DAGSQAxAu1CbC7catZg4ebC4UIeIhKxBZvws= -golang.org/x/sys v0.21.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.22.0 h1:RI27ohtqKCnwULzJLqkv897zojh5/DwS/ENaMzUOaWI= +golang.org/x/sys v0.22.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/term v0.1.0/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= -golang.org/x/term v0.21.0 h1:WVXCp+/EBEHOj53Rvu+7KiT/iElMrO8ACK16SMZ3jaA= +golang.org/x/term v0.22.0 h1:BbsgPEJULsl2fV/AT3v15Mjva5yXKQDyKf+TbDz7QJk= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= diff --git a/uploader/fs/fs.go b/uploader/fs/fs.go index 9e7e810..f9166b2 100644 --- a/uploader/fs/fs.go +++ b/uploader/fs/fs.go @@ -12,11 +12,15 @@ import ( "fmt" "io" "os" + "time" - "github.com/essentialkaos/ek/v12/events" - "github.com/essentialkaos/ek/v12/fsutil" - "github.com/essentialkaos/ek/v12/log" - "github.com/essentialkaos/ek/v12/path" + "github.com/essentialkaos/ek/v13/events" + "github.com/essentialkaos/ek/v13/fsutil" + "github.com/essentialkaos/ek/v13/log" + "github.com/essentialkaos/ek/v13/passthru" + "github.com/essentialkaos/ek/v13/path" + + "github.com/essentialkaos/katana" "github.com/essentialkaos/atlassian-cloud-backuper/uploader" ) @@ -25,8 +29,9 @@ import ( // Config is configuration for FS uploader type Config struct { - Path string - Mode os.FileMode + Path string + Mode os.FileMode + Secret *katana.Secret } // FSUploader is FS uploader instance @@ -64,10 +69,6 @@ func (u *FSUploader) SetDispatcher(d *events.Dispatcher) { // Upload uploads given file to storage func (u *FSUploader) Upload(file, fileName string) error { - log.Info("Copying backup file to %s…", u.config.Path) - - u.dispatcher.DispatchAndWait(uploader.EVENT_UPLOAD_STARTED, "FS") - err := fsutil.ValidatePerms("FRS", file) if err != nil { @@ -78,43 +79,89 @@ func (u *FSUploader) Upload(file, fileName string) error { err = os.MkdirAll(u.config.Path, 0750) if err != nil { - return fmt.Errorf("Can't create directory for backup: %v", err) + return fmt.Errorf("Can't create directory for backup: %w", err) } } - err = fsutil.CopyFile(file, path.Join(u.config.Path, fileName), u.config.Mode) + fd, err := os.Open(file) - u.dispatcher.DispatchAndWait(uploader.EVENT_UPLOAD_DONE, "FS") + if err != nil { + return fmt.Errorf("Can't open backup file: %w", err) + } - log.Info("Backup successfully copied to %s", u.config.Path) + defer fd.Close() + + err = u.Write(fd, fileName, fsutil.GetSize(file)) + + if err != nil { + return fmt.Errorf("Can't save backup file: %w", err) + } return err } // Write writes data from given reader to given file -func (u *FSUploader) Write(r io.ReadCloser, fileName string) error { +func (u *FSUploader) Write(r io.ReadCloser, fileName string, fileSize int64) error { u.dispatcher.DispatchAndWait(uploader.EVENT_UPLOAD_STARTED, "FS") - fd, err := os.OpenFile( - path.Join(u.config.Path, fileName), - os.O_CREATE|os.O_TRUNC|os.O_WRONLY, u.config.Mode, - ) + var w io.Writer + + lastUpdate := time.Now() + outputFile := path.Join(u.config.Path, fileName) + + log.Info("Copying backup file to %s…", u.config.Path) + + fd, err := os.OpenFile(outputFile, os.O_CREATE|os.O_TRUNC|os.O_WRONLY, u.config.Mode) if err != nil { return err } - defer fd.Close() - defer r.Close() + w = fd + + if u.config.Secret != nil { + sw, err := u.config.Secret.NewWriter(fd) + + if err != nil { + return fmt.Errorf("Can't create encrypted writer: %w", err) + } + + defer sw.Close() + + w = sw + } - w := bufio.NewWriter(fd) - _, err = io.Copy(w, r) + if fileSize > 0 { + pw := passthru.NewWriter(w, fileSize) + + pw.Update = func(n int) { + if time.Since(lastUpdate) < 3*time.Second { + return + } + + u.dispatcher.Dispatch( + uploader.EVENT_UPLOAD_PROGRESS, + &uploader.ProgressInfo{ + Progress: pw.Progress(), + Current: pw.Current(), + Total: pw.Total(), + }, + ) + + lastUpdate = time.Now() + } + + w = pw + } + + _, err = io.Copy(bufio.NewWriter(w), r) if err != nil { return fmt.Errorf("File writing error: %w", err) } u.dispatcher.DispatchAndWait(uploader.EVENT_UPLOAD_DONE, "FS") + log.Info("Backup successfully copied to %s", u.config.Path) return nil } diff --git a/uploader/s3/s3.go b/uploader/s3/s3.go index 8a363b9..669682b 100644 --- a/uploader/s3/s3.go +++ b/uploader/s3/s3.go @@ -15,16 +15,18 @@ import ( "strings" "time" - "github.com/essentialkaos/ek/v12/events" - "github.com/essentialkaos/ek/v12/fsutil" - "github.com/essentialkaos/ek/v12/log" - "github.com/essentialkaos/ek/v12/passthru" - "github.com/essentialkaos/ek/v12/path" + "github.com/essentialkaos/ek/v13/events" + "github.com/essentialkaos/ek/v13/fsutil" + "github.com/essentialkaos/ek/v13/log" + "github.com/essentialkaos/ek/v13/passthru" + "github.com/essentialkaos/ek/v13/path" "github.com/aws/aws-sdk-go-v2/aws" "github.com/aws/aws-sdk-go-v2/credentials" "github.com/aws/aws-sdk-go-v2/service/s3" + "github.com/essentialkaos/katana" + "github.com/essentialkaos/atlassian-cloud-backuper/uploader" ) @@ -38,6 +40,7 @@ type Config struct { SecretKey string Bucket string Path string + Secret *katana.Secret } // S3Uploader is S3 uploader instance @@ -75,87 +78,87 @@ func (u *S3Uploader) SetDispatcher(d *events.Dispatcher) { // Upload uploads given file to S3 storage func (u *S3Uploader) Upload(file, fileName string) error { - u.dispatcher.DispatchAndWait(uploader.EVENT_UPLOAD_STARTED, "S3") - - lastUpdate := time.Now() - fileSize := fsutil.GetSize(file) - outputFile := path.Join(u.config.Path, fileName) - - log.Info( - "Uploading backup file to %s:%s (%s/%s)", - u.config.Bucket, u.config.Path, u.config.Host, u.config.Region, - ) - - client := s3.New(s3.Options{ - Region: "ru-central1", - BaseEndpoint: aws.String("https://storage.yandexcloud.net"), - Credentials: aws.NewCredentialsCache(credentials.NewStaticCredentialsProvider( - u.config.AccessKeyID, u.config.SecretKey, "", - )), - }) - - inputFD, err := os.OpenFile(file, os.O_RDONLY, 0) + fd, err := os.Open(file) if err != nil { return fmt.Errorf("Can't open backup file for reading: %v", err) } - defer inputFD.Close() - - r := passthru.NewReader(inputFD, fileSize) + defer fd.Close() - r.Update = func(n int) { - if time.Since(lastUpdate) < 3*time.Second { - return - } - - u.dispatcher.Dispatch( - uploader.EVENT_UPLOAD_PROGRESS, - &uploader.ProgressInfo{Progress: r.Progress(), Current: r.Current(), Total: r.Total()}, - ) - - lastUpdate = time.Now() - } - - _, err = client.PutObject(context.TODO(), &s3.PutObjectInput{ - Bucket: aws.String(u.config.Bucket), - Key: aws.String(outputFile), - Body: r, - }) + err = u.Write(fd, fileName, fsutil.GetSize(file)) if err != nil { - return fmt.Errorf("Can't upload file to S3: %v", err) + return fmt.Errorf("Can't save backup: %w", err) } - log.Info("File successfully uploaded to S3!") - u.dispatcher.DispatchAndWait(uploader.EVENT_UPLOAD_DONE, "S3") - return nil } // Write writes data from given reader to given file -func (u *S3Uploader) Write(r io.ReadCloser, fileName string) error { +func (u *S3Uploader) Write(r io.ReadCloser, fileName string, fileSize int64) error { u.dispatcher.DispatchAndWait(uploader.EVENT_UPLOAD_STARTED, "S3") - outputFile := path.Join(u.config.Path, fileName) + var rr io.Reader + var err error + + lastUpdate := time.Now() + outputFile := fileName + + if u.config.Path != "" { + outputFile = path.Join(u.config.Path, fileName) + } log.Info( "Uploading backup file to %s:%s (%s/%s)", u.config.Bucket, u.config.Path, u.config.Host, u.config.Region, ) + rr = r + + if u.config.Secret != nil { + sr, err := u.config.Secret.NewReader(r, katana.MODE_ENCRYPT) + + if err != nil { + return fmt.Errorf("Can't create encrypted reader: %w", err) + } + + rr = sr + } + + if fileSize > 0 { + pr := passthru.NewReader(rr, fileSize) + + pr.Update = func(n int) { + if time.Since(lastUpdate) < 3*time.Second { + return + } + + u.dispatcher.Dispatch( + uploader.EVENT_UPLOAD_PROGRESS, + &uploader.ProgressInfo{ + Progress: pr.Progress(), + Current: pr.Current(), + Total: pr.Total(), + }, + ) + } + + rr = pr + } + client := s3.New(s3.Options{ - Region: "ru-central1", - BaseEndpoint: aws.String("https://storage.yandexcloud.net"), + Region: u.config.Region, + BaseEndpoint: aws.String("https://" + u.config.Host), Credentials: aws.NewCredentialsCache(credentials.NewStaticCredentialsProvider( u.config.AccessKeyID, u.config.SecretKey, "", )), }) - _, err := client.PutObject(context.TODO(), &s3.PutObjectInput{ + _, err = client.PutObject(context.TODO(), &s3.PutObjectInput{ Bucket: aws.String(u.config.Bucket), Key: aws.String(outputFile), - Body: r, + Body: rr, }) if err != nil { diff --git a/uploader/sftp/sftp.go b/uploader/sftp/sftp.go index ea2541a..08e7ee1 100644 --- a/uploader/sftp/sftp.go +++ b/uploader/sftp/sftp.go @@ -14,14 +14,16 @@ import ( "strings" "time" + "github.com/essentialkaos/ek/v13/events" + "github.com/essentialkaos/ek/v13/fsutil" + "github.com/essentialkaos/ek/v13/log" + "github.com/essentialkaos/ek/v13/passthru" + "github.com/essentialkaos/ek/v13/path" + "github.com/pkg/sftp" "golang.org/x/crypto/ssh" - "github.com/essentialkaos/ek/v12/events" - "github.com/essentialkaos/ek/v12/fsutil" - "github.com/essentialkaos/ek/v12/log" - "github.com/essentialkaos/ek/v12/passthru" - "github.com/essentialkaos/ek/v12/path" + "github.com/essentialkaos/katana" "github.com/essentialkaos/atlassian-cloud-backuper/uploader" ) @@ -30,11 +32,12 @@ import ( // Config is configuration for SFTP uploader type Config struct { - Host string - User string - Key []byte - Path string - Mode os.FileMode + Host string + User string + Key []byte + Path string + Mode os.FileMode + Secret *katana.Secret } // SFTPUploader is SFTP uploader instance @@ -58,7 +61,7 @@ func NewUploader(config *Config) (*SFTPUploader, error) { return nil, err } - return &SFTPUploader{config, nil}, nil + return &SFTPUploader{config: config}, nil } // ////////////////////////////////////////////////////////////////////////////////// // @@ -72,88 +75,30 @@ func (u *SFTPUploader) SetDispatcher(d *events.Dispatcher) { // Upload uploads given file to SFTP storage func (u *SFTPUploader) Upload(file, fileName string) error { - u.dispatcher.DispatchAndWait(uploader.EVENT_UPLOAD_STARTED, "SFTP") - - lastUpdate := time.Now() - fileSize := fsutil.GetSize(file) - outputFile := path.Join(u.config.Path, fileName) - - log.Info( - "Uploading backup file to %s@%s~%s/%s…", - u.config.User, u.config.Host, u.config.Path, fileName, - ) - - sftpClient, err := u.connectToSFTP() - - if err != nil { - return fmt.Errorf("Can't connect to SFTP: %v", err) - } - - defer sftpClient.Close() - - _, err = sftpClient.Stat(u.config.Path) - - if err != nil { - err = sftpClient.MkdirAll(u.config.Path) - - if err != nil { - return fmt.Errorf("Can't create directory for backup: %v", err) - } - } - - outputFD, err := sftpClient.OpenFile(outputFile, os.O_CREATE|os.O_TRUNC|os.O_WRONLY) - - if err != nil { - return fmt.Errorf("Can't create file of SFTP: %v", err) - } - - defer outputFD.Close() - - inputFD, err := os.OpenFile(file, os.O_RDONLY, 0) + fd, err := os.Open(file) if err != nil { return fmt.Errorf("Can't open backup file for reading: %v", err) } - defer inputFD.Close() - - w := passthru.NewWriter(outputFD, fileSize) + defer fd.Close() - w.Update = func(n int) { - if time.Since(lastUpdate) < 3*time.Second { - return - } - - u.dispatcher.Dispatch( - uploader.EVENT_UPLOAD_PROGRESS, - &uploader.ProgressInfo{Progress: w.Progress(), Current: w.Current(), Total: w.Total()}, - ) - - lastUpdate = time.Now() - } - - _, err = io.Copy(w, inputFD) + err = u.Write(fd, fileName, fsutil.GetSize(file)) if err != nil { - return fmt.Errorf("Can't upload file to SFTP: %v", err) + return fmt.Errorf("Can't save backup: %w", err) } - err = sftpClient.Chmod(outputFile, u.config.Mode) - - if err != nil { - log.Error("Can't change file mode for uploaded file: %v", err) - } - - log.Info("File successfully uploaded to SFTP!") - u.dispatcher.DispatchAndWait(uploader.EVENT_UPLOAD_DONE, "SFTP") - return nil } // Write writes data from given reader to given file -func (u *SFTPUploader) Write(r io.ReadCloser, fileName string) error { +func (u *SFTPUploader) Write(r io.ReadCloser, fileName string, fileSize int64) error { u.dispatcher.DispatchAndWait(uploader.EVENT_UPLOAD_STARTED, "SFTP") + var w io.Writer + + lastUpdate := time.Now() outputFile := path.Join(u.config.Path, fileName) log.Info( @@ -179,16 +124,50 @@ func (u *SFTPUploader) Write(r io.ReadCloser, fileName string) error { } } - outputFD, err := sftpClient.OpenFile(outputFile, os.O_CREATE|os.O_TRUNC|os.O_WRONLY) + fd, err := sftpClient.OpenFile(outputFile, os.O_CREATE|os.O_TRUNC|os.O_WRONLY) if err != nil { return fmt.Errorf("Can't create file of SFTP: %v", err) } - defer outputFD.Close() - defer r.Close() + w = fd + + if u.config.Secret != nil { + sw, err := u.config.Secret.NewWriter(fd) + + if err != nil { + return fmt.Errorf("Can't create encrypted writer: %w", err) + } + + defer sw.Close() + + w = sw + } + + if fileSize > 0 { + pw := passthru.NewWriter(w, fileSize) + + pw.Update = func(n int) { + if time.Since(lastUpdate) < 3*time.Second { + return + } + + u.dispatcher.Dispatch( + uploader.EVENT_UPLOAD_PROGRESS, + &uploader.ProgressInfo{ + Progress: pw.Progress(), + Current: pw.Current(), + Total: pw.Total(), + }, + ) + + lastUpdate = time.Now() + } + + w = pw + } - _, err = io.Copy(outputFD, r) + _, err = io.Copy(w, r) if err != nil { return fmt.Errorf("Can't upload file to SFTP: %v", err) diff --git a/uploader/uploader.go b/uploader/uploader.go index 0cfe9f4..92de1f6 100644 --- a/uploader/uploader.go +++ b/uploader/uploader.go @@ -10,7 +10,7 @@ package uploader import ( "io" - "github.com/essentialkaos/ek/v12/events" + "github.com/essentialkaos/ek/v13/events" ) // ////////////////////////////////////////////////////////////////////////////////// // @@ -33,12 +33,14 @@ type ProgressInfo struct { // Uploader is generic uploader interface type Uploader interface { - // Upload uploads given file to storage - Upload(file, fileName string) error - // SetDispatcher sets events dispatcher SetDispatcher(d *events.Dispatcher) + // Upload uploads given file to storage + Upload(file, fileName string) error + // Write writes data from given reader to given file - Write(r io.ReadCloser, fileName string) error + Write(r io.ReadCloser, fileName string, fileSize int64) error } + +// ////////////////////////////////////////////////////////////////////////////////// //