diff --git a/.codebeatsettings b/.codebeatsettings
new file mode 100644
index 0000000..8e5dd08
--- /dev/null
+++ b/.codebeatsettings
@@ -0,0 +1,11 @@
+{
+ "GOLANG": {
+ "ABC": [15, 25, 50, 70],
+ "LOC": [30, 45, 70, 100],
+ "BLOCK_NESTING": [4, 6, 8, 10],
+ "TOO_MANY_IVARS": [50, 75, 100, 150],
+ "TOO_MANY_FUNCTIONS": [64, 128, 256, 512],
+ "TOTAL_LOC": [1500, 3000, 5000, 10000],
+ "TOTAL_COMPLEXITY": [350, 600, 900, 1200]
+ }
+}
diff --git a/.codeclimate.yml b/.codeclimate.yml
new file mode 100644
index 0000000..16f4324
--- /dev/null
+++ b/.codeclimate.yml
@@ -0,0 +1,39 @@
+version: "2"
+
+checks:
+ argument-count:
+ enabled: true
+ config:
+ threshold: 6
+ complex-logic:
+ enabled: true
+ config:
+ threshold: 6
+ file-lines:
+ enabled: true
+ config:
+ threshold: 1000
+ method-complexity:
+ enabled: true
+ config:
+ threshold: 8
+ method-count:
+ enabled: true
+ config:
+ threshold: 20
+ method-lines:
+ enabled: true
+ config:
+ threshold: 100
+ nested-control-flow:
+ enabled: true
+ config:
+ threshold: 6
+ return-statements:
+ enabled: true
+ config:
+ threshold: 6
+ similar-code:
+ enabled: false
+ identical-code:
+ enabled: false
diff --git a/.docker/alpine.docker b/.docker/alpine.docker
new file mode 100644
index 0000000..77be9ff
--- /dev/null
+++ b/.docker/alpine.docker
@@ -0,0 +1,36 @@
+## REGISTRY CONFIGURATION ######################################################
+
+ARG REGISTRY="docker.io"
+
+## BUILDER #####################################################################
+
+FROM golang:alpine3.17 as builder
+
+WORKDIR /go/src/github.com/essentialkaos/atlassian-cloud-backuper
+
+COPY . .
+
+# hadolint ignore=DL3018
+RUN apk add --no-cache git make && make deps && make all
+
+## FINAL IMAGE #################################################################
+
+FROM ${REGISTRY}/essentialkaos/alpine:3.17
+
+LABEL org.opencontainers.image.title="atlassian-cloud-backuper" \
+ org.opencontainers.image.description="Atlassian Cloud Backuper" \
+ org.opencontainers.image.vendor="ESSENTIAL KAOS" \
+ org.opencontainers.image.authors="Anton Novojilov" \
+ org.opencontainers.image.licenses="Apache-2.0" \
+ org.opencontainers.image.url="https://kaos.sh/atlassian-cloud-backuper" \
+ org.opencontainers.image.source="https://github.com/essentialkaos/atlassian-cloud-backuper"
+
+COPY --from=builder /go/src/github.com/essentialkaos/atlassian-cloud-backuper/atlassian-cloud-backuper /usr/bin/
+COPY --from=builder /go/src/github.com/essentialkaos/atlassian-cloud-backuper/common/atlassian-cloud-backuper-container.knf /etc/atlassian-cloud-backuper.knf
+
+# hadolint ignore=DL3018
+RUN apk add --no-cache ca-certificates
+
+ENTRYPOINT ["atlassian-cloud-backuper"]
+
+################################################################################
diff --git a/.github/CODE_OF_CONDUCT.md b/.github/CODE_OF_CONDUCT.md
new file mode 100644
index 0000000..45abf3d
--- /dev/null
+++ b/.github/CODE_OF_CONDUCT.md
@@ -0,0 +1,42 @@
+# Contributor Code of Conduct
+
+As contributors and maintainers of this project, and in the interest of
+fostering an open and welcoming community, we pledge to respect all people who
+contribute through reporting issues, posting feature requests, updating
+documentation, submitting pull requests or patches, and other activities.
+
+We are committed to making participation in this project a harassment-free
+experience for everyone, regardless of level of experience, gender, gender
+identity and expression, sexual orientation, disability, personal appearance,
+body size, race, ethnicity, age, religion, or nationality.
+
+Examples of unacceptable behavior by participants include:
+
+* The use of sexualized language or imagery
+* Personal attacks
+* Trolling or insulting/derogatory comments
+* Public or private harassment
+* Publishing other's private information, such as physical or electronic
+ addresses, without explicit permission
+* Other unethical or unprofessional conduct
+
+Project maintainers have the right and responsibility to remove, edit, or
+reject comments, commits, code, wiki edits, issues, and other contributions
+that are not aligned to this Code of Conduct, or to ban temporarily or
+permanently any contributor for other behaviors that they deem inappropriate,
+threatening, offensive, or harmful.
+
+By adopting this Code of Conduct, project maintainers commit themselves to
+fairly and consistently applying these principles to every aspect of managing
+this project. Project maintainers who do not follow or enforce the Code of
+Conduct may be permanently removed from the project team.
+
+This Code of Conduct applies both within project spaces and in public spaces
+when an individual is representing the project or its community.
+
+Instances of abusive, harassing, or otherwise unacceptable behavior may be
+reported by contacting a project maintainer at `conduct@essentialkaos.com`. All
+complaints will be reviewed and investigated and will result in a response that
+is deemed necessary and appropriate to the circumstances. Maintainers are
+obligated to maintain confidentiality with regard to the reporter of an
+incident.
diff --git a/.github/CONTRIBUTING.md b/.github/CONTRIBUTING.md
new file mode 100644
index 0000000..d7da3fb
--- /dev/null
+++ b/.github/CONTRIBUTING.md
@@ -0,0 +1,19 @@
+# Contributing Guidelines
+
+**IMPORTANT! Contribute your code only if you have an excellent understanding of project idea and all existing code base. Otherwise, a nicely formatted issue will be more helpful to us.**
+
+### Issues
+
+1. Provide product version where the problem was found;
+2. Provide info about your environment;
+3. Provide detailed info about your problem;
+4. Provide steps to reproduce the problem;
+5. Provide actual and expected results.
+
+### Code
+
+1. Check your code **before** creating pull request;
+2. If tests are present in a project, add tests for your code;
+3. Add inline documentation for your code;
+4. Apply code style used throughout the project;
+5. Create your pull request to `develop` branch (_pull requests to other branches are not allowed_).
diff --git a/.github/ISSUE_TEMPLATE.md b/.github/ISSUE_TEMPLATE.md
new file mode 100644
index 0000000..2eb6195
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE.md
@@ -0,0 +1,51 @@
+_Before opening an issue, search for similar bug reports or feature requests on GitHub Issues. If yes, please add a_ š _reaction to the existing issue. If no similar issue can be found, fill out either the "Bug Report" or the "Feature Request" section below. Erase the other section and everything on and above this line._
+
+### Bug report
+
+**System info:**
+
+* **Verbose version info (`atlassian-cloud-backuper -vv`):**
+* **Install tools:**
+
+**Steps to reproduce:**
+
+1. [First Step]
+2. [Second Step]
+3. [and so on...]
+
+**Expected behavior:**
+
+[What you expected to happen]
+
+**Actual behavior:**
+
+[What actually happened]
+
+**Additional info:**
+
+[Include gist of relevant config, logs, etc.]
+
+Please run those if possible and link them from a [gist](http://gist.github.com).
+
+---
+
+### Feature Request
+
+Opening a feature request kicks off a discussion. Requests may be closed if we're not actively planning to work on them.
+
+**Proposal:**
+
+[Description of the feature]
+
+**Current behavior:**
+
+[What currently happens]
+
+**Desired behavior:**
+
+[What you would like to happen]
+
+**Use case:**
+
+[Why is this important (helps with prioritizing requests)]
+
diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md
new file mode 100644
index 0000000..e7814b8
--- /dev/null
+++ b/.github/PULL_REQUEST_TEMPLATE.md
@@ -0,0 +1,24 @@
+### What did you implement:
+
+Closes #XXXXX
+
+### How did you implement it:
+
+...
+
+### How can we verify it:
+
+...
+
+### TODO's:
+
+- [ ] Write tests
+- [ ] Write documentation
+- [ ] Check that there aren't other open pull requests for the same issue/feature
+- [ ] Format your source code by `make fmt`
+- [ ] Provide verification config / commands
+- [ ] Enable "Allow edits from maintainers" for this PR
+- [ ] Update the messages below
+
+**Is this ready for review?:** No
+**Is it a breaking change?:** No
diff --git a/.github/dependabot.yml b/.github/dependabot.yml
new file mode 100644
index 0000000..fc51337
--- /dev/null
+++ b/.github/dependabot.yml
@@ -0,0 +1,30 @@
+version: 2
+
+updates:
+ - package-ecosystem: "gomod"
+ directory: "/"
+ target-branch: "develop"
+ schedule:
+ interval: "daily"
+ timezone: "Europe/London"
+ time: "03:00"
+ labels:
+ - "PR ā¢ MAINTENANCE"
+ assignees:
+ - "andyone"
+ reviewers:
+ - "andyone"
+
+ - package-ecosystem: "github-actions"
+ directory: "/"
+ target-branch: "develop"
+ schedule:
+ interval: "daily"
+ timezone: "Europe/London"
+ time: "04:00"
+ labels:
+ - "PR ā¢ MAINTENANCE"
+ assignees:
+ - "andyone"
+ reviewers:
+ - "andyone"
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
new file mode 100644
index 0000000..c2eb6e3
--- /dev/null
+++ b/.github/workflows/ci.yml
@@ -0,0 +1,153 @@
+name: CI
+
+on:
+ push:
+ branches: [master, develop]
+ pull_request:
+ branches: [master]
+ workflow_dispatch:
+ inputs:
+ force_run:
+ description: 'Force workflow run'
+ required: true
+ type: choice
+ options: [yes, no]
+
+permissions:
+ actions: read
+ contents: read
+ statuses: write
+
+concurrency:
+ group: ${{ github.workflow }}-${{ github.ref }}
+ cancel-in-progress: true
+
+jobs:
+ Go:
+ name: Go
+ runs-on: ubuntu-latest
+
+ strategy:
+ matrix:
+ go: [ '1.21.x', '1.22.x' ]
+
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+
+ - name: Set up Go
+ uses: actions/setup-go@v5
+ with:
+ go-version: ${{ matrix.go }}
+
+ - name: Download dependencies
+ run: make deps
+
+ - name: Run tests
+ run: make all
+
+ Perfecto:
+ name: Perfecto
+ runs-on: ubuntu-latest
+
+ needs: Go
+
+ steps:
+ - name: Code checkout
+ uses: actions/checkout@v4
+
+ - name: Login to GitHub Container Registry
+ uses: docker/login-action@v3
+ with:
+ registry: ghcr.io
+ username: ${{ github.actor }}
+ password: ${{ secrets.GITHUB_TOKEN }}
+
+ - name: Check specs with Perfecto
+ uses: essentialkaos/perfecto-action@v2
+ with:
+ files: common/atlassian-cloud-backuper.spec
+
+ Hadolint:
+ name: Hadolint
+ runs-on: ubuntu-latest
+
+ needs: Go
+
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+
+ - name: Check dockerfiles with Hadolint
+ uses: essentialkaos/hadolint-action@v1
+ with:
+ files: .docker/*.docker
+
+ Typos:
+ name: Typos
+ runs-on: ubuntu-latest
+
+ needs: Go
+
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+
+ - name: Check spelling
+ uses: crate-ci/typos@master
+
+ DockerBuild:
+ name: Docker Build Check
+ runs-on: ubuntu-latest
+
+ needs: [Hadolint, Perfecto]
+
+ env:
+ REGISTRY: ghcr.io
+
+ strategy:
+ matrix:
+ image: [ 'alpine' ]
+
+ steps:
+ - name: Check event type
+ run: |
+ if [[ "${{github.event_name}}" != "pull_request" ]] ; then
+ echo "::notice::Event type is not 'pull_request', all job actions will be skipped"
+ fi
+
+ # This step is a hack for needs+if issue with actions
+ # More info about issue: https://github.com/actions/runner/issues/491
+
+ - name: Checkout
+ uses: actions/checkout@v4
+ if: ${{ github.event_name == 'pull_request' }}
+
+ - name: Login to DockerHub
+ uses: docker/login-action@v3
+ env:
+ DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
+ if: ${{ github.event_name == 'pull_request' && env.DOCKERHUB_USERNAME != '' }}
+ with:
+ username: ${{ secrets.DOCKERHUB_USERNAME }}
+ password: ${{ secrets.DOCKERHUB_TOKEN }}
+
+ - name: Login to GitHub Container Registry
+ uses: docker/login-action@v3
+ if: ${{ github.event_name == 'pull_request' }}
+ with:
+ registry: ghcr.io
+ username: ${{ github.actor }}
+ password: ${{ secrets.GITHUB_TOKEN }}
+
+ - name: Build Docker image
+ if: ${{ github.event_name == 'pull_request' }}
+ run: |
+ docker build --build-arg REGISTRY=${REGISTRY} -f .docker/${{matrix.image}}.docker -t ${{matrix.image}} .
+
+ - name: Show info about built Docker image
+ uses: essentialkaos/docker-info-action@v1
+ if: ${{ github.event_name == 'pull_request' }}
+ with:
+ image: ${{matrix.image}}
+ show-labels: true
diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml
new file mode 100644
index 0000000..bfc4df5
--- /dev/null
+++ b/.github/workflows/codeql.yml
@@ -0,0 +1,33 @@
+name: "CodeQL"
+
+on:
+ push:
+ branches: [master, develop]
+ pull_request:
+ branches: [master]
+ schedule:
+ - cron: '0 3 * * */2'
+
+permissions:
+ security-events: write
+ actions: read
+ contents: read
+
+jobs:
+ analyse:
+ name: Analyse
+ runs-on: ubuntu-latest
+
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@v4
+ with:
+ fetch-depth: 2
+
+ - name: Initialize CodeQL
+ uses: github/codeql-action/init@v3
+ with:
+ languages: go
+
+ - name: Perform CodeQL Analysis
+ uses: github/codeql-action/analyze@v3
diff --git a/.github/workflows/docker-push.yml b/.github/workflows/docker-push.yml
new file mode 100644
index 0000000..be2c235
--- /dev/null
+++ b/.github/workflows/docker-push.yml
@@ -0,0 +1,168 @@
+name: "Docker Push"
+
+on:
+ release:
+ types: [published]
+ workflow_dispatch:
+ inputs:
+ force_rebuild:
+ description: 'Force container rebuild'
+ required: true
+ type: choice
+ options: [yes, no]
+ schedule:
+ - cron: '30 12 * * *'
+
+permissions:
+ packages: write
+ contents: read
+
+env:
+ IMAGE_NAME: ${{ github.repository }}
+
+jobs:
+ Docker:
+ name: Docker Build & Publish
+ runs-on: ubuntu-latest
+
+ env:
+ DOCKER_FILE: alpine
+
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v3
+ with:
+ fetch-depth: 0
+
+ - name: Login to DockerHub
+ uses: docker/login-action@v2
+ with:
+ username: ${{ secrets.DOCKERHUB_USERNAME }}
+ password: ${{ secrets.DOCKERHUB_TOKEN }}
+
+ - name: Login to GitHub Container Registry
+ uses: docker/login-action@v2
+ with:
+ registry: ghcr.io
+ username: ${{ github.actor }}
+ password: ${{ secrets.GITHUB_TOKEN }}
+
+ - name: Checkout the latest tag
+ run: |
+ rev=$(git rev-list --tags --max-count=1)
+ tag=$(git describe --tags "$rev")
+
+ if [[ -z "$tag" ]] ; then
+ echo "::error::Can't find the latest tag"
+ exit 1
+ fi
+
+ echo -e "\033[34mRev:\033[0m $rev"
+ echo -e "\033[34mTag:\033[0m $tag"
+
+ git checkout "$tag"
+
+ - name: Prepare metadata for build
+ id: metadata
+ run: |
+ rev=$(git rev-list --tags --max-count=1)
+ version=$(git describe --tags "$rev" | tr -d 'v')
+
+ if [[ -z "$version" ]] ; then
+ echo "::error::Can't find version info"
+ exit 1
+ fi
+
+ docker_file=".docker/${{env.DOCKER_FILE}}.docker"
+ base_image=$(grep 'FROM ' $docker_file | grep -v 'builder' | sed 's#${REGISTRY}/##' | tail -1 | cut -f2 -d' ')
+
+ if [[ -z "$base_image" ]] ; then
+ echo "::error::Can't extract base image info"
+ exit 1
+ fi
+
+ echo "version=$version" >> $GITHUB_OUTPUT
+ echo "dockerfile=$docker_file" >> $GITHUB_OUTPUT
+ echo "baseimage=$base_image" >> $GITHUB_OUTPUT
+
+ echo -e "\033[34mVersion:\033[0m $version"
+ echo -e "\033[34mDockerfile:\033[0m $docker_file"
+ echo -e "\033[34mBase image:\033[0m $base_image"
+
+ - name: Check if build/rebuild is required
+ id: build_check
+ run: |
+ if [[ "${{github.event_name}}" == "release" ]] ; then
+ echo "build=true" >> $GITHUB_OUTPUT
+ exit 0
+ fi
+
+ if [[ "${{ github.event.inputs.force_rebuild }}" == "true" ]] ; then
+ echo "::warning::Rebuild is required (reason: forced rebuild)"
+ echo "build=true" >> $GITHUB_OUTPUT
+ exit 0
+ fi
+
+ echo -e "::group::\033[34mDownloading built imageā¦\033[0m"
+
+ if ! docker pull ghcr.io/${{env.IMAGE_NAME}}:latest ; then
+ echo "::warning::Rebuild is required (reason: new image)"
+ echo "build=true" >> $GITHUB_OUTPUT
+ exit 0
+ fi
+
+ echo "::endgroup::"
+ echo -e "::group::\033[34mDownloading base imageā¦\033[0m"
+
+ if ! docker pull ${{steps.metadata.outputs.baseimage}} ; then
+ echo "::error::Can't download image ${{steps.metadata.outputs.baseimage}}"
+ exit 1
+ fi
+
+ echo "::endgroup::"
+
+ base_layer=$(docker inspect "${{steps.metadata.outputs.baseimage}}" | jq -r '.[0].RootFS.Layers[-1]')
+
+ if [[ -z "$base_layer" ]] ; then
+ echo "::error::Can't extract layers info from base image"
+ exit 1
+ fi
+
+ if ! docker inspect "ghcr.io/${{env.IMAGE_NAME}}:latest" | jq -r '.[0].RootFS.Layers' | grep -q "$base_layer" ; then
+ echo "::warning::Rebuild image (reason: base image rebuilt)"
+ echo "build=true" >> $GITHUB_OUTPUT
+ exit 0
+ fi
+
+ - name: Build and push Docker images (Docker)
+ if: ${{ steps.build_check.outputs.build == 'true' }}
+ uses: docker/build-push-action@v3
+ with:
+ push: true
+ context: .
+ file: ${{steps.metadata.outputs.dockerfile}}
+ build-args: |
+ REGISTRY=docker.io
+ tags: |
+ ${{env.IMAGE_NAME}}:${{steps.metadata.outputs.version}}
+ ${{env.IMAGE_NAME}}:latest
+
+ - name: Build and push Docker images (GHCR)
+ if: ${{ steps.build_check.outputs.build == 'true' }}
+ uses: docker/build-push-action@v3
+ with:
+ push: true
+ context: .
+ file: ${{steps.metadata.outputs.dockerfile}}
+ build-args: |
+ REGISTRY=ghcr.io
+ tags: |
+ ghcr.io/${{env.IMAGE_NAME}}:${{steps.metadata.outputs.version}}
+ ghcr.io/${{env.IMAGE_NAME}}:latest
+
+ - name: Show info about built Docker image
+ if: ${{ steps.build_check.outputs.build == 'true' }}
+ uses: essentialkaos/docker-info-action@v1
+ with:
+ image: ghcr.io/${{env.IMAGE_NAME}}:latest
+ show-labels: true
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..91f4d39
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,2 @@
+atlassian-cloud-backuper
+vendor
diff --git a/.typos.toml b/.typos.toml
new file mode 100644
index 0000000..55aead8
--- /dev/null
+++ b/.typos.toml
@@ -0,0 +1,2 @@
+[files]
+extend-exclude = ["go.sum"]
diff --git a/LICENSE b/LICENSE
new file mode 100644
index 0000000..12fb719
--- /dev/null
+++ b/LICENSE
@@ -0,0 +1,201 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright 2020 ESSENTIAL KAOS
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/Makefile b/Makefile
new file mode 100644
index 0000000..6357e54
--- /dev/null
+++ b/Makefile
@@ -0,0 +1,100 @@
+################################################################################
+
+# This Makefile generated by GoMakeGen 2.3.0 using next command:
+# gomakegen --mod .
+#
+# More info: https://kaos.sh/gomakegen
+
+################################################################################
+
+export GO111MODULE=on
+
+ifdef VERBOSE ## Print verbose information (Flag)
+VERBOSE_FLAG = -v
+endif
+
+COMPAT ?= 1.18
+MAKEDIR = $(dir $(realpath $(firstword $(MAKEFILE_LIST))))
+GITREV ?= $(shell test -s $(MAKEDIR)/.git && git rev-parse --short HEAD)
+
+################################################################################
+
+.DEFAULT_GOAL := help
+.PHONY = fmt vet all clean deps update init vendor mod-init mod-update mod-download mod-vendor help
+
+################################################################################
+
+all: atlassian-cloud-backuper ## Build all binaries
+
+atlassian-cloud-backuper:
+ go build $(VERBOSE_FLAG) -ldflags="-X main.gitrev=$(GITREV)" atlassian-cloud-backuper.go
+
+install: ## Install all binaries
+ cp atlassian-cloud-backuper /usr/bin/atlassian-cloud-backuper
+
+uninstall: ## Uninstall all binaries
+ rm -f /usr/bin/atlassian-cloud-backuper
+
+init: mod-init ## Initialize new module
+
+deps: mod-download ## Download dependencies
+
+update: mod-update ## Update dependencies to the latest versions
+
+vendor: mod-vendor ## Make vendored copy of dependencies
+
+mod-init:
+ifdef MODULE_PATH ## Module path for initialization (String)
+ go mod init $(MODULE_PATH)
+else
+ go mod init
+endif
+
+ifdef COMPAT ## Compatible Go version (String)
+ go mod tidy $(VERBOSE_FLAG) -compat=$(COMPAT) -go=$(COMPAT)
+else
+ go mod tidy $(VERBOSE_FLAG)
+endif
+
+mod-update:
+ifdef UPDATE_ALL ## Update all dependencies (Flag)
+ go get -u $(VERBOSE_FLAG) all
+else
+ go get -u $(VERBOSE_FLAG) ./...
+endif
+
+ifdef COMPAT
+ go mod tidy $(VERBOSE_FLAG) -compat=$(COMPAT)
+else
+ go mod tidy $(VERBOSE_FLAG)
+endif
+
+ test -d vendor && rm -rf vendor && go mod vendor $(VERBOSE_FLAG) || :
+
+mod-download:
+ go mod download
+
+mod-vendor:
+ rm -rf vendor && go mod vendor $(VERBOSE_FLAG)
+
+fmt: ## Format source code with gofmt
+ find . -name "*.go" -exec gofmt -s -w {} \;
+
+vet: ## Runs 'go vet' over sources
+ go vet -composites=false -printfuncs=LPrintf,TLPrintf,TPrintf,log.Debug,log.Info,log.Warn,log.Error,log.Critical,log.Print ./...
+
+clean: ## Remove generated files
+ rm -f atlassian-cloud-backuper
+
+help: ## Show this info
+ @echo -e '\n\033[1mTargets:\033[0m\n'
+ @grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) \
+ | awk 'BEGIN {FS = ":.*?## "}; {printf " \033[33m%-26s\033[0m %s\n", $$1, $$2}'
+ @echo -e '\n\033[1mVariables:\033[0m\n'
+ @grep -E '^ifdef [A-Z_]+ .*?## .*$$' $(abspath $(lastword $(MAKEFILE_LIST))) \
+ | sed 's/ifdef //' \
+ | awk 'BEGIN {FS = " .*?## "}; {printf " \033[32m%-14s\033[0m %s\n", $$1, $$2}'
+ @echo -e ''
+ @echo -e '\033[90mGenerated by GoMakeGen 2.3.0\033[0m\n'
+
+################################################################################
diff --git a/README.md b/README.md
index 7e333d7..fe59450 100644
--- a/README.md
+++ b/README.md
@@ -1,2 +1,108 @@
-# atlassian-cloud-backuper
-Tool for backuping Atlassian cloud services (Jira and Confluence)
+
+
+
+
+
+
+
+
+
+
+
+
+`atlassian-cloud-backuper` is tool for backuping Atlassian cloud services (_Jira and Confluence_).
+
+### Installation
+
+#### From [ESSENTIAL KAOS Public Repository](https://kaos.sh/kaos-repo)
+
+```bash
+sudo yum install -y https://pkgs.kaos.st/kaos-repo-latest.el$(grep 'CPE_NAME' /etc/os-release | tr -d '"' | cut -d':' -f5).noarch.rpm
+sudo yum install atlassian-cloud-backuper
+```
+
+#### Prebuilt binaries
+
+You can download prebuilt binaries for Linux from [EK Apps Repository](https://apps.kaos.st/atlassian-cloud-backuper/latest):
+
+```bash
+bash <(curl -fsSL https://apps.kaos.st/get) atlassian-cloud-backuper
+```
+
+#### Container Image
+
+The latest version of `atlassian-cloud-backuper` also available as container image on [GitHub Container Registry](https://kaos.sh/p/atlassian-cloud-backuper) and [Docker Hub](https://kaos.sh/d/atlassian-cloud-backuper).
+
+### Usage
+
+#### Standalone
+```
+Usage: atlassian-cloud-backuper {options} target
+
+Options
+
+ --config, -c file Path to configuration file
+ --interactive, -I Interactive mode
+ --no-color, -nc Disable colors in output
+ --help, -h Show this help message
+ --version, -v Show version
+```
+
+#### Container
+
+If `atlassian-cloud-backuper` runs inside a container, it allows you to use united configuration (_knf file + options + environment variables_).
+
+```
+Usage: atlassian-cloud-backuper {options} target
+
+Options
+
+ --config, -c file Path to configuration file
+ --interactive, -I Interactive mode
+ --no-color, -nc Disable colors in output
+ --help, -h Show this help message
+ --version, -v Show version
+
+ --access-account name Account name (ACCESS_ACCOUNT)
+ --access-email email User email with access to API (ACCESS_EMAIL)
+ --access-api-key key API key (ACCESS_API_KEY)
+ --storage-type fs/sftp/s3 Storage type (STORAGE_TYPE)
+ --storage-fs-path path Path on system for backups (STORAGE_FS_PATH)
+ --storage-fs-mode mode File mode on system (STORAGE_FS_MODE)
+ --storage-sftp-host host SFTP host (STORAGE_SFTP_HOST)
+ --storage-sftp-user name SFTP user name (STORAGE_SFTP_USER)
+ --storage-sftp-key key Base64-encoded private key (STORAGE_SFTP_KEY)
+ --storage-sftp-path path Path on SFTP (STORAGE_SFTP_PATH)
+ --storage-sftp-mode mode File mode on SFTP (STORAGE_SFTP_MODE)
+ --storage-s3-host host S3 host (STORAGE_S3_HOST)
+ --storage-s3-access-key id S3 access key ID (STORAGE_S3_ACCESS_KEY)
+ --storage-s3-secret-key key S3 access secret key (STORAGE_S3_SECRET_KEY)
+ --storage-s3-bucket name S3 bucket (STORAGE_S3_BUCKET)
+ --storage-s3-path path Path for backups (STORAGE_S3_PATH)
+ --jira-output-file template Jira backup output file name template (JIRA_OUTPUT_FILE)
+ --jira-include-attachments yes/no Include attachments to Jira backup (JIRA_INCLUDE_ATTACHMENTS)
+ --jira-cloud-format yes/no Create Jira backup for Cloud (JIRA_CLOUD_FORMAT)
+ --confluence-output-file template Confluence backup output file name template (CONFLUENCE_OUTPUT_FILE)
+ --confluence-include-attachments yes/no Include attachments to Confluence backup (CONFLUENCE_INCLUDE_ATTACHMENTS)
+ --confluence-cloud-format yes/no Create Confluence backup for Cloud (CONFLUENCE_CLOUD_FORMAT)
+ --temp-dir path Path to directory for temporary data (TEMP_DIR)
+ --log-format text/json Log format (LOG_FORMAT)
+ --log-level level Log level (LOG_LEVEL)
+```
+
+### CI Status
+
+| Branch | Status |
+|--------|----------|
+| `master` | [![CI](https://kaos.sh/w/atlassian-cloud-backuper/ci.svg?branch=master)](https://kaos.sh/w/atlassian-cloud-backuper/ci?query=branch:master) |
+| `develop` | [![CI](https://kaos.sh/w/atlassian-cloud-backuper/ci.svg?branch=develop)](https://kaos.sh/w/atlassian-cloud-backuper/ci?query=branch:develop) |
+
+### Contributing
+
+Before contributing to this project please read our [Contributing Guidelines](https://github.com/essentialkaos/contributing-guidelines#contributing-guidelines).
+
+### License
+
+[Apache License, Version 2.0](http://www.apache.org/licenses/LICENSE-2.0)
+
+
diff --git a/SECURITY.md b/SECURITY.md
new file mode 100644
index 0000000..f42c71e
--- /dev/null
+++ b/SECURITY.md
@@ -0,0 +1,36 @@
+# Security Policies and Procedures
+
+This document outlines security procedures and general policies for all
+ESSENTIAL KAOS projects.
+
+ * [Reporting a Bug](#reporting-a-bug)
+ * [Disclosure Policy](#disclosure-policy)
+
+## Reporting a Bug
+
+The ESSENTIAL KAOS team and community take all security bugs in our projects
+very seriously. Thank you for improving the security of our project. We
+appreciate your efforts and responsible disclosure and will make every effort
+to acknowledge your contributions.
+
+Report security bugs by emailing our security team at security@essentialkaos.com.
+
+The security team will acknowledge your email within 48 hours and will send a
+more detailed response within 48 hours, indicating the next steps in handling
+your report. After the initial reply to your report, the security team will
+endeavor to keep you informed of the progress towards a fix and full
+announcement, and may ask for additional information or guidance.
+
+Report security bugs in third-party dependencies to the person or team
+maintaining the dependencies.
+
+## Disclosure Policy
+
+When the security team receives a security bug report, they will assign it to a
+primary handler. This person will coordinate the fix and release process,
+involving the following steps:
+
+ * Confirm the problem and determine the affected versions;
+ * Audit code to find any similar potential problems;
+ * Prepare fixes for all releases still under maintenance. These fixes will be
+ released as fast as possible.
diff --git a/atlassian-cloud-backuper.go b/atlassian-cloud-backuper.go
new file mode 100644
index 0000000..94d0bd7
--- /dev/null
+++ b/atlassian-cloud-backuper.go
@@ -0,0 +1,28 @@
+package main
+
+// ////////////////////////////////////////////////////////////////////////////////// //
+// //
+// Copyright (c) 2024 ESSENTIAL KAOS //
+// Apache License, Version 2.0 //
+// //
+// ////////////////////////////////////////////////////////////////////////////////// //
+
+import (
+ _ "embed"
+
+ CLI "github.com/essentialkaos/atlassian-cloud-backuper/cli"
+)
+
+// ////////////////////////////////////////////////////////////////////////////////// //
+
+//go:embed go.mod
+var gomod []byte
+
+// gitrev is short hash of the latest git commit
+var gitrev string
+
+// ////////////////////////////////////////////////////////////////////////////////// //
+
+func main() {
+ CLI.Run(gitrev, gomod)
+}
diff --git a/backuper/backuper.go b/backuper/backuper.go
new file mode 100644
index 0000000..856b7eb
--- /dev/null
+++ b/backuper/backuper.go
@@ -0,0 +1,83 @@
+package backuper
+
+// ////////////////////////////////////////////////////////////////////////////////// //
+// //
+// Copyright (c) 2024 ESSENTIAL KAOS //
+// Apache License, Version 2.0 //
+// //
+// ////////////////////////////////////////////////////////////////////////////////// //
+
+import (
+ "fmt"
+
+ "github.com/essentialkaos/ek/v12/events"
+)
+
+// ////////////////////////////////////////////////////////////////////////////////// //
+
+const (
+ EVENT_BACKUP_STARTED = "backup-started"
+ EVENT_BACKUP_PROGRESS = "backup-progress"
+ EVENT_BACKUP_SAVING = "backup-saving"
+ EVENT_BACKUP_DONE = "backup-done"
+)
+
+// ////////////////////////////////////////////////////////////////////////////////// //
+
+// Backuper is generic backuper interface
+type Backuper interface {
+ // Backup starts backup process
+ Backup() error
+
+ // SetDispatcher sets events dispatcher
+ SetDispatcher(d *events.Dispatcher)
+}
+
+// ////////////////////////////////////////////////////////////////////////////////// //
+
+// Config is backuper configuration struct
+type Config struct {
+ Account string
+ Email string
+ APIKey string
+ OutputFile string
+ WithAttachments bool
+ ForCloud bool
+}
+
+type ProgressInfo struct {
+ Message string
+ Progress int
+}
+
+// ////////////////////////////////////////////////////////////////////////////////// //
+
+var (
+ ErrEmptyAccount = fmt.Errorf("Configuration validation error: account is empty")
+ ErrEmptyEmail = fmt.Errorf("Configuration validation error: email is empty")
+ ErrEmptyAPIKey = fmt.Errorf("Configuration validation error: API key is empty")
+ ErrEmptyOutputFile = fmt.Errorf("Configuration validation error: output file is empty")
+)
+
+// ////////////////////////////////////////////////////////////////////////////////// //
+
+// Validate validates configuration struct
+func (c Config) Validate() error {
+ switch {
+ case c.Account == "":
+ return ErrEmptyAccount
+ case c.Email == "":
+ return ErrEmptyEmail
+ case c.APIKey == "":
+ return ErrEmptyAPIKey
+ case c.OutputFile == "":
+ return ErrEmptyOutputFile
+ }
+
+ return nil
+}
+
+// AccountURL returns URL of account
+func (c Config) AccountURL() string {
+ return "https://" + c.Account + ".atlassian.net"
+}
diff --git a/backuper/confluence/confluence-backuper.go b/backuper/confluence/confluence-backuper.go
new file mode 100644
index 0000000..54e9538
--- /dev/null
+++ b/backuper/confluence/confluence-backuper.go
@@ -0,0 +1,265 @@
+package confluence
+
+// ////////////////////////////////////////////////////////////////////////////////// //
+// //
+// Copyright (c) 2024 ESSENTIAL KAOS //
+// Apache License, Version 2.0 //
+// //
+// ////////////////////////////////////////////////////////////////////////////////// //
+
+import (
+ "bufio"
+ "fmt"
+ "io"
+ "os"
+ "strconv"
+ "strings"
+ "time"
+
+ "github.com/essentialkaos/ek/v12/events"
+ "github.com/essentialkaos/ek/v12/fmtutil"
+ "github.com/essentialkaos/ek/v12/fsutil"
+ "github.com/essentialkaos/ek/v12/log"
+ "github.com/essentialkaos/ek/v12/req"
+
+ "github.com/essentialkaos/atlassian-cloud-backuper/backuper"
+)
+
+// ////////////////////////////////////////////////////////////////////////////////// //
+
+type ConfluenceBackuper struct {
+ config *backuper.Config
+ dispatcher *events.Dispatcher
+}
+
+type BackupPrefs struct {
+ WithAttachments bool `json:"cbAttachments"`
+ ForCloud bool `json:"exportToCloud"`
+}
+
+type BackupProgressInfo struct {
+ CurrentStatus string `json:"currentStatus"`
+ AlternativePercentage string `json:"alternativePercentage"`
+ Filename string `json:"fileName"`
+ Size int `json:"size"`
+ Time int `json:"time"`
+ ConcurrentBackupInProgress bool `json:"concurrentBackupInProgress"`
+ IsOutdated bool `json:"isOutdated"`
+}
+
+// ////////////////////////////////////////////////////////////////////////////////// //
+
+func NewBackuper(config *backuper.Config) (*ConfluenceBackuper, error) {
+ err := config.Validate()
+
+ if err != nil {
+ return nil, err
+ }
+
+ return &ConfluenceBackuper{config, nil}, nil
+}
+
+// ////////////////////////////////////////////////////////////////////////////////// //
+
+// SetDispatcher sets events dispatcher
+func (b *ConfluenceBackuper) SetDispatcher(d *events.Dispatcher) {
+ if b != nil {
+ b.dispatcher = d
+ }
+}
+
+// Backup starts backup process
+func (b *ConfluenceBackuper) Backup() error {
+ var err error
+ var backupFile string
+
+ log.Info("Starting Confluence backup process for account %sā¦", b.config.Account)
+ log.Info("Checking for existing backup taskā¦")
+
+ start := time.Now()
+ info, _ := b.getBackupProgress()
+
+ if info != nil && !info.IsOutdated {
+ log.Info("Found previously created backup task")
+ } else {
+ err = b.startBackup()
+
+ if err != nil {
+ return fmt.Errorf("Can't start backup: %w", err)
+ }
+ }
+
+ b.dispatcher.DispatchAndWait(backuper.EVENT_BACKUP_STARTED, nil)
+
+ errNum := 0
+ lastProgress := ""
+
+ for range time.NewTicker(15 * time.Second).C {
+ progressInfo, err := b.getBackupProgress()
+
+ if err != nil {
+ log.Error("Got error while checking progress: %w", err)
+ errNum++
+
+ if errNum > 10 {
+ return fmt.Errorf("Can't download backup: too much errors")
+ }
+ } else {
+ errNum = 0
+ }
+
+ if time.Since(start) > 6*time.Hour {
+ return fmt.Errorf("Can't download backup: backup task took too much time")
+ }
+
+ b.dispatcher.Dispatch(backuper.EVENT_BACKUP_PROGRESS, b.convertProgressInfo(progressInfo))
+
+ if progressInfo.Size == 0 && progressInfo.AlternativePercentage >= lastProgress {
+ log.Info(
+ "(%s) Backup in progress: %s",
+ progressInfo.AlternativePercentage,
+ progressInfo.CurrentStatus,
+ )
+ lastProgress = progressInfo.AlternativePercentage
+ }
+
+ if progressInfo.Size != 0 && progressInfo.Filename != "" {
+ backupFile = progressInfo.Filename
+ break
+ }
+ }
+
+ log.Info("Backup is ready for download, fetching fileā¦")
+ log.Info("Writing backup file into %s", b.config.OutputFile)
+
+ b.dispatcher.DispatchAndWait(backuper.EVENT_BACKUP_SAVING, nil)
+
+ err = b.downloadBackup(backupFile)
+
+ if err != nil {
+ return err
+ }
+
+ b.dispatcher.DispatchAndWait(backuper.EVENT_BACKUP_DONE, nil)
+
+ log.Info(
+ "Backup successfully saved (size: %s)",
+ fmtutil.PrettySize(fsutil.GetSize(b.config.OutputFile)),
+ )
+
+ return nil
+}
+
+// ////////////////////////////////////////////////////////////////////////////////// //
+
+// startBackup starts backup process
+func (b *ConfluenceBackuper) startBackup() error {
+ resp, err := req.Request{
+ URL: b.config.AccountURL() + "/wiki/rest/obm/1.0/runbackup",
+ BasicAuthUsername: b.config.Email,
+ BasicAuthPassword: b.config.APIKey,
+ Accept: req.CONTENT_TYPE_JSON,
+ ContentType: req.CONTENT_TYPE_JSON,
+ Body: &BackupPrefs{
+ WithAttachments: b.config.WithAttachments,
+ ForCloud: b.config.ForCloud,
+ },
+ }.Post()
+
+ if err != nil {
+ return err
+ }
+
+ if resp.StatusCode != 200 {
+ return fmt.Errorf("API returned non-ok status code (%d)", resp.StatusCode)
+ }
+
+ return nil
+}
+
+// getBackupProgress returns backup progress info
+func (b *ConfluenceBackuper) getBackupProgress() (*BackupProgressInfo, error) {
+ resp, err := req.Request{
+ URL: b.config.AccountURL() + "/wiki/rest/obm/1.0/getprogress",
+ BasicAuthUsername: b.config.Email,
+ BasicAuthPassword: b.config.APIKey,
+ Accept: req.CONTENT_TYPE_JSON,
+ ContentType: req.CONTENT_TYPE_JSON,
+ AutoDiscard: true,
+ }.Get()
+
+ if err != nil {
+ return nil, err
+ }
+
+ if resp.StatusCode != 200 {
+ return nil, fmt.Errorf("API returned non-ok status code (%d)", resp.StatusCode)
+ }
+
+ progressInfo := &BackupProgressInfo{}
+ err = resp.JSON(progressInfo)
+
+ if err != nil {
+ return nil, fmt.Errorf("Can't decode API response: %v", err)
+ }
+
+ // Remove useless dot from the end of current status message
+ progressInfo.CurrentStatus = strings.TrimRight(progressInfo.CurrentStatus, ".")
+
+ return progressInfo, nil
+}
+
+// convertProgressInfo converts progress info from internal format to general backuper format
+func (b *ConfluenceBackuper) convertProgressInfo(i *BackupProgressInfo) *backuper.ProgressInfo {
+ perc, err := strconv.Atoi(strings.TrimRight(i.AlternativePercentage, "%"))
+
+ if err != nil {
+ return &backuper.ProgressInfo{Message: "Unknown status", Progress: 0}
+ }
+
+ return &backuper.ProgressInfo{
+ Message: i.CurrentStatus,
+ Progress: perc,
+ }
+}
+
+// downloadBackup downloads backup and saves it as a file
+func (b *ConfluenceBackuper) downloadBackup(backupFile string) error {
+ backupFileURL := b.config.AccountURL() + "/wiki/download/" + backupFile
+
+ log.Debug("Downloading file from %s", backupFileURL)
+
+ resp, err := req.Request{
+ URL: backupFileURL,
+ BasicAuthUsername: b.config.Email,
+ BasicAuthPassword: b.config.APIKey,
+ AutoDiscard: true,
+ }.Get()
+
+ if err != nil {
+ return err
+ }
+
+ if resp.StatusCode != 200 {
+ return fmt.Errorf("API returned non-ok status code (%d)", resp.StatusCode)
+ }
+
+ fd, err := os.OpenFile(b.config.OutputFile, os.O_CREATE|os.O_TRUNC|os.O_WRONLY, 0600)
+
+ if err != nil {
+ return fmt.Errorf("Can't open file for saving data: %w", err)
+ }
+
+ defer fd.Close()
+
+ w := bufio.NewWriter(fd)
+ _, err = io.Copy(w, resp.Body)
+
+ if err != nil {
+ return fmt.Errorf("File writing error: %w", err)
+ }
+
+ w.Flush()
+
+ return nil
+}
diff --git a/backuper/jira/jira-backuper.go b/backuper/jira/jira-backuper.go
new file mode 100644
index 0000000..831eb8b
--- /dev/null
+++ b/backuper/jira/jira-backuper.go
@@ -0,0 +1,280 @@
+package jira
+
+// ////////////////////////////////////////////////////////////////////////////////// //
+// //
+// Copyright (c) 2024 ESSENTIAL KAOS //
+// Apache License, Version 2.0 //
+// //
+// ////////////////////////////////////////////////////////////////////////////////// //
+
+import (
+ "bufio"
+ "fmt"
+ "io"
+ "os"
+ "time"
+
+ "github.com/essentialkaos/ek/v12/events"
+ "github.com/essentialkaos/ek/v12/fmtutil"
+ "github.com/essentialkaos/ek/v12/fsutil"
+ "github.com/essentialkaos/ek/v12/log"
+ "github.com/essentialkaos/ek/v12/req"
+
+ "github.com/essentialkaos/atlassian-cloud-backuper/backuper"
+)
+
+// ////////////////////////////////////////////////////////////////////////////////// //
+
+type JiraBackuper struct {
+ config *backuper.Config
+ dispatcher *events.Dispatcher
+}
+
+type BackupPrefs struct {
+ WithAttachments bool `json:"cbAttachments"`
+ ForCloud bool `json:"exportToCloud"`
+}
+
+type BackupTaskInfo struct {
+ TaskID string `json:"taskId"`
+}
+
+type BackupProgressInfo struct {
+ Status string `json:"status"`
+ Desc string `json:"description"`
+ Message string `json:"message"`
+ Result string `json:"result"`
+ ExportType string `json:"exportType"`
+ Progress int `json:"progress"`
+}
+
+// ////////////////////////////////////////////////////////////////////////////////// //
+
+func NewBackuper(config *backuper.Config) (*JiraBackuper, error) {
+ err := config.Validate()
+
+ if err != nil {
+ return nil, err
+ }
+
+ return &JiraBackuper{config, nil}, nil
+}
+
+// ////////////////////////////////////////////////////////////////////////////////// //
+
+// SetDispatcher sets events dispatcher
+func (b *JiraBackuper) SetDispatcher(d *events.Dispatcher) {
+ if b != nil {
+ b.dispatcher = d
+ }
+}
+
+// Backup starts backup process
+func (b *JiraBackuper) Backup() error {
+ var err error
+ var backupTaskID, backupFile string
+
+ log.Info("Starting Jira backup process for account %sā¦", b.config.Account)
+ log.Info("Checking for existing backup taskā¦")
+
+ start := time.Now()
+ backupTaskID, _ = b.getLastTaskID()
+
+ if backupTaskID != "" {
+ log.Info("Found previously created backup task with ID %s", backupTaskID)
+ } else {
+ log.Info("No previously created task found, run backupā¦")
+
+ backupTaskID, err = b.startBackup()
+
+ if err != nil {
+ return fmt.Errorf("Can't start backup: %w", err)
+ }
+ }
+
+ b.dispatcher.DispatchAndWait(backuper.EVENT_BACKUP_STARTED, nil)
+
+ errNum := 0
+ lastProgress := -1
+
+ for range time.NewTicker(15 * time.Second).C {
+ progressInfo, err := b.getTaskProgress(backupTaskID)
+
+ if err != nil {
+ log.Error("Got error while checking progress: %w", err)
+ errNum++
+
+ if errNum > 10 {
+ return fmt.Errorf("Can't download backup: too much errors")
+ }
+ } else {
+ errNum = 0
+ }
+
+ if time.Since(start) > 6*time.Hour {
+ return fmt.Errorf("Can't download backup: backup task took too much time")
+ }
+
+ b.dispatcher.Dispatch(
+ backuper.EVENT_BACKUP_PROGRESS,
+ &backuper.ProgressInfo{Message: progressInfo.Message, Progress: progressInfo.Progress},
+ )
+
+ if progressInfo.Progress < 100 && progressInfo.Progress >= lastProgress {
+ log.Info("(%d%%) Backup in progress: %s", progressInfo.Progress, progressInfo.Message)
+ lastProgress = progressInfo.Progress
+ }
+
+ if progressInfo.Progress >= 100 && progressInfo.Result != "" {
+ backupFile = progressInfo.Result
+ break
+ }
+ }
+
+ log.Info("Backup is ready for download, fetching fileā¦")
+ log.Info("Writing backup file into %s", b.config.OutputFile)
+
+ b.dispatcher.DispatchAndWait(backuper.EVENT_BACKUP_SAVING, nil)
+
+ err = b.downloadBackup(backupFile)
+
+ if err != nil {
+ return err
+ }
+
+ b.dispatcher.DispatchAndWait(backuper.EVENT_BACKUP_DONE, nil)
+
+ log.Info(
+ "Backup successfully saved (size: %s)",
+ fmtutil.PrettySize(fsutil.GetSize(b.config.OutputFile)),
+ )
+
+ return nil
+}
+
+// ////////////////////////////////////////////////////////////////////////////////// //
+
+// startBackup starts backup process
+func (b *JiraBackuper) startBackup() (string, error) {
+ resp, err := req.Request{
+ URL: b.config.AccountURL() + "/rest/backup/1/export/runbackup",
+ BasicAuthUsername: b.config.Email,
+ BasicAuthPassword: b.config.APIKey,
+ Accept: req.CONTENT_TYPE_JSON,
+ ContentType: req.CONTENT_TYPE_JSON,
+ Body: &BackupPrefs{
+ WithAttachments: b.config.WithAttachments,
+ ForCloud: b.config.ForCloud,
+ },
+ }.Post()
+
+ if err != nil {
+ return "", err
+ }
+
+ if resp.StatusCode != 200 {
+ return "", fmt.Errorf("API returned non-ok status code (%d)", resp.StatusCode)
+ }
+
+ backupInfo := &BackupTaskInfo{}
+ err = resp.JSON(backupInfo)
+
+ if err != nil {
+ return "", fmt.Errorf("Can't decode API response: %v", err)
+ }
+
+ return backupInfo.TaskID, nil
+}
+
+// getLastTaskID returns ID of the last task for backup
+func (b *JiraBackuper) getLastTaskID() (string, error) {
+ resp, err := req.Request{
+ URL: b.config.AccountURL() + "/rest/backup/1/export/lastTaskId",
+ BasicAuthUsername: b.config.Email,
+ BasicAuthPassword: b.config.APIKey,
+ Accept: req.CONTENT_TYPE_JSON,
+ ContentType: req.CONTENT_TYPE_JSON,
+ AutoDiscard: true,
+ }.Get()
+
+ if err != nil {
+ return "", err
+ }
+
+ if resp.StatusCode != 200 {
+ return "", fmt.Errorf("API returned non-ok status code (%d)", resp.StatusCode)
+ }
+
+ return resp.String(), nil
+}
+
+// getTaskProgress returns progress for task
+func (b *JiraBackuper) getTaskProgress(taskID string) (*BackupProgressInfo, error) {
+ resp, err := req.Request{
+ URL: b.config.AccountURL() + "/rest/backup/1/export/getProgress",
+ BasicAuthUsername: b.config.Email,
+ BasicAuthPassword: b.config.APIKey,
+ Accept: req.CONTENT_TYPE_JSON,
+ ContentType: req.CONTENT_TYPE_JSON,
+ Query: req.Query{"taskId": taskID},
+ AutoDiscard: true,
+ }.Get()
+
+ if err != nil {
+ return nil, err
+ }
+
+ if resp.StatusCode != 200 {
+ return nil, fmt.Errorf("API returned non-ok status code (%d)", resp.StatusCode)
+ }
+
+ progressInfo := &BackupProgressInfo{}
+ err = resp.JSON(progressInfo)
+
+ if err != nil {
+ return nil, fmt.Errorf("Can't decode API response: %v", err)
+ }
+
+ return progressInfo, nil
+}
+
+// downloadBackup downloads backup and saves it as a file
+func (b *JiraBackuper) downloadBackup(backupFile string) error {
+ backupFileURL := b.config.AccountURL() + "/plugins/servlet/" + backupFile
+
+ log.Debug("Downloading file from %s", backupFileURL)
+
+ resp, err := req.Request{
+ URL: backupFileURL,
+ BasicAuthUsername: b.config.Email,
+ BasicAuthPassword: b.config.APIKey,
+ AutoDiscard: true,
+ }.Get()
+
+ if err != nil {
+ return err
+ }
+
+ if resp.StatusCode != 200 {
+ return fmt.Errorf("API returned non-ok status code (%d)", resp.StatusCode)
+ }
+
+ fd, err := os.OpenFile(b.config.OutputFile, os.O_CREATE|os.O_TRUNC|os.O_WRONLY, 0600)
+
+ if err != nil {
+ return fmt.Errorf("Can't open file for saving data: %w", err)
+ }
+
+ defer fd.Close()
+
+ w := bufio.NewWriter(fd)
+ _, err = io.Copy(w, resp.Body)
+
+ if err != nil {
+ return fmt.Errorf("File writing error: %w", err)
+ }
+
+ w.Flush()
+
+ return nil
+}
diff --git a/cli/cli.go b/cli/cli.go
new file mode 100644
index 0000000..c930cfd
--- /dev/null
+++ b/cli/cli.go
@@ -0,0 +1,758 @@
+package app
+
+// ////////////////////////////////////////////////////////////////////////////////// //
+// //
+// Copyright (c) 2024 ESSENTIAL KAOS //
+// Apache License, Version 2.0 //
+// //
+// ////////////////////////////////////////////////////////////////////////////////// //
+
+import (
+ "encoding/base64"
+ "fmt"
+ "os"
+ "strings"
+ "time"
+
+ "github.com/essentialkaos/ek/v12/errutil"
+ "github.com/essentialkaos/ek/v12/events"
+ "github.com/essentialkaos/ek/v12/fmtc"
+ "github.com/essentialkaos/ek/v12/fmtutil"
+ "github.com/essentialkaos/ek/v12/fsutil"
+ "github.com/essentialkaos/ek/v12/knf"
+ "github.com/essentialkaos/ek/v12/log"
+ "github.com/essentialkaos/ek/v12/options"
+ "github.com/essentialkaos/ek/v12/path"
+ "github.com/essentialkaos/ek/v12/req"
+ "github.com/essentialkaos/ek/v12/spinner"
+ "github.com/essentialkaos/ek/v12/support"
+ "github.com/essentialkaos/ek/v12/support/deps"
+ "github.com/essentialkaos/ek/v12/system/container"
+ "github.com/essentialkaos/ek/v12/terminal/tty"
+ "github.com/essentialkaos/ek/v12/timeutil"
+ "github.com/essentialkaos/ek/v12/tmp"
+ "github.com/essentialkaos/ek/v12/usage"
+ "github.com/essentialkaos/ek/v12/usage/completion/bash"
+ "github.com/essentialkaos/ek/v12/usage/completion/fish"
+ "github.com/essentialkaos/ek/v12/usage/completion/zsh"
+ "github.com/essentialkaos/ek/v12/usage/man"
+ "github.com/essentialkaos/ek/v12/usage/update"
+
+ knfu "github.com/essentialkaos/ek/v12/knf/united"
+ knfv "github.com/essentialkaos/ek/v12/knf/validators"
+ knff "github.com/essentialkaos/ek/v12/knf/validators/fs"
+ knfn "github.com/essentialkaos/ek/v12/knf/validators/network"
+
+ "github.com/essentialkaos/atlassian-cloud-backuper/backuper"
+ "github.com/essentialkaos/atlassian-cloud-backuper/backuper/confluence"
+ "github.com/essentialkaos/atlassian-cloud-backuper/backuper/jira"
+
+ "github.com/essentialkaos/atlassian-cloud-backuper/uploader"
+ "github.com/essentialkaos/atlassian-cloud-backuper/uploader/fs"
+ "github.com/essentialkaos/atlassian-cloud-backuper/uploader/s3"
+ "github.com/essentialkaos/atlassian-cloud-backuper/uploader/sftp"
+)
+
+// ////////////////////////////////////////////////////////////////////////////////// //
+
+// Basic utility info
+const (
+ APP = "Atlassian Cloud Backuper"
+ VER = "0.0.1"
+ DESC = "Tool for backuping Atlassian cloud services (Jira and Confluence)"
+)
+
+// ////////////////////////////////////////////////////////////////////////////////// //
+
+// Options
+const (
+ OPT_CONFIG = "c:config"
+ OPT_INTERACTIVE = "I:interactive"
+ OPT_NO_COLOR = "nc:no-color"
+ OPT_HELP = "h:help"
+ OPT_VER = "v:version"
+
+ OPT_VERB_VER = "vv:verbose-version"
+ OPT_COMPLETION = "completion"
+ OPT_GENERATE_MAN = "generate-man"
+)
+
+const (
+ ACCESS_ACCOUNT = "access:account"
+ ACCESS_EMAIL = "access:email"
+ ACCESS_API_KEY = "access:api-key"
+ STORAGE_TYPE = "storage:type"
+ STORAGE_FS_PATH = "storage-fs:path"
+ STORAGE_FS_MODE = "storage-fs:mode"
+ STORAGE_SFTP_HOST = "storage-sftp:host"
+ STORAGE_SFTP_USER = "storage-sftp:user"
+ STORAGE_SFTP_KEY = "storage-sftp:key"
+ STORAGE_SFTP_PATH = "storage-sftp:path"
+ STORAGE_SFTP_MODE = "storage-sftp:mode"
+ STORAGE_S3_HOST = "storage-s3:host"
+ STORAGE_S3_REGION = "storage-s3:region"
+ STORAGE_S3_ACCESS_KEY = "storage-s3:access-key"
+ STORAGE_S3_SECRET_KEY = "storage-s3:secret-key"
+ STORAGE_S3_BUCKET = "storage-s3:bucket"
+ STORAGE_S3_PATH = "storage-s3:path"
+ JIRA_OUTPUT_FILE = "jira:output-file"
+ JIRA_INCLUDE_ATTACHMENTS = "jira:include-attachments"
+ JIRA_CLOUD_FORMAT = "jira:cloud-format"
+ CONFLUENCE_OUTPUT_FILE = "confluence:output-file"
+ CONFLUENCE_INCLUDE_ATTACHMENTS = "confluence:include-attachments"
+ CONFLUENCE_CLOUD_FORMAT = "confluence:cloud-format"
+ TEMP_DIR = "temp:dir"
+ LOG_DIR = "log:dir"
+ LOG_FILE = "log:file"
+ LOG_FORMAT = "log:format"
+ LOG_MODE = "log:perms"
+ LOG_LEVEL = "log:level"
+)
+
+const (
+ TARGET_JIRA = "jira"
+ TARGET_CONFLUENCE = "confluence"
+)
+
+// ////////////////////////////////////////////////////////////////////////////////// //
+
+// optMap contains information about all supported options
+var optMap = options.Map{
+ OPT_CONFIG: {Value: "/etc/atlassian-cloud-backuper.knf"},
+ OPT_INTERACTIVE: {Type: options.BOOL},
+ OPT_NO_COLOR: {Type: options.BOOL},
+ OPT_HELP: {Type: options.BOOL},
+ OPT_VER: {Type: options.MIXED},
+
+ OPT_VERB_VER: {Type: options.BOOL},
+ OPT_COMPLETION: {},
+ OPT_GENERATE_MAN: {Type: options.BOOL},
+}
+
+// temp is temp data manager
+var temp *tmp.Temp
+
+// color tags for app name and version
+var colorTagApp, colorTagVer string
+
+// ////////////////////////////////////////////////////////////////////////////////// //
+
+// Run is main utility function
+func Run(gitRev string, gomod []byte) {
+ preConfigureUI()
+ addExtraOptions(optMap)
+
+ args, errs := options.Parse(optMap)
+
+ if len(errs) != 0 {
+ printError(errs[0].Error())
+ os.Exit(1)
+ }
+
+ configureUI()
+
+ switch {
+ case options.Has(OPT_COMPLETION):
+ os.Exit(printCompletion())
+ case options.Has(OPT_GENERATE_MAN):
+ printMan()
+ os.Exit(0)
+ case options.GetB(OPT_VER):
+ genAbout(gitRev).Print(options.GetS(OPT_VER))
+ os.Exit(0)
+ case options.GetB(OPT_VERB_VER):
+ support.Collect(APP, VER).
+ WithRevision(gitRev).
+ WithDeps(deps.Extract(gomod)).
+ WithChecks(getServiceStatus("Jira Software")).
+ WithChecks(getServiceStatus("Jira Service Management")).
+ WithChecks(getServiceStatus("Jira Work Management")).
+ WithChecks(getServiceStatus("Confluence")).
+ Print()
+ os.Exit(0)
+ case options.GetB(OPT_HELP) || len(args) == 0:
+ genUsage().Print()
+ os.Exit(0)
+ }
+
+ err := errutil.Chain(
+ loadConfig,
+ validateConfig,
+ setupLogger,
+ setupTemp,
+ )
+
+ if err != nil {
+ printError(err.Error())
+ os.Exit(1)
+ }
+
+ log.Divider()
+ log.Aux("%s %s startingā¦", APP, VER)
+
+ if !process(args.Get(0).String()) {
+ os.Exit(1)
+ }
+}
+
+// ////////////////////////////////////////////////////////////////////////////////// //
+
+// preConfigureUI preconfigures UI based on information about user terminal
+func preConfigureUI() {
+ if !tty.IsTTY() {
+ fmtc.DisableColors = true
+ }
+
+ switch {
+ case fmtc.IsTrueColorSupported():
+ colorTagApp, colorTagVer = "{*}{#00AFFF}", "{#00AFFF}"
+ case fmtc.Is256ColorsSupported():
+ colorTagApp, colorTagVer = "{*}{#39}", "{#39}"
+ default:
+ colorTagApp, colorTagVer = "{*}{c}", "{c}"
+ }
+}
+
+// addExtraOptions adds additional options for configuration when the application
+// is running inside a container
+func addExtraOptions(m options.Map) {
+ if !container.IsContainer() {
+ return
+ }
+
+ knfu.AddOptions(m,
+ ACCESS_ACCOUNT,
+ ACCESS_EMAIL,
+ ACCESS_API_KEY,
+ STORAGE_TYPE,
+ STORAGE_FS_PATH,
+ STORAGE_FS_MODE,
+ STORAGE_SFTP_HOST,
+ STORAGE_SFTP_USER,
+ STORAGE_SFTP_KEY,
+ STORAGE_SFTP_PATH,
+ STORAGE_SFTP_MODE,
+ STORAGE_S3_HOST,
+ STORAGE_S3_ACCESS_KEY,
+ STORAGE_S3_SECRET_KEY,
+ STORAGE_S3_BUCKET,
+ STORAGE_S3_PATH,
+ JIRA_OUTPUT_FILE,
+ JIRA_INCLUDE_ATTACHMENTS,
+ JIRA_CLOUD_FORMAT,
+ CONFLUENCE_OUTPUT_FILE,
+ CONFLUENCE_INCLUDE_ATTACHMENTS,
+ CONFLUENCE_CLOUD_FORMAT,
+ TEMP_DIR,
+ LOG_FORMAT,
+ LOG_LEVEL,
+ )
+}
+
+// configureUI configures user interface
+func configureUI() {
+ if options.GetB(OPT_NO_COLOR) {
+ fmtc.DisableColors = true
+ }
+
+ req.SetUserAgent("AtlassianCloudBackuper", VER)
+}
+
+// loadConfig loads configuration file
+func loadConfig() error {
+ config, err := knf.Read(options.GetS(OPT_CONFIG))
+
+ if err != nil {
+ return fmt.Errorf("Can't load configuration: %w", err)
+ }
+
+ if !container.IsContainer() {
+ knfu.Combine(config)
+ } else {
+ knfu.Combine(
+ config,
+ knfu.Simple(ACCESS_ACCOUNT),
+ knfu.Simple(ACCESS_EMAIL),
+ knfu.Simple(ACCESS_API_KEY),
+ knfu.Simple(STORAGE_TYPE),
+ knfu.Simple(STORAGE_FS_PATH),
+ knfu.Simple(STORAGE_FS_MODE),
+ knfu.Simple(STORAGE_SFTP_HOST),
+ knfu.Simple(STORAGE_SFTP_USER),
+ knfu.Simple(STORAGE_SFTP_KEY),
+ knfu.Simple(STORAGE_SFTP_PATH),
+ knfu.Simple(STORAGE_SFTP_MODE),
+ knfu.Simple(STORAGE_S3_HOST),
+ knfu.Simple(STORAGE_S3_ACCESS_KEY),
+ knfu.Simple(STORAGE_S3_SECRET_KEY),
+ knfu.Simple(STORAGE_S3_BUCKET),
+ knfu.Simple(STORAGE_S3_PATH),
+ knfu.Simple(JIRA_OUTPUT_FILE),
+ knfu.Simple(JIRA_INCLUDE_ATTACHMENTS),
+ knfu.Simple(JIRA_CLOUD_FORMAT),
+ knfu.Simple(CONFLUENCE_OUTPUT_FILE),
+ knfu.Simple(CONFLUENCE_INCLUDE_ATTACHMENTS),
+ knfu.Simple(CONFLUENCE_CLOUD_FORMAT),
+ knfu.Simple(TEMP_DIR),
+ knfu.Simple(LOG_DIR),
+ knfu.Simple(LOG_FILE),
+ knfu.Simple(LOG_MODE),
+ knfu.Simple(LOG_LEVEL),
+ )
+ }
+
+ return nil
+}
+
+// validateConfig validates configuration file values
+func validateConfig() error {
+ validators := []*knf.Validator{
+ {ACCESS_ACCOUNT, knfv.Empty, nil},
+ {ACCESS_EMAIL, knfv.Empty, nil},
+ {ACCESS_API_KEY, knfv.Empty, nil},
+ {ACCESS_EMAIL, knfn.Mail, nil},
+ {STORAGE_TYPE, knfv.NotContains, []string{
+ "fs", "sftp", "s3",
+ }},
+ {LOG_FORMAT, knfv.NotContains, []string{
+ "", "text", "json",
+ }},
+ {LOG_LEVEL, knfv.NotContains, []string{
+ "", "debug", "info", "warn", "error", "crit",
+ }},
+ {TEMP_DIR, knff.Perms, "DW"},
+ }
+
+ switch knfu.GetS(STORAGE_TYPE) {
+ case "fs":
+ validators = append(validators,
+ &knf.Validator{STORAGE_FS_PATH, knff.Perms, "DRW"},
+ )
+
+ case "sftp":
+ validators = append(validators,
+ &knf.Validator{STORAGE_SFTP_HOST, knfv.Empty, nil},
+ &knf.Validator{STORAGE_SFTP_USER, knfv.Empty, nil},
+ &knf.Validator{STORAGE_SFTP_KEY, knfv.Empty, nil},
+ &knf.Validator{STORAGE_SFTP_PATH, knfv.Empty, nil},
+ )
+
+ case "s3":
+ validators = append(validators,
+ &knf.Validator{STORAGE_S3_HOST, knfv.Empty, nil},
+ &knf.Validator{STORAGE_S3_ACCESS_KEY, knfv.Empty, nil},
+ &knf.Validator{STORAGE_S3_SECRET_KEY, knfv.Empty, nil},
+ &knf.Validator{STORAGE_S3_BUCKET, knfv.Empty, nil},
+ &knf.Validator{STORAGE_S3_PATH, knfv.Empty, nil},
+ )
+ }
+
+ errs := knfu.Validate(validators)
+
+ if len(errs) > 0 {
+ return errs[0]
+ }
+
+ return nil
+}
+
+// setupLogger configures logger subsystem
+func setupLogger() error {
+ var err error
+
+ if knfu.GetS(LOG_FILE) != "" {
+ err = log.Set(knfu.GetS(LOG_FILE), knfu.GetM(LOG_MODE, 640))
+
+ if err != nil {
+ return err
+ }
+ }
+
+ err = log.MinLevel(knfu.GetS(LOG_LEVEL, "info"))
+
+ if err != nil {
+ return err
+ }
+
+ if knfu.GetS(LOG_FORMAT) == "" && container.IsContainer() {
+ log.Global.UseJSON = true
+ } else {
+ switch knfu.GetS(LOG_FORMAT) {
+ case "json":
+ log.Global.UseJSON = true
+ case "text", "":
+ // default
+ default:
+ return fmt.Errorf("Unknown log format %q", knfu.GetS(LOG_FORMAT))
+ }
+ }
+
+ return nil
+}
+
+// setupTemp configures temporary directory
+func setupTemp() error {
+ var err error
+
+ temp, err = tmp.NewTemp(knfu.GetS(TEMP_DIR, "/tmp"))
+
+ return err
+}
+
+// process starts backup creation
+func process(target string) bool {
+ var dispatcher *events.Dispatcher
+
+ if options.GetB(OPT_INTERACTIVE) {
+ dispatcher = events.NewDispatcher()
+ addEventsHandlers(dispatcher)
+ }
+
+ defer temp.Clean()
+
+ bkpr, outputFile, err := getBackuper(target)
+
+ if err != nil {
+ log.Crit("Can't start backuping process: %v", err)
+ return false
+ }
+
+ bkpr.SetDispatcher(dispatcher)
+
+ err = bkpr.Backup()
+
+ if err != nil {
+ spinner.Done(false)
+ log.Crit("Error while backuping process: %v", err)
+ return false
+ }
+
+ log.Info("Backup process successfully finished!")
+
+ updr, err := getUploader(target)
+
+ if err != nil {
+ log.Crit("Can't start uploading process: %v", err)
+ return false
+ }
+
+ updr.SetDispatcher(dispatcher)
+
+ err = updr.Upload(outputFile)
+
+ if err != nil {
+ spinner.Done(false)
+ log.Crit("Error while uploading process: %v", err)
+ return false
+ }
+
+ return true
+}
+
+// getBackuper returns backuper instances
+func getBackuper(target string) (backuper.Backuper, string, error) {
+ var err error
+ var bkpr backuper.Backuper
+
+ bkpConfig, err := getBackuperConfig(target)
+
+ if err != nil {
+ return nil, "", err
+ }
+
+ switch target {
+ case TARGET_JIRA:
+ bkpr, err = jira.NewBackuper(bkpConfig)
+ case TARGET_CONFLUENCE:
+ bkpr, err = confluence.NewBackuper(bkpConfig)
+ }
+
+ return bkpr, bkpConfig.OutputFile, nil
+}
+
+// getBackuperConfig returns configuration for backuper
+func getBackuperConfig(target string) (*backuper.Config, error) {
+ tmpDir, err := temp.MkDir()
+
+ if err != nil {
+ return nil, fmt.Errorf("Can't create directory for temporary data: %v", err.Error())
+ }
+
+ switch target {
+ case TARGET_JIRA:
+ fileNameTemplate := knfu.GetS(JIRA_OUTPUT_FILE, `jira-backup-%Y-%m-%d`) + ".zip"
+ tmpFile := path.Join(path.Clean(tmpDir), timeutil.Format(time.Now(), fileNameTemplate))
+
+ return &backuper.Config{
+ Account: knfu.GetS(ACCESS_ACCOUNT),
+ Email: knfu.GetS(ACCESS_EMAIL),
+ APIKey: knfu.GetS(ACCESS_API_KEY),
+ OutputFile: tmpFile,
+ WithAttachments: knfu.GetB(JIRA_INCLUDE_ATTACHMENTS),
+ ForCloud: knfu.GetB(JIRA_CLOUD_FORMAT),
+ }, nil
+
+ case TARGET_CONFLUENCE:
+ fileNameTemplate := knfu.GetS(JIRA_OUTPUT_FILE, `confluence-backup-%Y-%m-%d`) + ".zip"
+ tmpFile := path.Join(path.Clean(tmpDir), timeutil.Format(time.Now(), fileNameTemplate))
+
+ return &backuper.Config{
+ Account: knfu.GetS(ACCESS_ACCOUNT),
+ Email: knfu.GetS(ACCESS_EMAIL),
+ APIKey: knfu.GetS(ACCESS_API_KEY),
+ OutputFile: tmpFile,
+ WithAttachments: knfu.GetB(CONFLUENCE_INCLUDE_ATTACHMENTS),
+ ForCloud: knfu.GetB(CONFLUENCE_CLOUD_FORMAT),
+ }, nil
+ }
+
+ return nil, fmt.Errorf("Unknown target %q", target)
+}
+
+// getUploader returns uploader instance
+func getUploader(target string) (uploader.Uploader, error) {
+ var err error
+ var updr uploader.Uploader
+
+ switch knfu.GetS(STORAGE_TYPE) {
+ case "fs":
+ updr, err = fs.NewUploader(&fs.Config{
+ Path: path.Join(knfu.GetS(STORAGE_FS_PATH), target),
+ Mode: knfu.GetM(STORAGE_FS_MODE, 0600),
+ })
+
+ case "sftp":
+ keyData, err := readPrivateKeyData()
+
+ if err != nil {
+ return nil, err
+ }
+
+ updr, err = sftp.NewUploader(&sftp.Config{
+ Host: knfu.GetS(STORAGE_SFTP_HOST),
+ User: knfu.GetS(STORAGE_SFTP_USER),
+ Key: keyData,
+ Path: path.Join(knfu.GetS(STORAGE_SFTP_PATH), target),
+ Mode: knfu.GetM(STORAGE_SFTP_MODE, 0600),
+ })
+
+ case "s3":
+ updr, err = s3.NewUploader(&s3.Config{
+ Host: knfu.GetS(STORAGE_S3_HOST),
+ Region: knfu.GetS(STORAGE_S3_REGION),
+ AccessKeyID: knfu.GetS(STORAGE_S3_ACCESS_KEY),
+ SecretKey: knfu.GetS(STORAGE_S3_SECRET_KEY),
+ Bucket: knfu.GetS(STORAGE_S3_BUCKET),
+ Path: path.Join(knfu.GetS(STORAGE_S3_PATH), target),
+ })
+ }
+
+ return updr, err
+}
+
+// readPrivateKeyData reads private key data
+func readPrivateKeyData() ([]byte, error) {
+ if fsutil.IsExist(knfu.GetS(STORAGE_SFTP_KEY)) {
+ return os.ReadFile(knfu.GetS(STORAGE_SFTP_KEY))
+ }
+
+ return base64.StdEncoding.DecodeString(knfu.GetS(STORAGE_SFTP_KEY))
+}
+
+// addEventsHandlers registers events handlers
+func addEventsHandlers(dispatcher *events.Dispatcher) {
+ dispatcher.AddHandler(backuper.EVENT_BACKUP_STARTED, func(payload any) {
+ fmtc.NewLine()
+ spinner.Show("Starting downloading process")
+ })
+
+ dispatcher.AddHandler(backuper.EVENT_BACKUP_PROGRESS, func(payload any) {
+ p := payload.(*backuper.ProgressInfo)
+ spinner.Update("[%d%%] %s", p.Progress, p.Message)
+ })
+
+ dispatcher.AddHandler(backuper.EVENT_BACKUP_SAVING, func(payload any) {
+ spinner.Done(true)
+ spinner.Show("Fetching backup file")
+ })
+
+ dispatcher.AddHandler(backuper.EVENT_BACKUP_DONE, func(payload any) {
+ spinner.Done(true)
+ })
+
+ dispatcher.AddHandler(uploader.EVENT_UPLOAD_STARTED, func(payload any) {
+ spinner.Show("Uploading backup file to %s storage", payload)
+ })
+
+ dispatcher.AddHandler(uploader.EVENT_UPLOAD_PROGRESS, func(payload any) {
+ p := payload.(*uploader.ProgressInfo)
+ spinner.Update(
+ "[%s] Uploading file (%s/%s)",
+ fmtutil.PrettyPerc(p.Progress),
+ fmtutil.PrettySize(p.Current),
+ fmtutil.PrettySize(p.Total),
+ )
+ })
+
+ dispatcher.AddHandler(uploader.EVENT_UPLOAD_DONE, func(payload any) {
+ spinner.Update("Uploading file")
+ spinner.Done(true)
+ fmtc.NewLine()
+ })
+}
+
+// printError prints error message to console
+func printError(f string, a ...interface{}) {
+ if len(a) == 0 {
+ fmtc.Fprintln(os.Stderr, "{r}"+f+"{!}")
+ } else {
+ fmtc.Fprintf(os.Stderr, "{r}"+f+"{!}\n", a...)
+ }
+}
+
+// ////////////////////////////////////////////////////////////////////////////////// //
+
+// getServiceStatus returns service status from status API
+func getServiceStatus(service string) support.Check {
+ chk := support.Check{support.CHECK_ERROR, service, ""}
+ serviceName := strings.ReplaceAll(strings.ToLower(service), " ", "-")
+
+ resp, err := req.Request{
+ URL: fmt.Sprintf("https://%s.status.atlassian.com/api/v2/status.json", serviceName),
+ AutoDiscard: true,
+ }.Get()
+
+ if err != nil {
+ chk.Message = "Can't send request to status API"
+ return chk
+ }
+
+ if resp.StatusCode != 200 {
+ chk.Message = fmt.Sprintf("Status API returned non-ok status code (%d)", resp.StatusCode)
+ return chk
+ }
+
+ type StatusInfo struct {
+ Desc string `json:"description"`
+ Indicator string `json:"indicator"`
+ }
+
+ type StatusResp struct {
+ Status *StatusInfo `json:"status"`
+ }
+
+ status := &StatusResp{}
+ err = resp.JSON(status)
+
+ if err != nil {
+ chk.Message = err.Error()
+ return chk
+ }
+
+ switch status.Status.Indicator {
+ case "minor":
+ chk.Status = support.CHECK_WARN
+ case "none":
+ chk.Status = support.CHECK_OK
+ }
+
+ chk.Message = status.Status.Desc
+
+ return chk
+}
+
+// printCompletion prints completion for given shell
+func printCompletion() int {
+ info := genUsage()
+
+ switch options.GetS(OPT_COMPLETION) {
+ case "bash":
+ fmt.Print(bash.Generate(info, "atlassian-cloud-backuper"))
+ case "fish":
+ fmt.Print(fish.Generate(info, "atlassian-cloud-backuper"))
+ case "zsh":
+ fmt.Print(zsh.Generate(info, optMap, "atlassian-cloud-backuper"))
+ default:
+ return 1
+ }
+
+ return 0
+}
+
+// printMan prints man page
+func printMan() {
+ fmt.Println(man.Generate(genUsage(), genAbout("")))
+}
+
+// addUnitedOption adds info about option from united config
+func addUnitedOption(info *usage.Info, prop, desc, value string) {
+ info.AddOption(knfu.O(prop), desc+" {s-}("+knfu.E(prop)+"){!}", value).ColorTag = "{b}"
+}
+
+// genUsage generates usage info
+func genUsage() *usage.Info {
+ info := usage.NewInfo("", "target")
+
+ info.AddOption(OPT_CONFIG, "Path to configuration file", "file")
+ info.AddOption(OPT_INTERACTIVE, "Interactive mode")
+ info.AddOption(OPT_NO_COLOR, "Disable colors in output")
+ info.AddOption(OPT_HELP, "Show this help message")
+ info.AddOption(OPT_VER, "Show version")
+
+ if container.IsContainer() {
+ addUnitedOption(info, ACCESS_ACCOUNT, "Account name", "name")
+ addUnitedOption(info, ACCESS_EMAIL, "User email with access to API", "email")
+ addUnitedOption(info, ACCESS_API_KEY, "API key", "key")
+ addUnitedOption(info, STORAGE_TYPE, "Storage type", "fs/sftp/s3")
+ addUnitedOption(info, STORAGE_FS_PATH, "Path on system for backups", "path")
+ addUnitedOption(info, STORAGE_FS_MODE, "File mode on system", "mode")
+ addUnitedOption(info, STORAGE_SFTP_HOST, "SFTP host", "host")
+ addUnitedOption(info, STORAGE_SFTP_USER, "SFTP user name", "name")
+ addUnitedOption(info, STORAGE_SFTP_KEY, "Base64-encoded private key", "key")
+ addUnitedOption(info, STORAGE_SFTP_PATH, "Path on SFTP", "path")
+ addUnitedOption(info, STORAGE_SFTP_MODE, "File mode on SFTP", "mode")
+ addUnitedOption(info, STORAGE_S3_HOST, "S3 host", "host")
+ addUnitedOption(info, STORAGE_S3_ACCESS_KEY, "S3 access key ID", "id")
+ addUnitedOption(info, STORAGE_S3_SECRET_KEY, "S3 access secret key", "key")
+ addUnitedOption(info, STORAGE_S3_BUCKET, "S3 bucket", "name")
+ addUnitedOption(info, STORAGE_S3_PATH, "Path for backups", "path")
+ addUnitedOption(info, JIRA_OUTPUT_FILE, "Jira backup output file name template", "template")
+ addUnitedOption(info, JIRA_INCLUDE_ATTACHMENTS, "Include attachments to Jira backup", "yes/no")
+ addUnitedOption(info, JIRA_CLOUD_FORMAT, "Create Jira backup for Cloud", "yes/no")
+ addUnitedOption(info, CONFLUENCE_OUTPUT_FILE, "Confluence backup output file name template", "template")
+ addUnitedOption(info, CONFLUENCE_INCLUDE_ATTACHMENTS, "Include attachments to Confluence backup", "yes/no")
+ addUnitedOption(info, CONFLUENCE_CLOUD_FORMAT, "Create Confluence backup for Cloud", "yes/no")
+ addUnitedOption(info, TEMP_DIR, "Path to directory for temporary data", "path")
+ addUnitedOption(info, LOG_FORMAT, "Log format", "text/json")
+ addUnitedOption(info, LOG_LEVEL, "Log level", "level")
+ }
+
+ return info
+}
+
+// genAbout generates info about version
+func genAbout(gitRev string) *usage.About {
+ about := &usage.About{
+ App: APP,
+ Version: VER,
+ Desc: DESC,
+ Year: 2009,
+ Owner: "ESSENTIAL KAOS",
+
+ AppNameColorTag: colorTagApp,
+ VersionColorTag: colorTagVer,
+ DescSeparator: "{s}ā{!}",
+
+ License: "Apache License, Version 2.0 ",
+ BugTracker: "https://github.com/essentialkaos/atlassian-cloud-backuper/issues",
+ UpdateChecker: usage.UpdateChecker{"essentialkaos/atlassian-cloud-backuper", update.GitHubChecker},
+ }
+
+ if gitRev != "" {
+ about.Build = "git:" + gitRev
+ }
+
+ return about
+}
+
+// ////////////////////////////////////////////////////////////////////////////////// //
diff --git a/common/atlassian-cloud-backuper-confluence.service b/common/atlassian-cloud-backuper-confluence.service
new file mode 100644
index 0000000..b740859
--- /dev/null
+++ b/common/atlassian-cloud-backuper-confluence.service
@@ -0,0 +1,9 @@
+[Unit]
+Description=Backup Confluence data using Atlassian Cloud Backuper
+
+[Service]
+Type=oneshot
+ExecStart=/usr/bin/atlassian-cloud-backuper confluence
+
+[Install]
+WantedBy=multi-user.target
diff --git a/common/atlassian-cloud-backuper-confluence.timer b/common/atlassian-cloud-backuper-confluence.timer
new file mode 100644
index 0000000..e366eb2
--- /dev/null
+++ b/common/atlassian-cloud-backuper-confluence.timer
@@ -0,0 +1,10 @@
+[Unit]
+Description=Backup Confluence data using Atlassian Cloud Backuper
+
+[Timer]
+OnCalendar=Sun *-*-* 09:00:00
+RandomizedDelaySec=1h
+Persistent=true
+
+[Install]
+WantedBy=timers.target
diff --git a/common/atlassian-cloud-backuper-container.knf b/common/atlassian-cloud-backuper-container.knf
new file mode 100644
index 0000000..384dd85
--- /dev/null
+++ b/common/atlassian-cloud-backuper-container.knf
@@ -0,0 +1,104 @@
+[access]
+
+ # Account name
+ account:
+
+ # User email with access to API
+ email:
+
+ # API key
+ api-key:
+
+[storage]
+
+ # Storage type (fs/sftp/s3)
+ type:
+
+[storage-fs]
+
+ # Path to directory with backups
+ path:
+
+ # Mode for all files
+ mode:
+
+[storage-sftp]
+
+ # SFTP Hostname or IP with port
+ host:
+
+ # Name of user on SFTP storage
+ user:
+
+ # Path to private key
+ key:
+
+ # Path to directory with backups
+ path:
+
+ # Mode for all files
+ mode:
+
+[storage-s3]
+
+ # Name of host with Amazon S3 HTTP API compatible endpoint
+ host:
+
+ # S3 region
+ region:
+
+ # Access key ID
+ access-key:
+
+ # Secret access key
+ secret-key:
+
+ # Name of bucket
+ bucket:
+
+ # Path to directory with backups
+ path:
+
+[jira]
+
+ # Backup file name with date tags (default: jira-backup-%Y-%m-%d.zip)
+ output-file:
+
+ # Include attachments to backup
+ include-attachments: true
+
+ # Export to the cloud format
+ cloud-format: true
+
+[confluence]
+
+ # Backup file name with date tags (default: confluence-backup-%Y-%m-%d.zip)
+ output-file:
+
+ # Include attachments to backup
+ include-attachments: true
+
+ # Export to the cloud format
+ cloud-format: true
+
+[temp]
+
+ # Path to directory for temporary data
+ dir: /tmp
+
+[log]
+
+ # Log file dir
+ dir:
+
+ # Path to log file
+ file:
+
+ # Log format (text/json)
+ format: json
+
+ # Log file mode
+ mode: 600
+
+ # Default log level (debug/info/warn/error/crit)
+ level: info
diff --git a/common/atlassian-cloud-backuper-jira.service b/common/atlassian-cloud-backuper-jira.service
new file mode 100644
index 0000000..832b36d
--- /dev/null
+++ b/common/atlassian-cloud-backuper-jira.service
@@ -0,0 +1,9 @@
+[Unit]
+Description=Backup Jira data using Atlassian Cloud Backuper
+
+[Service]
+Type=oneshot
+ExecStart=/usr/bin/atlassian-cloud-backuper jira
+
+[Install]
+WantedBy=multi-user.target
diff --git a/common/atlassian-cloud-backuper-jira.timer b/common/atlassian-cloud-backuper-jira.timer
new file mode 100644
index 0000000..72d7ef7
--- /dev/null
+++ b/common/atlassian-cloud-backuper-jira.timer
@@ -0,0 +1,10 @@
+[Unit]
+Description=Backup Jira data using Atlassian Cloud Backuper
+
+[Timer]
+OnCalendar=Sun *-*-* 03:00:00
+RandomizedDelaySec=1h
+Persistent=true
+
+[Install]
+WantedBy=timers.target
diff --git a/common/atlassian-cloud-backuper.cron b/common/atlassian-cloud-backuper.cron
new file mode 100644
index 0000000..daea8bc
--- /dev/null
+++ b/common/atlassian-cloud-backuper.cron
@@ -0,0 +1,14 @@
+################################################################################
+
+SHELL=/bin/bash
+PATH=/sbin:/bin:/usr/sbin:/usr/bin
+MAILTO=root
+HOME=/srv/atlassian-backups
+
+################################################################################
+
+# Backup Jira data
+# 0 3 * * Sun root atlassian-cloud-backuper jira &> /dev/null || :
+
+# Backup Confluence data
+# 0 9 * * Sun root atlassian-cloud-backuper confluence &> /dev/null || :
diff --git a/common/atlassian-cloud-backuper.knf b/common/atlassian-cloud-backuper.knf
new file mode 100644
index 0000000..87bcd2a
--- /dev/null
+++ b/common/atlassian-cloud-backuper.knf
@@ -0,0 +1,104 @@
+[access]
+
+ # Account name
+ account:
+
+ # User email with access to API
+ email:
+
+ # API key
+ api-key:
+
+[storage]
+
+ # Storage type (fs/sftp/s3)
+ type:
+
+[storage-fs]
+
+ # Path to directory with backups
+ path:
+
+ # Mode for all files
+ mode:
+
+[storage-sftp]
+
+ # SFTP Hostname or IP with port
+ host:
+
+ # Name of user on SFTP storage
+ user:
+
+ # Path to private key
+ key:
+
+ # Path to directory with backups
+ path:
+
+ # Mode for all files
+ mode:
+
+[storage-s3]
+
+ # Name of host with Amazon S3 HTTP API compatible endpoint
+ host:
+
+ # S3 region
+ region:
+
+ # Access key ID
+ access-key:
+
+ # Secret access key
+ secret-key:
+
+ # Name of bucket
+ bucket:
+
+ # Path to directory with backups
+ path:
+
+[jira]
+
+ # Backup file name with date tags (default: jira-backup-%Y-%m-%d.zip)
+ output-file:
+
+ # Include attachments to backup
+ include-attachments: true
+
+ # Export to the cloud format
+ cloud-format: true
+
+[confluence]
+
+ # Backup file name with date tags (default: confluence-backup-%Y-%m-%d.zip)
+ output-file:
+
+ # Include attachments to backup
+ include-attachments: true
+
+ # Export to the cloud format
+ cloud-format: true
+
+[temp]
+
+ # Path to directory for temporary data
+ dir: /tmp
+
+[log]
+
+ # Log file dir
+ dir: /var/log/atlassian-cloud-backuper
+
+ # Path to log file
+ file: {log:dir}/backup.log
+
+ # Log format (text/json)
+ format:
+
+ # Log file mode
+ mode: 600
+
+ # Default log level (debug/info/warn/error/crit)
+ level: info
diff --git a/common/atlassian-cloud-backuper.logrotate b/common/atlassian-cloud-backuper.logrotate
new file mode 100644
index 0000000..f28fcc9
--- /dev/null
+++ b/common/atlassian-cloud-backuper.logrotate
@@ -0,0 +1,9 @@
+/var/log/atlassian-cloud-backuper/*.log {
+ weekly
+ rotate 8
+ copytruncate
+ delaycompress
+ compress
+ notifempty
+ missingok
+}
diff --git a/common/atlassian-cloud-backuper.spec b/common/atlassian-cloud-backuper.spec
new file mode 100644
index 0000000..d79adbc
--- /dev/null
+++ b/common/atlassian-cloud-backuper.spec
@@ -0,0 +1,114 @@
+################################################################################
+
+%global crc_check pushd ../SOURCES ; sha512sum -c %{SOURCE100} ; popd
+
+################################################################################
+
+%define debug_package %{nil}
+
+################################################################################
+
+Summary: Tool for backuping Atlassian cloud services
+Name: atlassian-cloud-backuper
+Version: 0.0.1
+Release: 0%{?dist}
+Group: Applications/System
+License: Apache License, Version 2.0
+URL: https://kaos.sh/atlassian-cloud-backuper
+
+Source0: https://source.kaos.st/%{name}/%{name}-%{version}.tar.bz2
+
+Source100: checksum.sha512
+
+BuildRoot: %{_tmppath}/%{name}-%{version}-%{release}-root-%(%{__id_u} -n)
+
+BuildRequires: golang >= 1.21
+
+Provides: %{name} = %{version}-%{release}
+
+################################################################################
+
+%description
+Tool for backuping Atlassian cloud services (Jira and Confluence).
+
+################################################################################
+
+%prep
+%{crc_check}
+
+%setup -q
+
+%build
+if [[ ! -d "%{name}/vendor" ]] ; then
+ echo "This package requires vendored dependencies"
+ exit 1
+fi
+
+pushd %{name}
+ %{__make} %{?_smp_mflags} all
+popd
+
+%install
+rm -rf %{buildroot}
+
+install -dDm 755 %{buildroot}%{_bindir}
+install -dDm 755 %{buildroot}%{_sysconfdir}/logrotate.d
+install -dDm 755 %{buildroot}%{_localstatedir}/log/%{name}
+
+install -pm 755 %{name}/%{name} \
+ %{buildroot}%{_bindir}/
+
+install -pm 644 %{name}/common/%{name}.knf \
+ %{buildroot}%{_sysconfdir}/
+
+install -pm 644 %{name}/common/%{name}.logrotate \
+ %{buildroot}%{_sysconfdir}/logrotate.d/%{name}
+
+install -pDm 644 %{name}/common/%{name}.cron \
+ %{buildroot}%{_sysconfdir}/cron.d/%{name}
+
+install -pDm 644 %{name}/common/%{name}-confluence.service \
+ %{buildroot}%{_unitdir}/%{name}-confluence.service
+install -pDm 644 %{name}/common/%{name}-confluence.service \
+ %{buildroot}%{_unitdir}/%{name}-confluence.timer
+install -pDm 644 %{name}/common/%{name}-jira.service \
+ %{buildroot}%{_unitdir}/%{name}-jira.service
+install -pDm 644 %{name}/common/%{name}-jira.service \
+ %{buildroot}%{_unitdir}/%{name}-jira.timer
+
+# Generate man page
+install -dDm 755 %{buildroot}%{_mandir}/man1
+./%{name}/%{name} --generate-man > %{buildroot}%{_mandir}/man1/%{name}.1
+
+# Generate completions
+install -dDm 755 %{buildroot}%{_sysconfdir}/bash_completion.d
+install -dDm 755 %{buildroot}%{_datadir}/zsh/site-functions
+install -dDm 755 %{buildroot}%{_datarootdir}/fish/vendor_completions.d
+./%{name}/%{name} --completion=bash 1> %{buildroot}%{_sysconfdir}/bash_completion.d/%{name}
+./%{name}/%{name} --completion=zsh 1> %{buildroot}%{_datadir}/zsh/site-functions/_%{name}
+./%{name}/%{name} --completion=fish 1> %{buildroot}%{_datarootdir}/fish/vendor_completions.d/%{name}.fish
+
+%clean
+rm -rf %{buildroot}
+
+################################################################################
+
+%files
+%defattr(-,root,root,-)
+%doc %{name}/LICENSE
+%dir %{_localstatedir}/log/%{name}
+%config(noreplace) %{_sysconfdir}/%{name}.knf
+%config(noreplace) %{_sysconfdir}/logrotate.d/%{name}
+%config(noreplace) %{_unitdir}/%{name}-*
+%config(noreplace) %{_sysconfdir}/cron.d/%{name}
+%{_bindir}/%{name}
+%{_mandir}/man1/%{name}.1.*
+%{_sysconfdir}/bash_completion.d/%{name}
+%{_datadir}/zsh/site-functions/_%{name}
+%{_datarootdir}/fish/vendor_completions.d/%{name}.fish
+
+################################################################################
+
+%changelog
+* Tue Mar 26 2024 Anton Novojilov - 0.0.1-0
+- Initial build for kaos-repo
diff --git a/go.mod b/go.mod
new file mode 100644
index 0000000..3e3d22f
--- /dev/null
+++ b/go.mod
@@ -0,0 +1,27 @@
+module github.com/essentialkaos/atlassian-cloud-backuper
+
+go 1.18
+
+require (
+ github.com/aws/aws-sdk-go-v2 v1.26.0
+ github.com/aws/aws-sdk-go-v2/credentials v1.17.9
+ github.com/aws/aws-sdk-go-v2/service/s3 v1.53.0
+ github.com/essentialkaos/ek/v12 v12.111.1
+ github.com/pkg/sftp v1.13.6
+ golang.org/x/crypto v0.21.0
+)
+
+require (
+ github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.1 // indirect
+ github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.4 // indirect
+ github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.4 // indirect
+ github.com/aws/aws-sdk-go-v2/internal/v4a v1.3.4 // indirect
+ github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.11.1 // indirect
+ github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.3.6 // indirect
+ github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.11.6 // indirect
+ github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.17.4 // indirect
+ github.com/aws/smithy-go v1.20.1 // indirect
+ github.com/essentialkaos/depsy v1.1.0 // indirect
+ github.com/kr/fs v0.1.0 // indirect
+ golang.org/x/sys v0.18.0 // indirect
+)
diff --git a/go.sum b/go.sum
new file mode 100644
index 0000000..f4a18b5
--- /dev/null
+++ b/go.sum
@@ -0,0 +1,84 @@
+github.com/aws/aws-sdk-go-v2 v1.26.0 h1:/Ce4OCiM3EkpW7Y+xUnfAFpchU78K7/Ug01sZni9PgA=
+github.com/aws/aws-sdk-go-v2 v1.26.0/go.mod h1:35hUlJVYd+M++iLI3ALmVwMOyRYMmRqUXpTtRGW+K9I=
+github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.1 h1:gTK2uhtAPtFcdRRJilZPx8uJLL2J85xK11nKtWL0wfU=
+github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.1/go.mod h1:sxpLb+nZk7tIfCWChfd+h4QwHNUR57d8hA1cleTkjJo=
+github.com/aws/aws-sdk-go-v2/credentials v1.17.9 h1:N8s0/7yW+h8qR8WaRlPQeJ6czVMNQVNtNdUqf6cItao=
+github.com/aws/aws-sdk-go-v2/credentials v1.17.9/go.mod h1:446YhIdmSV0Jf/SLafGZalQo+xr2iw7/fzXGDPTU1yQ=
+github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.4 h1:0ScVK/4qZ8CIW0k8jOeFVsyS/sAiXpYxRBLolMkuLQM=
+github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.4/go.mod h1:84KyjNZdHC6QZW08nfHI6yZgPd+qRgaWcYsyLUo3QY8=
+github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.4 h1:sHmMWWX5E7guWEFQ9SVo6A3S4xpPrWnd77a6y4WM6PU=
+github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.4/go.mod h1:WjpDrhWisWOIoS9n3nk67A3Ll1vfULJ9Kq6h29HTD48=
+github.com/aws/aws-sdk-go-v2/internal/v4a v1.3.4 h1:SIkD6T4zGQ+1YIit22wi37CGNkrE7mXV1vNA5VpI3TI=
+github.com/aws/aws-sdk-go-v2/internal/v4a v1.3.4/go.mod h1:XfeqbsG0HNedNs0GT+ju4Bs+pFAwsrlzcRdMvdNVf5s=
+github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.11.1 h1:EyBZibRTVAs6ECHZOw5/wlylS9OcTzwyjeQMudmREjE=
+github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.11.1/go.mod h1:JKpmtYhhPs7D97NL/ltqz7yCkERFW5dOlHyVl66ZYF8=
+github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.3.6 h1:NkHCgg0Ck86c5PTOzBZ0JRccI51suJDg5lgFtxBu1ek=
+github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.3.6/go.mod h1:mjTpxjC8v4SeINTngrnKFgm2QUi+Jm+etTbCxh8W4uU=
+github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.11.6 h1:b+E7zIUHMmcB4Dckjpkapoy47W6C9QBv/zoUP+Hn8Kc=
+github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.11.6/go.mod h1:S2fNV0rxrP78NhPbCZeQgY8H9jdDMeGtwcfZIRxzBqU=
+github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.17.4 h1:uDj2K47EM1reAYU9jVlQ1M5YENI1u6a/TxJpf6AeOLA=
+github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.17.4/go.mod h1:XKCODf4RKHppc96c2EZBGV/oCUC7OClxAo2MEyg4pIk=
+github.com/aws/aws-sdk-go-v2/service/s3 v1.53.0 h1:r3o2YsgW9zRcIP3Q0WCmttFVhTuugeKIvT5z9xDspc0=
+github.com/aws/aws-sdk-go-v2/service/s3 v1.53.0/go.mod h1:w2E4f8PUfNtyjfL6Iu+mWI96FGttE03z3UdNcUEC4tA=
+github.com/aws/smithy-go v1.20.1 h1:4SZlSlMr36UEqC7XOyRVb27XMeZubNcBNN+9IgEPIQw=
+github.com/aws/smithy-go v1.20.1/go.mod h1:krry+ya/rV9RDcV/Q16kpu6ypI4K2czasz0NC3qS14E=
+github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
+github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
+github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
+github.com/essentialkaos/check v1.4.0 h1:kWdFxu9odCxUqo1NNFNJmguGrDHgwi3A8daXX1nkuKk=
+github.com/essentialkaos/depsy v1.1.0 h1:U6dp687UkQwXlZU17Hg2KMxbp3nfZAoZ8duaeUFYvJI=
+github.com/essentialkaos/depsy v1.1.0/go.mod h1:kpiTAV17dyByVnrbNaMcZt2jRwvuXClUYOzpyJQwtG8=
+github.com/essentialkaos/ek/v12 v12.111.1 h1:s9vi+ydPmt1MI/JtABqmfD32j1VMFsZHe/45eAC+XYU=
+github.com/essentialkaos/ek/v12 v12.111.1/go.mod h1:SslW97Se34YQKc08Ume2V/8h/HPTgLS1+Iok64cNF/U=
+github.com/essentialkaos/go-linenoise/v3 v3.4.0 h1:g72w8x+/HIwOMBVvNaPYp+wMWVHrYZwzFAF7OfZR5Ts=
+github.com/kr/fs v0.1.0 h1:Jskdu9ieNAYnjxsi0LbQp1ulIKZV1LAFgK1tWhpZgl8=
+github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg=
+github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE=
+github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
+github.com/pkg/sftp v1.13.6 h1:JFZT4XbOU7l77xGSpOdW+pwIMqP044IyjXX6FGyEKFo=
+github.com/pkg/sftp v1.13.6/go.mod h1:tz1ryNURKu77RL+GuCzmoJYxQczL3wLNNpPWagdg4Qk=
+github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
+github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
+github.com/rogpeppe/go-internal v1.11.0 h1:cWPaGQEPrBb5/AsnsZesgZZ9yb1OQ+GOISoDNXVBh4M=
+github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
+github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
+github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
+github.com/stretchr/testify v1.8.0 h1:pSgiaMZlXftHpm5L7V1+rVB+AZJydKsMxsQBIJw4PKk=
+github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
+github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
+golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
+golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
+golang.org/x/crypto v0.1.0/go.mod h1:RecgLatLF4+eUMCP1PoPZQb+cVrJcOPbHkTkbkB9sbw=
+golang.org/x/crypto v0.21.0 h1:X31++rzVUdKhX5sWmSOFZxx8UW/ldWx55cbf08iNAMA=
+golang.org/x/crypto v0.21.0/go.mod h1:0BP7YvVV9gBbVKyeTG0Gyn+gZm94bibOW5BjDEYAOMs=
+golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
+golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
+golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
+golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
+golang.org/x/net v0.1.0/go.mod h1:Cx3nUiGt4eDBEyega/BKRp+/AlGL8hYe7U9odMt2Cco=
+golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
+golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.18.0 h1:DBdB3niSjOA/O0blCZBqDefyWNYveAYMNF1Wum0DYQ4=
+golang.org/x/sys v0.18.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
+golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
+golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
+golang.org/x/term v0.1.0/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
+golang.org/x/term v0.18.0 h1:FcHjZXDMxI8mM3nwhX9HlKop4C0YQvCVCdwYl2wOtE8=
+golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
+golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
+golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
+golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
+golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
+golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
+golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
+golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
+gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
+gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
+gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
+gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
diff --git a/uploader/fs/fs.go b/uploader/fs/fs.go
new file mode 100644
index 0000000..4bca8da
--- /dev/null
+++ b/uploader/fs/fs.go
@@ -0,0 +1,103 @@
+package fs
+
+// ////////////////////////////////////////////////////////////////////////////////// //
+// //
+// Copyright (c) 2024 ESSENTIAL KAOS //
+// Apache License, Version 2.0 //
+// //
+// ////////////////////////////////////////////////////////////////////////////////// //
+
+import (
+ "fmt"
+ "os"
+
+ "github.com/essentialkaos/ek/v12/events"
+ "github.com/essentialkaos/ek/v12/fsutil"
+ "github.com/essentialkaos/ek/v12/log"
+ "github.com/essentialkaos/ek/v12/path"
+
+ "github.com/essentialkaos/atlassian-cloud-backuper/uploader"
+)
+
+// ////////////////////////////////////////////////////////////////////////////////// //
+
+// Config is configuration for FS uploader
+type Config struct {
+ Path string
+ Mode os.FileMode
+}
+
+// FSUploader is FS uploader instance
+type FSUploader struct {
+ config *Config
+ dispatcher *events.Dispatcher
+}
+
+// ////////////////////////////////////////////////////////////////////////////////// //
+
+// NewUploader creates new FS uploader instance
+func NewUploader(config *Config) (*FSUploader, error) {
+ err := config.Validate()
+
+ if err != nil {
+ return nil, err
+ }
+
+ return &FSUploader{config, nil}, nil
+}
+
+// ////////////////////////////////////////////////////////////////////////////////// //
+
+// SetDispatcher sets events dispatcher
+func (u *FSUploader) SetDispatcher(d *events.Dispatcher) {
+ if u != nil {
+ u.dispatcher = d
+ }
+}
+
+// Upload uploads given file to storage
+func (u *FSUploader) Upload(file string) error {
+ log.Info("Copying backup file to %sā¦", u.config.Path)
+
+ u.dispatcher.DispatchAndWait(uploader.EVENT_UPLOAD_STARTED, "FS")
+
+ err := fsutil.ValidatePerms("FRS", file)
+
+ if err != nil {
+ return err
+ }
+
+ if !fsutil.IsExist(u.config.Path) {
+ err = os.MkdirAll(u.config.Path, 0750)
+
+ if err != nil {
+ return fmt.Errorf("Can't create directory for backup: %v", err)
+ }
+ }
+
+ fileName := path.Base(file)
+
+ err = fsutil.CopyFile(file, path.Join(u.config.Path, fileName), u.config.Mode)
+
+ u.dispatcher.DispatchAndWait(uploader.EVENT_UPLOAD_DONE, "FS")
+
+ log.Info("Backup successfully copied to %s", u.config.Path)
+
+ return err
+}
+
+// ////////////////////////////////////////////////////////////////////////////////// //
+
+// Validate validates configuration
+func (c *Config) Validate() error {
+ switch {
+ case c == nil:
+ return fmt.Errorf("Configuration validation error: config is nil")
+ case c.Path == "":
+ return fmt.Errorf("Configuration validation error: path is empty")
+ case c.Mode == 0:
+ return fmt.Errorf("Configuration validation error: invalid file mode %v", c.Mode)
+ }
+
+ return nil
+}
diff --git a/uploader/s3/s3.go b/uploader/s3/s3.go
new file mode 100644
index 0000000..e0778df
--- /dev/null
+++ b/uploader/s3/s3.go
@@ -0,0 +1,163 @@
+package s3
+
+// ////////////////////////////////////////////////////////////////////////////////// //
+// //
+// Copyright (c) 2024 ESSENTIAL KAOS //
+// Apache License, Version 2.0 //
+// //
+// ////////////////////////////////////////////////////////////////////////////////// //
+
+import (
+ "context"
+ "fmt"
+ "os"
+ "path"
+ "strings"
+ "time"
+
+ "github.com/essentialkaos/ek/v12/events"
+ "github.com/essentialkaos/ek/v12/fsutil"
+ "github.com/essentialkaos/ek/v12/log"
+ "github.com/essentialkaos/ek/v12/passthru"
+
+ "github.com/aws/aws-sdk-go-v2/aws"
+ "github.com/aws/aws-sdk-go-v2/credentials"
+ "github.com/aws/aws-sdk-go-v2/service/s3"
+
+ "github.com/essentialkaos/atlassian-cloud-backuper/uploader"
+)
+
+// ////////////////////////////////////////////////////////////////////////////////// //
+
+// Config is configuration for S3 uploader
+type Config struct {
+ Host string
+ Region string
+ AccessKeyID string
+ SecretKey string
+ Bucket string
+ Path string
+}
+
+// S3Uploader is S3 uploader instance
+type S3Uploader struct {
+ config *Config
+ dispatcher *events.Dispatcher
+}
+
+// ////////////////////////////////////////////////////////////////////////////////// //
+
+// NewUploader creates new S3 uploader instance
+func NewUploader(config *Config) (*S3Uploader, error) {
+ err := config.Validate()
+
+ if err != nil {
+ return nil, err
+ }
+
+ return &S3Uploader{config, nil}, nil
+}
+
+// ////////////////////////////////////////////////////////////////////////////////// //
+
+// SetDispatcher sets events dispatcher
+func (u *S3Uploader) SetDispatcher(d *events.Dispatcher) {
+ if u != nil {
+ u.dispatcher = d
+ }
+}
+
+// Upload uploads given file to S3 storage
+func (u *S3Uploader) Upload(file string) error {
+ u.dispatcher.DispatchAndWait(uploader.EVENT_UPLOAD_STARTED, "S3")
+
+ lastUpdate := time.Now()
+ fileName := path.Base(file)
+ fileSize := fsutil.GetSize(file)
+ outputFile := path.Join(u.config.Path, fileName)
+
+ log.Info(
+ "Uploading backup file to %s:%s (%s/%s)",
+ u.config.Bucket, u.config.Path, u.config.Host, u.config.Region,
+ )
+
+ client := s3.New(s3.Options{
+ Region: "ru-central1",
+ BaseEndpoint: aws.String("https://storage.yandexcloud.net"),
+ Credentials: aws.NewCredentialsCache(credentials.NewStaticCredentialsProvider(
+ u.config.AccessKeyID, u.config.SecretKey, "",
+ )),
+ })
+
+ inputFD, err := os.OpenFile(file, os.O_RDONLY, 0)
+
+ if err != nil {
+ return fmt.Errorf("Can't open backup file for reading: %v", err)
+ }
+
+ defer inputFD.Close()
+
+ r := passthru.NewReader(inputFD, fileSize)
+
+ r.Update = func(n int) {
+ if time.Since(lastUpdate) < 3*time.Second {
+ return
+ }
+
+ u.dispatcher.Dispatch(
+ uploader.EVENT_UPLOAD_PROGRESS,
+ &uploader.ProgressInfo{Progress: r.Progress(), Current: r.Current(), Total: r.Total()},
+ )
+
+ lastUpdate = time.Now()
+ }
+
+ _, err = client.PutObject(context.TODO(), &s3.PutObjectInput{
+ Bucket: aws.String(u.config.Bucket),
+ Key: aws.String(outputFile),
+ Body: r,
+ })
+
+ if err != nil {
+ return fmt.Errorf("Can't upload file to S3: %v", err)
+ }
+
+ log.Info("File successfully uploaded to S3!")
+ u.dispatcher.DispatchAndWait(uploader.EVENT_UPLOAD_DONE, "S3")
+
+ return nil
+}
+
+// ////////////////////////////////////////////////////////////////////////////////// //
+
+// Validate validates configuration
+func (c *Config) Validate() error {
+ switch {
+ case c == nil:
+ return fmt.Errorf("Configuration validation error: config is nil")
+
+ case c.Host == "":
+ return fmt.Errorf("Configuration validation error: host is empty")
+
+ case c.Region == "":
+ return fmt.Errorf("Configuration validation error: region is empty")
+
+ case c.AccessKeyID == "":
+ return fmt.Errorf("Configuration validation error: access key is empty")
+
+ case c.SecretKey == "":
+ return fmt.Errorf("Configuration validation error: secret key is empty")
+
+ case c.Bucket == "":
+ return fmt.Errorf("Configuration validation error: bucket is empty")
+
+ case c.Path == "":
+ return fmt.Errorf("Configuration validation error: path is empty")
+
+ case strings.HasPrefix(c.Host, "https://"),
+ strings.HasPrefix(c.Host, "http://"):
+ return fmt.Errorf("Configuration validation error: host must not contain scheme")
+ }
+
+ return nil
+}
diff --git a/uploader/sftp/sftp.go b/uploader/sftp/sftp.go
new file mode 100644
index 0000000..efa2077
--- /dev/null
+++ b/uploader/sftp/sftp.go
@@ -0,0 +1,203 @@
+package sftp
+
+// ////////////////////////////////////////////////////////////////////////////////// //
+// //
+// Copyright (c) 2024 ESSENTIAL KAOS //
+// Apache License, Version 2.0 //
+// //
+// ////////////////////////////////////////////////////////////////////////////////// //
+
+import (
+ "fmt"
+ "io"
+ "os"
+ "strings"
+ "time"
+
+ "github.com/pkg/sftp"
+ "golang.org/x/crypto/ssh"
+
+ "github.com/essentialkaos/ek/v12/events"
+ "github.com/essentialkaos/ek/v12/fsutil"
+ "github.com/essentialkaos/ek/v12/log"
+ "github.com/essentialkaos/ek/v12/passthru"
+ "github.com/essentialkaos/ek/v12/path"
+
+ "github.com/essentialkaos/atlassian-cloud-backuper/uploader"
+)
+
+// ////////////////////////////////////////////////////////////////////////////////// //
+
+// Config is configuration for SFTP uploader
+type Config struct {
+ Host string
+ User string
+ Key []byte
+ Path string
+ Mode os.FileMode
+}
+
+// SFTPUploader is SFTP uploader instance
+type SFTPUploader struct {
+ config *Config
+ dispatcher *events.Dispatcher
+}
+
+// ////////////////////////////////////////////////////////////////////////////////// //
+
+// NewUploader creates new SFTP uploader instance
+func NewUploader(config *Config) (*SFTPUploader, error) {
+ err := config.Validate()
+
+ if err != nil {
+ return nil, err
+ }
+
+ return &SFTPUploader{config, nil}, nil
+}
+
+// ////////////////////////////////////////////////////////////////////////////////// //
+
+// SetDispatcher sets events dispatcher
+func (u *SFTPUploader) SetDispatcher(d *events.Dispatcher) {
+ if u != nil {
+ u.dispatcher = d
+ }
+}
+
+// Upload uploads given file to SFTP storage
+func (u *SFTPUploader) Upload(file string) error {
+ u.dispatcher.DispatchAndWait(uploader.EVENT_UPLOAD_STARTED, "SFTP")
+
+ lastUpdate := time.Now()
+ fileName := path.Base(file)
+ fileSize := fsutil.GetSize(file)
+ outputFile := path.Join(u.config.Path, fileName)
+
+ log.Info(
+ "Uploading backup file to %s@%s~%s/%sā¦",
+ u.config.User, u.config.Host, u.config.Path, fileName,
+ )
+
+ sftpClient, err := u.connectToSFTP()
+
+ if err != nil {
+ return fmt.Errorf("Can't connect to SFTP: %v", err)
+ }
+
+ defer sftpClient.Close()
+
+ _, err = sftpClient.Stat(u.config.Path)
+
+ if err != nil {
+ err = sftpClient.MkdirAll(u.config.Path)
+
+ if err != nil {
+ return fmt.Errorf("Can't create directory for backup: %v", err)
+ }
+ }
+
+ outputFD, err := sftpClient.OpenFile(outputFile, os.O_CREATE|os.O_TRUNC|os.O_WRONLY)
+
+ if err != nil {
+ return fmt.Errorf("Can't create file of SFTP: %v", err)
+ }
+
+ defer outputFD.Close()
+
+ inputFD, err := os.OpenFile(file, os.O_RDONLY, 0)
+
+ if err != nil {
+ return fmt.Errorf("Can't open backup file for reading: %v", err)
+ }
+
+ defer inputFD.Close()
+
+ w := passthru.NewWriter(outputFD, fileSize)
+
+ w.Update = func(n int) {
+ if time.Since(lastUpdate) < 3*time.Second {
+ return
+ }
+
+ u.dispatcher.Dispatch(
+ uploader.EVENT_UPLOAD_PROGRESS,
+ &uploader.ProgressInfo{Progress: w.Progress(), Current: w.Current(), Total: w.Total()},
+ )
+
+ lastUpdate = time.Now()
+ }
+
+ _, err = io.Copy(w, inputFD)
+
+ if err != nil {
+ return fmt.Errorf("Can't upload file to SFTP: %v", err)
+ }
+
+ err = sftpClient.Chmod(outputFile, u.config.Mode)
+
+ if err != nil {
+ log.Error("Can't change file mode for uploaded file: %v", err)
+ }
+
+ log.Info("File successfully uploaded to SFTP!")
+ u.dispatcher.DispatchAndWait(uploader.EVENT_UPLOAD_DONE, "SFTP")
+
+ return nil
+}
+
+// ////////////////////////////////////////////////////////////////////////////////// //
+
+// connectToSFTP connects to SFTP storage
+func (u *SFTPUploader) connectToSFTP() (*sftp.Client, error) {
+ signer, _ := ssh.ParsePrivateKey(u.config.Key)
+
+ sshClient, err := ssh.Dial("tcp", u.config.Host, &ssh.ClientConfig{
+ User: u.config.User,
+ Auth: []ssh.AuthMethod{ssh.PublicKeys(signer)},
+ Timeout: 5 * time.Second,
+ HostKeyCallback: ssh.InsecureIgnoreHostKey(),
+ })
+
+ if err != nil {
+ return nil, fmt.Errorf("Can't connect to SSH: %v", err)
+ }
+
+ return sftp.NewClient(sshClient, sftp.UseConcurrentWrites(true))
+}
+
+// ////////////////////////////////////////////////////////////////////////////////// //
+
+// Validate validates configuration
+func (c *Config) Validate() error {
+ switch {
+ case c == nil:
+ return fmt.Errorf("Configuration validation error: config is nil")
+
+ case c.Host == "":
+ return fmt.Errorf("Configuration validation error: host is empty")
+
+ case !strings.Contains(c.Host, ":"):
+ return fmt.Errorf("Configuration validation error: host doesn't contain port number")
+
+ case c.User == "":
+ return fmt.Errorf("Configuration validation error: user is empty")
+
+ case c.Path == "":
+ return fmt.Errorf("Configuration validation error: path is empty")
+
+ case len(c.Key) == 0:
+ return fmt.Errorf("Configuration validation error: key is empty")
+
+ case c.Mode == 0:
+ return fmt.Errorf("Configuration validation error: invalid file mode %v", c.Mode)
+ }
+
+ _, err := ssh.ParsePrivateKey(c.Key)
+
+ if err != nil {
+ return fmt.Errorf("Configuration validation error: invalid key: %v", err)
+ }
+
+ return nil
+}
diff --git a/uploader/uploader.go b/uploader/uploader.go
new file mode 100644
index 0000000..75fc390
--- /dev/null
+++ b/uploader/uploader.go
@@ -0,0 +1,37 @@
+package uploader
+
+// ////////////////////////////////////////////////////////////////////////////////// //
+// //
+// Copyright (c) 2024 ESSENTIAL KAOS //
+// Apache License, Version 2.0 //
+// //
+// ////////////////////////////////////////////////////////////////////////////////// //
+
+import "github.com/essentialkaos/ek/v12/events"
+
+// ////////////////////////////////////////////////////////////////////////////////// //
+
+const (
+ EVENT_UPLOAD_STARTED = "upload-started"
+ EVENT_UPLOAD_PROGRESS = "upload-progress"
+ EVENT_UPLOAD_DONE = "upload-done"
+)
+
+// ////////////////////////////////////////////////////////////////////////////////// //
+
+type ProgressInfo struct {
+ Progress float64
+ Current int64
+ Total int64
+}
+
+// ////////////////////////////////////////////////////////////////////////////////// //
+
+// Uploader is generic uploader interface
+type Uploader interface {
+ // Upload uploads given file to storage
+ Upload(file string) error
+
+ // SetDispatcher sets events dispatcher
+ SetDispatcher(d *events.Dispatcher)
+}